1
# Copyright (C) 2005, 2006, 2007, 2009 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
"""Code to show logs of changes.
21
Various flavors of log can be produced:
23
* for one file, or the whole tree, and (not done yet) for
24
files in a given directory
26
* in "verbose" mode with a description of what changed from one
29
* with file-ids and revision-ids shown
31
Logs are actually written out through an abstract LogFormatter
32
interface, which allows for different preferred formats. Plugins can
35
Logs can be produced in either forward (oldest->newest) or reverse
36
(newest->oldest) order.
38
Logs can be filtered to show only revisions matching a particular
39
search string, or within a particular range of revisions. The range
40
can be given as date/times, which are reduced to revisions before
43
In verbose mode we show a summary of what changed in each particular
44
revision. Note that this is the delta for changes in that revision
45
relative to its left-most parent, not the delta relative to the last
46
logged revision. So for example if you ask for a verbose log of
47
changes touching hello.c you will get a list of those revisions also
48
listing other things that were changed in the same revision, but not
49
all the changes since the previous revision that touched hello.c.
53
from cStringIO import StringIO
54
from itertools import (
60
from warnings import (
64
from bzrlib.lazy_import import lazy_import
65
lazy_import(globals(), """
72
repository as _mod_repository,
73
revision as _mod_revision,
83
from bzrlib.osutils import (
85
get_terminal_encoding,
91
def find_touching_revisions(branch, file_id):
92
"""Yield a description of revisions which affect the file_id.
94
Each returned element is (revno, revision_id, description)
96
This is the list of revisions where the file is either added,
97
modified, renamed or deleted.
99
TODO: Perhaps some way to limit this to only particular revisions,
100
or to traverse a non-mainline set of revisions?
105
for revision_id in branch.revision_history():
106
this_inv = branch.repository.get_revision_inventory(revision_id)
107
if file_id in this_inv:
108
this_ie = this_inv[file_id]
109
this_path = this_inv.id2path(file_id)
111
this_ie = this_path = None
113
# now we know how it was last time, and how it is in this revision.
114
# are those two states effectively the same or not?
116
if not this_ie and not last_ie:
117
# not present in either
119
elif this_ie and not last_ie:
120
yield revno, revision_id, "added " + this_path
121
elif not this_ie and last_ie:
123
yield revno, revision_id, "deleted " + last_path
124
elif this_path != last_path:
125
yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path))
126
elif (this_ie.text_size != last_ie.text_size
127
or this_ie.text_sha1 != last_ie.text_sha1):
128
yield revno, revision_id, "modified " + this_path
131
last_path = this_path
135
def _enumerate_history(branch):
138
for rev_id in branch.revision_history():
139
rh.append((revno, rev_id))
144
class LogRequest(object):
145
"""Query parameters for logging a branch."""
149
specific_fileids=None,
158
_match_using_deltas=True,
160
"""Create a logging request.
162
Each of these parameter become a public attribute of the object.
164
:param direction: 'reverse' (default) is latest to earliest;
165
'forward' is earliest to latest.
167
:param specific_fileids: If not None, only include revisions
168
affecting the specified files, rather than all revisions.
170
:param start_revision: If not None, only generate
171
revisions >= start_revision
173
:param end_revision: If not None, only generate
174
revisions <= end_revision
176
:param limit: If set, generate only 'limit' revisions, all revisions
177
are shown if None or 0.
179
:param message_search: If not None, only include revisions with
180
matching commit messages
182
:param levels: the number of levels of revisions to
183
generate; 1 for just the mainline; 0 for all levels.
185
:param generate_tags: If True, include tags for matched revisions.
187
:param delta_type: Either 'full', 'partial' or None.
188
'full' means generate the complete delta - adds/deletes/modifies/etc;
189
'partial' means filter the delta using specific_fileids;
190
None means do not generate any delta.
192
:param diff_type: Either 'full', 'partial' or None.
193
'full' means generate the complete diff - adds/deletes/modifies/etc;
194
'partial' means filter the diff using specific_fileids;
195
None means do not generate any diff.
197
:param _match_using_deltas: a private parameter controlling the
198
algorithm used for matching specific_fileids. This parameter
199
may be removed in the future so bzrlib client code should NOT
202
self.direction = direction
203
self.specific_fileids = specific_fileids
204
self.start_revision = start_revision
205
self.end_revision = end_revision
207
self.message_search = message_search
209
self.generate_tags = generate_tags
210
self.delta_type = delta_type
211
self.diff_type = diff_type
212
# Add 'private' attributes for features that may be deprecated
213
self._match_using_deltas = _match_using_deltas
214
self._allow_single_merge_revision = True
217
def show_log_request(branch, lf, rqst):
218
"""Write out human-readable log of commits to this branch.
220
:param lf: The LogFormatter object showing the output.
222
:param rqst: The LogRequest object specifying the query parameters.
226
if getattr(lf, 'begin_log', None):
229
_show_log_request(branch, lf, rqst)
231
if getattr(lf, 'end_log', None):
239
specific_fileid=None,
247
"""Write out human-readable log of commits to this branch.
249
Note: show_log_request() is now the preferred API to this one.
250
This function is being retained for backwards compatibility but
251
should not be extended with new parameters.
253
:param lf: The LogFormatter object showing the output.
255
:param specific_fileid: If not None, list only the commits affecting the
256
specified file, rather than all commits.
258
:param verbose: If True show added/changed/deleted/renamed files.
260
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
263
:param start_revision: If not None, only show revisions >= start_revision
265
:param end_revision: If not None, only show revisions <= end_revision
267
:param search: If not None, only show revisions with matching commit
270
:param limit: If set, shows only 'limit' revisions, all revisions are shown
273
:param show_diff: If True, output a diff after each revision.
275
# Convert old-style parameters to new-style parameters
276
if specific_fileid is not None:
277
file_ids = [specific_fileid]
282
delta_type = 'partial'
289
diff_type = 'partial'
295
# Build the request and execute it
296
rqst = LogRequest(direction=direction, specific_fileids=file_ids,
297
start_revision=start_revision, end_revision=end_revision,
298
limit=limit, message_search=search,
299
delta_type=delta_type, diff_type=diff_type)
300
show_log_request(branch, lf, rqst)
303
def _show_log_request(branch, lf, rqst):
304
"""Worker function for show_log_request - see show_log_request."""
305
if not isinstance(lf, LogFormatter):
306
warn("not a LogFormatter instance: %r" % lf)
308
# Tweak the LogRequest based on what the LogFormatter can handle.
309
# (There's no point generating stuff if the formatter can't display it.)
310
rqst.levels = lf.get_levels()
311
if not getattr(lf, 'supports_tags', False):
312
rqst.generate_tags = False
313
if not getattr(lf, 'supports_delta', False):
314
rqst.delta_type = None
315
if not getattr(lf, 'supports_diff', False):
316
rqst.diff_type = None
317
if not getattr(lf, 'supports_merge_revisions', False):
318
rqst._allow_single_merge_revision = getattr(lf,
319
'supports_single_merge_revision', False)
321
# Find and print the interesting revisions
322
generator = _LogGenerator(branch, rqst)
323
for lr in generator.iter_log_revisions():
327
class _StartNotLinearAncestor(Exception):
328
"""Raised when a start revision is not found walking left-hand history."""
331
class _LogGenerator(object):
332
"""A generator of log revisions given a branch and a LogRequest."""
334
def __init__(self, branch, rqst):
337
if rqst.generate_tags and branch.supports_tags():
338
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
340
self.rev_tag_dict = {}
342
def iter_log_revisions(self):
343
"""Iterate over LogRevision objects.
345
:return: An iterator yielding LogRevision objects.
349
revision_iterator = self._create_log_revision_iterator()
350
for revs in revision_iterator:
351
for (rev_id, revno, merge_depth), rev, delta in revs:
352
# 0 levels means show everything; merge_depth counts from 0
353
if rqst.levels != 0 and merge_depth >= rqst.levels:
355
diff = self._format_diff(rev, rev_id)
356
yield LogRevision(rev, revno, merge_depth, delta,
357
self.rev_tag_dict.get(rev_id), diff)
360
if log_count >= rqst.limit:
363
def _format_diff(self, rev, rev_id):
364
diff_type = self.rqst.diff_type
365
if diff_type is None:
367
repo = self.branch.repository
368
if len(rev.parent_ids) == 0:
369
ancestor_id = _mod_revision.NULL_REVISION
371
ancestor_id = rev.parent_ids[0]
372
tree_1 = repo.revision_tree(ancestor_id)
373
tree_2 = repo.revision_tree(rev_id)
374
file_ids = self.rqst.specific_fileids
375
if diff_type == 'partial' and file_ids is not None:
376
specific_files = [tree_2.id2path(id) for id in file_ids]
378
specific_files = None
380
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
384
def _create_log_revision_iterator(self):
385
"""Create a revision iterator for log.
387
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
390
self.start_rev_id, self.end_rev_id = _get_revision_limits(
391
self.branch, self.rqst.start_revision, self.rqst.end_revision)
392
if self.rqst._match_using_deltas:
393
return self._log_revision_iterator_using_delta_matching()
395
# We're using the per-file-graph algorithm. This scales really
396
# well but only makes sense if there is a single file and it's
398
file_count = len(self.rqst.specific_fileids)
400
raise BzrError("illegal LogRequest: must match-using-deltas "
401
"when logging %d files" % file_count)
402
return self._log_revision_iterator_using_per_file_graph()
404
def _log_revision_iterator_using_delta_matching(self):
405
# Get the base revisions, filtering by the revision range
407
generate_merge_revisions = rqst.levels != 1
408
delayed_graph_generation = not rqst.specific_fileids and (
409
rqst.limit or self.start_rev_id or self.end_rev_id)
410
view_revisions = _calc_view_revisions(self.branch, self.start_rev_id,
411
self.end_rev_id, rqst.direction, generate_merge_revisions,
412
rqst._allow_single_merge_revision,
413
delayed_graph_generation=delayed_graph_generation)
415
# Apply the other filters
416
return make_log_rev_iterator(self.branch, view_revisions,
417
rqst.delta_type, rqst.message_search,
418
file_ids=rqst.specific_fileids, direction=rqst.direction)
420
def _log_revision_iterator_using_per_file_graph(self):
421
# Get the base revisions, filtering by the revision range.
422
# Note that we always generate the merge revisions because
423
# filter_revisions_touching_file_id() requires them ...
425
view_revisions = _calc_view_revisions(self.branch, self.start_rev_id,
426
self.end_rev_id, rqst.direction, True,
427
rqst._allow_single_merge_revision)
428
if not isinstance(view_revisions, list):
429
view_revisions = list(view_revisions)
430
view_revisions = _filter_revisions_touching_file_id(self.branch,
431
rqst.specific_fileids[0], view_revisions,
432
include_merges=rqst.levels != 1)
433
return make_log_rev_iterator(self.branch, view_revisions,
434
rqst.delta_type, rqst.message_search)
437
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
438
generate_merge_revisions, allow_single_merge_revision,
439
delayed_graph_generation=False):
440
"""Calculate the revisions to view.
442
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
443
a list of the same tuples.
445
br_revno, br_rev_id = branch.last_revision_info()
449
# If a single revision is requested, check we can handle it
450
generate_single_revision = (end_rev_id and start_rev_id == end_rev_id and
451
(not generate_merge_revisions or not _has_merges(branch, end_rev_id)))
452
if generate_single_revision:
453
return _generate_one_revision(branch, end_rev_id, br_rev_id, br_revno,
454
allow_single_merge_revision)
456
# If we only want to see linear revisions, we can iterate ...
457
if not generate_merge_revisions:
458
return _generate_flat_revisions(branch, start_rev_id, end_rev_id,
461
return _generate_all_revisions(branch, start_rev_id, end_rev_id,
462
direction, delayed_graph_generation)
465
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno,
466
allow_single_merge_revision):
467
if rev_id == br_rev_id:
469
return [(br_rev_id, br_revno, 0)]
471
revno = branch.revision_id_to_dotted_revno(rev_id)
472
if len(revno) > 1 and not allow_single_merge_revision:
473
# It's a merge revision and the log formatter is
474
# completely brain dead. This "feature" of allowing
475
# log formatters incapable of displaying dotted revnos
476
# ought to be deprecated IMNSHO. IGC 20091022
477
raise errors.BzrCommandError('Selected log formatter only'
478
' supports mainline revisions.')
479
revno_str = '.'.join(str(n) for n in revno)
480
return [(rev_id, revno_str, 0)]
483
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction):
484
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
485
# If a start limit was given and it's not obviously an
486
# ancestor of the end limit, check it before outputting anything
487
if direction == 'forward' or (start_rev_id
488
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
490
result = list(result)
491
except _StartNotLinearAncestor:
492
raise errors.BzrCommandError('Start revision not found in'
493
' left-hand history of end revision.')
494
if direction == 'forward':
495
result = reversed(result)
499
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
500
delayed_graph_generation):
501
# On large trees, generating the merge graph can take 30-60 seconds
502
# so we delay doing it until a merge is detected, incrementally
503
# returning initial (non-merge) revisions while we can.
504
initial_revisions = []
505
if delayed_graph_generation:
507
for rev_id, revno, depth in \
508
_linear_view_revisions(branch, start_rev_id, end_rev_id):
509
if _has_merges(branch, rev_id):
513
initial_revisions.append((rev_id, revno, depth))
515
# No merged revisions found
516
if direction == 'reverse':
517
return initial_revisions
518
elif direction == 'forward':
519
return reversed(initial_revisions)
521
raise ValueError('invalid direction %r' % direction)
522
except _StartNotLinearAncestor:
523
# A merge was never detected so the lower revision limit can't
524
# be nested down somewhere
525
raise errors.BzrCommandError('Start revision not found in'
526
' history of end revision.')
528
# A log including nested merges is required. If the direction is reverse,
529
# we rebase the initial merge depths so that the development line is
530
# shown naturally, i.e. just like it is for linear logging. We can easily
531
# make forward the exact opposite display, but showing the merge revisions
532
# indented at the end seems slightly nicer in that case.
533
view_revisions = chain(iter(initial_revisions),
534
_graph_view_revisions(branch, start_rev_id, end_rev_id,
535
rebase_initial_depths=direction == 'reverse'))
536
if direction == 'reverse':
537
return view_revisions
538
elif direction == 'forward':
539
# Forward means oldest first, adjusting for depth.
540
view_revisions = reverse_by_depth(list(view_revisions))
541
return _rebase_merge_depth(view_revisions)
543
raise ValueError('invalid direction %r' % direction)
546
def _has_merges(branch, rev_id):
547
"""Does a revision have multiple parents or not?"""
548
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
549
return len(parents) > 1
552
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
553
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
554
if start_rev_id and end_rev_id:
555
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
556
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
557
if len(start_dotted) == 1 and len(end_dotted) == 1:
559
return start_dotted[0] <= end_dotted[0]
560
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
561
start_dotted[0:1] == end_dotted[0:1]):
562
# both on same development line
563
return start_dotted[2] <= end_dotted[2]
570
def _linear_view_revisions(branch, start_rev_id, end_rev_id):
571
"""Calculate a sequence of revisions to view, newest to oldest.
573
:param start_rev_id: the lower revision-id
574
:param end_rev_id: the upper revision-id
575
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
576
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
577
is not found walking the left-hand history
579
br_revno, br_rev_id = branch.last_revision_info()
580
repo = branch.repository
581
if start_rev_id is None and end_rev_id is None:
583
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
584
yield revision_id, str(cur_revno), 0
587
if end_rev_id is None:
588
end_rev_id = br_rev_id
589
found_start = start_rev_id is None
590
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
591
revno = branch.revision_id_to_dotted_revno(revision_id)
592
revno_str = '.'.join(str(n) for n in revno)
593
if not found_start and revision_id == start_rev_id:
594
yield revision_id, revno_str, 0
598
yield revision_id, revno_str, 0
601
raise _StartNotLinearAncestor()
604
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
605
rebase_initial_depths=True):
606
"""Calculate revisions to view including merges, newest to oldest.
608
:param branch: the branch
609
:param start_rev_id: the lower revision-id
610
:param end_rev_id: the upper revision-id
611
:param rebase_initial_depth: should depths be rebased until a mainline
613
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
615
view_revisions = branch.iter_merge_sorted_revisions(
616
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
617
stop_rule="with-merges")
618
if not rebase_initial_depths:
619
for (rev_id, merge_depth, revno, end_of_merge
621
yield rev_id, '.'.join(map(str, revno)), merge_depth
623
# We're following a development line starting at a merged revision.
624
# We need to adjust depths down by the initial depth until we find
625
# a depth less than it. Then we use that depth as the adjustment.
626
# If and when we reach the mainline, depth adjustment ends.
627
depth_adjustment = None
628
for (rev_id, merge_depth, revno, end_of_merge
630
if depth_adjustment is None:
631
depth_adjustment = merge_depth
633
if merge_depth < depth_adjustment:
634
depth_adjustment = merge_depth
635
merge_depth -= depth_adjustment
636
yield rev_id, '.'.join(map(str, revno)), merge_depth
639
def calculate_view_revisions(branch, start_revision, end_revision, direction,
640
specific_fileid, generate_merge_revisions, allow_single_merge_revision):
641
"""Calculate the revisions to view.
643
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
644
a list of the same tuples.
646
# This method is no longer called by the main code path.
647
# It is retained for API compatibility and may be deprecated
649
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
651
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
652
direction, generate_merge_revisions or specific_fileid,
653
allow_single_merge_revision))
655
view_revisions = _filter_revisions_touching_file_id(branch,
656
specific_fileid, view_revisions,
657
include_merges=generate_merge_revisions)
658
return _rebase_merge_depth(view_revisions)
661
def _rebase_merge_depth(view_revisions):
662
"""Adjust depths upwards so the top level is 0."""
663
# If either the first or last revision have a merge_depth of 0, we're done
664
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
665
min_depth = min([d for r,n,d in view_revisions])
667
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
668
return view_revisions
671
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
672
file_ids=None, direction='reverse'):
673
"""Create a revision iterator for log.
675
:param branch: The branch being logged.
676
:param view_revisions: The revisions being viewed.
677
:param generate_delta: Whether to generate a delta for each revision.
678
Permitted values are None, 'full' and 'partial'.
679
:param search: A user text search string.
680
:param file_ids: If non empty, only revisions matching one or more of
681
the file-ids are to be kept.
682
:param direction: the direction in which view_revisions is sorted
683
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
686
# Convert view_revisions into (view, None, None) groups to fit with
687
# the standard interface here.
688
if type(view_revisions) == list:
689
# A single batch conversion is faster than many incremental ones.
690
# As we have all the data, do a batch conversion.
691
nones = [None] * len(view_revisions)
692
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
695
for view in view_revisions:
696
yield (view, None, None)
697
log_rev_iterator = iter([_convert()])
698
for adapter in log_adapters:
699
# It would be nicer if log adapters were first class objects
700
# with custom parameters. This will do for now. IGC 20090127
701
if adapter == _make_delta_filter:
702
log_rev_iterator = adapter(branch, generate_delta,
703
search, log_rev_iterator, file_ids, direction)
705
log_rev_iterator = adapter(branch, generate_delta,
706
search, log_rev_iterator)
707
return log_rev_iterator
710
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
711
"""Create a filtered iterator of log_rev_iterator matching on a regex.
713
:param branch: The branch being logged.
714
:param generate_delta: Whether to generate a delta for each revision.
715
:param search: A user text search string.
716
:param log_rev_iterator: An input iterator containing all revisions that
717
could be displayed, in lists.
718
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
722
return log_rev_iterator
723
searchRE = re_compile_checked(search, re.IGNORECASE,
724
'log message filter')
725
return _filter_message_re(searchRE, log_rev_iterator)
728
def _filter_message_re(searchRE, log_rev_iterator):
729
for revs in log_rev_iterator:
731
for (rev_id, revno, merge_depth), rev, delta in revs:
732
if searchRE.search(rev.message):
733
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
737
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
738
fileids=None, direction='reverse'):
739
"""Add revision deltas to a log iterator if needed.
741
:param branch: The branch being logged.
742
:param generate_delta: Whether to generate a delta for each revision.
743
Permitted values are None, 'full' and 'partial'.
744
:param search: A user text search string.
745
:param log_rev_iterator: An input iterator containing all revisions that
746
could be displayed, in lists.
747
:param fileids: If non empty, only revisions matching one or more of
748
the file-ids are to be kept.
749
:param direction: the direction in which view_revisions is sorted
750
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
753
if not generate_delta and not fileids:
754
return log_rev_iterator
755
return _generate_deltas(branch.repository, log_rev_iterator,
756
generate_delta, fileids, direction)
759
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
761
"""Create deltas for each batch of revisions in log_rev_iterator.
763
If we're only generating deltas for the sake of filtering against
764
file-ids, we stop generating deltas once all file-ids reach the
765
appropriate life-cycle point. If we're receiving data newest to
766
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
768
check_fileids = fileids is not None and len(fileids) > 0
770
fileid_set = set(fileids)
771
if direction == 'reverse':
777
for revs in log_rev_iterator:
778
# If we were matching against fileids and we've run out,
779
# there's nothing left to do
780
if check_fileids and not fileid_set:
782
revisions = [rev[1] for rev in revs]
784
if delta_type == 'full' and not check_fileids:
785
deltas = repository.get_deltas_for_revisions(revisions)
786
for rev, delta in izip(revs, deltas):
787
new_revs.append((rev[0], rev[1], delta))
789
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
790
for rev, delta in izip(revs, deltas):
792
if delta is None or not delta.has_changed():
795
_update_fileids(delta, fileid_set, stop_on)
796
if delta_type is None:
798
elif delta_type == 'full':
799
# If the file matches all the time, rebuilding
800
# a full delta like this in addition to a partial
801
# one could be slow. However, it's likely the
802
# most revisions won't get this far, making it
803
# faster to filter on the partial deltas and
804
# build the occasional full delta than always
805
# building full deltas and filtering those.
807
delta = repository.get_revision_delta(rev_id)
808
new_revs.append((rev[0], rev[1], delta))
812
def _update_fileids(delta, fileids, stop_on):
813
"""Update the set of file-ids to search based on file lifecycle events.
815
:param fileids: a set of fileids to update
816
:param stop_on: either 'add' or 'remove' - take file-ids out of the
817
fileids set once their add or remove entry is detected respectively
820
for item in delta.added:
821
if item[1] in fileids:
822
fileids.remove(item[1])
823
elif stop_on == 'delete':
824
for item in delta.removed:
825
if item[1] in fileids:
826
fileids.remove(item[1])
829
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
830
"""Extract revision objects from the repository
832
:param branch: The branch being logged.
833
:param generate_delta: Whether to generate a delta for each revision.
834
:param search: A user text search string.
835
:param log_rev_iterator: An input iterator containing all revisions that
836
could be displayed, in lists.
837
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
840
repository = branch.repository
841
for revs in log_rev_iterator:
842
# r = revision_id, n = revno, d = merge depth
843
revision_ids = [view[0] for view, _, _ in revs]
844
revisions = repository.get_revisions(revision_ids)
845
revs = [(rev[0], revision, rev[2]) for rev, revision in
846
izip(revs, revisions)]
850
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
851
"""Group up a single large batch into smaller ones.
853
:param branch: The branch being logged.
854
:param generate_delta: Whether to generate a delta for each revision.
855
:param search: A user text search string.
856
:param log_rev_iterator: An input iterator containing all revisions that
857
could be displayed, in lists.
858
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
861
repository = branch.repository
863
for batch in log_rev_iterator:
866
step = [detail for _, detail in zip(range(num), batch)]
870
num = min(int(num * 1.5), 200)
873
def _get_revision_limits(branch, start_revision, end_revision):
874
"""Get and check revision limits.
876
:param branch: The branch containing the revisions.
878
:param start_revision: The first revision to be logged.
879
For backwards compatibility this may be a mainline integer revno,
880
but for merge revision support a RevisionInfo is expected.
882
:param end_revision: The last revision to be logged.
883
For backwards compatibility this may be a mainline integer revno,
884
but for merge revision support a RevisionInfo is expected.
886
:return: (start_rev_id, end_rev_id) tuple.
888
branch_revno, branch_rev_id = branch.last_revision_info()
890
if start_revision is None:
893
if isinstance(start_revision, revisionspec.RevisionInfo):
894
start_rev_id = start_revision.rev_id
895
start_revno = start_revision.revno or 1
897
branch.check_real_revno(start_revision)
898
start_revno = start_revision
899
start_rev_id = branch.get_rev_id(start_revno)
902
if end_revision is None:
903
end_revno = branch_revno
905
if isinstance(end_revision, revisionspec.RevisionInfo):
906
end_rev_id = end_revision.rev_id
907
end_revno = end_revision.revno or branch_revno
909
branch.check_real_revno(end_revision)
910
end_revno = end_revision
911
end_rev_id = branch.get_rev_id(end_revno)
913
if branch_revno != 0:
914
if (start_rev_id == _mod_revision.NULL_REVISION
915
or end_rev_id == _mod_revision.NULL_REVISION):
916
raise errors.BzrCommandError('Logging revision 0 is invalid.')
917
if start_revno > end_revno:
918
raise errors.BzrCommandError("Start revision must be older than "
920
return (start_rev_id, end_rev_id)
923
def _get_mainline_revs(branch, start_revision, end_revision):
924
"""Get the mainline revisions from the branch.
926
Generates the list of mainline revisions for the branch.
928
:param branch: The branch containing the revisions.
930
:param start_revision: The first revision to be logged.
931
For backwards compatibility this may be a mainline integer revno,
932
but for merge revision support a RevisionInfo is expected.
934
:param end_revision: The last revision to be logged.
935
For backwards compatibility this may be a mainline integer revno,
936
but for merge revision support a RevisionInfo is expected.
938
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
940
branch_revno, branch_last_revision = branch.last_revision_info()
941
if branch_revno == 0:
942
return None, None, None, None
944
# For mainline generation, map start_revision and end_revision to
945
# mainline revnos. If the revision is not on the mainline choose the
946
# appropriate extreme of the mainline instead - the extra will be
948
# Also map the revisions to rev_ids, to be used in the later filtering
951
if start_revision is None:
954
if isinstance(start_revision, revisionspec.RevisionInfo):
955
start_rev_id = start_revision.rev_id
956
start_revno = start_revision.revno or 1
958
branch.check_real_revno(start_revision)
959
start_revno = start_revision
962
if end_revision is None:
963
end_revno = branch_revno
965
if isinstance(end_revision, revisionspec.RevisionInfo):
966
end_rev_id = end_revision.rev_id
967
end_revno = end_revision.revno or branch_revno
969
branch.check_real_revno(end_revision)
970
end_revno = end_revision
972
if ((start_rev_id == _mod_revision.NULL_REVISION)
973
or (end_rev_id == _mod_revision.NULL_REVISION)):
974
raise errors.BzrCommandError('Logging revision 0 is invalid.')
975
if start_revno > end_revno:
976
raise errors.BzrCommandError("Start revision must be older than "
979
if end_revno < start_revno:
980
return None, None, None, None
981
cur_revno = branch_revno
984
for revision_id in branch.repository.iter_reverse_revision_history(
985
branch_last_revision):
986
if cur_revno < start_revno:
987
# We have gone far enough, but we always add 1 more revision
988
rev_nos[revision_id] = cur_revno
989
mainline_revs.append(revision_id)
991
if cur_revno <= end_revno:
992
rev_nos[revision_id] = cur_revno
993
mainline_revs.append(revision_id)
996
# We walked off the edge of all revisions, so we add a 'None' marker
997
mainline_revs.append(None)
999
mainline_revs.reverse()
1001
# override the mainline to look like the revision history.
1002
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1005
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1006
"""Filter view_revisions based on revision ranges.
1008
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1009
tuples to be filtered.
1011
:param start_rev_id: If not NONE specifies the first revision to be logged.
1012
If NONE then all revisions up to the end_rev_id are logged.
1014
:param end_rev_id: If not NONE specifies the last revision to be logged.
1015
If NONE then all revisions up to the end of the log are logged.
1017
:return: The filtered view_revisions.
1019
# This method is no longer called by the main code path.
1020
# It may be removed soon. IGC 20090127
1021
if start_rev_id or end_rev_id:
1022
revision_ids = [r for r, n, d in view_revisions]
1024
start_index = revision_ids.index(start_rev_id)
1027
if start_rev_id == end_rev_id:
1028
end_index = start_index
1031
end_index = revision_ids.index(end_rev_id)
1033
end_index = len(view_revisions) - 1
1034
# To include the revisions merged into the last revision,
1035
# extend end_rev_id down to, but not including, the next rev
1036
# with the same or lesser merge_depth
1037
end_merge_depth = view_revisions[end_index][2]
1039
for index in xrange(end_index+1, len(view_revisions)+1):
1040
if view_revisions[index][2] <= end_merge_depth:
1041
end_index = index - 1
1044
# if the search falls off the end then log to the end as well
1045
end_index = len(view_revisions) - 1
1046
view_revisions = view_revisions[start_index:end_index+1]
1047
return view_revisions
1050
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1051
include_merges=True):
1052
r"""Return the list of revision ids which touch a given file id.
1054
The function filters view_revisions and returns a subset.
1055
This includes the revisions which directly change the file id,
1056
and the revisions which merge these changes. So if the
1068
And 'C' changes a file, then both C and D will be returned. F will not be
1069
returned even though it brings the changes to C into the branch starting
1070
with E. (Note that if we were using F as the tip instead of G, then we
1073
This will also be restricted based on a subset of the mainline.
1075
:param branch: The branch where we can get text revision information.
1077
:param file_id: Filter out revisions that do not touch file_id.
1079
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1080
tuples. This is the list of revisions which will be filtered. It is
1081
assumed that view_revisions is in merge_sort order (i.e. newest
1084
:param include_merges: include merge revisions in the result or not
1086
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1088
# Lookup all possible text keys to determine which ones actually modified
1090
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1092
# Looking up keys in batches of 1000 can cut the time in half, as well as
1093
# memory consumption. GraphIndex *does* like to look for a few keys in
1094
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1095
# TODO: This code needs to be re-evaluated periodically as we tune the
1096
# indexing layer. We might consider passing in hints as to the known
1097
# access pattern (sparse/clustered, high success rate/low success
1098
# rate). This particular access is clustered with a low success rate.
1099
get_parent_map = branch.repository.texts.get_parent_map
1100
modified_text_revisions = set()
1102
for start in xrange(0, len(text_keys), chunk_size):
1103
next_keys = text_keys[start:start + chunk_size]
1104
# Only keep the revision_id portion of the key
1105
modified_text_revisions.update(
1106
[k[1] for k in get_parent_map(next_keys)])
1107
del text_keys, next_keys
1110
# Track what revisions will merge the current revision, replace entries
1111
# with 'None' when they have been added to result
1112
current_merge_stack = [None]
1113
for info in view_revisions:
1114
rev_id, revno, depth = info
1115
if depth == len(current_merge_stack):
1116
current_merge_stack.append(info)
1118
del current_merge_stack[depth + 1:]
1119
current_merge_stack[-1] = info
1121
if rev_id in modified_text_revisions:
1122
# This needs to be logged, along with the extra revisions
1123
for idx in xrange(len(current_merge_stack)):
1124
node = current_merge_stack[idx]
1125
if node is not None:
1126
if include_merges or node[2] == 0:
1128
current_merge_stack[idx] = None
1132
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1133
include_merges=True):
1134
"""Produce an iterator of revisions to show
1135
:return: an iterator of (revision_id, revno, merge_depth)
1136
(if there is no revno for a revision, None is supplied)
1138
# This method is no longer called by the main code path.
1139
# It is retained for API compatibility and may be deprecated
1140
# soon. IGC 20090127
1141
if not include_merges:
1142
revision_ids = mainline_revs[1:]
1143
if direction == 'reverse':
1144
revision_ids.reverse()
1145
for revision_id in revision_ids:
1146
yield revision_id, str(rev_nos[revision_id]), 0
1148
graph = branch.repository.get_graph()
1149
# This asks for all mainline revisions, which means we only have to spider
1150
# sideways, rather than depth history. That said, its still size-of-history
1151
# and should be addressed.
1152
# mainline_revisions always includes an extra revision at the beginning, so
1154
parent_map = dict(((key, value) for key, value in
1155
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1156
# filter out ghosts; merge_sort errors on ghosts.
1157
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1158
merge_sorted_revisions = tsort.merge_sort(
1162
generate_revno=True)
1164
if direction == 'forward':
1165
# forward means oldest first.
1166
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1167
elif direction != 'reverse':
1168
raise ValueError('invalid direction %r' % direction)
1170
for (sequence, rev_id, merge_depth, revno, end_of_merge
1171
) in merge_sorted_revisions:
1172
yield rev_id, '.'.join(map(str, revno)), merge_depth
1175
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1176
"""Reverse revisions by depth.
1178
Revisions with a different depth are sorted as a group with the previous
1179
revision of that depth. There may be no topological justification for this,
1180
but it looks much nicer.
1182
# Add a fake revision at start so that we can always attach sub revisions
1183
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1185
for val in merge_sorted_revisions:
1186
if val[2] == _depth:
1187
# Each revision at the current depth becomes a chunk grouping all
1188
# higher depth revisions.
1189
zd_revisions.append([val])
1191
zd_revisions[-1].append(val)
1192
for revisions in zd_revisions:
1193
if len(revisions) > 1:
1194
# We have higher depth revisions, let reverse them locally
1195
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1196
zd_revisions.reverse()
1198
for chunk in zd_revisions:
1199
result.extend(chunk)
1201
# Top level call, get rid of the fake revisions that have been added
1202
result = [r for r in result if r[0] is not None and r[1] is not None]
1206
class LogRevision(object):
1207
"""A revision to be logged (by LogFormatter.log_revision).
1209
A simple wrapper for the attributes of a revision to be logged.
1210
The attributes may or may not be populated, as determined by the
1211
logging options and the log formatter capabilities.
1214
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1215
tags=None, diff=None):
1217
self.revno = str(revno)
1218
self.merge_depth = merge_depth
1224
class LogFormatter(object):
1225
"""Abstract class to display log messages.
1227
At a minimum, a derived class must implement the log_revision method.
1229
If the LogFormatter needs to be informed of the beginning or end of
1230
a log it should implement the begin_log and/or end_log hook methods.
1232
A LogFormatter should define the following supports_XXX flags
1233
to indicate which LogRevision attributes it supports:
1235
- supports_delta must be True if this log formatter supports delta.
1236
Otherwise the delta attribute may not be populated. The 'delta_format'
1237
attribute describes whether the 'short_status' format (1) or the long
1238
one (2) should be used.
1240
- supports_merge_revisions must be True if this log formatter supports
1241
merge revisions. If not, and if supports_single_merge_revision is
1242
also not True, then only mainline revisions will be passed to the
1245
- preferred_levels is the number of levels this formatter defaults to.
1246
The default value is zero meaning display all levels.
1247
This value is only relevant if supports_merge_revisions is True.
1249
- supports_single_merge_revision must be True if this log formatter
1250
supports logging only a single merge revision. This flag is
1251
only relevant if supports_merge_revisions is not True.
1253
- supports_tags must be True if this log formatter supports tags.
1254
Otherwise the tags attribute may not be populated.
1256
- supports_diff must be True if this log formatter supports diffs.
1257
Otherwise the diff attribute may not be populated.
1259
Plugins can register functions to show custom revision properties using
1260
the properties_handler_registry. The registered function
1261
must respect the following interface description:
1262
def my_show_properties(properties_dict):
1263
# code that returns a dict {'name':'value'} of the properties
1266
preferred_levels = 0
1268
def __init__(self, to_file, show_ids=False, show_timezone='original',
1269
delta_format=None, levels=None):
1270
"""Create a LogFormatter.
1272
:param to_file: the file to output to
1273
:param show_ids: if True, revision-ids are to be displayed
1274
:param show_timezone: the timezone to use
1275
:param delta_format: the level of delta information to display
1276
or None to leave it u to the formatter to decide
1277
:param levels: the number of levels to display; None or -1 to
1278
let the log formatter decide.
1280
self.to_file = to_file
1281
# 'exact' stream used to show diff, it should print content 'as is'
1282
# and should not try to decode/encode it to unicode to avoid bug #328007
1283
self.to_exact_file = getattr(to_file, 'stream', to_file)
1284
self.show_ids = show_ids
1285
self.show_timezone = show_timezone
1286
if delta_format is None:
1287
# Ensures backward compatibility
1288
delta_format = 2 # long format
1289
self.delta_format = delta_format
1290
self.levels = levels
1292
def get_levels(self):
1293
"""Get the number of levels to display or 0 for all."""
1294
if getattr(self, 'supports_merge_revisions', False):
1295
if self.levels is None or self.levels == -1:
1296
return self.preferred_levels
1301
def log_revision(self, revision):
1304
:param revision: The LogRevision to be logged.
1306
raise NotImplementedError('not implemented in abstract base')
1308
def short_committer(self, rev):
1309
name, address = config.parse_username(rev.committer)
1314
def short_author(self, rev):
1315
name, address = config.parse_username(rev.get_apparent_authors()[0])
1320
def show_properties(self, revision, indent):
1321
"""Displays the custom properties returned by each registered handler.
1323
If a registered handler raises an error it is propagated.
1325
for key, handler in properties_handler_registry.iteritems():
1326
for key, value in handler(revision).items():
1327
self.to_file.write(indent + key + ': ' + value + '\n')
1329
def show_diff(self, to_file, diff, indent):
1330
for l in diff.rstrip().split('\n'):
1331
to_file.write(indent + '%s\n' % (l,))
1334
class LongLogFormatter(LogFormatter):
1336
supports_merge_revisions = True
1337
supports_delta = True
1338
supports_tags = True
1339
supports_diff = True
1341
def log_revision(self, revision):
1342
"""Log a revision, either merged or not."""
1343
indent = ' ' * revision.merge_depth
1344
to_file = self.to_file
1345
to_file.write(indent + '-' * 60 + '\n')
1346
if revision.revno is not None:
1347
to_file.write(indent + 'revno: %s\n' % (revision.revno,))
1349
to_file.write(indent + 'tags: %s\n' % (', '.join(revision.tags)))
1351
to_file.write(indent + 'revision-id: ' + revision.rev.revision_id)
1353
for parent_id in revision.rev.parent_ids:
1354
to_file.write(indent + 'parent: %s\n' % (parent_id,))
1355
self.show_properties(revision.rev, indent)
1357
committer = revision.rev.committer
1358
authors = revision.rev.get_apparent_authors()
1359
if authors != [committer]:
1360
to_file.write(indent + 'author: %s\n' % (", ".join(authors),))
1361
to_file.write(indent + 'committer: %s\n' % (committer,))
1363
branch_nick = revision.rev.properties.get('branch-nick', None)
1364
if branch_nick is not None:
1365
to_file.write(indent + 'branch nick: %s\n' % (branch_nick,))
1367
date_str = format_date(revision.rev.timestamp,
1368
revision.rev.timezone or 0,
1370
to_file.write(indent + 'timestamp: %s\n' % (date_str,))
1372
to_file.write(indent + 'message:\n')
1373
if not revision.rev.message:
1374
to_file.write(indent + ' (no message)\n')
1376
message = revision.rev.message.rstrip('\r\n')
1377
for l in message.split('\n'):
1378
to_file.write(indent + ' %s\n' % (l,))
1379
if revision.delta is not None:
1380
# We don't respect delta_format for compatibility
1381
revision.delta.show(to_file, self.show_ids, indent=indent,
1383
if revision.diff is not None:
1384
to_file.write(indent + 'diff:\n')
1385
# Note: we explicitly don't indent the diff (relative to the
1386
# revision information) so that the output can be fed to patch -p0
1387
self.show_diff(self.to_exact_file, revision.diff, indent)
1390
class ShortLogFormatter(LogFormatter):
1392
supports_merge_revisions = True
1393
preferred_levels = 1
1394
supports_delta = True
1395
supports_tags = True
1396
supports_diff = True
1398
def __init__(self, *args, **kwargs):
1399
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1400
self.revno_width_by_depth = {}
1402
def log_revision(self, revision):
1403
# We need two indents: one per depth and one for the information
1404
# relative to that indent. Most mainline revnos are 5 chars or
1405
# less while dotted revnos are typically 11 chars or less. Once
1406
# calculated, we need to remember the offset for a given depth
1407
# as we might be starting from a dotted revno in the first column
1408
# and we want subsequent mainline revisions to line up.
1409
depth = revision.merge_depth
1410
indent = ' ' * depth
1411
revno_width = self.revno_width_by_depth.get(depth)
1412
if revno_width is None:
1413
if revision.revno.find('.') == -1:
1414
# mainline revno, e.g. 12345
1417
# dotted revno, e.g. 12345.10.55
1419
self.revno_width_by_depth[depth] = revno_width
1420
offset = ' ' * (revno_width + 1)
1422
to_file = self.to_file
1424
if len(revision.rev.parent_ids) > 1:
1425
is_merge = ' [merge]'
1428
tags = ' {%s}' % (', '.join(revision.tags))
1429
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1430
revision.revno, self.short_author(revision.rev),
1431
format_date(revision.rev.timestamp,
1432
revision.rev.timezone or 0,
1433
self.show_timezone, date_fmt="%Y-%m-%d",
1436
self.show_properties(revision.rev, indent+offset)
1438
to_file.write(indent + offset + 'revision-id:%s\n'
1439
% (revision.rev.revision_id,))
1440
if not revision.rev.message:
1441
to_file.write(indent + offset + '(no message)\n')
1443
message = revision.rev.message.rstrip('\r\n')
1444
for l in message.split('\n'):
1445
to_file.write(indent + offset + '%s\n' % (l,))
1447
if revision.delta is not None:
1448
revision.delta.show(to_file, self.show_ids, indent=indent + offset,
1449
short_status=self.delta_format==1)
1450
if revision.diff is not None:
1451
self.show_diff(self.to_exact_file, revision.diff, ' ')
1455
class LineLogFormatter(LogFormatter):
1457
supports_merge_revisions = True
1458
preferred_levels = 1
1459
supports_tags = True
1461
def __init__(self, *args, **kwargs):
1462
super(LineLogFormatter, self).__init__(*args, **kwargs)
1463
self._max_chars = terminal_width() - 1
1465
def truncate(self, str, max_len):
1466
if len(str) <= max_len:
1468
return str[:max_len-3]+'...'
1470
def date_string(self, rev):
1471
return format_date(rev.timestamp, rev.timezone or 0,
1472
self.show_timezone, date_fmt="%Y-%m-%d",
1475
def message(self, rev):
1477
return '(no message)'
1481
def log_revision(self, revision):
1482
indent = ' ' * revision.merge_depth
1483
self.to_file.write(self.log_string(revision.revno, revision.rev,
1484
self._max_chars, revision.tags, indent))
1485
self.to_file.write('\n')
1487
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1488
"""Format log info into one string. Truncate tail of string
1489
:param revno: revision number or None.
1490
Revision numbers counts from 1.
1491
:param rev: revision object
1492
:param max_chars: maximum length of resulting string
1493
:param tags: list of tags or None
1494
:param prefix: string to prefix each line
1495
:return: formatted truncated string
1499
# show revno only when is not None
1500
out.append("%s:" % revno)
1501
out.append(self.truncate(self.short_author(rev), 20))
1502
out.append(self.date_string(rev))
1503
if len(rev.parent_ids) > 1:
1504
out.append('[merge]')
1506
tag_str = '{%s}' % (', '.join(tags))
1508
out.append(rev.get_summary())
1509
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1512
class GnuChangelogLogFormatter(LogFormatter):
1514
supports_merge_revisions = True
1515
supports_delta = True
1517
def log_revision(self, revision):
1518
"""Log a revision, either merged or not."""
1519
to_file = self.to_file
1521
date_str = format_date(revision.rev.timestamp,
1522
revision.rev.timezone or 0,
1524
date_fmt='%Y-%m-%d',
1526
committer_str = revision.rev.committer.replace (' <', ' <')
1527
to_file.write('%s %s\n\n' % (date_str,committer_str))
1529
if revision.delta is not None and revision.delta.has_changed():
1530
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1532
to_file.write('\t* %s:\n' % (path,))
1533
for c in revision.delta.renamed:
1534
oldpath,newpath = c[:2]
1535
# For renamed files, show both the old and the new path
1536
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1539
if not revision.rev.message:
1540
to_file.write('\tNo commit message\n')
1542
message = revision.rev.message.rstrip('\r\n')
1543
for l in message.split('\n'):
1544
to_file.write('\t%s\n' % (l.lstrip(),))
1548
def line_log(rev, max_chars):
1549
lf = LineLogFormatter(None)
1550
return lf.log_string(None, rev, max_chars)
1553
class LogFormatterRegistry(registry.Registry):
1554
"""Registry for log formatters"""
1556
def make_formatter(self, name, *args, **kwargs):
1557
"""Construct a formatter from arguments.
1559
:param name: Name of the formatter to construct. 'short', 'long' and
1560
'line' are built-in.
1562
return self.get(name)(*args, **kwargs)
1564
def get_default(self, branch):
1565
return self.get(branch.get_config().log_format())
1568
log_formatter_registry = LogFormatterRegistry()
1571
log_formatter_registry.register('short', ShortLogFormatter,
1572
'Moderately short log format')
1573
log_formatter_registry.register('long', LongLogFormatter,
1574
'Detailed log format')
1575
log_formatter_registry.register('line', LineLogFormatter,
1576
'Log format with one line per revision')
1577
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1578
'Format used by GNU ChangeLog files')
1581
def register_formatter(name, formatter):
1582
log_formatter_registry.register(name, formatter)
1585
def log_formatter(name, *args, **kwargs):
1586
"""Construct a formatter from arguments.
1588
name -- Name of the formatter to construct; currently 'long', 'short' and
1589
'line' are supported.
1592
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1594
raise errors.BzrCommandError("unknown log formatter: %r" % name)
1597
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
1598
# deprecated; for compatibility
1599
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
1600
lf.show(revno, rev, delta)
1603
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1605
"""Show the change in revision history comparing the old revision history to the new one.
1607
:param branch: The branch where the revisions exist
1608
:param old_rh: The old revision history
1609
:param new_rh: The new revision history
1610
:param to_file: A file to write the results to. If None, stdout will be used
1613
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1615
lf = log_formatter(log_format,
1618
show_timezone='original')
1620
# This is the first index which is different between
1623
for i in xrange(max(len(new_rh),
1625
if (len(new_rh) <= i
1627
or new_rh[i] != old_rh[i]):
1631
if base_idx is None:
1632
to_file.write('Nothing seems to have changed\n')
1634
## TODO: It might be nice to do something like show_log
1635
## and show the merged entries. But since this is the
1636
## removed revisions, it shouldn't be as important
1637
if base_idx < len(old_rh):
1638
to_file.write('*'*60)
1639
to_file.write('\nRemoved Revisions:\n')
1640
for i in range(base_idx, len(old_rh)):
1641
rev = branch.repository.get_revision(old_rh[i])
1642
lr = LogRevision(rev, i+1, 0, None)
1644
to_file.write('*'*60)
1645
to_file.write('\n\n')
1646
if base_idx < len(new_rh):
1647
to_file.write('Added Revisions:\n')
1652
direction='forward',
1653
start_revision=base_idx+1,
1654
end_revision=len(new_rh),
1658
def get_history_change(old_revision_id, new_revision_id, repository):
1659
"""Calculate the uncommon lefthand history between two revisions.
1661
:param old_revision_id: The original revision id.
1662
:param new_revision_id: The new revision id.
1663
:param repository: The repository to use for the calculation.
1665
return old_history, new_history
1668
old_revisions = set()
1670
new_revisions = set()
1671
new_iter = repository.iter_reverse_revision_history(new_revision_id)
1672
old_iter = repository.iter_reverse_revision_history(old_revision_id)
1673
stop_revision = None
1676
while do_new or do_old:
1679
new_revision = new_iter.next()
1680
except StopIteration:
1683
new_history.append(new_revision)
1684
new_revisions.add(new_revision)
1685
if new_revision in old_revisions:
1686
stop_revision = new_revision
1690
old_revision = old_iter.next()
1691
except StopIteration:
1694
old_history.append(old_revision)
1695
old_revisions.add(old_revision)
1696
if old_revision in new_revisions:
1697
stop_revision = old_revision
1699
new_history.reverse()
1700
old_history.reverse()
1701
if stop_revision is not None:
1702
new_history = new_history[new_history.index(stop_revision) + 1:]
1703
old_history = old_history[old_history.index(stop_revision) + 1:]
1704
return old_history, new_history
1707
def show_branch_change(branch, output, old_revno, old_revision_id):
1708
"""Show the changes made to a branch.
1710
:param branch: The branch to show changes about.
1711
:param output: A file-like object to write changes to.
1712
:param old_revno: The revno of the old tip.
1713
:param old_revision_id: The revision_id of the old tip.
1715
new_revno, new_revision_id = branch.last_revision_info()
1716
old_history, new_history = get_history_change(old_revision_id,
1719
if old_history == [] and new_history == []:
1720
output.write('Nothing seems to have changed\n')
1723
log_format = log_formatter_registry.get_default(branch)
1724
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
1725
if old_history != []:
1726
output.write('*'*60)
1727
output.write('\nRemoved Revisions:\n')
1728
show_flat_log(branch.repository, old_history, old_revno, lf)
1729
output.write('*'*60)
1730
output.write('\n\n')
1731
if new_history != []:
1732
output.write('Added Revisions:\n')
1733
start_revno = new_revno - len(new_history) + 1
1734
show_log(branch, lf, None, verbose=False, direction='forward',
1735
start_revision=start_revno,)
1738
def show_flat_log(repository, history, last_revno, lf):
1739
"""Show a simple log of the specified history.
1741
:param repository: The repository to retrieve revisions from.
1742
:param history: A list of revision_ids indicating the lefthand history.
1743
:param last_revno: The revno of the last revision_id in the history.
1744
:param lf: The log formatter to use.
1746
start_revno = last_revno - len(history) + 1
1747
revisions = repository.get_revisions(history)
1748
for i, rev in enumerate(revisions):
1749
lr = LogRevision(rev, i + last_revno, 0, None)
1753
def _get_info_for_log_files(revisionspec_list, file_list):
1754
"""Find file-ids and kinds given a list of files and a revision range.
1756
We search for files at the end of the range. If not found there,
1757
we try the start of the range.
1759
:param revisionspec_list: revision range as parsed on the command line
1760
:param file_list: the list of paths given on the command line;
1761
the first of these can be a branch location or a file path,
1762
the remainder must be file paths
1763
:return: (branch, info_list, start_rev_info, end_rev_info) where
1764
info_list is a list of (relative_path, file_id, kind) tuples where
1765
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
1767
from builtins import _get_revision_range, safe_relpath_files
1768
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
1769
# XXX: It's damn messy converting a list of paths to relative paths when
1770
# those paths might be deleted ones, they might be on a case-insensitive
1771
# filesystem and/or they might be in silly locations (like another branch).
1772
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
1773
# file2 implicitly in the same dir as file1 or should its directory be
1774
# taken from the current tree somehow?) For now, this solves the common
1775
# case of running log in a nested directory, assuming paths beyond the
1776
# first one haven't been deleted ...
1778
relpaths = [path] + safe_relpath_files(tree, file_list[1:])
1780
relpaths = [path] + file_list[1:]
1782
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
1784
if start_rev_info is None and end_rev_info is None:
1786
tree = b.basis_tree()
1789
file_id = tree.path2id(fp)
1790
kind = _get_kind_for_file_id(tree, file_id)
1792
# go back to when time began
1795
rev1 = b.get_rev_id(1)
1796
except errors.NoSuchRevision:
1801
tree1 = b.repository.revision_tree(rev1)
1803
file_id = tree1.path2id(fp)
1804
kind = _get_kind_for_file_id(tree1, file_id)
1805
info_list.append((fp, file_id, kind))
1807
elif start_rev_info == end_rev_info:
1808
# One revision given - file must exist in it
1809
tree = b.repository.revision_tree(end_rev_info.rev_id)
1811
file_id = tree.path2id(fp)
1812
kind = _get_kind_for_file_id(tree, file_id)
1813
info_list.append((fp, file_id, kind))
1816
# Revision range given. Get the file-id from the end tree.
1817
# If that fails, try the start tree.
1818
rev_id = end_rev_info.rev_id
1820
tree = b.basis_tree()
1822
tree = b.repository.revision_tree(rev_id)
1825
file_id = tree.path2id(fp)
1826
kind = _get_kind_for_file_id(tree, file_id)
1829
rev_id = start_rev_info.rev_id
1831
rev1 = b.get_rev_id(1)
1832
tree1 = b.repository.revision_tree(rev1)
1834
tree1 = b.repository.revision_tree(rev_id)
1835
file_id = tree1.path2id(fp)
1836
kind = _get_kind_for_file_id(tree1, file_id)
1837
info_list.append((fp, file_id, kind))
1838
return b, info_list, start_rev_info, end_rev_info
1841
def _get_kind_for_file_id(tree, file_id):
1842
"""Return the kind of a file-id or None if it doesn't exist."""
1843
if file_id is not None:
1844
return tree.kind(file_id)
1849
properties_handler_registry = registry.Registry()
1850
properties_handler_registry.register_lazy("foreign",
1852
"show_foreign_properties")
1855
# adapters which revision ids to log are filtered. When log is called, the
1856
# log_rev_iterator is adapted through each of these factory methods.
1857
# Plugins are welcome to mutate this list in any way they like - as long
1858
# as the overall behaviour is preserved. At this point there is no extensible
1859
# mechanism for getting parameters to each factory method, and until there is
1860
# this won't be considered a stable api.
1864
# read revision objects
1865
_make_revision_objects,
1866
# filter on log messages
1867
_make_search_filter,
1868
# generate deltas for things we will show