117
148
direction='reverse',
118
149
start_revision=None,
119
150
end_revision=None,
121
154
"""Write out human-readable log of commits to this branch.
124
LogFormatter object to show the output.
127
If true, list only the commits affecting the specified
128
file, rather than all commits.
131
If true show added/changed/deleted/renamed files.
134
'reverse' (default) is latest to earliest;
135
'forward' is earliest to latest.
138
If not None, only show revisions >= start_revision
141
If not None, only show revisions <= end_revision
143
from bzrlib.osutils import format_date
144
from bzrlib.errors import BzrCheckError
145
from bzrlib.textui import show_status
147
from warnings import warn
149
if not isinstance(lf, LogFormatter):
150
warn("not a LogFormatter instance: %r" % lf)
153
mutter('get log for file_id %r' % specific_fileid)
155
if search is not None:
157
searchRE = re.compile(search, re.IGNORECASE)
161
which_revs = _enumerate_history(branch)
163
if start_revision is None:
165
elif start_revision < 1 or start_revision >= len(which_revs):
166
raise InvalidRevisionNumber(start_revision)
168
if end_revision is None:
169
end_revision = len(which_revs)
170
elif end_revision < 1 or end_revision >= len(which_revs):
171
raise InvalidRevisionNumber(end_revision)
173
# list indexes are 0-based; revisions are 1-based
174
cut_revs = which_revs[(start_revision-1):(end_revision)]
156
This function is being retained for backwards compatibility but
157
should not be extended with new parameters. Use the new Logger class
158
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
159
make_log_request_dict function.
161
:param lf: The LogFormatter object showing the output.
163
:param specific_fileid: If not None, list only the commits affecting the
164
specified file, rather than all commits.
166
:param verbose: If True show added/changed/deleted/renamed files.
168
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
171
:param start_revision: If not None, only show revisions >= start_revision
173
:param end_revision: If not None, only show revisions <= end_revision
175
:param search: If not None, only show revisions with matching commit
178
:param limit: If set, shows only 'limit' revisions, all revisions are shown
181
:param show_diff: If True, output a diff after each revision.
183
# Convert old-style parameters to new-style parameters
184
if specific_fileid is not None:
185
file_ids = [specific_fileid]
190
delta_type = 'partial'
197
diff_type = 'partial'
203
# Build the request and execute it
204
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
205
start_revision=start_revision, end_revision=end_revision,
206
limit=limit, message_search=search,
207
delta_type=delta_type, diff_type=diff_type)
208
Logger(branch, rqst).show(lf)
211
# Note: This needs to be kept this in sync with the defaults in
212
# make_log_request_dict() below
213
_DEFAULT_REQUEST_PARAMS = {
214
'direction': 'reverse',
216
'generate_tags': True,
217
'_match_using_deltas': True,
221
def make_log_request_dict(direction='reverse', specific_fileids=None,
222
start_revision=None, end_revision=None, limit=None,
223
message_search=None, levels=1, generate_tags=True, delta_type=None,
224
diff_type=None, _match_using_deltas=True):
225
"""Convenience function for making a logging request dictionary.
227
Using this function may make code slightly safer by ensuring
228
parameters have the correct names. It also provides a reference
229
point for documenting the supported parameters.
231
:param direction: 'reverse' (default) is latest to earliest;
232
'forward' is earliest to latest.
234
:param specific_fileids: If not None, only include revisions
235
affecting the specified files, rather than all revisions.
237
:param start_revision: If not None, only generate
238
revisions >= start_revision
240
:param end_revision: If not None, only generate
241
revisions <= end_revision
243
:param limit: If set, generate only 'limit' revisions, all revisions
244
are shown if None or 0.
246
:param message_search: If not None, only include revisions with
247
matching commit messages
249
:param levels: the number of levels of revisions to
250
generate; 1 for just the mainline; 0 for all levels.
252
:param generate_tags: If True, include tags for matched revisions.
254
:param delta_type: Either 'full', 'partial' or None.
255
'full' means generate the complete delta - adds/deletes/modifies/etc;
256
'partial' means filter the delta using specific_fileids;
257
None means do not generate any delta.
259
:param diff_type: Either 'full', 'partial' or None.
260
'full' means generate the complete diff - adds/deletes/modifies/etc;
261
'partial' means filter the diff using specific_fileids;
262
None means do not generate any diff.
264
:param _match_using_deltas: a private parameter controlling the
265
algorithm used for matching specific_fileids. This parameter
266
may be removed in the future so bzrlib client code should NOT
270
'direction': direction,
271
'specific_fileids': specific_fileids,
272
'start_revision': start_revision,
273
'end_revision': end_revision,
275
'message_search': message_search,
277
'generate_tags': generate_tags,
278
'delta_type': delta_type,
279
'diff_type': diff_type,
280
# Add 'private' attributes for features that may be deprecated
281
'_match_using_deltas': _match_using_deltas,
285
def _apply_log_request_defaults(rqst):
286
"""Apply default values to a request dictionary."""
287
result = _DEFAULT_REQUEST_PARAMS
293
class LogGenerator(object):
294
"""A generator of log revisions."""
296
def iter_log_revisions(self):
297
"""Iterate over LogRevision objects.
299
:return: An iterator yielding LogRevision objects.
301
raise NotImplementedError(self.iter_log_revisions)
304
class Logger(object):
305
"""An object the generates, formats and displays a log."""
307
def __init__(self, branch, rqst):
310
:param branch: the branch to log
311
:param rqst: A dictionary specifying the query parameters.
312
See make_log_request_dict() for supported values.
315
self.rqst = _apply_log_request_defaults(rqst)
320
:param lf: The LogFormatter object to send the output to.
322
if not isinstance(lf, LogFormatter):
323
warn("not a LogFormatter instance: %r" % lf)
325
self.branch.lock_read()
327
if getattr(lf, 'begin_log', None):
330
if getattr(lf, 'end_log', None):
335
def _show_body(self, lf):
336
"""Show the main log output.
338
Subclasses may wish to override this.
340
# Tweak the LogRequest based on what the LogFormatter can handle.
341
# (There's no point generating stuff if the formatter can't display it.)
343
rqst['levels'] = lf.get_levels()
344
if not getattr(lf, 'supports_tags', False):
345
rqst['generate_tags'] = False
346
if not getattr(lf, 'supports_delta', False):
347
rqst['delta_type'] = None
348
if not getattr(lf, 'supports_diff', False):
349
rqst['diff_type'] = None
351
# Find and print the interesting revisions
352
generator = self._generator_factory(self.branch, rqst)
353
for lr in generator.iter_log_revisions():
357
def _generator_factory(self, branch, rqst):
358
"""Make the LogGenerator object to use.
360
Subclasses may wish to override this.
362
return _DefaultLogGenerator(branch, rqst)
365
class _StartNotLinearAncestor(Exception):
366
"""Raised when a start revision is not found walking left-hand history."""
369
class _DefaultLogGenerator(LogGenerator):
370
"""The default generator of log revisions."""
372
def __init__(self, branch, rqst):
375
if rqst.get('generate_tags') and branch.supports_tags():
376
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
378
self.rev_tag_dict = {}
380
def iter_log_revisions(self):
381
"""Iterate over LogRevision objects.
383
:return: An iterator yielding LogRevision objects.
387
revision_iterator = self._create_log_revision_iterator()
388
for revs in revision_iterator:
389
for (rev_id, revno, merge_depth), rev, delta in revs:
390
# 0 levels means show everything; merge_depth counts from 0
391
levels = rqst.get('levels')
392
if levels != 0 and merge_depth >= levels:
394
diff = self._format_diff(rev, rev_id)
395
yield LogRevision(rev, revno, merge_depth, delta,
396
self.rev_tag_dict.get(rev_id), diff)
397
limit = rqst.get('limit')
400
if log_count >= limit:
403
def _format_diff(self, rev, rev_id):
404
diff_type = self.rqst.get('diff_type')
405
if diff_type is None:
407
repo = self.branch.repository
408
if len(rev.parent_ids) == 0:
409
ancestor_id = _mod_revision.NULL_REVISION
411
ancestor_id = rev.parent_ids[0]
412
tree_1 = repo.revision_tree(ancestor_id)
413
tree_2 = repo.revision_tree(rev_id)
414
file_ids = self.rqst.get('specific_fileids')
415
if diff_type == 'partial' and file_ids is not None:
416
specific_files = [tree_2.id2path(id) for id in file_ids]
418
specific_files = None
420
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
424
def _create_log_revision_iterator(self):
425
"""Create a revision iterator for log.
427
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
430
self.start_rev_id, self.end_rev_id = _get_revision_limits(
431
self.branch, self.rqst.get('start_revision'),
432
self.rqst.get('end_revision'))
433
if self.rqst.get('_match_using_deltas'):
434
return self._log_revision_iterator_using_delta_matching()
436
# We're using the per-file-graph algorithm. This scales really
437
# well but only makes sense if there is a single file and it's
439
file_count = len(self.rqst.get('specific_fileids'))
441
raise BzrError("illegal LogRequest: must match-using-deltas "
442
"when logging %d files" % file_count)
443
return self._log_revision_iterator_using_per_file_graph()
445
def _log_revision_iterator_using_delta_matching(self):
446
# Get the base revisions, filtering by the revision range
448
generate_merge_revisions = rqst.get('levels') != 1
449
delayed_graph_generation = not rqst.get('specific_fileids') and (
450
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
451
view_revisions = _calc_view_revisions(self.branch, self.start_rev_id,
452
self.end_rev_id, rqst.get('direction'), generate_merge_revisions,
453
delayed_graph_generation=delayed_graph_generation)
455
# Apply the other filters
456
return make_log_rev_iterator(self.branch, view_revisions,
457
rqst.get('delta_type'), rqst.get('message_search'),
458
file_ids=rqst.get('specific_fileids'),
459
direction=rqst.get('direction'))
461
def _log_revision_iterator_using_per_file_graph(self):
462
# Get the base revisions, filtering by the revision range.
463
# Note that we always generate the merge revisions because
464
# filter_revisions_touching_file_id() requires them ...
466
view_revisions = _calc_view_revisions(self.branch, self.start_rev_id,
467
self.end_rev_id, rqst.get('direction'), True)
468
if not isinstance(view_revisions, list):
469
view_revisions = list(view_revisions)
470
view_revisions = _filter_revisions_touching_file_id(self.branch,
471
rqst.get('specific_fileids')[0], view_revisions,
472
include_merges=rqst.get('levels') != 1)
473
return make_log_rev_iterator(self.branch, view_revisions,
474
rqst.get('delta_type'), rqst.get('message_search'))
477
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
478
generate_merge_revisions, delayed_graph_generation=False):
479
"""Calculate the revisions to view.
481
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
482
a list of the same tuples.
484
br_revno, br_rev_id = branch.last_revision_info()
488
# If a single revision is requested, check we can handle it
489
generate_single_revision = (end_rev_id and start_rev_id == end_rev_id and
490
(not generate_merge_revisions or not _has_merges(branch, end_rev_id)))
491
if generate_single_revision:
492
return _generate_one_revision(branch, end_rev_id, br_rev_id, br_revno)
494
# If we only want to see linear revisions, we can iterate ...
495
if not generate_merge_revisions:
496
return _generate_flat_revisions(branch, start_rev_id, end_rev_id,
499
return _generate_all_revisions(branch, start_rev_id, end_rev_id,
500
direction, delayed_graph_generation)
503
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
504
if rev_id == br_rev_id:
506
return [(br_rev_id, br_revno, 0)]
508
revno = branch.revision_id_to_dotted_revno(rev_id)
509
revno_str = '.'.join(str(n) for n in revno)
510
return [(rev_id, revno_str, 0)]
513
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction):
514
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
515
# If a start limit was given and it's not obviously an
516
# ancestor of the end limit, check it before outputting anything
517
if direction == 'forward' or (start_rev_id
518
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
520
result = list(result)
521
except _StartNotLinearAncestor:
522
raise errors.BzrCommandError('Start revision not found in'
523
' left-hand history of end revision.')
524
if direction == 'forward':
525
result = reversed(result)
529
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
530
delayed_graph_generation):
531
# On large trees, generating the merge graph can take 30-60 seconds
532
# so we delay doing it until a merge is detected, incrementally
533
# returning initial (non-merge) revisions while we can.
534
initial_revisions = []
535
if delayed_graph_generation:
537
for rev_id, revno, depth in \
538
_linear_view_revisions(branch, start_rev_id, end_rev_id):
539
if _has_merges(branch, rev_id):
543
initial_revisions.append((rev_id, revno, depth))
545
# No merged revisions found
546
if direction == 'reverse':
547
return initial_revisions
548
elif direction == 'forward':
549
return reversed(initial_revisions)
551
raise ValueError('invalid direction %r' % direction)
552
except _StartNotLinearAncestor:
553
# A merge was never detected so the lower revision limit can't
554
# be nested down somewhere
555
raise errors.BzrCommandError('Start revision not found in'
556
' history of end revision.')
558
# A log including nested merges is required. If the direction is reverse,
559
# we rebase the initial merge depths so that the development line is
560
# shown naturally, i.e. just like it is for linear logging. We can easily
561
# make forward the exact opposite display, but showing the merge revisions
562
# indented at the end seems slightly nicer in that case.
563
view_revisions = chain(iter(initial_revisions),
564
_graph_view_revisions(branch, start_rev_id, end_rev_id,
565
rebase_initial_depths=direction == 'reverse'))
176
566
if direction == 'reverse':
567
return view_revisions
178
568
elif direction == 'forward':
569
# Forward means oldest first, adjusting for depth.
570
view_revisions = reverse_by_depth(list(view_revisions))
571
return _rebase_merge_depth(view_revisions)
181
573
raise ValueError('invalid direction %r' % direction)
183
for revno, rev_id in cut_revs:
184
if verbose or specific_fileid:
185
delta = branch.get_revision_delta(revno)
188
if not delta.touches_file_id(specific_fileid):
192
# although we calculated it, throw it away without display
195
rev = branch.get_revision(rev_id)
198
if not searchRE.search(rev.message):
201
lf.show(revno, rev, delta)
205
def deltas_for_log_dummy(branch, which_revs):
206
"""Return all the revisions without intermediate deltas.
208
Useful for log commands that won't need the delta information.
576
def _has_merges(branch, rev_id):
577
"""Does a revision have multiple parents or not?"""
578
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
579
return len(parents) > 1
582
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
583
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
584
if start_rev_id and end_rev_id:
585
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
586
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
587
if len(start_dotted) == 1 and len(end_dotted) == 1:
589
return start_dotted[0] <= end_dotted[0]
590
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
591
start_dotted[0:1] == end_dotted[0:1]):
592
# both on same development line
593
return start_dotted[2] <= end_dotted[2]
600
def _linear_view_revisions(branch, start_rev_id, end_rev_id):
601
"""Calculate a sequence of revisions to view, newest to oldest.
603
:param start_rev_id: the lower revision-id
604
:param end_rev_id: the upper revision-id
605
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
606
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
607
is not found walking the left-hand history
609
br_revno, br_rev_id = branch.last_revision_info()
610
repo = branch.repository
611
if start_rev_id is None and end_rev_id is None:
613
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
614
yield revision_id, str(cur_revno), 0
617
if end_rev_id is None:
618
end_rev_id = br_rev_id
619
found_start = start_rev_id is None
620
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
621
revno = branch.revision_id_to_dotted_revno(revision_id)
622
revno_str = '.'.join(str(n) for n in revno)
623
if not found_start and revision_id == start_rev_id:
624
yield revision_id, revno_str, 0
628
yield revision_id, revno_str, 0
631
raise _StartNotLinearAncestor()
634
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
635
rebase_initial_depths=True):
636
"""Calculate revisions to view including merges, newest to oldest.
638
:param branch: the branch
639
:param start_rev_id: the lower revision-id
640
:param end_rev_id: the upper revision-id
641
:param rebase_initial_depth: should depths be rebased until a mainline
643
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
645
view_revisions = branch.iter_merge_sorted_revisions(
646
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
647
stop_rule="with-merges")
648
if not rebase_initial_depths:
649
for (rev_id, merge_depth, revno, end_of_merge
651
yield rev_id, '.'.join(map(str, revno)), merge_depth
653
# We're following a development line starting at a merged revision.
654
# We need to adjust depths down by the initial depth until we find
655
# a depth less than it. Then we use that depth as the adjustment.
656
# If and when we reach the mainline, depth adjustment ends.
657
depth_adjustment = None
658
for (rev_id, merge_depth, revno, end_of_merge
660
if depth_adjustment is None:
661
depth_adjustment = merge_depth
663
if merge_depth < depth_adjustment:
664
depth_adjustment = merge_depth
665
merge_depth -= depth_adjustment
666
yield rev_id, '.'.join(map(str, revno)), merge_depth
669
def calculate_view_revisions(branch, start_revision, end_revision, direction,
670
specific_fileid, generate_merge_revisions):
671
"""Calculate the revisions to view.
673
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
674
a list of the same tuples.
676
# This method is no longer called by the main code path.
677
# It is retained for API compatibility and may be deprecated
679
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
681
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
682
direction, generate_merge_revisions or specific_fileid))
684
view_revisions = _filter_revisions_touching_file_id(branch,
685
specific_fileid, view_revisions,
686
include_merges=generate_merge_revisions)
687
return _rebase_merge_depth(view_revisions)
690
def _rebase_merge_depth(view_revisions):
691
"""Adjust depths upwards so the top level is 0."""
692
# If either the first or last revision have a merge_depth of 0, we're done
693
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
694
min_depth = min([d for r,n,d in view_revisions])
696
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
697
return view_revisions
700
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
701
file_ids=None, direction='reverse'):
702
"""Create a revision iterator for log.
704
:param branch: The branch being logged.
705
:param view_revisions: The revisions being viewed.
706
:param generate_delta: Whether to generate a delta for each revision.
707
Permitted values are None, 'full' and 'partial'.
708
:param search: A user text search string.
709
:param file_ids: If non empty, only revisions matching one or more of
710
the file-ids are to be kept.
711
:param direction: the direction in which view_revisions is sorted
712
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
715
# Convert view_revisions into (view, None, None) groups to fit with
716
# the standard interface here.
717
if type(view_revisions) == list:
718
# A single batch conversion is faster than many incremental ones.
719
# As we have all the data, do a batch conversion.
720
nones = [None] * len(view_revisions)
721
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
724
for view in view_revisions:
725
yield (view, None, None)
726
log_rev_iterator = iter([_convert()])
727
for adapter in log_adapters:
728
# It would be nicer if log adapters were first class objects
729
# with custom parameters. This will do for now. IGC 20090127
730
if adapter == _make_delta_filter:
731
log_rev_iterator = adapter(branch, generate_delta,
732
search, log_rev_iterator, file_ids, direction)
734
log_rev_iterator = adapter(branch, generate_delta,
735
search, log_rev_iterator)
736
return log_rev_iterator
739
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
740
"""Create a filtered iterator of log_rev_iterator matching on a regex.
742
:param branch: The branch being logged.
743
:param generate_delta: Whether to generate a delta for each revision.
744
:param search: A user text search string.
745
:param log_rev_iterator: An input iterator containing all revisions that
746
could be displayed, in lists.
747
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
751
return log_rev_iterator
752
searchRE = re_compile_checked(search, re.IGNORECASE,
753
'log message filter')
754
return _filter_message_re(searchRE, log_rev_iterator)
757
def _filter_message_re(searchRE, log_rev_iterator):
758
for revs in log_rev_iterator:
760
for (rev_id, revno, merge_depth), rev, delta in revs:
761
if searchRE.search(rev.message):
762
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
766
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
767
fileids=None, direction='reverse'):
768
"""Add revision deltas to a log iterator if needed.
770
:param branch: The branch being logged.
771
:param generate_delta: Whether to generate a delta for each revision.
772
Permitted values are None, 'full' and 'partial'.
773
:param search: A user text search string.
774
:param log_rev_iterator: An input iterator containing all revisions that
775
could be displayed, in lists.
776
:param fileids: If non empty, only revisions matching one or more of
777
the file-ids are to be kept.
778
:param direction: the direction in which view_revisions is sorted
779
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
782
if not generate_delta and not fileids:
783
return log_rev_iterator
784
return _generate_deltas(branch.repository, log_rev_iterator,
785
generate_delta, fileids, direction)
788
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
790
"""Create deltas for each batch of revisions in log_rev_iterator.
792
If we're only generating deltas for the sake of filtering against
793
file-ids, we stop generating deltas once all file-ids reach the
794
appropriate life-cycle point. If we're receiving data newest to
795
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
797
check_fileids = fileids is not None and len(fileids) > 0
799
fileid_set = set(fileids)
800
if direction == 'reverse':
806
for revs in log_rev_iterator:
807
# If we were matching against fileids and we've run out,
808
# there's nothing left to do
809
if check_fileids and not fileid_set:
811
revisions = [rev[1] for rev in revs]
813
if delta_type == 'full' and not check_fileids:
814
deltas = repository.get_deltas_for_revisions(revisions)
815
for rev, delta in izip(revs, deltas):
816
new_revs.append((rev[0], rev[1], delta))
818
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
819
for rev, delta in izip(revs, deltas):
821
if delta is None or not delta.has_changed():
824
_update_fileids(delta, fileid_set, stop_on)
825
if delta_type is None:
827
elif delta_type == 'full':
828
# If the file matches all the time, rebuilding
829
# a full delta like this in addition to a partial
830
# one could be slow. However, it's likely that
831
# most revisions won't get this far, making it
832
# faster to filter on the partial deltas and
833
# build the occasional full delta than always
834
# building full deltas and filtering those.
836
delta = repository.get_revision_delta(rev_id)
837
new_revs.append((rev[0], rev[1], delta))
841
def _update_fileids(delta, fileids, stop_on):
842
"""Update the set of file-ids to search based on file lifecycle events.
211
for revno, revision_id in which_revs:
212
yield revno, branch.get_revision(revision_id), None
215
def deltas_for_log_reverse(branch, which_revs):
216
"""Compute deltas for display in latest-to-earliest order.
222
Sequence of (revno, revision_id) for the subset of history to examine
225
Sequence of (revno, rev, delta)
227
The delta is from the given revision to the next one in the
228
sequence, which makes sense if the log is being displayed from
231
last_revno = last_revision_id = last_tree = None
232
for revno, revision_id in which_revs:
233
this_tree = branch.revision_tree(revision_id)
234
this_revision = branch.get_revision(revision_id)
237
yield last_revno, last_revision, compare_trees(this_tree, last_tree, False)
239
this_tree = EmptyTree(branch.get_root_id())
242
last_revision = this_revision
243
last_tree = this_tree
247
this_tree = EmptyTree(branch.get_root_id())
249
this_revno = last_revno - 1
250
this_revision_id = branch.revision_history()[this_revno]
251
this_tree = branch.revision_tree(this_revision_id)
252
yield last_revno, last_revision, compare_trees(this_tree, last_tree, False)
255
def deltas_for_log_forward(branch, which_revs):
256
"""Compute deltas for display in forward log.
258
Given a sequence of (revno, revision_id) pairs, return
261
The delta is from the given revision to the next one in the
262
sequence, which makes sense if the log is being displayed from
265
last_revno = last_revision_id = last_tree = None
266
prev_tree = EmptyTree(branch.get_root_id())
268
for revno, revision_id in which_revs:
269
this_tree = branch.revision_tree(revision_id)
270
this_revision = branch.get_revision(revision_id)
274
last_tree = EmptyTree(branch.get_root_id())
844
:param fileids: a set of fileids to update
845
:param stop_on: either 'add' or 'remove' - take file-ids out of the
846
fileids set once their add or remove entry is detected respectively
849
for item in delta.added:
850
if item[1] in fileids:
851
fileids.remove(item[1])
852
elif stop_on == 'delete':
853
for item in delta.removed:
854
if item[1] in fileids:
855
fileids.remove(item[1])
858
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
859
"""Extract revision objects from the repository
861
:param branch: The branch being logged.
862
:param generate_delta: Whether to generate a delta for each revision.
863
:param search: A user text search string.
864
:param log_rev_iterator: An input iterator containing all revisions that
865
could be displayed, in lists.
866
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
869
repository = branch.repository
870
for revs in log_rev_iterator:
871
# r = revision_id, n = revno, d = merge depth
872
revision_ids = [view[0] for view, _, _ in revs]
873
revisions = repository.get_revisions(revision_ids)
874
revs = [(rev[0], revision, rev[2]) for rev, revision in
875
izip(revs, revisions)]
879
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
880
"""Group up a single large batch into smaller ones.
882
:param branch: The branch being logged.
883
:param generate_delta: Whether to generate a delta for each revision.
884
:param search: A user text search string.
885
:param log_rev_iterator: An input iterator containing all revisions that
886
could be displayed, in lists.
887
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
890
repository = branch.repository
892
for batch in log_rev_iterator:
895
step = [detail for _, detail in zip(range(num), batch)]
899
num = min(int(num * 1.5), 200)
902
def _get_revision_limits(branch, start_revision, end_revision):
903
"""Get and check revision limits.
905
:param branch: The branch containing the revisions.
907
:param start_revision: The first revision to be logged.
908
For backwards compatibility this may be a mainline integer revno,
909
but for merge revision support a RevisionInfo is expected.
911
:param end_revision: The last revision to be logged.
912
For backwards compatibility this may be a mainline integer revno,
913
but for merge revision support a RevisionInfo is expected.
915
:return: (start_rev_id, end_rev_id) tuple.
917
branch_revno, branch_rev_id = branch.last_revision_info()
919
if start_revision is None:
922
if isinstance(start_revision, revisionspec.RevisionInfo):
923
start_rev_id = start_revision.rev_id
924
start_revno = start_revision.revno or 1
926
branch.check_real_revno(start_revision)
927
start_revno = start_revision
928
start_rev_id = branch.get_rev_id(start_revno)
931
if end_revision is None:
932
end_revno = branch_revno
934
if isinstance(end_revision, revisionspec.RevisionInfo):
935
end_rev_id = end_revision.rev_id
936
end_revno = end_revision.revno or branch_revno
938
branch.check_real_revno(end_revision)
939
end_revno = end_revision
940
end_rev_id = branch.get_rev_id(end_revno)
942
if branch_revno != 0:
943
if (start_rev_id == _mod_revision.NULL_REVISION
944
or end_rev_id == _mod_revision.NULL_REVISION):
945
raise errors.BzrCommandError('Logging revision 0 is invalid.')
946
if start_revno > end_revno:
947
raise errors.BzrCommandError("Start revision must be older than "
949
return (start_rev_id, end_rev_id)
952
def _get_mainline_revs(branch, start_revision, end_revision):
953
"""Get the mainline revisions from the branch.
955
Generates the list of mainline revisions for the branch.
957
:param branch: The branch containing the revisions.
959
:param start_revision: The first revision to be logged.
960
For backwards compatibility this may be a mainline integer revno,
961
but for merge revision support a RevisionInfo is expected.
963
:param end_revision: The last revision to be logged.
964
For backwards compatibility this may be a mainline integer revno,
965
but for merge revision support a RevisionInfo is expected.
967
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
969
branch_revno, branch_last_revision = branch.last_revision_info()
970
if branch_revno == 0:
971
return None, None, None, None
973
# For mainline generation, map start_revision and end_revision to
974
# mainline revnos. If the revision is not on the mainline choose the
975
# appropriate extreme of the mainline instead - the extra will be
977
# Also map the revisions to rev_ids, to be used in the later filtering
980
if start_revision is None:
983
if isinstance(start_revision, revisionspec.RevisionInfo):
984
start_rev_id = start_revision.rev_id
985
start_revno = start_revision.revno or 1
987
branch.check_real_revno(start_revision)
988
start_revno = start_revision
991
if end_revision is None:
992
end_revno = branch_revno
994
if isinstance(end_revision, revisionspec.RevisionInfo):
995
end_rev_id = end_revision.rev_id
996
end_revno = end_revision.revno or branch_revno
998
branch.check_real_revno(end_revision)
999
end_revno = end_revision
1001
if ((start_rev_id == _mod_revision.NULL_REVISION)
1002
or (end_rev_id == _mod_revision.NULL_REVISION)):
1003
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1004
if start_revno > end_revno:
1005
raise errors.BzrCommandError("Start revision must be older than "
1006
"the end revision.")
1008
if end_revno < start_revno:
1009
return None, None, None, None
1010
cur_revno = branch_revno
1013
for revision_id in branch.repository.iter_reverse_revision_history(
1014
branch_last_revision):
1015
if cur_revno < start_revno:
1016
# We have gone far enough, but we always add 1 more revision
1017
rev_nos[revision_id] = cur_revno
1018
mainline_revs.append(revision_id)
1020
if cur_revno <= end_revno:
1021
rev_nos[revision_id] = cur_revno
1022
mainline_revs.append(revision_id)
1025
# We walked off the edge of all revisions, so we add a 'None' marker
1026
mainline_revs.append(None)
1028
mainline_revs.reverse()
1030
# override the mainline to look like the revision history.
1031
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1034
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1035
"""Filter view_revisions based on revision ranges.
1037
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1038
tuples to be filtered.
1040
:param start_rev_id: If not NONE specifies the first revision to be logged.
1041
If NONE then all revisions up to the end_rev_id are logged.
1043
:param end_rev_id: If not NONE specifies the last revision to be logged.
1044
If NONE then all revisions up to the end of the log are logged.
1046
:return: The filtered view_revisions.
1048
# This method is no longer called by the main code path.
1049
# It may be removed soon. IGC 20090127
1050
if start_rev_id or end_rev_id:
1051
revision_ids = [r for r, n, d in view_revisions]
1053
start_index = revision_ids.index(start_rev_id)
1056
if start_rev_id == end_rev_id:
1057
end_index = start_index
1060
end_index = revision_ids.index(end_rev_id)
276
last_revno = revno - 1
277
last_revision_id = branch.revision_history()[last_revno]
278
last_tree = branch.revision_tree(last_revision_id)
280
yield revno, this_revision, compare_trees(last_tree, this_tree, False)
283
last_revision = this_revision
284
last_tree = this_tree
1062
end_index = len(view_revisions) - 1
1063
# To include the revisions merged into the last revision,
1064
# extend end_rev_id down to, but not including, the next rev
1065
# with the same or lesser merge_depth
1066
end_merge_depth = view_revisions[end_index][2]
1068
for index in xrange(end_index+1, len(view_revisions)+1):
1069
if view_revisions[index][2] <= end_merge_depth:
1070
end_index = index - 1
1073
# if the search falls off the end then log to the end as well
1074
end_index = len(view_revisions) - 1
1075
view_revisions = view_revisions[start_index:end_index+1]
1076
return view_revisions
1079
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1080
include_merges=True):
1081
r"""Return the list of revision ids which touch a given file id.
1083
The function filters view_revisions and returns a subset.
1084
This includes the revisions which directly change the file id,
1085
and the revisions which merge these changes. So if the
1097
And 'C' changes a file, then both C and D will be returned. F will not be
1098
returned even though it brings the changes to C into the branch starting
1099
with E. (Note that if we were using F as the tip instead of G, then we
1102
This will also be restricted based on a subset of the mainline.
1104
:param branch: The branch where we can get text revision information.
1106
:param file_id: Filter out revisions that do not touch file_id.
1108
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1109
tuples. This is the list of revisions which will be filtered. It is
1110
assumed that view_revisions is in merge_sort order (i.e. newest
1113
:param include_merges: include merge revisions in the result or not
1115
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1117
# Lookup all possible text keys to determine which ones actually modified
1119
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1121
# Looking up keys in batches of 1000 can cut the time in half, as well as
1122
# memory consumption. GraphIndex *does* like to look for a few keys in
1123
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1124
# TODO: This code needs to be re-evaluated periodically as we tune the
1125
# indexing layer. We might consider passing in hints as to the known
1126
# access pattern (sparse/clustered, high success rate/low success
1127
# rate). This particular access is clustered with a low success rate.
1128
get_parent_map = branch.repository.texts.get_parent_map
1129
modified_text_revisions = set()
1131
for start in xrange(0, len(text_keys), chunk_size):
1132
next_keys = text_keys[start:start + chunk_size]
1133
# Only keep the revision_id portion of the key
1134
modified_text_revisions.update(
1135
[k[1] for k in get_parent_map(next_keys)])
1136
del text_keys, next_keys
1139
# Track what revisions will merge the current revision, replace entries
1140
# with 'None' when they have been added to result
1141
current_merge_stack = [None]
1142
for info in view_revisions:
1143
rev_id, revno, depth = info
1144
if depth == len(current_merge_stack):
1145
current_merge_stack.append(info)
1147
del current_merge_stack[depth + 1:]
1148
current_merge_stack[-1] = info
1150
if rev_id in modified_text_revisions:
1151
# This needs to be logged, along with the extra revisions
1152
for idx in xrange(len(current_merge_stack)):
1153
node = current_merge_stack[idx]
1154
if node is not None:
1155
if include_merges or node[2] == 0:
1157
current_merge_stack[idx] = None
1161
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1162
include_merges=True):
1163
"""Produce an iterator of revisions to show
1164
:return: an iterator of (revision_id, revno, merge_depth)
1165
(if there is no revno for a revision, None is supplied)
1167
# This method is no longer called by the main code path.
1168
# It is retained for API compatibility and may be deprecated
1169
# soon. IGC 20090127
1170
if not include_merges:
1171
revision_ids = mainline_revs[1:]
1172
if direction == 'reverse':
1173
revision_ids.reverse()
1174
for revision_id in revision_ids:
1175
yield revision_id, str(rev_nos[revision_id]), 0
1177
graph = branch.repository.get_graph()
1178
# This asks for all mainline revisions, which means we only have to spider
1179
# sideways, rather than depth history. That said, its still size-of-history
1180
# and should be addressed.
1181
# mainline_revisions always includes an extra revision at the beginning, so
1183
parent_map = dict(((key, value) for key, value in
1184
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1185
# filter out ghosts; merge_sort errors on ghosts.
1186
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1187
merge_sorted_revisions = tsort.merge_sort(
1191
generate_revno=True)
1193
if direction == 'forward':
1194
# forward means oldest first.
1195
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1196
elif direction != 'reverse':
1197
raise ValueError('invalid direction %r' % direction)
1199
for (sequence, rev_id, merge_depth, revno, end_of_merge
1200
) in merge_sorted_revisions:
1201
yield rev_id, '.'.join(map(str, revno)), merge_depth
1204
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1205
"""Reverse revisions by depth.
1207
Revisions with a different depth are sorted as a group with the previous
1208
revision of that depth. There may be no topological justification for this,
1209
but it looks much nicer.
1211
# Add a fake revision at start so that we can always attach sub revisions
1212
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1214
for val in merge_sorted_revisions:
1215
if val[2] == _depth:
1216
# Each revision at the current depth becomes a chunk grouping all
1217
# higher depth revisions.
1218
zd_revisions.append([val])
1220
zd_revisions[-1].append(val)
1221
for revisions in zd_revisions:
1222
if len(revisions) > 1:
1223
# We have higher depth revisions, let reverse them locally
1224
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1225
zd_revisions.reverse()
1227
for chunk in zd_revisions:
1228
result.extend(chunk)
1230
# Top level call, get rid of the fake revisions that have been added
1231
result = [r for r in result if r[0] is not None and r[1] is not None]
1235
class LogRevision(object):
1236
"""A revision to be logged (by LogFormatter.log_revision).
1238
A simple wrapper for the attributes of a revision to be logged.
1239
The attributes may or may not be populated, as determined by the
1240
logging options and the log formatter capabilities.
1243
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1244
tags=None, diff=None):
1246
self.revno = str(revno)
1247
self.merge_depth = merge_depth
287
1253
class LogFormatter(object):
288
"""Abstract class to display log messages."""
289
def __init__(self, to_file, show_ids=False, show_timezone='original'):
1254
"""Abstract class to display log messages.
1256
At a minimum, a derived class must implement the log_revision method.
1258
If the LogFormatter needs to be informed of the beginning or end of
1259
a log it should implement the begin_log and/or end_log hook methods.
1261
A LogFormatter should define the following supports_XXX flags
1262
to indicate which LogRevision attributes it supports:
1264
- supports_delta must be True if this log formatter supports delta.
1265
Otherwise the delta attribute may not be populated. The 'delta_format'
1266
attribute describes whether the 'short_status' format (1) or the long
1267
one (2) should be used.
1269
- supports_merge_revisions must be True if this log formatter supports
1270
merge revisions. If not, then only mainline revisions will be passed
1273
- preferred_levels is the number of levels this formatter defaults to.
1274
The default value is zero meaning display all levels.
1275
This value is only relevant if supports_merge_revisions is True.
1277
- supports_tags must be True if this log formatter supports tags.
1278
Otherwise the tags attribute may not be populated.
1280
- supports_diff must be True if this log formatter supports diffs.
1281
Otherwise the diff attribute may not be populated.
1283
Plugins can register functions to show custom revision properties using
1284
the properties_handler_registry. The registered function
1285
must respect the following interface description:
1286
def my_show_properties(properties_dict):
1287
# code that returns a dict {'name':'value'} of the properties
1290
preferred_levels = 0
1292
def __init__(self, to_file, show_ids=False, show_timezone='original',
1293
delta_format=None, levels=None, show_advice=False):
1294
"""Create a LogFormatter.
1296
:param to_file: the file to output to
1297
:param show_ids: if True, revision-ids are to be displayed
1298
:param show_timezone: the timezone to use
1299
:param delta_format: the level of delta information to display
1300
or None to leave it to the formatter to decide
1301
:param levels: the number of levels to display; None or -1 to
1302
let the log formatter decide.
1303
:param show_advice: whether to show advice at the end of the
290
1306
self.to_file = to_file
1307
# 'exact' stream used to show diff, it should print content 'as is'
1308
# and should not try to decode/encode it to unicode to avoid bug #328007
1309
self.to_exact_file = getattr(to_file, 'stream', to_file)
291
1310
self.show_ids = show_ids
292
1311
self.show_timezone = show_timezone
295
def show(self, revno, rev, delta):
1312
if delta_format is None:
1313
# Ensures backward compatibility
1314
delta_format = 2 # long format
1315
self.delta_format = delta_format
1316
self.levels = levels
1317
self._show_advice = show_advice
1318
self._merge_count = 0
1320
def get_levels(self):
1321
"""Get the number of levels to display or 0 for all."""
1322
if getattr(self, 'supports_merge_revisions', False):
1323
if self.levels is None or self.levels == -1:
1324
self.levels = self.preferred_levels
1329
def log_revision(self, revision):
1332
:param revision: The LogRevision to be logged.
296
1334
raise NotImplementedError('not implemented in abstract base')
1336
def show_advice(self):
1337
"""Output user advice, if any, when the log is completed."""
1338
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1339
advice_sep = self.get_advice_separator()
1341
self.to_file.write(advice_sep)
1343
"Use --include-merges or -n0 to see merged revisions.\n")
1345
def get_advice_separator(self):
1346
"""Get the text separating the log from the closing advice."""
1349
def short_committer(self, rev):
1350
name, address = config.parse_username(rev.committer)
1355
def short_author(self, rev):
1356
name, address = config.parse_username(rev.get_apparent_authors()[0])
1361
def merge_marker(self, revision):
1362
"""Get the merge marker to include in the output or '' if none."""
1363
if len(revision.rev.parent_ids) > 1:
1364
self._merge_count += 1
1369
def show_properties(self, revision, indent):
1370
"""Displays the custom properties returned by each registered handler.
1372
If a registered handler raises an error it is propagated.
1374
for key, handler in properties_handler_registry.iteritems():
1375
for key, value in handler(revision).items():
1376
self.to_file.write(indent + key + ': ' + value + '\n')
1378
def show_diff(self, to_file, diff, indent):
1379
for l in diff.rstrip().split('\n'):
1380
to_file.write(indent + '%s\n' % (l,))
303
1383
class LongLogFormatter(LogFormatter):
304
def show(self, revno, rev, delta):
305
from osutils import format_date
1385
supports_merge_revisions = True
1386
preferred_levels = 1
1387
supports_delta = True
1388
supports_tags = True
1389
supports_diff = True
1391
def log_revision(self, revision):
1392
"""Log a revision, either merged or not."""
1393
indent = ' ' * revision.merge_depth
307
1394
to_file = self.to_file
309
print >>to_file, '-' * 60
310
print >>to_file, 'revno:', revno
1395
to_file.write(indent + '-' * 60 + '\n')
1396
if revision.revno is not None:
1397
to_file.write(indent + 'revno: %s%s\n' % (revision.revno,
1398
self.merge_marker(revision)))
1400
to_file.write(indent + 'tags: %s\n' % (', '.join(revision.tags)))
311
1401
if self.show_ids:
312
print >>to_file, 'revision-id:', rev.revision_id
313
print >>to_file, 'committer:', rev.committer
315
date_str = format_date(rev.timestamp,
1402
to_file.write(indent + 'revision-id: ' + revision.rev.revision_id)
1404
for parent_id in revision.rev.parent_ids:
1405
to_file.write(indent + 'parent: %s\n' % (parent_id,))
1406
self.show_properties(revision.rev, indent)
1408
committer = revision.rev.committer
1409
authors = revision.rev.get_apparent_authors()
1410
if authors != [committer]:
1411
to_file.write(indent + 'author: %s\n' % (", ".join(authors),))
1412
to_file.write(indent + 'committer: %s\n' % (committer,))
1414
branch_nick = revision.rev.properties.get('branch-nick', None)
1415
if branch_nick is not None:
1416
to_file.write(indent + 'branch nick: %s\n' % (branch_nick,))
1418
date_str = format_date(revision.rev.timestamp,
1419
revision.rev.timezone or 0,
317
1420
self.show_timezone)
318
print >>to_file, 'timestamp: %s' % date_str
1421
to_file.write(indent + 'timestamp: %s\n' % (date_str,))
320
print >>to_file, 'message:'
322
print >>to_file, ' (no message)'
1423
to_file.write(indent + 'message:\n')
1424
if not revision.rev.message:
1425
to_file.write(indent + ' (no message)\n')
324
for l in rev.message.split('\n'):
325
print >>to_file, ' ' + l
328
delta.show(to_file, self.show_ids)
1427
message = revision.rev.message.rstrip('\r\n')
1428
for l in message.split('\n'):
1429
to_file.write(indent + ' %s\n' % (l,))
1430
if revision.delta is not None:
1431
# We don't respect delta_format for compatibility
1432
revision.delta.show(to_file, self.show_ids, indent=indent,
1434
if revision.diff is not None:
1435
to_file.write(indent + 'diff:\n')
1436
# Note: we explicitly don't indent the diff (relative to the
1437
# revision information) so that the output can be fed to patch -p0
1438
self.show_diff(self.to_exact_file, revision.diff, indent)
1440
def get_advice_separator(self):
1441
"""Get the text separating the log from the closing advice."""
1442
return '-' * 60 + '\n'
332
1445
class ShortLogFormatter(LogFormatter):
333
def show(self, revno, rev, delta):
334
from bzrlib.osutils import format_date
1447
supports_merge_revisions = True
1448
preferred_levels = 1
1449
supports_delta = True
1450
supports_tags = True
1451
supports_diff = True
1453
def __init__(self, *args, **kwargs):
1454
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1455
self.revno_width_by_depth = {}
1457
def log_revision(self, revision):
1458
# We need two indents: one per depth and one for the information
1459
# relative to that indent. Most mainline revnos are 5 chars or
1460
# less while dotted revnos are typically 11 chars or less. Once
1461
# calculated, we need to remember the offset for a given depth
1462
# as we might be starting from a dotted revno in the first column
1463
# and we want subsequent mainline revisions to line up.
1464
depth = revision.merge_depth
1465
indent = ' ' * depth
1466
revno_width = self.revno_width_by_depth.get(depth)
1467
if revno_width is None:
1468
if revision.revno.find('.') == -1:
1469
# mainline revno, e.g. 12345
1472
# dotted revno, e.g. 12345.10.55
1474
self.revno_width_by_depth[depth] = revno_width
1475
offset = ' ' * (revno_width + 1)
336
1477
to_file = self.to_file
338
print >>to_file, "%5d %s\t%s" % (revno, rev.committer,
339
format_date(rev.timestamp, rev.timezone or 0,
1480
tags = ' {%s}' % (', '.join(revision.tags))
1481
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1482
revision.revno, self.short_author(revision.rev),
1483
format_date(revision.rev.timestamp,
1484
revision.rev.timezone or 0,
1485
self.show_timezone, date_fmt="%Y-%m-%d",
1487
tags, self.merge_marker(revision)))
1488
self.show_properties(revision.rev, indent+offset)
341
1489
if self.show_ids:
342
print >>to_file, ' revision-id:', rev.revision_id
1490
to_file.write(indent + offset + 'revision-id:%s\n'
1491
% (revision.rev.revision_id,))
1492
if not revision.rev.message:
1493
to_file.write(indent + offset + '(no message)\n')
1495
message = revision.rev.message.rstrip('\r\n')
1496
for l in message.split('\n'):
1497
to_file.write(indent + offset + '%s\n' % (l,))
1499
if revision.delta is not None:
1500
revision.delta.show(to_file, self.show_ids, indent=indent + offset,
1501
short_status=self.delta_format==1)
1502
if revision.diff is not None:
1503
self.show_diff(self.to_exact_file, revision.diff, ' ')
1507
class LineLogFormatter(LogFormatter):
1509
supports_merge_revisions = True
1510
preferred_levels = 1
1511
supports_tags = True
1513
def __init__(self, *args, **kwargs):
1514
super(LineLogFormatter, self).__init__(*args, **kwargs)
1515
self._max_chars = terminal_width() - 1
1517
def truncate(self, str, max_len):
1518
if len(str) <= max_len:
1520
return str[:max_len-3]+'...'
1522
def date_string(self, rev):
1523
return format_date(rev.timestamp, rev.timezone or 0,
1524
self.show_timezone, date_fmt="%Y-%m-%d",
1527
def message(self, rev):
343
1528
if not rev.message:
344
print >>to_file, ' (no message)'
346
for l in rev.message.split('\n'):
347
print >>to_file, ' ' + l
349
# TODO: Why not show the modified files in a shorter form as
350
# well? rewrap them single lines of appropriate length
352
delta.show(to_file, self.show_ids)
357
FORMATTERS = {'long': LongLogFormatter,
358
'short': ShortLogFormatter,
1529
return '(no message)'
1533
def log_revision(self, revision):
1534
indent = ' ' * revision.merge_depth
1535
self.to_file.write(self.log_string(revision.revno, revision.rev,
1536
self._max_chars, revision.tags, indent))
1537
self.to_file.write('\n')
1539
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1540
"""Format log info into one string. Truncate tail of string
1541
:param revno: revision number or None.
1542
Revision numbers counts from 1.
1543
:param rev: revision object
1544
:param max_chars: maximum length of resulting string
1545
:param tags: list of tags or None
1546
:param prefix: string to prefix each line
1547
:return: formatted truncated string
1551
# show revno only when is not None
1552
out.append("%s:" % revno)
1553
out.append(self.truncate(self.short_author(rev), 20))
1554
out.append(self.date_string(rev))
1555
if len(rev.parent_ids) > 1:
1556
out.append('[merge]')
1558
tag_str = '{%s}' % (', '.join(tags))
1560
out.append(rev.get_summary())
1561
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1564
class GnuChangelogLogFormatter(LogFormatter):
1566
supports_merge_revisions = True
1567
supports_delta = True
1569
def log_revision(self, revision):
1570
"""Log a revision, either merged or not."""
1571
to_file = self.to_file
1573
date_str = format_date(revision.rev.timestamp,
1574
revision.rev.timezone or 0,
1576
date_fmt='%Y-%m-%d',
1578
committer_str = revision.rev.committer.replace (' <', ' <')
1579
to_file.write('%s %s\n\n' % (date_str,committer_str))
1581
if revision.delta is not None and revision.delta.has_changed():
1582
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1584
to_file.write('\t* %s:\n' % (path,))
1585
for c in revision.delta.renamed:
1586
oldpath,newpath = c[:2]
1587
# For renamed files, show both the old and the new path
1588
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1591
if not revision.rev.message:
1592
to_file.write('\tNo commit message\n')
1594
message = revision.rev.message.rstrip('\r\n')
1595
for l in message.split('\n'):
1596
to_file.write('\t%s\n' % (l.lstrip(),))
1600
def line_log(rev, max_chars):
1601
lf = LineLogFormatter(None)
1602
return lf.log_string(None, rev, max_chars)
1605
class LogFormatterRegistry(registry.Registry):
1606
"""Registry for log formatters"""
1608
def make_formatter(self, name, *args, **kwargs):
1609
"""Construct a formatter from arguments.
1611
:param name: Name of the formatter to construct. 'short', 'long' and
1612
'line' are built-in.
1614
return self.get(name)(*args, **kwargs)
1616
def get_default(self, branch):
1617
return self.get(branch.get_config().log_format())
1620
log_formatter_registry = LogFormatterRegistry()
1623
log_formatter_registry.register('short', ShortLogFormatter,
1624
'Moderately short log format')
1625
log_formatter_registry.register('long', LongLogFormatter,
1626
'Detailed log format')
1627
log_formatter_registry.register('line', LineLogFormatter,
1628
'Log format with one line per revision')
1629
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1630
'Format used by GNU ChangeLog files')
1633
def register_formatter(name, formatter):
1634
log_formatter_registry.register(name, formatter)
362
1637
def log_formatter(name, *args, **kwargs):
363
from bzrlib.errors import BzrCommandError
1638
"""Construct a formatter from arguments.
1640
name -- Name of the formatter to construct; currently 'long', 'short' and
1641
'line' are supported.
366
return FORMATTERS[name](*args, **kwargs)
368
raise BzrCommandError("unknown log formatter: %r" % name)
1644
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1646
raise errors.BzrCommandError("unknown log formatter: %r" % name)
370
1649
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
371
# deprecated; for compatability
1650
# deprecated; for compatibility
372
1651
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
373
1652
lf.show(revno, rev, delta)
1655
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1657
"""Show the change in revision history comparing the old revision history to the new one.
1659
:param branch: The branch where the revisions exist
1660
:param old_rh: The old revision history
1661
:param new_rh: The new revision history
1662
:param to_file: A file to write the results to. If None, stdout will be used
1665
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1667
lf = log_formatter(log_format,
1670
show_timezone='original')
1672
# This is the first index which is different between
1675
for i in xrange(max(len(new_rh),
1677
if (len(new_rh) <= i
1679
or new_rh[i] != old_rh[i]):
1683
if base_idx is None:
1684
to_file.write('Nothing seems to have changed\n')
1686
## TODO: It might be nice to do something like show_log
1687
## and show the merged entries. But since this is the
1688
## removed revisions, it shouldn't be as important
1689
if base_idx < len(old_rh):
1690
to_file.write('*'*60)
1691
to_file.write('\nRemoved Revisions:\n')
1692
for i in range(base_idx, len(old_rh)):
1693
rev = branch.repository.get_revision(old_rh[i])
1694
lr = LogRevision(rev, i+1, 0, None)
1696
to_file.write('*'*60)
1697
to_file.write('\n\n')
1698
if base_idx < len(new_rh):
1699
to_file.write('Added Revisions:\n')
1704
direction='forward',
1705
start_revision=base_idx+1,
1706
end_revision=len(new_rh),
1710
def get_history_change(old_revision_id, new_revision_id, repository):
1711
"""Calculate the uncommon lefthand history between two revisions.
1713
:param old_revision_id: The original revision id.
1714
:param new_revision_id: The new revision id.
1715
:param repository: The repository to use for the calculation.
1717
return old_history, new_history
1720
old_revisions = set()
1722
new_revisions = set()
1723
new_iter = repository.iter_reverse_revision_history(new_revision_id)
1724
old_iter = repository.iter_reverse_revision_history(old_revision_id)
1725
stop_revision = None
1728
while do_new or do_old:
1731
new_revision = new_iter.next()
1732
except StopIteration:
1735
new_history.append(new_revision)
1736
new_revisions.add(new_revision)
1737
if new_revision in old_revisions:
1738
stop_revision = new_revision
1742
old_revision = old_iter.next()
1743
except StopIteration:
1746
old_history.append(old_revision)
1747
old_revisions.add(old_revision)
1748
if old_revision in new_revisions:
1749
stop_revision = old_revision
1751
new_history.reverse()
1752
old_history.reverse()
1753
if stop_revision is not None:
1754
new_history = new_history[new_history.index(stop_revision) + 1:]
1755
old_history = old_history[old_history.index(stop_revision) + 1:]
1756
return old_history, new_history
1759
def show_branch_change(branch, output, old_revno, old_revision_id):
1760
"""Show the changes made to a branch.
1762
:param branch: The branch to show changes about.
1763
:param output: A file-like object to write changes to.
1764
:param old_revno: The revno of the old tip.
1765
:param old_revision_id: The revision_id of the old tip.
1767
new_revno, new_revision_id = branch.last_revision_info()
1768
old_history, new_history = get_history_change(old_revision_id,
1771
if old_history == [] and new_history == []:
1772
output.write('Nothing seems to have changed\n')
1775
log_format = log_formatter_registry.get_default(branch)
1776
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
1777
if old_history != []:
1778
output.write('*'*60)
1779
output.write('\nRemoved Revisions:\n')
1780
show_flat_log(branch.repository, old_history, old_revno, lf)
1781
output.write('*'*60)
1782
output.write('\n\n')
1783
if new_history != []:
1784
output.write('Added Revisions:\n')
1785
start_revno = new_revno - len(new_history) + 1
1786
show_log(branch, lf, None, verbose=False, direction='forward',
1787
start_revision=start_revno,)
1790
def show_flat_log(repository, history, last_revno, lf):
1791
"""Show a simple log of the specified history.
1793
:param repository: The repository to retrieve revisions from.
1794
:param history: A list of revision_ids indicating the lefthand history.
1795
:param last_revno: The revno of the last revision_id in the history.
1796
:param lf: The log formatter to use.
1798
start_revno = last_revno - len(history) + 1
1799
revisions = repository.get_revisions(history)
1800
for i, rev in enumerate(revisions):
1801
lr = LogRevision(rev, i + last_revno, 0, None)
1805
def _get_info_for_log_files(revisionspec_list, file_list):
1806
"""Find file-ids and kinds given a list of files and a revision range.
1808
We search for files at the end of the range. If not found there,
1809
we try the start of the range.
1811
:param revisionspec_list: revision range as parsed on the command line
1812
:param file_list: the list of paths given on the command line;
1813
the first of these can be a branch location or a file path,
1814
the remainder must be file paths
1815
:return: (branch, info_list, start_rev_info, end_rev_info) where
1816
info_list is a list of (relative_path, file_id, kind) tuples where
1817
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
1819
from builtins import _get_revision_range, safe_relpath_files
1820
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
1821
# XXX: It's damn messy converting a list of paths to relative paths when
1822
# those paths might be deleted ones, they might be on a case-insensitive
1823
# filesystem and/or they might be in silly locations (like another branch).
1824
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
1825
# file2 implicitly in the same dir as file1 or should its directory be
1826
# taken from the current tree somehow?) For now, this solves the common
1827
# case of running log in a nested directory, assuming paths beyond the
1828
# first one haven't been deleted ...
1830
relpaths = [path] + safe_relpath_files(tree, file_list[1:])
1832
relpaths = [path] + file_list[1:]
1834
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
1836
if relpaths in ([], [u'']):
1837
return b, [], start_rev_info, end_rev_info
1838
if start_rev_info is None and end_rev_info is None:
1840
tree = b.basis_tree()
1843
file_id = tree.path2id(fp)
1844
kind = _get_kind_for_file_id(tree, file_id)
1846
# go back to when time began
1849
rev1 = b.get_rev_id(1)
1850
except errors.NoSuchRevision:
1855
tree1 = b.repository.revision_tree(rev1)
1857
file_id = tree1.path2id(fp)
1858
kind = _get_kind_for_file_id(tree1, file_id)
1859
info_list.append((fp, file_id, kind))
1861
elif start_rev_info == end_rev_info:
1862
# One revision given - file must exist in it
1863
tree = b.repository.revision_tree(end_rev_info.rev_id)
1865
file_id = tree.path2id(fp)
1866
kind = _get_kind_for_file_id(tree, file_id)
1867
info_list.append((fp, file_id, kind))
1870
# Revision range given. Get the file-id from the end tree.
1871
# If that fails, try the start tree.
1872
rev_id = end_rev_info.rev_id
1874
tree = b.basis_tree()
1876
tree = b.repository.revision_tree(rev_id)
1879
file_id = tree.path2id(fp)
1880
kind = _get_kind_for_file_id(tree, file_id)
1883
rev_id = start_rev_info.rev_id
1885
rev1 = b.get_rev_id(1)
1886
tree1 = b.repository.revision_tree(rev1)
1888
tree1 = b.repository.revision_tree(rev_id)
1889
file_id = tree1.path2id(fp)
1890
kind = _get_kind_for_file_id(tree1, file_id)
1891
info_list.append((fp, file_id, kind))
1892
return b, info_list, start_rev_info, end_rev_info
1895
def _get_kind_for_file_id(tree, file_id):
1896
"""Return the kind of a file-id or None if it doesn't exist."""
1897
if file_id is not None:
1898
return tree.kind(file_id)
1903
properties_handler_registry = registry.Registry()
1904
properties_handler_registry.register_lazy("foreign",
1906
"show_foreign_properties")
1909
# adapters which revision ids to log are filtered. When log is called, the
1910
# log_rev_iterator is adapted through each of these factory methods.
1911
# Plugins are welcome to mutate this list in any way they like - as long
1912
# as the overall behaviour is preserved. At this point there is no extensible
1913
# mechanism for getting parameters to each factory method, and until there is
1914
# this won't be considered a stable api.
1918
# read revision objects
1919
_make_revision_objects,
1920
# filter on log messages
1921
_make_search_filter,
1922
# generate deltas for things we will show