47
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
51
from io import BytesIO
53
from cStringIO import StringIO
54
from itertools import (
55
60
from warnings import (
59
from .lazy_import import lazy_import
64
from brzlib.lazy_import import lazy_import
60
65
lazy_import(globals(), """
73
repository as _mod_repository,
68
74
revision as _mod_revision,
70
from breezy.i18n import gettext, ngettext
78
from brzlib.i18n import gettext, ngettext
79
from .osutils import (
85
from brzlib.osutils import (
81
87
format_date_with_offset_in_original_timezone,
82
88
get_diff_header_encoding,
83
89
get_terminal_encoding,
92
def find_touching_revisions(repository, last_revision, last_tree, last_path):
94
def find_touching_revisions(branch, file_id):
93
95
"""Yield a description of revisions which affect the file_id.
95
97
Each returned element is (revno, revision_id, description)
100
102
TODO: Perhaps some way to limit this to only particular revisions,
101
103
or to traverse a non-mainline set of revisions?
103
last_verifier = last_tree.get_file_verifier(last_path)
104
graph = repository.get_graph()
105
history = list(graph.iter_lefthand_ancestry(last_revision, []))
107
for revision_id in history:
108
this_tree = repository.revision_tree(revision_id)
109
this_intertree = InterTree.get(this_tree, last_tree)
110
this_path = this_intertree.find_source_path(last_path)
108
graph = branch.repository.get_graph()
109
history = list(graph.iter_lefthand_ancestry(branch.last_revision(),
110
[_mod_revision.NULL_REVISION]))
111
for revision_id in reversed(history):
112
this_inv = branch.repository.get_inventory(revision_id)
113
if this_inv.has_id(file_id):
114
this_ie = this_inv[file_id]
115
this_path = this_inv.id2path(file_id)
117
this_ie = this_path = None
112
119
# now we know how it was last time, and how it is in this revision.
113
120
# are those two states effectively the same or not?
114
if this_path is not None and last_path is None:
115
yield revno, revision_id, "deleted " + this_path
116
this_verifier = this_tree.get_file_verifier(this_path)
117
elif this_path is None and last_path is not None:
118
yield revno, revision_id, "added " + last_path
122
if not this_ie and not last_ie:
123
# not present in either
125
elif this_ie and not last_ie:
126
yield revno, revision_id, "added " + this_path
127
elif not this_ie and last_ie:
129
yield revno, revision_id, "deleted " + last_path
119
130
elif this_path != last_path:
120
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
121
this_verifier = this_tree.get_file_verifier(this_path)
123
this_verifier = this_tree.get_file_verifier(this_path)
124
if (this_verifier != last_verifier):
125
yield revno, revision_id, "modified " + this_path
131
yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path))
132
elif (this_ie.text_size != last_ie.text_size
133
or this_ie.text_sha1 != last_ie.text_sha1):
134
yield revno, revision_id, "modified " + this_path
127
last_verifier = this_verifier
128
137
last_path = this_path
129
last_tree = this_tree
130
if last_path is None:
135
141
def show_log(branch,
143
specific_fileid=None,
138
145
direction='reverse',
139
146
start_revision=None,
171
181
:param match: Dictionary of search lists to use when matching revision
184
# Convert old-style parameters to new-style parameters
185
if specific_fileid is not None:
186
file_ids = [specific_fileid]
191
delta_type = 'partial'
177
195
delta_type = None
198
diff_type = 'partial'
183
if isinstance(start_revision, int):
185
start_revision = revisionspec.RevisionInfo(branch, start_revision)
186
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
187
raise errors.InvalidRevisionNumber(start_revision)
189
if isinstance(end_revision, int):
191
end_revision = revisionspec.RevisionInfo(branch, end_revision)
192
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
193
raise errors.InvalidRevisionNumber(end_revision)
195
if end_revision is not None and end_revision.revno == 0:
196
raise errors.InvalidRevisionNumber(end_revision.revno)
198
204
# Build the request and execute it
199
rqst = make_log_request_dict(
205
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
201
206
start_revision=start_revision, end_revision=end_revision,
202
207
limit=limit, message_search=search,
203
208
delta_type=delta_type, diff_type=diff_type)
320
def format_signature_validity(rev_id, branch):
325
def format_signature_validity(rev_id, repo):
321
326
"""get the signature validity
323
328
:param rev_id: revision id to validate
324
:param branch: branch of revision
329
:param repo: repository of revision
325
330
:return: human readable string to print to log
327
from breezy import gpg
332
from brzlib import gpg
329
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
330
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
334
gpg_strategy = gpg.GPGStrategy(None)
335
result = repo.verify_revision_signature(rev_id, gpg_strategy)
331
336
if result[0] == gpg.SIGNATURE_VALID:
332
337
return u"valid signature from {0}".format(result[1])
333
338
if result[0] == gpg.SIGNATURE_KEY_MISSING:
370
375
if not isinstance(lf, LogFormatter):
371
376
warn("not a LogFormatter instance: %r" % lf)
373
with self.branch.lock_read():
378
self.branch.lock_read()
374
380
if getattr(lf, 'begin_log', None):
376
382
self._show_body(lf)
377
383
if getattr(lf, 'end_log', None):
380
388
def _show_body(self, lf):
381
389
"""Show the main log output.
401
409
# Find and print the interesting revisions
402
410
generator = self._generator_factory(self.branch, rqst)
404
for lr in generator.iter_log_revisions():
406
except errors.GhostRevisionUnusableHere:
407
raise errors.BzrCommandError(
408
gettext('Further revision history missing.'))
411
for lr in generator.iter_log_revisions():
411
415
def _generator_factory(self, branch, rqst):
447
451
for revs in revision_iterator:
448
452
for (rev_id, revno, merge_depth), rev, delta in revs:
449
453
# 0 levels means show everything; merge_depth counts from 0
450
if (levels != 0 and merge_depth is not None and
451
merge_depth >= levels):
454
if levels != 0 and merge_depth >= levels:
453
456
if omit_merges and len(rev.parent_ids) > 1:
456
raise errors.GhostRevisionUnusableHere(rev_id)
457
458
if diff_type is None:
460
461
diff = self._format_diff(rev, rev_id, diff_type)
461
462
if show_signature:
462
signature = format_signature_validity(rev_id, self.branch)
463
signature = format_signature_validity(rev_id,
464
self.branch.repository)
466
rev, revno, merge_depth, delta,
467
yield LogRevision(rev, revno, merge_depth, delta,
467
468
self.rev_tag_dict.get(rev_id), diff, signature)
483
484
specific_files = [tree_2.id2path(id) for id in file_ids]
485
486
specific_files = None
487
488
path_encoding = get_diff_header_encoding()
488
489
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
489
new_label='', path_encoding=path_encoding)
490
new_label='', path_encoding=path_encoding)
490
491
return s.getvalue()
492
493
def _create_log_revision_iterator(self):
527
527
# Apply the other filters
528
528
return make_log_rev_iterator(self.branch, view_revisions,
529
rqst.get('delta_type'), rqst.get('match'),
530
file_ids=rqst.get('specific_fileids'),
531
direction=rqst.get('direction'))
529
rqst.get('delta_type'), rqst.get('match'),
530
file_ids=rqst.get('specific_fileids'),
531
direction=rqst.get('direction'))
533
533
def _log_revision_iterator_using_per_file_graph(self):
534
534
# Get the base revisions, filtering by the revision range.
542
542
if not isinstance(view_revisions, list):
543
543
view_revisions = list(view_revisions)
544
544
view_revisions = _filter_revisions_touching_file_id(self.branch,
545
rqst.get('specific_fileids')[
547
include_merges=rqst.get('levels') != 1)
545
rqst.get('specific_fileids')[0], view_revisions,
546
include_merges=rqst.get('levels') != 1)
548
547
return make_log_rev_iterator(self.branch, view_revisions,
549
rqst.get('delta_type'), rqst.get('match'))
548
rqst.get('delta_type'), rqst.get('match'))
552
551
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
564
563
'--exclude-common-ancestry requires two different revisions'))
565
564
if direction not in ('reverse', 'forward'):
566
565
raise ValueError(gettext('invalid direction %r') % direction)
567
br_rev_id = branch.last_revision()
568
if br_rev_id == _mod_revision.NULL_REVISION:
566
br_revno, br_rev_id = branch.last_revision_info()
571
570
if (end_rev_id and start_rev_id == end_rev_id
572
571
and (not generate_merge_revisions
573
572
or not _has_merges(branch, end_rev_id))):
574
573
# If a single revision is requested, check we can handle it
575
return _generate_one_revision(branch, end_rev_id, br_rev_id,
574
return _generate_one_revision(branch, end_rev_id, br_rev_id,
577
576
if not generate_merge_revisions:
579
578
# If we only want to see linear revisions, we can iterate ...
584
583
# ancestor of the end limit, check it before outputting anything
585
584
if (direction == 'forward'
586
585
or (start_rev_id and not _is_obvious_ancestor(
587
branch, start_rev_id, end_rev_id))):
588
iter_revs = list(iter_revs)
586
branch, start_rev_id, end_rev_id))):
587
iter_revs = list(iter_revs)
589
588
if direction == 'forward':
590
589
iter_revs = reversed(iter_revs)
623
622
initial_revisions = []
624
623
if delayed_graph_generation:
626
for rev_id, revno, depth in _linear_view_revisions(
627
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
625
for rev_id, revno, depth in _linear_view_revisions(
626
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
628
627
if _has_merges(branch, rev_id):
629
628
# The end_rev_id can be nested down somewhere. We need an
630
629
# explicit ancestry check. There is an ambiguity here as we
662
661
# shown naturally, i.e. just like it is for linear logging. We can easily
663
662
# make forward the exact opposite display, but showing the merge revisions
664
663
# indented at the end seems slightly nicer in that case.
665
view_revisions = itertools.chain(iter(initial_revisions),
666
_graph_view_revisions(branch, start_rev_id, end_rev_id,
667
rebase_initial_depths=(
668
direction == 'reverse'),
669
exclude_common_ancestry=exclude_common_ancestry))
664
view_revisions = chain(iter(initial_revisions),
665
_graph_view_revisions(branch, start_rev_id, end_rev_id,
666
rebase_initial_depths=(direction == 'reverse'),
667
exclude_common_ancestry=exclude_common_ancestry))
670
668
return view_revisions
724
722
:param exclude_common_ancestry: Whether the start_rev_id should be part of
725
723
the iterated revisions.
726
724
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
727
dotted_revno will be None for ghosts
728
725
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
729
726
is not found walking the left-hand history
728
br_revno, br_rev_id = branch.last_revision_info()
731
729
repo = branch.repository
732
730
graph = repo.get_graph()
733
731
if start_rev_id is None and end_rev_id is None:
734
if branch._format.stores_revno() or \
735
config.GlobalStack().get('calculate_revnos'):
737
br_revno, br_rev_id = branch.last_revision_info()
738
except errors.GhostRevisionsHaveNoRevno:
739
br_rev_id = branch.last_revision()
744
br_rev_id = branch.last_revision()
747
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
748
(_mod_revision.NULL_REVISION,))
751
revision_id = next(graph_iter)
752
except errors.RevisionNotPresent as e:
754
yield e.revision_id, None, None
756
except StopIteration:
759
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
760
if cur_revno is not None:
733
for revision_id in graph.iter_lefthand_ancestry(br_rev_id,
734
(_mod_revision.NULL_REVISION,)):
735
yield revision_id, str(cur_revno), 0
763
br_rev_id = branch.last_revision()
764
738
if end_rev_id is None:
765
739
end_rev_id = br_rev_id
766
740
found_start = start_rev_id is None
767
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
768
(_mod_revision.NULL_REVISION,))
771
revision_id = next(graph_iter)
772
except StopIteration:
774
except errors.RevisionNotPresent as e:
776
yield e.revision_id, None, None
779
revno_str = _compute_revno_str(branch, revision_id)
780
if not found_start and revision_id == start_rev_id:
781
if not exclude_common_ancestry:
782
yield revision_id, revno_str, 0
741
for revision_id in graph.iter_lefthand_ancestry(end_rev_id,
742
(_mod_revision.NULL_REVISION,)):
743
revno_str = _compute_revno_str(branch, revision_id)
744
if not found_start and revision_id == start_rev_id:
745
if not exclude_common_ancestry:
786
746
yield revision_id, revno_str, 0
788
raise _StartNotLinearAncestor()
750
yield revision_id, revno_str, 0
753
raise _StartNotLinearAncestor()
791
756
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
836
801
"""Adjust depths upwards so the top level is 0."""
837
802
# If either the first or last revision have a merge_depth of 0, we're done
838
803
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
839
min_depth = min([d for r, n, d in view_revisions])
804
min_depth = min([d for r,n,d in view_revisions])
840
805
if min_depth != 0:
841
view_revisions = [(r, n, d - min_depth)
842
for r, n, d in view_revisions]
806
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
843
807
return view_revisions
846
810
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
847
file_ids=None, direction='reverse'):
811
file_ids=None, direction='reverse'):
848
812
"""Create a revision iterator for log.
850
814
:param branch: The branch being logged.
861
825
# Convert view_revisions into (view, None, None) groups to fit with
862
826
# the standard interface here.
863
if isinstance(view_revisions, list):
827
if type(view_revisions) == list:
864
828
# A single batch conversion is faster than many incremental ones.
865
829
# As we have all the data, do a batch conversion.
866
830
nones = [None] * len(view_revisions)
867
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
831
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
870
834
for view in view_revisions:
874
838
# It would be nicer if log adapters were first class objects
875
839
# with custom parameters. This will do for now. IGC 20090127
876
840
if adapter == _make_delta_filter:
877
log_rev_iterator = adapter(
878
branch, generate_delta, search, log_rev_iterator, file_ids,
841
log_rev_iterator = adapter(branch, generate_delta,
842
search, log_rev_iterator, file_ids, direction)
881
log_rev_iterator = adapter(
882
branch, generate_delta, search, log_rev_iterator)
844
log_rev_iterator = adapter(branch, generate_delta,
845
search, log_rev_iterator)
883
846
return log_rev_iterator
897
860
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
901
864
return log_rev_iterator
902
# Use lazy_compile so mapping to InvalidPattern error occurs.
903
searchRE = [(k, [lazy_regex.lazy_compile(x, re.IGNORECASE) for x in v])
904
for k, v in match.items()]
865
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
866
for (k,v) in match.iteritems()]
905
867
return _filter_re(searchRE, log_rev_iterator)
915
876
def _match_filter(searchRE, rev):
917
'message': (rev.message,),
918
'committer': (rev.committer,),
919
'author': (rev.get_apparent_authors()),
920
'bugs': list(rev.iter_bugs())
922
strings[''] = [item for inner_list in strings.values()
878
'message': (rev.message,),
879
'committer': (rev.committer,),
880
'author': (rev.get_apparent_authors()),
881
'bugs': list(rev.iter_bugs())
883
strings[''] = [item for inner_list in strings.itervalues()
923
884
for item in inner_list]
924
for k, v in searchRE:
885
for (k,v) in searchRE:
925
886
if k in strings and not _match_any_filter(strings[k], v):
930
890
def _match_any_filter(strings, res):
931
return any(r.search(s) for r in res for s in strings)
891
return any([filter(None, map(re.search, strings)) for re in res])
934
893
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
935
fileids=None, direction='reverse'):
894
fileids=None, direction='reverse'):
936
895
"""Add revision deltas to a log iterator if needed.
938
897
:param branch: The branch being logged.
950
909
if not generate_delta and not fileids:
951
910
return log_rev_iterator
952
911
return _generate_deltas(branch.repository, log_rev_iterator,
953
generate_delta, fileids, direction)
912
generate_delta, fileids, direction)
956
915
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
958
917
"""Create deltas for each batch of revisions in log_rev_iterator.
960
919
If we're only generating deltas for the sake of filtering against
981
940
if delta_type == 'full' and not check_fileids:
982
941
deltas = repository.get_deltas_for_revisions(revisions)
983
for rev, delta in zip(revs, deltas):
942
for rev, delta in izip(revs, deltas):
984
943
new_revs.append((rev[0], rev[1], delta))
986
945
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
987
for rev, delta in zip(revs, deltas):
946
for rev, delta in izip(revs, deltas):
988
947
if check_fileids:
989
948
if delta is None or not delta.has_changed():
1014
973
fileids set once their add or remove entry is detected respectively
1016
975
if stop_on == 'add':
1017
for item in delta.added + delta.copied:
1018
if item.file_id in fileids:
1019
fileids.remove(item.file_id)
976
for item in delta.added:
977
if item[1] in fileids:
978
fileids.remove(item[1])
1020
979
elif stop_on == 'delete':
1021
980
for item in delta.removed:
1022
if item.file_id in fileids:
1023
fileids.remove(item.file_id)
981
if item[1] in fileids:
982
fileids.remove(item[1])
1026
985
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1038
997
for revs in log_rev_iterator:
1039
998
# r = revision_id, n = revno, d = merge depth
1040
999
revision_ids = [view[0] for view, _, _ in revs]
1041
revisions = dict(repository.iter_revisions(revision_ids))
1042
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1000
revisions = repository.get_revisions(revision_ids)
1001
revs = [(rev[0], revision, rev[2]) for rev, revision in
1002
izip(revs, revisions)]
1045
1006
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1079
1041
:return: (start_rev_id, end_rev_id) tuple.
1043
branch_revno, branch_rev_id = branch.last_revision_info()
1081
1044
start_rev_id = None
1083
if start_revision is not None:
1084
if not isinstance(start_revision, revisionspec.RevisionInfo):
1085
raise TypeError(start_revision)
1086
start_rev_id = start_revision.rev_id
1087
start_revno = start_revision.revno
1088
if start_revno is None:
1045
if start_revision is None:
1089
1046
start_revno = 1
1048
if isinstance(start_revision, revisionspec.RevisionInfo):
1049
start_rev_id = start_revision.rev_id
1050
start_revno = start_revision.revno or 1
1052
branch.check_real_revno(start_revision)
1053
start_revno = start_revision
1054
start_rev_id = branch.get_rev_id(start_revno)
1091
1056
end_rev_id = None
1093
if end_revision is not None:
1094
if not isinstance(end_revision, revisionspec.RevisionInfo):
1095
raise TypeError(start_revision)
1096
end_rev_id = end_revision.rev_id
1097
end_revno = end_revision.revno
1057
if end_revision is None:
1058
end_revno = branch_revno
1060
if isinstance(end_revision, revisionspec.RevisionInfo):
1061
end_rev_id = end_revision.rev_id
1062
end_revno = end_revision.revno or branch_revno
1064
branch.check_real_revno(end_revision)
1065
end_revno = end_revision
1066
end_rev_id = branch.get_rev_id(end_revno)
1099
if branch.last_revision() != _mod_revision.NULL_REVISION:
1068
if branch_revno != 0:
1100
1069
if (start_rev_id == _mod_revision.NULL_REVISION
1101
or end_rev_id == _mod_revision.NULL_REVISION):
1102
raise errors.BzrCommandError(
1103
gettext('Logging revision 0 is invalid.'))
1104
if end_revno is not None and start_revno > end_revno:
1105
raise errors.BzrCommandError(
1106
gettext("Start revision must be older than the end revision."))
1070
or end_rev_id == _mod_revision.NULL_REVISION):
1071
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1072
if start_revno > end_revno:
1073
raise errors.BzrCommandError(gettext("Start revision must be "
1074
"older than the end revision."))
1107
1075
return (start_rev_id, end_rev_id)
1157
1125
end_revno = end_revision
1159
1127
if ((start_rev_id == _mod_revision.NULL_REVISION)
1160
or (end_rev_id == _mod_revision.NULL_REVISION)):
1128
or (end_rev_id == _mod_revision.NULL_REVISION)):
1161
1129
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1162
1130
if start_revno > end_revno:
1163
1131
raise errors.BzrCommandError(gettext("Start revision must be older "
1164
"than the end revision."))
1132
"than the end revision."))
1166
1134
if end_revno < start_revno:
1167
1135
return None, None, None, None
1244
1212
# rate). This particular access is clustered with a low success rate.
1245
1213
modified_text_revisions = set()
1246
1214
chunk_size = 1000
1247
for start in range(0, len(text_keys), chunk_size):
1215
for start in xrange(0, len(text_keys), chunk_size):
1248
1216
next_keys = text_keys[start:start + chunk_size]
1249
1217
# Only keep the revision_id portion of the key
1250
1218
modified_text_revisions.update(
1392
1360
self.to_file = to_file
1393
1361
# 'exact' stream used to show diff, it should print content 'as is'
1394
# and should not try to decode/encode it to unicode to avoid bug
1362
# and should not try to decode/encode it to unicode to avoid bug #328007
1396
1363
if to_exact_file is not None:
1397
1364
self.to_exact_file = to_exact_file
1399
# XXX: somewhat hacky; this assumes it's a codec writer; it's
1400
# better for code that expects to get diffs to pass in the exact
1366
# XXX: somewhat hacky; this assumes it's a codec writer; it's better
1367
# for code that expects to get diffs to pass in the exact file
1402
1369
self.to_exact_file = getattr(to_file, 'stream', to_file)
1403
1370
self.show_ids = show_ids
1404
1371
self.show_timezone = show_timezone
1405
1372
if delta_format is None:
1406
1373
# Ensures backward compatibility
1407
delta_format = 2 # long format
1374
delta_format = 2 # long format
1408
1375
self.delta_format = delta_format
1409
1376
self.levels = levels
1410
1377
self._show_advice = show_advice
1509
1476
lines = self._foreign_info_properties(revision)
1510
1477
for key, handler in properties_handler_registry.iteritems():
1512
lines.extend(self._format_properties(handler(revision)))
1514
trace.log_exception_quietly()
1515
trace.print_exception(sys.exc_info(), self.to_file)
1478
lines.extend(self._format_properties(handler(revision)))
1518
1481
def _foreign_info_properties(self, rev):
1548
1511
def show_diff(self, to_file, diff, indent):
1549
encoding = get_terminal_encoding()
1550
for l in diff.rstrip().split(b'\n'):
1551
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1512
for l in diff.rstrip().split('\n'):
1513
to_file.write(indent + '%s\n' % (l,))
1554
1516
# Separator between revisions in long format
1585
1547
lines = [_LONG_SEP]
1586
1548
if revision.revno is not None:
1587
1549
lines.append('revno: %s%s' % (revision.revno,
1588
self.merge_marker(revision)))
1550
self.merge_marker(revision)))
1589
1551
if revision.tags:
1590
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1552
lines.append('tags: %s' % (', '.join(revision.tags)))
1591
1553
if self.show_ids or revision.revno is None:
1592
lines.append('revision-id: %s' %
1593
(revision.rev.revision_id.decode('utf-8'),))
1554
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1594
1555
if self.show_ids:
1595
1556
for parent_id in revision.rev.parent_ids:
1596
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1557
lines.append('parent: %s' % (parent_id,))
1597
1558
lines.extend(self.custom_properties(revision.rev))
1599
1560
committer = revision.rev.committer
1624
1585
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1625
1586
if revision.delta is not None:
1626
1587
# Use the standard status output to display changes
1627
from breezy.delta import report_delta
1588
from brzlib.delta import report_delta
1628
1589
report_delta(to_file, revision.delta, short_status=False,
1629
1590
show_ids=self.show_ids, indent=indent)
1630
1591
if revision.diff is not None:
1675
1636
to_file = self.to_file
1677
1638
if revision.tags:
1678
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1639
tags = ' {%s}' % (', '.join(revision.tags))
1679
1640
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1680
revision.revno or "", self.short_author(
1682
format_date(revision.rev.timestamp,
1683
revision.rev.timezone or 0,
1684
self.show_timezone, date_fmt="%Y-%m-%d",
1686
tags, self.merge_marker(revision)))
1687
self.show_properties(revision.rev, indent + offset)
1641
revision.revno or "", self.short_author(revision.rev),
1642
format_date(revision.rev.timestamp,
1643
revision.rev.timezone or 0,
1644
self.show_timezone, date_fmt="%Y-%m-%d",
1646
tags, self.merge_marker(revision)))
1647
self.show_properties(revision.rev, indent+offset)
1688
1648
if self.show_ids or revision.revno is None:
1689
1649
to_file.write(indent + offset + 'revision-id:%s\n'
1690
% (revision.rev.revision_id.decode('utf-8'),))
1650
% (revision.rev.revision_id,))
1691
1651
if not revision.rev.message:
1692
1652
to_file.write(indent + offset + '(no message)\n')
1698
1658
if revision.delta is not None:
1699
1659
# Use the standard status output to display changes
1700
from breezy.delta import report_delta
1660
from brzlib.delta import report_delta
1701
1661
report_delta(to_file, revision.delta,
1702
short_status=self.delta_format == 1,
1662
short_status=self.delta_format==1,
1703
1663
show_ids=self.show_ids, indent=indent + offset)
1704
1664
if revision.diff is not None:
1705
1665
self.show_diff(self.to_exact_file, revision.diff, ' ')
1739
1699
def log_revision(self, revision):
1740
1700
indent = ' ' * revision.merge_depth
1741
1701
self.to_file.write(self.log_string(revision.revno, revision.rev,
1742
self._max_chars, revision.tags, indent))
1702
self._max_chars, revision.tags, indent))
1743
1703
self.to_file.write('\n')
1745
1705
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1758
1718
# show revno only when is not None
1759
1719
out.append("%s:" % revno)
1760
1720
if max_chars is not None:
1761
out.append(self.truncate(
1762
self.short_author(rev), (max_chars + 3) // 4))
1721
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1764
1723
out.append(self.short_author(rev))
1765
1724
out.append(self.date_string(rev))
1766
1725
if len(rev.parent_ids) > 1:
1767
1726
out.append('[merge]')
1769
tag_str = '{%s}' % (', '.join(sorted(tags)))
1728
tag_str = '{%s}' % (', '.join(tags))
1770
1729
out.append(tag_str)
1771
1730
out.append(rev.get_summary())
1772
1731
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1788
1747
show_offset=False)
1789
1748
committer_str = self.authors(revision.rev, 'first', sep=', ')
1790
1749
committer_str = committer_str.replace(' <', ' <')
1791
to_file.write('%s %s\n\n' % (date_str, committer_str))
1750
to_file.write('%s %s\n\n' % (date_str,committer_str))
1793
1752
if revision.delta is not None and revision.delta.has_changed():
1794
1753
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1795
if c.path[0] is None:
1799
1755
to_file.write('\t* %s:\n' % (path,))
1800
for c in revision.delta.renamed + revision.delta.copied:
1756
for c in revision.delta.renamed:
1757
oldpath,newpath = c[:2]
1801
1758
# For renamed files, show both the old and the new path
1802
to_file.write('\t* %s:\n\t* %s:\n' % (c.path[0], c.path[1]))
1759
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1803
1760
to_file.write('\n')
1805
1762
if not revision.rev.message:
1910
1866
# This is the first index which is different between
1912
1868
base_idx = None
1913
for i in range(max(len(new_rh), len(old_rh))):
1869
for i in xrange(max(len(new_rh),
1914
1871
if (len(new_rh) <= i
1915
1872
or len(old_rh) <= i
1916
or new_rh[i] != old_rh[i]):
1873
or new_rh[i] != old_rh[i]):
1920
1877
if base_idx is None:
1921
1878
to_file.write('Nothing seems to have changed\n')
1923
# TODO: It might be nice to do something like show_log
1924
# and show the merged entries. But since this is the
1925
# removed revisions, it shouldn't be as important
1880
## TODO: It might be nice to do something like show_log
1881
## and show the merged entries. But since this is the
1882
## removed revisions, it shouldn't be as important
1926
1883
if base_idx < len(old_rh):
1927
to_file.write('*' * 60)
1884
to_file.write('*'*60)
1928
1885
to_file.write('\nRemoved Revisions:\n')
1929
1886
for i in range(base_idx, len(old_rh)):
1930
1887
rev = branch.repository.get_revision(old_rh[i])
1931
lr = LogRevision(rev, i + 1, 0, None)
1888
lr = LogRevision(rev, i+1, 0, None)
1932
1889
lf.log_revision(lr)
1933
to_file.write('*' * 60)
1890
to_file.write('*'*60)
1934
1891
to_file.write('\n\n')
1935
1892
if base_idx < len(new_rh):
1936
1893
to_file.write('Added Revisions:\n')
1937
1894
show_log(branch,
1940
1898
direction='forward',
1941
start_revision=base_idx + 1,
1899
start_revision=base_idx+1,
1942
1900
end_revision=len(new_rh),
2012
1970
log_format = log_formatter_registry.get_default(branch)
2013
1971
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2014
1972
if old_history != []:
2015
output.write('*' * 60)
1973
output.write('*'*60)
2016
1974
output.write('\nRemoved Revisions:\n')
2017
1975
show_flat_log(branch.repository, old_history, old_revno, lf)
2018
output.write('*' * 60)
1976
output.write('*'*60)
2019
1977
output.write('\n\n')
2020
1978
if new_history != []:
2021
1979
output.write('Added Revisions:\n')
2022
1980
start_revno = new_revno - len(new_history) + 1
2023
show_log(branch, lf, verbose=False, direction='forward',
2024
start_revision=start_revno)
1981
show_log(branch, lf, None, verbose=False, direction='forward',
1982
start_revision=start_revno,)
2027
1985
def show_flat_log(repository, history, last_revno, lf):
2032
1990
:param last_revno: The revno of the last revision_id in the history.
2033
1991
:param lf: The log formatter to use.
1993
start_revno = last_revno - len(history) + 1
2035
1994
revisions = repository.get_revisions(history)
2036
1995
for i, rev in enumerate(revisions):
2037
1996
lr = LogRevision(rev, i + last_revno, 0, None)
2038
1997
lf.log_revision(lr)
2041
def _get_info_for_log_files(revisionspec_list, file_list, exit_stack):
2000
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2042
2001
"""Find file-ids and kinds given a list of files and a revision range.
2044
2003
We search for files at the end of the range. If not found there,
2048
2007
:param file_list: the list of paths given on the command line;
2049
2008
the first of these can be a branch location or a file path,
2050
2009
the remainder must be file paths
2051
:param exit_stack: When the branch returned is read locked,
2052
an unlock call will be queued to the exit stack.
2010
:param add_cleanup: When the branch returned is read locked,
2011
an unlock call will be queued to the cleanup.
2053
2012
:return: (branch, info_list, start_rev_info, end_rev_info) where
2054
2013
info_list is a list of (relative_path, file_id, kind) tuples where
2055
2014
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2056
2015
branch will be read-locked.
2058
from breezy.builtins import _get_revision_range
2017
from brzlib.builtins import _get_revision_range
2059
2018
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2061
exit_stack.enter_context(b.lock_read())
2020
add_cleanup(b.lock_read().unlock)
2062
2021
# XXX: It's damn messy converting a list of paths to relative paths when
2063
2022
# those paths might be deleted ones, they might be on a case-insensitive
2064
2023
# filesystem and/or they might be in silly locations (like another branch).
2129
2088
tree1 = b.repository.revision_tree(rev_id)
2130
2089
file_id = tree1.path2id(fp)
2131
kind = _get_kind_for_file_id(tree1, fp, file_id)
2090
kind = _get_kind_for_file_id(tree1, file_id)
2132
2091
info_list.append((fp, file_id, kind))
2133
2092
return b, info_list, start_rev_info, end_rev_info
2136
def _get_kind_for_file_id(tree, path, file_id):
2095
def _get_kind_for_file_id(tree, file_id):
2137
2096
"""Return the kind of a file-id or None if it doesn't exist."""
2138
2097
if file_id is not None:
2139
return tree.kind(path)
2098
return tree.kind(file_id)
2144
2103
properties_handler_registry = registry.Registry()
2146
2105
# Use the properties handlers to print out bug information if available
2149
2106
def _bugs_properties_handler(revision):
2151
related_bug_urls = []
2152
for bug_url, status in revision.iter_bugs():
2153
if status == 'fixed':
2154
fixed_bug_urls.append(bug_url)
2155
elif status == 'related':
2156
related_bug_urls.append(bug_url)
2159
text = ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls))
2160
ret[text] = ' '.join(fixed_bug_urls)
2161
if related_bug_urls:
2162
text = ngettext('related bug', 'related bugs',
2163
len(related_bug_urls))
2164
ret[text] = ' '.join(related_bug_urls)
2107
if revision.properties.has_key('bugs'):
2108
bug_lines = revision.properties['bugs'].split('\n')
2109
bug_rows = [line.split(' ', 1) for line in bug_lines]
2110
fixed_bug_urls = [row[0] for row in bug_rows if
2111
len(row) > 1 and row[1] == 'fixed']
2114
return {ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls)):\
2115
' '.join(fixed_bug_urls)}
2168
2118
properties_handler_registry.register('bugs_properties_handler',
2169
2119
_bugs_properties_handler)