/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to object_store.py

Fix some more tests.

Show diffs side-by-side

added added

removed removed

Lines of Context:
44
44
    symlink_to_blob,
45
45
    )
46
46
from bzrlib.plugins.git.shamap import (
47
 
    from_repository as idmap_from_repository,
 
47
    from_repository as cache_from_repository,
48
48
    )
49
49
 
 
50
import posixpath
 
51
import stat
 
52
 
50
53
 
51
54
def get_object_store(repo, mapping=None):
52
55
    git = getattr(repo, "_git", None)
55
58
    return BazaarObjectStore(repo, mapping)
56
59
 
57
60
 
58
 
MAX_INV_CACHE_SIZE = 50 * 1024 * 1024
59
 
 
60
 
 
61
 
class LRUInventoryCache(object):
 
61
MAX_TREE_CACHE_SIZE = 50 * 1024 * 1024
 
62
 
 
63
 
 
64
class LRUTreeCache(object):
62
65
 
63
66
    def __init__(self, repository):
64
 
        def approx_inv_size(inv):
 
67
        def approx_tree_size(tree):
65
68
            # Very rough estimate, 1k per inventory entry
66
 
            return len(inv) * 1024
 
69
            return len(tree.inventory) * 1024
67
70
        self.repository = repository
68
 
        self._cache = lru_cache.LRUSizeCache(max_size=MAX_INV_CACHE_SIZE,
69
 
            after_cleanup_size=None, compute_size=approx_inv_size)
 
71
        self._cache = lru_cache.LRUSizeCache(max_size=MAX_TREE_CACHE_SIZE,
 
72
            after_cleanup_size=None, compute_size=approx_tree_size)
70
73
 
71
 
    def get_inventory(self, revid):            
 
74
    def revision_tree(self, revid):            
72
75
        try:
73
76
            return self._cache[revid] 
74
77
        except KeyError:
75
 
            inv = self.repository.get_inventory(revid)
76
 
            self._cache.add(revid, inv)
77
 
            return inv
78
 
 
79
 
    def iter_inventories(self, revids):
80
 
        invs = dict([(k, self._cache.get(k)) for k in revids]) 
81
 
        for inv in self.repository.iter_inventories(
82
 
                [r for r, v in invs.iteritems() if v is None]):
83
 
            invs[inv.revision_id] = inv
84
 
            self._cache.add(inv.revision_id, inv)
85
 
        return (invs[r] for r in revids)
86
 
 
87
 
    def get_inventories(self, revids):
88
 
        return list(self.iter_inventories(revids))
89
 
 
90
 
    def add(self, revid, inv):
91
 
        self._cache.add(revid, inv)
 
78
            tree = self.repository.revision_tree(revid)
 
79
            self.add(tree)
 
80
            return tree
 
81
 
 
82
    def iter_revision_trees(self, revids):
 
83
        trees = dict([(k, self._cache.get(k)) for k in revids]) 
 
84
        for tree in self.repository.revision_trees(
 
85
                [r for r, v in trees.iteritems() if v is None]):
 
86
            trees[tree.get_revision_id()] = tree
 
87
            self.add(tree)
 
88
        return (trees[r] for r in revids)
 
89
 
 
90
    def revision_trees(self, revids):
 
91
        return list(self.iter_revision_trees(revids))
 
92
 
 
93
    def add(self, tree):
 
94
        self._cache.add(tree.get_revision_id(), tree)
 
95
 
 
96
 
 
97
def _find_missing_bzr_revids(get_parent_map, want, have):
 
98
    """Find the revisions that have to be pushed.
 
99
 
 
100
    :param get_parent_map: Function that returns the parents for a sequence
 
101
        of revisions.
 
102
    :param want: Revisions the target wants
 
103
    :param have: Revisions the target already has
 
104
    :return: Set of revisions to fetch
 
105
    """
 
106
    pending = want - have
 
107
    processed = set()
 
108
    todo = set()
 
109
    while pending:
 
110
        processed.update(pending)
 
111
        next_map = get_parent_map(pending)
 
112
        next_pending = set()
 
113
        for item in next_map.iteritems():
 
114
            if item[0] in have:
 
115
                continue
 
116
            todo.add(item[0])
 
117
            next_pending.update(p for p in item[1] if p not in processed)
 
118
        pending = next_pending
 
119
    if NULL_REVISION in todo:
 
120
        todo.remove(NULL_REVISION)
 
121
    return todo
92
122
 
93
123
 
94
124
def _check_expected_sha(expected_sha, object):
112
142
            expected_sha))
113
143
 
114
144
 
115
 
def _inventory_to_objects(inv, parent_invs, parent_invshamaps,
116
 
        unusual_modes, iter_files_bytes, has_ghost_parents):
 
145
def _tree_to_objects(tree, parent_trees, idmap, unusual_modes, dummy_file_name=None):
117
146
    """Iterate over the objects that were introduced in a revision.
118
147
 
119
 
    :param inv: Inventory to process
120
 
    :param parent_invs: parent inventory SHA maps
121
 
    :param parent_invshamaps: parent inventory SHA Map
 
148
    :param idmap: id map
122
149
    :param unusual_modes: Unusual file modes
123
 
    :param iter_files_bytes: Repository.iter_files_bytes-like callback
124
 
    :return: Yields (path, object) entries
 
150
    :param dummy_file_name: File name to use for dummy files
 
151
        in empty directories. None to skip empty directories
 
152
    :return: Yields (path, object, ie) entries
125
153
    """
126
154
    new_trees = {}
127
155
    new_blobs = []
128
156
    shamap = {}
129
 
    for path, ie in inv.entries():
130
 
        if ie.kind == "file":
131
 
            if ie.revision != inv.revision_id:
132
 
                for (pinv, pinvshamap) in zip(parent_invs, parent_invshamaps):
 
157
    try:
 
158
        base_tree = parent_trees[0]
 
159
        other_parent_trees = parent_trees[1:]
 
160
    except IndexError:
 
161
        base_tree = tree._repository.revision_tree(NULL_REVISION)
 
162
        other_parent_trees = []
 
163
    def find_unchanged_parent_ie(ie, parent_trees):
 
164
        assert ie.kind in ("symlink", "file")
 
165
        for ptree in parent_trees:
 
166
            try:
 
167
                pie = ptree.inventory[ie.file_id]
 
168
            except errors.NoSuchId:
 
169
                pass
 
170
            else:
 
171
                if (pie.text_sha1 == ie.text_sha1 and 
 
172
                    pie.kind == ie.kind and
 
173
                    pie.symlink_target == ie.symlink_target):
 
174
                    return pie
 
175
        raise KeyError
 
176
    for (file_id, path, changed_content, versioned, parent, name, kind,
 
177
         executable) in tree.iter_changes(base_tree):
 
178
        if kind[1] == "file":
 
179
            ie = tree.inventory[file_id]
 
180
            if changed_content:
 
181
                try:
 
182
                    pie = find_unchanged_parent_ie(ie, other_parent_trees)
 
183
                except KeyError:
 
184
                    pass
 
185
                else:
133
186
                    try:
134
 
                        pie = pinv[ie.file_id]
135
 
                    except errors.NoSuchId:
136
 
                        pass
137
 
                    else:
138
 
                        if (pie.text_sha1 == ie.text_sha1 and 
139
 
                            pie.kind == ie.kind):
140
 
                            shamap[ie.file_id] = pinvshamap.lookup_blob(
141
 
                                pie.file_id, pie.revision)
142
 
                            break
143
 
            if not ie.file_id in shamap:
144
 
                new_blobs.append((path, ie))
145
 
                new_trees[urlutils.dirname(path)] = ie.parent_id
146
 
        elif ie.kind == "symlink":
147
 
            blob = symlink_to_blob(ie)
148
 
            for pinv in parent_invs:
149
 
                try:
150
 
                    pie = pinv[ie.file_id]
151
 
                except errors.NoSuchId:
152
 
                    pass
153
 
                else:
154
 
                    if (ie.kind == pie.kind and
155
 
                        ie.symlink_target == pie.symlink_target):
156
 
                        break
157
 
            else:
158
 
                yield path, blob
159
 
                new_trees[urlutils.dirname(path)] = ie.parent_id
160
 
            shamap[ie.file_id] = blob.id
161
 
        elif ie.kind == "directory":
162
 
            for (pinv, pinvshamap) in zip(parent_invs, parent_invshamaps):
163
 
                try:
164
 
                    pie = pinv[ie.file_id]
165
 
                except errors.NoSuchId:
166
 
                    pass
167
 
                else:
168
 
                    if (pie.kind == ie.kind and 
169
 
                        pie.children.keys() == ie.children.keys()):
170
 
                        try:
171
 
                            shamap[ie.file_id] = pinvshamap.lookup_tree(
172
 
                                ie.file_id)
173
 
                        except (NotImplementedError, KeyError):
174
 
                            pass
175
 
                        else:
176
 
                            break
177
 
            else:
178
 
                new_trees[path] = ie.file_id
179
 
        else:
180
 
            raise AssertionError(ie.kind)
 
187
                        shamap[ie.file_id] = idmap.lookup_blob_id(
 
188
                            pie.file_id, pie.revision)
 
189
                    except KeyError:
 
190
                        # no-change merge ?
 
191
                        blob = Blob()
 
192
                        blob.data = tree.get_file_text(ie.file_id)
 
193
                        shamap[ie.file_id] = blob.id
 
194
            if not file_id in shamap:
 
195
                new_blobs.append((path[1], ie))
 
196
            new_trees[posixpath.dirname(path[1])] = parent[1]
 
197
        elif kind[1] == "symlink":
 
198
            ie = tree.inventory[file_id]
 
199
            if changed_content:
 
200
                blob = symlink_to_blob(ie)
 
201
                shamap[file_id] = blob.id
 
202
                try:
 
203
                    find_unchanged_parent_ie(ie, other_parent_trees)
 
204
                except KeyError:
 
205
                    yield path[1], blob, ie
 
206
            new_trees[posixpath.dirname(path[1])] = parent[1]
 
207
        elif kind[1] not in (None, "directory"):
 
208
            raise AssertionError(kind[1])
 
209
        if path[0] is not None:
 
210
            new_trees[posixpath.dirname(path[0])] = parent[0]
181
211
    
182
 
    for (path, fid), chunks in iter_files_bytes(
183
 
        [(ie.file_id, ie.revision, (path, ie.file_id))
184
 
            for (path, ie) in new_blobs]):
 
212
    for (path, ie), chunks in tree.iter_files_bytes(
 
213
        [(ie.file_id, (path, ie)) for (path, ie) in new_blobs]):
185
214
        obj = Blob()
186
 
        obj.data = "".join(chunks)
187
 
        yield path, obj
188
 
        shamap[fid] = obj.id
 
215
        obj.chunked = chunks
 
216
        yield path, obj, ie
 
217
        shamap[ie.file_id] = obj.id
189
218
 
190
 
    for fid in unusual_modes:
191
 
        new_trees[inv.id2path(fid)] = inv[fid].parent_id
 
219
    for path in unusual_modes:
 
220
        parent_path = posixpath.dirname(path)
 
221
        new_trees[parent_path] = tree.path2id(parent_path)
192
222
    
193
223
    trees = {}
194
224
    while new_trees:
195
225
        items = new_trees.items()
196
226
        new_trees = {}
197
227
        for path, file_id in items:
198
 
            parent_id = inv[file_id].parent_id
 
228
            try:
 
229
                parent_id = tree.inventory[file_id].parent_id
 
230
            except errors.NoSuchId:
 
231
                # Directory was removed recursively perhaps ?
 
232
                continue
199
233
            if parent_id is not None:
200
234
                parent_path = urlutils.dirname(path)
201
235
                new_trees[parent_path] = parent_id
205
239
        try:
206
240
            return shamap[ie.file_id]
207
241
        except KeyError:
208
 
            # Not all cache backends store the tree information, 
209
 
            # calculate again from scratch
210
 
            ret = directory_to_tree(ie, ie_to_hexsha, unusual_modes)
211
 
            if ret is None:
212
 
                return ret
213
 
            return ret.id
 
242
            # FIXME: Should be the same as in parent
 
243
            if ie.kind in ("file", "symlink"):
 
244
                try:
 
245
                    return idmap.lookup_blob_id(ie.file_id, ie.revision)
 
246
                except KeyError:
 
247
                    # no-change merge ?
 
248
                    blob = Blob()
 
249
                    blob.data = tree.get_file_text(ie.file_id)
 
250
                    return blob.id
 
251
            elif ie.kind == "directory":
 
252
                # Not all cache backends store the tree information, 
 
253
                # calculate again from scratch
 
254
                ret = directory_to_tree(ie, ie_to_hexsha, unusual_modes,
 
255
                    dummy_file_name)
 
256
                if ret is None:
 
257
                    return ret
 
258
                return ret.id
 
259
            else:
 
260
                raise AssertionError
214
261
 
215
262
    for path in sorted(trees.keys(), reverse=True):
216
 
        ie = inv[trees[path]]
 
263
        ie = tree.inventory[trees[path]]
217
264
        assert ie.kind == "directory"
218
 
        obj = directory_to_tree(ie, ie_to_hexsha, unusual_modes)
 
265
        obj = directory_to_tree(ie, ie_to_hexsha, unusual_modes,
 
266
            dummy_file_name)
219
267
        if obj is not None:
220
 
            yield path, obj
 
268
            yield path, obj, ie
221
269
            shamap[ie.file_id] = obj.id
222
270
 
223
271
 
230
278
            self.mapping = default_mapping
231
279
        else:
232
280
            self.mapping = mapping
233
 
        self._idmap = idmap_from_repository(repository)
234
 
        self.start_write_group = self._idmap.start_write_group
235
 
        self.abort_write_group = self._idmap.abort_write_group
236
 
        self.commit_write_group = self._idmap.commit_write_group
237
 
        self.parent_invs_cache = LRUInventoryCache(self.repository)
 
281
        self._cache = cache_from_repository(repository)
 
282
        self._content_cache_types = ("tree")
 
283
        self.start_write_group = self._cache.idmap.start_write_group
 
284
        self.abort_write_group = self._cache.idmap.abort_write_group
 
285
        self.commit_write_group = self._cache.idmap.commit_write_group
 
286
        self.tree_cache = LRUTreeCache(self.repository)
238
287
 
239
288
    def _update_sha_map(self, stop_revision=None):
240
289
        graph = self.repository.get_graph()
242
291
            heads = graph.heads(self.repository.all_revision_ids())
243
292
        else:
244
293
            heads = set([stop_revision])
245
 
        missing_revids = self._idmap.missing_revisions(heads)
 
294
        missing_revids = self._cache.idmap.missing_revisions(heads)
246
295
        while heads:
247
296
            parents = graph.get_parent_map(heads)
248
297
            todo = set()
249
298
            for p in parents.values():
250
299
                todo.update([x for x in p if x not in missing_revids])
251
 
            heads = self._idmap.missing_revisions(todo)
 
300
            heads = self._cache.idmap.missing_revisions(todo)
252
301
            missing_revids.update(heads)
253
302
        if NULL_REVISION in missing_revids:
254
303
            missing_revids.remove(NULL_REVISION)
273
322
 
274
323
    def __iter__(self):
275
324
        self._update_sha_map()
276
 
        return iter(self._idmap.sha1s())
 
325
        return iter(self._cache.idmap.sha1s())
277
326
 
278
 
    def _revision_to_commit(self, rev, tree_sha):
 
327
    def _reconstruct_commit(self, rev, tree_sha, roundtrip):
279
328
        def parent_lookup(revid):
280
329
            try:
281
330
                return self._lookup_revision_sha1(revid)
282
331
            except errors.NoSuchRevision:
283
 
                trace.warning("Ignoring ghost parent %s", revid)
284
332
                return None
285
 
        return self.mapping.export_commit(rev, tree_sha, parent_lookup)
286
 
 
287
 
    def _revision_to_objects(self, rev, inv):
 
333
        return self.mapping.export_commit(rev, tree_sha, parent_lookup,
 
334
            roundtrip)
 
335
 
 
336
    def _create_fileid_map_blob(self, inv):
 
337
        # FIXME: This can probably be a lot more efficient, 
 
338
        # not all files necessarily have to be processed.
 
339
        file_ids = {}
 
340
        for (path, ie) in inv.iter_entries():
 
341
            if self.mapping.generate_file_id(path) != ie.file_id:
 
342
                file_ids[path] = ie.file_id
 
343
        return self.mapping.export_fileid_map(file_ids)
 
344
 
 
345
    def _revision_to_objects(self, rev, tree, roundtrip):
 
346
        """Convert a revision to a set of git objects.
 
347
 
 
348
        :param rev: Bazaar revision object
 
349
        :param tree: Bazaar revision tree
 
350
        :param roundtrip: Whether to roundtrip all Bazaar revision data
 
351
        """
288
352
        unusual_modes = extract_unusual_modes(rev)
289
353
        present_parents = self.repository.has_revisions(rev.parent_ids)
290
 
        has_ghost_parents = (len(rev.parent_ids) < len(present_parents))
291
 
        parent_invs = self.parent_invs_cache.get_inventories(
 
354
        parent_trees = self.tree_cache.revision_trees(
292
355
            [p for p in rev.parent_ids if p in present_parents])
293
 
        parent_invshamaps = [self._idmap.get_inventory_sha_map(r) for r in rev.parent_ids if r in present_parents]
294
 
        tree_sha = None
295
 
        for path, obj in _inventory_to_objects(inv, parent_invs,
296
 
                parent_invshamaps, unusual_modes,
297
 
                self.repository.iter_files_bytes, has_ghost_parents):
298
 
            yield path, obj
 
356
        root_tree = None
 
357
        for path, obj, ie in _tree_to_objects(tree, parent_trees,
 
358
                self._cache.idmap, unusual_modes, self.mapping.BZR_DUMMY_FILE):
299
359
            if path == "":
300
 
                tree_sha = obj.id
301
 
        if tree_sha is None:
 
360
                root_tree = obj
 
361
                root_ie = ie
 
362
                # Don't yield just yet
 
363
            else:
 
364
                yield path, obj, ie
 
365
        if root_tree is None:
 
366
            # Pointless commit - get the tree sha elsewhere
302
367
            if not rev.parent_ids:
303
 
                tree_sha = Tree().id
 
368
                root_tree = Tree()
304
369
            else:
305
 
                tree_sha = parent_invshamaps[0][inv.root.file_id]
306
 
        commit_obj = self._revision_to_commit(rev, tree_sha)
 
370
                base_sha1 = self._lookup_revision_sha1(rev.parent_ids[0])
 
371
                root_tree = self[self[base_sha1].tree]
 
372
            root_ie = tree.inventory.root
 
373
        if roundtrip and self.mapping.BZR_FILE_IDS_FILE is not None:
 
374
            b = self._create_fileid_map_blob(tree.inventory)
 
375
            if b is not None:
 
376
                root_tree[self.mapping.BZR_FILE_IDS_FILE] = ((stat.S_IFREG | 0644), b.id)
 
377
                yield self.mapping.BZR_FILE_IDS_FILE, b, None
 
378
        yield "", root_tree, root_ie
 
379
        commit_obj = self._reconstruct_commit(rev, root_tree.id,
 
380
            roundtrip=roundtrip)
307
381
        try:
308
 
            foreign_revid, mapping = mapping_registry.parse_revision_id(rev.revision_id)
 
382
            foreign_revid, mapping = mapping_registry.parse_revision_id(
 
383
                rev.revision_id)
309
384
        except errors.InvalidRevisionId:
310
385
            pass
311
386
        else:
312
387
            _check_expected_sha(foreign_revid, commit_obj)
313
 
        yield None, commit_obj
 
388
        yield None, commit_obj, None
 
389
 
 
390
    def _get_updater(self, rev):
 
391
        return self._cache.get_updater(rev)
314
392
 
315
393
    def _update_sha_map_revision(self, revid):
316
394
        rev = self.repository.get_revision(revid)
317
 
        inv = self.parent_invs_cache.get_inventory(rev.revision_id)
318
 
        commit_obj = None
319
 
        entries = []
320
 
        for path, obj in self._revision_to_objects(rev, inv):
321
 
            if obj._type == "commit":
322
 
                commit_obj = obj
323
 
            elif obj._type in ("blob", "tree"):
324
 
                file_id = inv.path2id(path)
325
 
                ie = inv[file_id]
326
 
                if obj._type == "blob":
327
 
                    revision = ie.revision
328
 
                else:
329
 
                    revision = revid
330
 
                entries.append((file_id, obj._type, obj.id, revision))
331
 
            else:
332
 
                raise AssertionError
333
 
        self._idmap.add_entries(revid, rev.parent_ids, commit_obj.id, 
334
 
            commit_obj.tree, entries)
 
395
        tree = self.tree_cache.revision_tree(rev.revision_id)
 
396
        updater = self._get_updater(rev)
 
397
        for path, obj, ie in self._revision_to_objects(rev, tree,
 
398
            roundtrip=True):
 
399
            updater.add_object(obj, ie)
 
400
        commit_obj = updater.finish()
335
401
        return commit_obj.id
336
402
 
337
 
    def _get_blob(self, fileid, revision, expected_sha):
 
403
    def _reconstruct_blobs(self, keys):
338
404
        """Return a Git Blob object from a fileid and revision stored in bzr.
339
405
 
340
406
        :param fileid: File id of the text
341
407
        :param revision: Revision of the text
342
408
        """
343
 
        blob = Blob()
344
 
        chunks = self.repository.iter_files_bytes([(fileid, revision, None)]).next()[1]
345
 
        blob.data = "".join(chunks)
346
 
        if blob.id != expected_sha:
347
 
            # Perhaps it's a symlink ?
348
 
            inv = self.parent_invs_cache.get_inventory(revision)
349
 
            entry = inv[fileid]
350
 
            assert entry.kind == 'symlink'
351
 
            blob = symlink_to_blob(entry)
352
 
        _check_expected_sha(expected_sha, blob)
353
 
        return blob
 
409
        stream = self.repository.iter_files_bytes(
 
410
            ((key[0], key[1], key) for key in keys))
 
411
        for (fileid, revision, expected_sha), chunks in stream:
 
412
            blob = Blob()
 
413
            blob.chunked = chunks
 
414
            if blob.id != expected_sha and blob.data == "":
 
415
                # Perhaps it's a symlink ?
 
416
                tree = self.tree_cache.revision_tree(revision)
 
417
                entry = tree.inventory[fileid]
 
418
                if entry.kind == 'symlink':
 
419
                    blob = symlink_to_blob(entry)
 
420
            _check_expected_sha(expected_sha, blob)
 
421
            yield blob
354
422
 
355
 
    def _get_tree(self, fileid, revid, inv, unusual_modes, expected_sha=None):
 
423
    def _reconstruct_tree(self, fileid, revid, inv, unusual_modes,
 
424
        expected_sha=None):
356
425
        """Return a Git Tree object from a file id and a revision stored in bzr.
357
426
 
358
427
        :param fileid: fileid in the tree.
359
428
        :param revision: Revision of the tree.
360
429
        """
361
 
        invshamap = self._idmap.get_inventory_sha_map(inv.revision_id)
362
430
        def get_ie_sha1(entry):
363
431
            if entry.kind == "directory":
364
432
                try:
365
 
                    return invshamap.lookup_tree(entry.file_id)
 
433
                    return self._cache.idmap.lookup_tree_id(entry.file_id,
 
434
                        revid)
366
435
                except (NotImplementedError, KeyError):
367
 
                    obj = self._get_tree(entry.file_id, revid, inv,
 
436
                    obj = self._reconstruct_tree(entry.file_id, revid, inv,
368
437
                        unusual_modes)
369
438
                    if obj is None:
370
439
                        return None
371
440
                    else:
372
441
                        return obj.id
373
442
            elif entry.kind in ("file", "symlink"):
374
 
                return invshamap.lookup_blob(entry.file_id, entry.revision)
 
443
                try:
 
444
                    return self._cache.idmap.lookup_blob_id(entry.file_id,
 
445
                        entry.revision)
 
446
                except KeyError:
 
447
                    # no-change merge?
 
448
                    return self._reconstruct_blobs(
 
449
                        [(entry.file_id, entry.revision, None)]).next().id
375
450
            else:
376
451
                raise AssertionError("unknown entry kind '%s'" % entry.kind)
377
 
        tree = directory_to_tree(inv[fileid], get_ie_sha1, unusual_modes)
 
452
        tree = directory_to_tree(inv[fileid], get_ie_sha1, unusual_modes,
 
453
            self.mapping.BZR_DUMMY_FILE)
 
454
        if (inv.root.file_id == fileid and
 
455
            self.mapping.BZR_FILE_IDS_FILE is not None):
 
456
            b = self._create_fileid_map_blob(inv)
 
457
            # If this is the root tree, add the file ids
 
458
            tree[self.mapping.BZR_FILE_IDS_FILE] = ((stat.S_IFREG | 0644), b.id)
378
459
        _check_expected_sha(expected_sha, tree)
379
460
        return tree
380
461
 
388
469
 
389
470
    def _lookup_revision_sha1(self, revid):
390
471
        """Return the SHA1 matching a Bazaar revision."""
 
472
        from dulwich.protocol import ZERO_SHA
391
473
        if revid == NULL_REVISION:
392
 
            return "0" * 40
 
474
            return ZERO_SHA
393
475
        try:
394
 
            return self._idmap.lookup_commit(revid)
 
476
            return self._cache.idmap.lookup_commit(revid)
395
477
        except KeyError:
396
478
            try:
397
479
                return mapping_registry.parse_revision_id(revid)[0]
398
480
            except errors.InvalidRevisionId:
399
 
                self._update_sha_map(revid)
400
 
                return self._idmap.lookup_commit(revid)
 
481
                self.repository.lock_read()
 
482
                try:
 
483
                    self._update_sha_map(revid)
 
484
                finally:
 
485
                    self.repository.unlock()
 
486
                return self._cache.idmap.lookup_commit(revid)
401
487
 
402
488
    def get_raw(self, sha):
403
489
        """Get the raw representation of a Git object by SHA1.
410
496
    def __contains__(self, sha):
411
497
        # See if sha is in map
412
498
        try:
413
 
            (type, type_data) = self._lookup_git_sha(sha)
 
499
            (type, type_data) = self.lookup_git_sha(sha)
414
500
            if type == "commit":
415
501
                return self.repository.has_revision(type_data[0])
416
502
            elif type == "blob":
422
508
        except KeyError:
423
509
            return False
424
510
 
425
 
    def _lookup_git_sha(self, sha):
426
 
        # See if sha is in map
427
 
        try:
428
 
            return self._idmap.lookup_git_sha(sha)
429
 
        except KeyError:
430
 
            # if not, see if there are any unconverted revisions and add them
431
 
            # to the map, search for sha in map again
432
 
            self._update_sha_map()
433
 
            return self._idmap.lookup_git_sha(sha)
 
511
    def lookup_git_shas(self, shas, update_map=True):
 
512
        ret = {}
 
513
        for sha in shas:
 
514
            try:
 
515
                ret[sha] = self._cache.idmap.lookup_git_sha(sha)
 
516
            except KeyError:
 
517
                if update_map:
 
518
                    # if not, see if there are any unconverted revisions and add
 
519
                    # them to the map, search for sha in map again
 
520
                    self._update_sha_map()
 
521
                    update_map = False
 
522
                    try:
 
523
                        ret[sha] = self._cache.idmap.lookup_git_sha(sha)
 
524
                    except KeyError:
 
525
                        pass
 
526
        return ret
 
527
 
 
528
    def lookup_git_sha(self, sha, update_map=True):
 
529
        return self.lookup_git_shas([sha], update_map=update_map)[sha]
434
530
 
435
531
    def __getitem__(self, sha):
436
 
        (type, type_data) = self._lookup_git_sha(sha)
 
532
        if self._cache.content_cache is not None:
 
533
            try:
 
534
                return self._cache.content_cache[sha]
 
535
            except KeyError:
 
536
                pass
 
537
        (type, type_data) = self.lookup_git_sha(sha)
437
538
        # convert object to git object
438
539
        if type == "commit":
439
540
            (revid, tree_sha) = type_data
440
541
            try:
441
542
                rev = self.repository.get_revision(revid)
442
543
            except errors.NoSuchRevision:
443
 
                trace.mutter('entry for %s %s in shamap: %r, but not found in repository', type, sha, type_data)
 
544
                trace.mutter('entry for %s %s in shamap: %r, but not found in '
 
545
                             'repository', type, sha, type_data)
444
546
                raise KeyError(sha)
445
 
            commit = self._revision_to_commit(rev, tree_sha)
 
547
            commit = self._reconstruct_commit(rev, tree_sha, roundtrip=True)
446
548
            _check_expected_sha(sha, commit)
447
549
            return commit
448
550
        elif type == "blob":
449
551
            (fileid, revision) = type_data
450
 
            return self._get_blob(fileid, revision, expected_sha=sha)
 
552
            return self._reconstruct_blobs([(fileid, revision, sha)]).next()
451
553
        elif type == "tree":
452
554
            (fileid, revid) = type_data
453
555
            try:
454
 
                inv = self.parent_invs_cache.get_inventory(revid)
 
556
                tree = self.tree_cache.revision_tree(revid)
455
557
                rev = self.repository.get_revision(revid)
456
558
            except errors.NoSuchRevision:
457
559
                trace.mutter('entry for %s %s in shamap: %r, but not found in repository', type, sha, type_data)
458
560
                raise KeyError(sha)
459
561
            unusual_modes = extract_unusual_modes(rev)
460
562
            try:
461
 
                return self._get_tree(fileid, revid, inv,
 
563
                return self._reconstruct_tree(fileid, revid, tree.inventory,
462
564
                    unusual_modes, expected_sha=sha)
463
565
            except errors.NoSuchRevision:
464
566
                raise KeyError(sha)
465
567
        else:
466
568
            raise AssertionError("Unknown object type '%s'" % type)
467
569
 
468
 
    def generate_pack_contents(self, have, want):
 
570
    def generate_lossy_pack_contents(self, have, want, progress=None,
 
571
            get_tagged=None):
 
572
        return self.generate_pack_contents(have, want, progress, get_tagged,
 
573
            lossy=True)
 
574
 
 
575
    def generate_pack_contents(self, have, want, progress=None,
 
576
            get_tagged=None, lossy=False):
469
577
        """Iterate over the contents of a pack file.
470
578
 
471
579
        :param have: List of SHA1s of objects that should not be sent
472
580
        :param want: List of SHA1s of objects that should be sent
473
581
        """
474
582
        processed = set()
 
583
        ret = self.lookup_git_shas(have + want)
475
584
        for commit_sha in have:
476
585
            try:
477
 
                (type, (revid, tree_sha)) = self._lookup_git_sha(commit_sha)
 
586
                (type, (revid, tree_sha)) = ret[commit_sha]
478
587
            except KeyError:
479
588
                pass
480
589
            else:
484
593
        for commit_sha in want:
485
594
            if commit_sha in have:
486
595
                continue
487
 
            (type, (revid, tree_sha)) = self._lookup_git_sha(commit_sha)
488
 
            assert type == "commit"
489
 
            pending.add(revid)
490
 
        todo = set()
491
 
        while pending:
492
 
            processed.update(pending)
493
 
            next_map = self.repository.get_parent_map(pending)
494
 
            next_pending = set()
495
 
            for item in next_map.iteritems():
496
 
                todo.add(item[0])
497
 
                next_pending.update(p for p in item[1] if p not in processed)
498
 
            pending = next_pending
499
 
        if NULL_REVISION in todo:
500
 
            todo.remove(NULL_REVISION)
 
596
            try:
 
597
                (type, (revid, tree_sha)) = ret[commit_sha]
 
598
            except KeyError:
 
599
                pass
 
600
            else:
 
601
                assert type == "commit"
 
602
                pending.add(revid)
 
603
 
 
604
        todo = _find_missing_bzr_revids(self.repository.get_parent_map, 
 
605
                                        pending, processed)
501
606
        trace.mutter('sending revisions %r', todo)
502
607
        ret = []
503
608
        pb = ui.ui_factory.nested_progress_bar()
505
610
            for i, revid in enumerate(todo):
506
611
                pb.update("generating git objects", i, len(todo))
507
612
                rev = self.repository.get_revision(revid)
508
 
                inv = self.parent_invs_cache.get_inventory(revid)
509
 
                for path, obj in self._revision_to_objects(rev, inv):
 
613
                tree = self.tree_cache.revision_tree(revid)
 
614
                for path, obj, ie in self._revision_to_objects(rev, tree,
 
615
                    roundtrip=not lossy):
510
616
                    ret.append((obj, path))
511
617
        finally:
512
618
            pb.finished()
513
619
        return ret
 
620
 
 
621
    def add_thin_pack(self):
 
622
        import tempfile
 
623
        import os
 
624
        fd, path = tempfile.mkstemp(suffix=".pack")
 
625
        f = os.fdopen(fd, 'wb')
 
626
        def commit():
 
627
            from dulwich.pack import PackData, Pack
 
628
            from bzrlib.plugins.git.fetch import import_git_objects
 
629
            os.fsync(fd)
 
630
            f.close()
 
631
            if os.path.getsize(path) == 0:
 
632
                return
 
633
            pd = PackData(path)
 
634
            pd.create_index_v2(path[:-5]+".idx", self.object_store.get_raw)
 
635
 
 
636
            p = Pack(path[:-5])
 
637
            self.repository.lock_write()
 
638
            try:
 
639
                self.repository.start_write_group()
 
640
                try:
 
641
                    import_git_objects(self.repository, self.mapping, 
 
642
                        p.iterobjects(get_raw=self.get_raw),
 
643
                        self.object_store)
 
644
                except:
 
645
                    self.repository.abort_write_group()
 
646
                    raise
 
647
                else:
 
648
                    self.repository.commit_write_group()
 
649
            finally:
 
650
                self.repository.unlock()
 
651
        return f, commit
 
652
 
 
653
    # The pack isn't kept around anyway, so no point 
 
654
    # in treating full packs different from thin packs
 
655
    add_pack = add_thin_pack