/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to object_store.py

Handle empty metadata.

Show diffs side-by-side

added added

removed removed

Lines of Context:
43
43
    mapping_registry,
44
44
    symlink_to_blob,
45
45
    )
46
 
from bzrlib.plugins.git.shamap import (
 
46
from bzrlib.plugins.git.cache import (
47
47
    from_repository as cache_from_repository,
48
48
    )
49
49
 
 
50
import posixpath
 
51
import stat
 
52
 
50
53
 
51
54
def get_object_store(repo, mapping=None):
52
55
    git = getattr(repo, "_git", None)
55
58
    return BazaarObjectStore(repo, mapping)
56
59
 
57
60
 
58
 
MAX_INV_CACHE_SIZE = 50 * 1024 * 1024
59
 
 
60
 
 
61
 
class LRUInventoryCache(object):
 
61
MAX_TREE_CACHE_SIZE = 50 * 1024 * 1024
 
62
 
 
63
 
 
64
class LRUTreeCache(object):
62
65
 
63
66
    def __init__(self, repository):
64
 
        def approx_inv_size(inv):
 
67
        def approx_tree_size(tree):
65
68
            # Very rough estimate, 1k per inventory entry
66
 
            return len(inv) * 1024
 
69
            return len(tree.inventory) * 1024
67
70
        self.repository = repository
68
 
        self._cache = lru_cache.LRUSizeCache(max_size=MAX_INV_CACHE_SIZE,
69
 
            after_cleanup_size=None, compute_size=approx_inv_size)
 
71
        self._cache = lru_cache.LRUSizeCache(max_size=MAX_TREE_CACHE_SIZE,
 
72
            after_cleanup_size=None, compute_size=approx_tree_size)
70
73
 
71
 
    def get_inventory(self, revid):            
 
74
    def revision_tree(self, revid):
72
75
        try:
73
 
            return self._cache[revid] 
 
76
            tree = self._cache[revid]
74
77
        except KeyError:
75
 
            inv = self.repository.get_inventory(revid)
76
 
            self._cache.add(revid, inv)
77
 
            return inv
78
 
 
79
 
    def iter_inventories(self, revids):
80
 
        invs = dict([(k, self._cache.get(k)) for k in revids]) 
81
 
        for inv in self.repository.iter_inventories(
82
 
                [r for r, v in invs.iteritems() if v is None]):
83
 
            invs[inv.revision_id] = inv
84
 
            self._cache.add(inv.revision_id, inv)
85
 
        return (invs[r] for r in revids)
86
 
 
87
 
    def get_inventories(self, revids):
88
 
        return list(self.iter_inventories(revids))
89
 
 
90
 
    def add(self, revid, inv):
91
 
        self._cache.add(revid, inv)
 
78
            tree = self.repository.revision_tree(revid)
 
79
            self.add(tree)
 
80
        assert tree.get_revision_id() == tree.inventory.revision_id
 
81
        return tree
 
82
 
 
83
    def iter_revision_trees(self, revids):
 
84
        trees = {}
 
85
        todo = []
 
86
        for revid in revids:
 
87
            try:
 
88
                tree = self._cache[revid]
 
89
            except KeyError:
 
90
                todo.append(revid)
 
91
            else:
 
92
                assert tree.get_revision_id() == revid
 
93
                assert tree.inventory.revision_id == revid
 
94
                trees[revid] = tree
 
95
        for tree in self.repository.revision_trees(todo):
 
96
            trees[tree.get_revision_id()] = tree
 
97
            self.add(tree)
 
98
        return (trees[r] for r in revids)
 
99
 
 
100
    def revision_trees(self, revids):
 
101
        return list(self.iter_revision_trees(revids))
 
102
 
 
103
    def add(self, tree):
 
104
        self._cache.add(tree.get_revision_id(), tree)
 
105
 
 
106
 
 
107
def _find_missing_bzr_revids(get_parent_map, want, have):
 
108
    """Find the revisions that have to be pushed.
 
109
 
 
110
    :param get_parent_map: Function that returns the parents for a sequence
 
111
        of revisions.
 
112
    :param want: Revisions the target wants
 
113
    :param have: Revisions the target already has
 
114
    :return: Set of revisions to fetch
 
115
    """
 
116
    pending = want - have
 
117
    processed = set()
 
118
    todo = set()
 
119
    while pending:
 
120
        processed.update(pending)
 
121
        next_map = get_parent_map(pending)
 
122
        next_pending = set()
 
123
        for item in next_map.iteritems():
 
124
            if item[0] in have:
 
125
                continue
 
126
            todo.add(item[0])
 
127
            next_pending.update(p for p in item[1] if p not in processed)
 
128
        pending = next_pending
 
129
    if NULL_REVISION in todo:
 
130
        todo.remove(NULL_REVISION)
 
131
    return todo
92
132
 
93
133
 
94
134
def _check_expected_sha(expected_sha, object):
112
152
            expected_sha))
113
153
 
114
154
 
115
 
def _inventory_to_objects(inv, parent_invs, idmap,
116
 
        unusual_modes, iter_files_bytes, has_ghost_parents):
 
155
def _tree_to_objects(tree, parent_trees, idmap, unusual_modes,
 
156
                     dummy_file_name=None):
117
157
    """Iterate over the objects that were introduced in a revision.
118
158
 
119
 
    :param inv: Inventory to process
120
 
    :param parent_invs: parent inventory SHA maps
121
159
    :param idmap: id map
122
 
    :param unusual_modes: Unusual file modes
123
 
    :param iter_files_bytes: Repository.iter_files_bytes-like callback
 
160
    :param parent_trees: Parent revision trees
 
161
    :param unusual_modes: Unusual file modes dictionary
 
162
    :param dummy_file_name: File name to use for dummy files
 
163
        in empty directories. None to skip empty directories
124
164
    :return: Yields (path, object, ie) entries
125
165
    """
126
166
    new_trees = {}
127
167
    new_blobs = []
128
168
    shamap = {}
129
 
    for path, ie in inv.entries():
130
 
        if ie.kind == "file":
131
 
            if ie.revision != inv.revision_id:
132
 
                for pinv in parent_invs:
 
169
    try:
 
170
        base_tree = parent_trees[0]
 
171
        other_parent_trees = parent_trees[1:]
 
172
    except IndexError:
 
173
        base_tree = tree._repository.revision_tree(NULL_REVISION)
 
174
        other_parent_trees = []
 
175
    def find_unchanged_parent_ie(ie, parent_trees):
 
176
        assert ie.kind in ("symlink", "file")
 
177
        for ptree in parent_trees:
 
178
            try:
 
179
                pie = ptree.inventory[ie.file_id]
 
180
            except errors.NoSuchId:
 
181
                pass
 
182
            else:
 
183
                if (pie.text_sha1 == ie.text_sha1 and 
 
184
                    pie.kind == ie.kind and
 
185
                    pie.symlink_target == ie.symlink_target):
 
186
                    return pie
 
187
        raise KeyError
 
188
 
 
189
    # Find all the changed blobs
 
190
    for (file_id, path, changed_content, versioned, parent, name, kind,
 
191
         executable) in tree.iter_changes(base_tree):
 
192
        if kind[1] == "file":
 
193
            ie = tree.inventory[file_id]
 
194
            if changed_content:
 
195
                try:
 
196
                    pie = find_unchanged_parent_ie(ie, other_parent_trees)
 
197
                except KeyError:
 
198
                    pass
 
199
                else:
133
200
                    try:
134
 
                        pie = pinv[ie.file_id]
135
 
                    except errors.NoSuchId:
136
 
                        pass
137
 
                    else:
138
 
                        if (pie.text_sha1 == ie.text_sha1 and 
139
 
                            pie.kind == ie.kind):
140
 
                            shamap[ie.file_id] = idmap.lookup_blob_id(
141
 
                                pie.file_id, pie.revision)
142
 
                            break
143
 
            if not ie.file_id in shamap:
144
 
                new_blobs.append((path, ie))
145
 
                new_trees[urlutils.dirname(path)] = ie.parent_id
146
 
        elif ie.kind == "symlink":
147
 
            blob = symlink_to_blob(ie)
148
 
            for pinv in parent_invs:
149
 
                try:
150
 
                    pie = pinv[ie.file_id]
151
 
                except errors.NoSuchId:
152
 
                    pass
153
 
                else:
154
 
                    if (ie.kind == pie.kind and
155
 
                        ie.symlink_target == pie.symlink_target):
156
 
                        break
157
 
            else:
158
 
                yield path, blob, ie
159
 
                new_trees[urlutils.dirname(path)] = ie.parent_id
160
 
            shamap[ie.file_id] = blob.id
161
 
        elif ie.kind == "directory":
162
 
            for pinv in parent_invs:
163
 
                try:
164
 
                    pie = pinv[ie.file_id]
165
 
                except errors.NoSuchId:
166
 
                    pass
167
 
                else:
168
 
                    if (pie.kind == ie.kind and 
169
 
                        pie.children.keys() == ie.children.keys()):
170
 
                        try:
171
 
                            shamap[ie.file_id] = idmap.lookup_tree_id(
172
 
                                ie.file_id, inv.revision_id)
173
 
                        except (NotImplementedError, KeyError):
174
 
                            pass
175
 
                        else:
176
 
                            break
177
 
            else:
178
 
                new_trees[path] = ie.file_id
179
 
        else:
180
 
            raise AssertionError(ie.kind)
 
201
                        shamap[ie.file_id] = idmap.lookup_blob_id(
 
202
                            pie.file_id, pie.revision)
 
203
                    except KeyError:
 
204
                        # no-change merge ?
 
205
                        blob = Blob()
 
206
                        blob.data = tree.get_file_text(ie.file_id)
 
207
                        shamap[ie.file_id] = blob.id
 
208
            if not file_id in shamap:
 
209
                new_blobs.append((path[1], ie))
 
210
            new_trees[posixpath.dirname(path[1])] = parent[1]
 
211
        elif kind[1] == "symlink":
 
212
            ie = tree.inventory[file_id]
 
213
            if changed_content:
 
214
                blob = symlink_to_blob(ie)
 
215
                shamap[file_id] = blob.id
 
216
                try:
 
217
                    find_unchanged_parent_ie(ie, other_parent_trees)
 
218
                except KeyError:
 
219
                    yield path[1], blob, ie
 
220
            new_trees[posixpath.dirname(path[1])] = parent[1]
 
221
        elif kind[1] not in (None, "directory"):
 
222
            raise AssertionError(kind[1])
 
223
        if (path[0] not in (None, "") and
 
224
            parent[0] in tree.inventory and
 
225
            tree.inventory[parent[0]].kind == "directory"):
 
226
            # Removal
 
227
            new_trees[posixpath.dirname(path[0])] = parent[0]
181
228
    
182
 
    for (path, ie), chunks in iter_files_bytes(
183
 
        [(ie.file_id, ie.revision, (path, ie))
184
 
            for (path, ie) in new_blobs]):
 
229
    # Fetch contents of the blobs that were changed
 
230
    for (path, ie), chunks in tree.iter_files_bytes(
 
231
        [(ie.file_id, (path, ie)) for (path, ie) in new_blobs]):
185
232
        obj = Blob()
186
233
        obj.chunked = chunks
187
234
        yield path, obj, ie
188
235
        shamap[ie.file_id] = obj.id
189
236
 
190
 
    for fid in unusual_modes:
191
 
        new_trees[inv.id2path(fid)] = inv[fid].parent_id
192
 
    
 
237
    for path in unusual_modes:
 
238
        parent_path = posixpath.dirname(path)
 
239
        new_trees[parent_path] = tree.path2id(parent_path)
 
240
 
193
241
    trees = {}
194
242
    while new_trees:
195
243
        items = new_trees.items()
196
244
        new_trees = {}
197
245
        for path, file_id in items:
198
 
            parent_id = inv[file_id].parent_id
 
246
            parent_id = tree.inventory[file_id].parent_id
199
247
            if parent_id is not None:
200
248
                parent_path = urlutils.dirname(path)
201
249
                new_trees[parent_path] = parent_id
205
253
        try:
206
254
            return shamap[ie.file_id]
207
255
        except KeyError:
208
 
            # Not all cache backends store the tree information, 
209
 
            # calculate again from scratch
210
 
            ret = directory_to_tree(ie, ie_to_hexsha, unusual_modes)
211
 
            if ret is None:
212
 
                return ret
213
 
            return ret.id
 
256
            # FIXME: Should be the same as in parent
 
257
            if ie.kind in ("file", "symlink"):
 
258
                try:
 
259
                    return idmap.lookup_blob_id(ie.file_id, ie.revision)
 
260
                except KeyError:
 
261
                    # no-change merge ?
 
262
                    blob = Blob()
 
263
                    blob.data = tree.get_file_text(ie.file_id)
 
264
                    return blob.id
 
265
            elif ie.kind == "directory":
 
266
                # Not all cache backends store the tree information, 
 
267
                # calculate again from scratch
 
268
                ret = directory_to_tree(ie, ie_to_hexsha, unusual_modes,
 
269
                    dummy_file_name)
 
270
                if ret is None:
 
271
                    return ret
 
272
                return ret.id
 
273
            else:
 
274
                raise AssertionError
214
275
 
215
276
    for path in sorted(trees.keys(), reverse=True):
216
 
        ie = inv[trees[path]]
 
277
        ie = tree.inventory[trees[path]]
217
278
        assert ie.kind == "directory"
218
 
        obj = directory_to_tree(ie, ie_to_hexsha, unusual_modes)
 
279
        obj = directory_to_tree(ie, ie_to_hexsha, unusual_modes,
 
280
            dummy_file_name)
219
281
        if obj is not None:
220
282
            yield path, obj, ie
221
283
            shamap[ie.file_id] = obj.id
235
297
        self.start_write_group = self._cache.idmap.start_write_group
236
298
        self.abort_write_group = self._cache.idmap.abort_write_group
237
299
        self.commit_write_group = self._cache.idmap.commit_write_group
238
 
        self.parent_invs_cache = LRUInventoryCache(self.repository)
 
300
        self.tree_cache = LRUTreeCache(self.repository)
239
301
 
240
302
    def _update_sha_map(self, stop_revision=None):
241
303
        graph = self.repository.get_graph()
276
338
        self._update_sha_map()
277
339
        return iter(self._cache.idmap.sha1s())
278
340
 
279
 
    def _revision_to_commit(self, rev, tree_sha):
 
341
    def _reconstruct_commit(self, rev, tree_sha, roundtrip):
280
342
        def parent_lookup(revid):
281
343
            try:
282
344
                return self._lookup_revision_sha1(revid)
283
345
            except errors.NoSuchRevision:
284
 
                trace.warning("Ignoring ghost parent %s", revid)
285
346
                return None
286
 
        return self.mapping.export_commit(rev, tree_sha, parent_lookup)
287
 
 
288
 
    def _revision_to_objects(self, rev, inv):
 
347
        return self.mapping.export_commit(rev, tree_sha, parent_lookup,
 
348
            roundtrip)
 
349
 
 
350
    def _create_fileid_map_blob(self, inv):
 
351
        # FIXME: This can probably be a lot more efficient, 
 
352
        # not all files necessarily have to be processed.
 
353
        file_ids = {}
 
354
        for (path, ie) in inv.iter_entries():
 
355
            if self.mapping.generate_file_id(path) != ie.file_id:
 
356
                file_ids[path] = ie.file_id
 
357
        return self.mapping.export_fileid_map(file_ids)
 
358
 
 
359
    def _revision_to_objects(self, rev, tree, roundtrip):
 
360
        """Convert a revision to a set of git objects.
 
361
 
 
362
        :param rev: Bazaar revision object
 
363
        :param tree: Bazaar revision tree
 
364
        :param roundtrip: Whether to roundtrip all Bazaar revision data
 
365
        """
289
366
        unusual_modes = extract_unusual_modes(rev)
290
367
        present_parents = self.repository.has_revisions(rev.parent_ids)
291
 
        has_ghost_parents = (len(rev.parent_ids) < len(present_parents))
292
 
        parent_invs = self.parent_invs_cache.get_inventories(
 
368
        parent_trees = self.tree_cache.revision_trees(
293
369
            [p for p in rev.parent_ids if p in present_parents])
294
 
        tree_sha = None
295
 
        for path, obj, ie in _inventory_to_objects(inv, parent_invs,
296
 
                self._cache.idmap, unusual_modes,
297
 
                self.repository.iter_files_bytes, has_ghost_parents):
298
 
            yield path, obj, ie
 
370
        root_tree = None
 
371
        for path, obj, ie in _tree_to_objects(tree, parent_trees,
 
372
                self._cache.idmap, unusual_modes, self.mapping.BZR_DUMMY_FILE):
299
373
            if path == "":
300
 
                tree_sha = obj.id
301
 
        if tree_sha is None:
 
374
                root_tree = obj
 
375
                root_ie = ie
 
376
                # Don't yield just yet
 
377
            else:
 
378
                yield path, obj, ie
 
379
        if root_tree is None:
 
380
            # Pointless commit - get the tree sha elsewhere
302
381
            if not rev.parent_ids:
303
 
                tree_sha = Tree().id
 
382
                root_tree = Tree()
304
383
            else:
305
 
                raise AssertionError
306
 
        commit_obj = self._revision_to_commit(rev, tree_sha)
 
384
                base_sha1 = self._lookup_revision_sha1(rev.parent_ids[0])
 
385
                root_tree = self[self[base_sha1].tree]
 
386
            root_ie = tree.inventory.root
 
387
        if roundtrip and self.mapping.BZR_FILE_IDS_FILE is not None:
 
388
            b = self._create_fileid_map_blob(tree.inventory)
 
389
            if b is not None:
 
390
                root_tree[self.mapping.BZR_FILE_IDS_FILE] = ((stat.S_IFREG | 0644), b.id)
 
391
                yield self.mapping.BZR_FILE_IDS_FILE, b, None
 
392
        yield "", root_tree, root_ie
 
393
        commit_obj = self._reconstruct_commit(rev, root_tree.id,
 
394
            roundtrip=roundtrip)
307
395
        try:
308
396
            foreign_revid, mapping = mapping_registry.parse_revision_id(
309
397
                rev.revision_id)
318
406
 
319
407
    def _update_sha_map_revision(self, revid):
320
408
        rev = self.repository.get_revision(revid)
321
 
        inv = self.parent_invs_cache.get_inventory(rev.revision_id)
 
409
        tree = self.tree_cache.revision_tree(rev.revision_id)
322
410
        updater = self._get_updater(rev)
323
 
        for path, obj, ie in self._revision_to_objects(rev, inv):
324
 
            updater.add_object(obj, ie)
 
411
        for path, obj, ie in self._revision_to_objects(rev, tree,
 
412
            roundtrip=True):
 
413
            updater.add_object(obj, ie, path)
325
414
        commit_obj = updater.finish()
326
415
        return commit_obj.id
327
416
 
328
 
    def _get_blob(self, fileid, revision, expected_sha):
 
417
    def _reconstruct_blobs(self, keys):
329
418
        """Return a Git Blob object from a fileid and revision stored in bzr.
330
419
 
331
420
        :param fileid: File id of the text
332
421
        :param revision: Revision of the text
333
422
        """
334
 
        blob = Blob()
335
 
        chunks = self.repository.iter_files_bytes([(fileid, revision, None)]).next()[1]
336
 
        blob.chunked = chunks
337
 
        if blob.id != expected_sha:
338
 
            # Perhaps it's a symlink ?
339
 
            inv = self.parent_invs_cache.get_inventory(revision)
340
 
            entry = inv[fileid]
341
 
            assert entry.kind == 'symlink'
342
 
            blob = symlink_to_blob(entry)
343
 
        _check_expected_sha(expected_sha, blob)
344
 
        return blob
 
423
        stream = self.repository.iter_files_bytes(
 
424
            ((key[0], key[1], key) for key in keys))
 
425
        for (fileid, revision, expected_sha), chunks in stream:
 
426
            blob = Blob()
 
427
            blob.chunked = chunks
 
428
            if blob.id != expected_sha and blob.data == "":
 
429
                # Perhaps it's a symlink ?
 
430
                tree = self.tree_cache.revision_tree(revision)
 
431
                entry = tree.inventory[fileid]
 
432
                if entry.kind == 'symlink':
 
433
                    blob = symlink_to_blob(entry)
 
434
            _check_expected_sha(expected_sha, blob)
 
435
            yield blob
345
436
 
346
 
    def _get_tree(self, fileid, revid, inv, unusual_modes, expected_sha=None):
 
437
    def _reconstruct_tree(self, fileid, revid, inv, unusual_modes,
 
438
        expected_sha=None):
347
439
        """Return a Git Tree object from a file id and a revision stored in bzr.
348
440
 
349
441
        :param fileid: fileid in the tree.
352
444
        def get_ie_sha1(entry):
353
445
            if entry.kind == "directory":
354
446
                try:
355
 
                    return self._cache.idmap.lookup_tree_id(entry.file_id)
 
447
                    return self._cache.idmap.lookup_tree_id(entry.file_id,
 
448
                        revid)
356
449
                except (NotImplementedError, KeyError):
357
 
                    obj = self._get_tree(entry.file_id, revid, inv,
 
450
                    obj = self._reconstruct_tree(entry.file_id, revid, inv,
358
451
                        unusual_modes)
359
452
                    if obj is None:
360
453
                        return None
361
454
                    else:
362
455
                        return obj.id
363
456
            elif entry.kind in ("file", "symlink"):
364
 
                return self._cache.idmap.lookup_blob_id(entry.file_id, entry.revision)
 
457
                try:
 
458
                    return self._cache.idmap.lookup_blob_id(entry.file_id,
 
459
                        entry.revision)
 
460
                except KeyError:
 
461
                    # no-change merge?
 
462
                    return self._reconstruct_blobs(
 
463
                        [(entry.file_id, entry.revision, None)]).next().id
365
464
            else:
366
465
                raise AssertionError("unknown entry kind '%s'" % entry.kind)
367
 
        tree = directory_to_tree(inv[fileid], get_ie_sha1, unusual_modes)
 
466
        tree = directory_to_tree(inv[fileid], get_ie_sha1, unusual_modes,
 
467
            self.mapping.BZR_DUMMY_FILE)
 
468
        if (inv.root.file_id == fileid and
 
469
            self.mapping.BZR_FILE_IDS_FILE is not None):
 
470
            b = self._create_fileid_map_blob(inv)
 
471
            # If this is the root tree, add the file ids
 
472
            tree[self.mapping.BZR_FILE_IDS_FILE] = ((stat.S_IFREG | 0644), b.id)
368
473
        _check_expected_sha(expected_sha, tree)
369
474
        return tree
370
475
 
378
483
 
379
484
    def _lookup_revision_sha1(self, revid):
380
485
        """Return the SHA1 matching a Bazaar revision."""
 
486
        from dulwich.protocol import ZERO_SHA
381
487
        if revid == NULL_REVISION:
382
 
            return "0" * 40
 
488
            return ZERO_SHA
383
489
        try:
384
490
            return self._cache.idmap.lookup_commit(revid)
385
491
        except KeyError:
386
492
            try:
387
493
                return mapping_registry.parse_revision_id(revid)[0]
388
494
            except errors.InvalidRevisionId:
389
 
                self._update_sha_map(revid)
 
495
                self.repository.lock_read()
 
496
                try:
 
497
                    self._update_sha_map(revid)
 
498
                finally:
 
499
                    self.repository.unlock()
390
500
                return self._cache.idmap.lookup_commit(revid)
391
501
 
392
502
    def get_raw(self, sha):
400
510
    def __contains__(self, sha):
401
511
        # See if sha is in map
402
512
        try:
403
 
            (type, type_data) = self._lookup_git_sha(sha)
 
513
            (type, type_data) = self.lookup_git_sha(sha)
404
514
            if type == "commit":
405
515
                return self.repository.has_revision(type_data[0])
406
516
            elif type == "blob":
407
 
                return self.repository.texts.has_version(type_data)
 
517
                return self.repository.texts.has_key(type_data)
408
518
            elif type == "tree":
409
519
                return self.repository.has_revision(type_data[1])
410
520
            else:
412
522
        except KeyError:
413
523
            return False
414
524
 
415
 
    def _lookup_git_sha(self, sha):
416
 
        # See if sha is in map
417
 
        try:
418
 
            return self._cache.idmap.lookup_git_sha(sha)
419
 
        except KeyError:
420
 
            # if not, see if there are any unconverted revisions and add them
421
 
            # to the map, search for sha in map again
422
 
            self._update_sha_map()
423
 
            return self._cache.idmap.lookup_git_sha(sha)
 
525
    def lookup_git_shas(self, shas, update_map=True):
 
526
        from dulwich.protocol import ZERO_SHA
 
527
        ret = {}
 
528
        for sha in shas:
 
529
            if sha == ZERO_SHA:
 
530
                ret[sha] = ("commit", (NULL_REVISION, None))
 
531
                continue
 
532
            try:
 
533
                ret[sha] = self._cache.idmap.lookup_git_sha(sha)
 
534
            except KeyError:
 
535
                if update_map:
 
536
                    # if not, see if there are any unconverted revisions and add
 
537
                    # them to the map, search for sha in map again
 
538
                    self._update_sha_map()
 
539
                    update_map = False
 
540
                    try:
 
541
                        ret[sha] = self._cache.idmap.lookup_git_sha(sha)
 
542
                    except KeyError:
 
543
                        pass
 
544
        return ret
 
545
 
 
546
    def lookup_git_sha(self, sha, update_map=True):
 
547
        return self.lookup_git_shas([sha], update_map=update_map)[sha]
424
548
 
425
549
    def __getitem__(self, sha):
426
550
        if self._cache.content_cache is not None:
428
552
                return self._cache.content_cache[sha]
429
553
            except KeyError:
430
554
                pass
431
 
        (type, type_data) = self._lookup_git_sha(sha)
 
555
        (type, type_data) = self.lookup_git_sha(sha)
432
556
        # convert object to git object
433
557
        if type == "commit":
434
558
            (revid, tree_sha) = type_data
438
562
                trace.mutter('entry for %s %s in shamap: %r, but not found in '
439
563
                             'repository', type, sha, type_data)
440
564
                raise KeyError(sha)
441
 
            commit = self._revision_to_commit(rev, tree_sha)
 
565
            commit = self._reconstruct_commit(rev, tree_sha, roundtrip=True)
442
566
            _check_expected_sha(sha, commit)
443
567
            return commit
444
568
        elif type == "blob":
445
569
            (fileid, revision) = type_data
446
 
            return self._get_blob(fileid, revision, expected_sha=sha)
 
570
            return self._reconstruct_blobs([(fileid, revision, sha)]).next()
447
571
        elif type == "tree":
448
572
            (fileid, revid) = type_data
449
573
            try:
450
 
                inv = self.parent_invs_cache.get_inventory(revid)
 
574
                tree = self.tree_cache.revision_tree(revid)
451
575
                rev = self.repository.get_revision(revid)
452
576
            except errors.NoSuchRevision:
453
577
                trace.mutter('entry for %s %s in shamap: %r, but not found in repository', type, sha, type_data)
454
578
                raise KeyError(sha)
455
579
            unusual_modes = extract_unusual_modes(rev)
456
580
            try:
457
 
                return self._get_tree(fileid, revid, inv, unusual_modes,
458
 
                    expected_sha=sha)
 
581
                return self._reconstruct_tree(fileid, revid, tree.inventory,
 
582
                    unusual_modes, expected_sha=sha)
459
583
            except errors.NoSuchRevision:
460
584
                raise KeyError(sha)
461
585
        else:
462
586
            raise AssertionError("Unknown object type '%s'" % type)
463
587
 
464
 
    def generate_pack_contents(self, have, want):
 
588
    def generate_lossy_pack_contents(self, have, want, progress=None,
 
589
            get_tagged=None):
 
590
        return self.generate_pack_contents(have, want, progress, get_tagged,
 
591
            lossy=True)
 
592
 
 
593
    def generate_pack_contents(self, have, want, progress=None,
 
594
            get_tagged=None, lossy=False):
465
595
        """Iterate over the contents of a pack file.
466
596
 
467
597
        :param have: List of SHA1s of objects that should not be sent
468
598
        :param want: List of SHA1s of objects that should be sent
469
599
        """
470
600
        processed = set()
 
601
        ret = self.lookup_git_shas(have + want)
471
602
        for commit_sha in have:
472
603
            try:
473
 
                (type, (revid, tree_sha)) = self._lookup_git_sha(commit_sha)
 
604
                (type, (revid, tree_sha)) = ret[commit_sha]
474
605
            except KeyError:
475
606
                pass
476
607
            else:
480
611
        for commit_sha in want:
481
612
            if commit_sha in have:
482
613
                continue
483
 
            (type, (revid, tree_sha)) = self._lookup_git_sha(commit_sha)
484
 
            assert type == "commit"
485
 
            pending.add(revid)
486
 
        todo = set()
487
 
        while pending:
488
 
            processed.update(pending)
489
 
            next_map = self.repository.get_parent_map(pending)
490
 
            next_pending = set()
491
 
            for item in next_map.iteritems():
492
 
                todo.add(item[0])
493
 
                next_pending.update(p for p in item[1] if p not in processed)
494
 
            pending = next_pending
495
 
        if NULL_REVISION in todo:
496
 
            todo.remove(NULL_REVISION)
 
614
            try:
 
615
                (type, (revid, tree_sha)) = ret[commit_sha]
 
616
            except KeyError:
 
617
                pass
 
618
            else:
 
619
                assert type == "commit"
 
620
                pending.add(revid)
 
621
 
 
622
        todo = _find_missing_bzr_revids(self.repository.get_parent_map, 
 
623
                                        pending, processed)
497
624
        trace.mutter('sending revisions %r', todo)
498
625
        ret = []
499
626
        pb = ui.ui_factory.nested_progress_bar()
501
628
            for i, revid in enumerate(todo):
502
629
                pb.update("generating git objects", i, len(todo))
503
630
                rev = self.repository.get_revision(revid)
504
 
                inv = self.parent_invs_cache.get_inventory(revid)
505
 
                for path, obj, ie in self._revision_to_objects(rev, inv):
 
631
                tree = self.tree_cache.revision_tree(revid)
 
632
                for path, obj, ie in self._revision_to_objects(rev, tree,
 
633
                    roundtrip=not lossy):
506
634
                    ret.append((obj, path))
507
635
        finally:
508
636
            pb.finished()
509
637
        return ret
 
638
 
 
639
    def add_thin_pack(self):
 
640
        import tempfile
 
641
        import os
 
642
        fd, path = tempfile.mkstemp(suffix=".pack")
 
643
        f = os.fdopen(fd, 'wb')
 
644
        def commit():
 
645
            from dulwich.pack import PackData, Pack
 
646
            from bzrlib.plugins.git.fetch import import_git_objects
 
647
            os.fsync(fd)
 
648
            f.close()
 
649
            if os.path.getsize(path) == 0:
 
650
                return
 
651
            pd = PackData(path)
 
652
            pd.create_index_v2(path[:-5]+".idx", self.object_store.get_raw)
 
653
 
 
654
            p = Pack(path[:-5])
 
655
            self.repository.lock_write()
 
656
            try:
 
657
                self.repository.start_write_group()
 
658
                try:
 
659
                    import_git_objects(self.repository, self.mapping, 
 
660
                        p.iterobjects(get_raw=self.get_raw),
 
661
                        self.object_store)
 
662
                except:
 
663
                    self.repository.abort_write_group()
 
664
                    raise
 
665
                else:
 
666
                    self.repository.commit_write_group()
 
667
            finally:
 
668
                self.repository.unlock()
 
669
        return f, commit
 
670
 
 
671
    # The pack isn't kept around anyway, so no point 
 
672
    # in treating full packs different from thin packs
 
673
    add_pack = add_thin_pack