/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to object_store.py

Merge changes to avoid inventories.

Show diffs side-by-side

added added

removed removed

Lines of Context:
21
21
    Commit,
22
22
    Tree,
23
23
    sha_to_hex,
 
24
    ZERO_SHA,
24
25
    )
25
26
from dulwich.object_store import (
26
27
    BaseObjectStore,
33
34
    ui,
34
35
    urlutils,
35
36
    )
 
37
from bzrlib.lock import LogicalLockResult
36
38
from bzrlib.revision import (
37
39
    NULL_REVISION,
38
40
    )
40
42
    StrictTestament3,
41
43
    )
42
44
 
 
45
from bzrlib.plugins.git.cache import (
 
46
    from_repository as cache_from_repository,
 
47
    )
43
48
from bzrlib.plugins.git.mapping import (
44
49
    default_mapping,
45
50
    directory_to_tree,
47
52
    mapping_registry,
48
53
    symlink_to_blob,
49
54
    )
50
 
from bzrlib.plugins.git.cache import (
51
 
    from_repository as cache_from_repository,
 
55
from bzrlib.plugins.git.unpeel_map import (
 
56
    UnpeelMap,
52
57
    )
53
58
 
54
59
import posixpath
58
63
def get_object_store(repo, mapping=None):
59
64
    git = getattr(repo, "_git", None)
60
65
    if git is not None:
 
66
        git.object_store.unlock = lambda: None
 
67
        git.object_store.lock_read = lambda: LogicalLockResult(lambda: None)
 
68
        git.object_store.lock_write = lambda: LogicalLockResult(lambda: None)
61
69
        return git.object_store
62
70
    return BazaarObjectStore(repo, mapping)
63
71
 
69
77
 
70
78
    def __init__(self, repository):
71
79
        def approx_tree_size(tree):
72
 
            # Very rough estimate, 1k per inventory entry
73
 
            return len(tree.inventory) * 1024
 
80
            # Very rough estimate, 250 per inventory entry
 
81
            try:
 
82
                inv = tree.root_inventory
 
83
            except AttributeError:
 
84
                inv = tree.inventory
 
85
            return len(inv) * 250
74
86
        self.repository = repository
75
87
        self._cache = lru_cache.LRUSizeCache(max_size=MAX_TREE_CACHE_SIZE,
76
88
            after_cleanup_size=None, compute_size=approx_tree_size)
81
93
        except KeyError:
82
94
            tree = self.repository.revision_tree(revid)
83
95
            self.add(tree)
84
 
        assert tree.get_revision_id() == tree.inventory.revision_id
85
96
        return tree
86
97
 
87
98
    def iter_revision_trees(self, revids):
94
105
                todo.append(revid)
95
106
            else:
96
107
                assert tree.get_revision_id() == revid
97
 
                assert tree.inventory.revision_id == revid
98
108
                trees[revid] = tree
99
109
        for tree in self.repository.revision_trees(todo):
100
110
            trees[tree.get_revision_id()] = tree
105
115
        return list(self.iter_revision_trees(revids))
106
116
 
107
117
    def add(self, tree):
108
 
        self._cache.add(tree.get_revision_id(), tree)
109
 
 
110
 
 
111
 
def _find_missing_bzr_revids(get_parent_map, want, have):
 
118
        self._cache[tree.get_revision_id()] = tree
 
119
 
 
120
 
 
121
def _find_missing_bzr_revids(graph, want, have):
112
122
    """Find the revisions that have to be pushed.
113
123
 
114
124
    :param get_parent_map: Function that returns the parents for a sequence
117
127
    :param have: Revisions the target already has
118
128
    :return: Set of revisions to fetch
119
129
    """
120
 
    pending = want - have
121
 
    processed = set()
 
130
    handled = set(have)
122
131
    todo = set()
123
 
    while pending:
124
 
        processed.update(pending)
125
 
        next_map = get_parent_map(pending)
126
 
        next_pending = set()
127
 
        for item in next_map.iteritems():
128
 
            if item[0] in have:
129
 
                continue
130
 
            todo.add(item[0])
131
 
            next_pending.update(p for p in item[1] if p not in processed)
132
 
        pending = next_pending
 
132
    for rev in want:
 
133
        extra_todo = graph.find_unique_ancestors(rev, handled)
 
134
        todo.update(extra_todo)
 
135
        handled.update(extra_todo)
133
136
    if NULL_REVISION in todo:
134
137
        todo.remove(NULL_REVISION)
135
138
    return todo
176
179
    except IndexError:
177
180
        base_tree = tree._repository.revision_tree(NULL_REVISION)
178
181
        other_parent_trees = []
179
 
    def find_unchanged_parent_ie(ie, parent_trees):
180
 
        assert ie.kind in ("symlink", "file")
 
182
    def find_unchanged_parent_ie(file_id, kind, other, parent_trees):
181
183
        for ptree in parent_trees:
182
184
            try:
183
 
                pie = ptree.inventory[ie.file_id]
 
185
                pkind = ptree.kind(file_id)
184
186
            except errors.NoSuchId:
185
187
                pass
186
188
            else:
187
 
                if (pie.text_sha1 == ie.text_sha1 and 
188
 
                    pie.kind == ie.kind and
189
 
                    pie.symlink_target == ie.symlink_target):
190
 
                    return pie
 
189
                if kind == "file":
 
190
                    if (pkind == "file" and 
 
191
                        ptree.get_file_sha1(file_id) == other):
 
192
                        return pie
 
193
                if kind == "symlink":
 
194
                    if (pkind == "symlink" and
 
195
                        ptree.get_symlink_target(file_id) == other):
 
196
                        return pie
191
197
        raise KeyError
192
198
 
193
199
    # Find all the changed blobs
194
200
    for (file_id, path, changed_content, versioned, parent, name, kind,
195
201
         executable) in tree.iter_changes(base_tree):
196
202
        if kind[1] == "file":
197
 
            ie = tree.inventory[file_id]
198
203
            if changed_content:
199
204
                try:
200
 
                    pie = find_unchanged_parent_ie(ie, other_parent_trees)
 
205
                    pie = find_unchanged_parent_ie(file_id, kind[1], tree.get_file_sha1(file_id), other_parent_trees)
201
206
                except KeyError:
202
207
                    pass
203
208
                else:
204
209
                    try:
205
 
                        shamap[ie.file_id] = idmap.lookup_blob_id(
 
210
                        shamap[file_id] = idmap.lookup_blob_id(
206
211
                            pie.file_id, pie.revision)
207
212
                    except KeyError:
208
213
                        # no-change merge ?
209
214
                        blob = Blob()
210
 
                        blob.data = tree.get_file_text(ie.file_id)
211
 
                        shamap[ie.file_id] = blob.id
 
215
                        blob.data = tree.get_file_text(file_id)
 
216
                        shamap[file_id] = blob.id
212
217
            if not file_id in shamap:
213
 
                new_blobs.append((path[1], ie))
 
218
                new_blobs.append((path[1], file_id))
214
219
            new_trees[posixpath.dirname(path[1])] = parent[1]
215
220
        elif kind[1] == "symlink":
216
 
            ie = tree.inventory[file_id]
217
221
            if changed_content:
218
 
                blob = symlink_to_blob(ie)
 
222
                target = tree.get_symlink_target(file_id)
 
223
                blob = symlink_to_blob(target)
219
224
                shamap[file_id] = blob.id
220
225
                try:
221
 
                    find_unchanged_parent_ie(ie, other_parent_trees)
 
226
                    find_unchanged_parent_ie(file_id, kind[1], target, other_parent_trees)
222
227
                except KeyError:
223
 
                    yield path[1], blob, ie
 
228
                    yield path[1], blob, (file_id, tree.get_file_revision(file_id, path[1]))
224
229
            new_trees[posixpath.dirname(path[1])] = parent[1]
225
230
        elif kind[1] not in (None, "directory"):
226
231
            raise AssertionError(kind[1])
227
232
        if (path[0] not in (None, "") and
228
 
            parent[0] in tree.inventory and
229
 
            tree.inventory[parent[0]].kind == "directory"):
 
233
            tree.has_id(parent[0]) and
 
234
            tree.kind(parent[0]) == "directory"):
230
235
            # Removal
231
236
            new_trees[posixpath.dirname(path[0])] = parent[0]
232
 
    
 
237
 
233
238
    # Fetch contents of the blobs that were changed
234
 
    for (path, ie), chunks in tree.iter_files_bytes(
235
 
        [(ie.file_id, (path, ie)) for (path, ie) in new_blobs]):
 
239
    for (path, file_id), chunks in tree.iter_files_bytes(
 
240
        [(file_id, (path, file_id)) for (path, file_id) in new_blobs]):
236
241
        obj = Blob()
237
242
        obj.chunked = chunks
238
 
        yield path, obj, ie
239
 
        shamap[ie.file_id] = obj.id
 
243
        yield path, obj, (file_id, tree.get_file_revision(file_id, path))
 
244
        shamap[file_id] = obj.id
240
245
 
241
246
    for path in unusual_modes:
242
247
        parent_path = posixpath.dirname(path)
247
252
        items = new_trees.items()
248
253
        new_trees = {}
249
254
        for path, file_id in items:
250
 
            parent_id = tree.inventory[file_id].parent_id
251
 
            if parent_id is not None:
 
255
            if path != "":
252
256
                parent_path = urlutils.dirname(path)
 
257
                parent_id = tree.path2id(parent_path)
253
258
                new_trees[parent_path] = parent_id
254
259
            trees[path] = file_id
255
260
 
269
274
            elif ie.kind == "directory":
270
275
                # Not all cache backends store the tree information, 
271
276
                # calculate again from scratch
272
 
                ret = directory_to_tree(ie, ie_to_hexsha, unusual_modes,
273
 
                    dummy_file_name)
 
277
                ret = directory_to_tree(ie.children, ie_to_hexsha,
 
278
                    unusual_modes, dummy_file_name, ie.parent_id is not None)
274
279
                if ret is None:
275
280
                    return ret
276
281
                return ret.id
277
282
            else:
278
283
                raise AssertionError
279
284
 
 
285
    try:
 
286
        inv = tree.root_inventory
 
287
    except AttributeError:
 
288
        inv = tree.inventory
 
289
 
280
290
    for path in sorted(trees.keys(), reverse=True):
281
 
        ie = tree.inventory[trees[path]]
282
 
        assert ie.kind == "directory"
283
 
        obj = directory_to_tree(ie, ie_to_hexsha, unusual_modes,
284
 
            dummy_file_name)
 
291
        file_id = trees[path]
 
292
        assert tree.kind(file_id) == 'directory'
 
293
        ie = inv[file_id]
 
294
        obj = directory_to_tree(ie.children, ie_to_hexsha, unusual_modes,
 
295
            dummy_file_name, path == "")
285
296
        if obj is not None:
286
 
            yield path, obj, ie
287
 
            shamap[ie.file_id] = obj.id
 
297
            yield path, obj, (file_id, )
 
298
            shamap[file_id] = obj.id
 
299
 
 
300
 
 
301
class PackTupleIterable(object):
 
302
 
 
303
    def __init__(self, store):
 
304
        self.store = store
 
305
        self.store.lock_read()
 
306
        self.objects = {}
 
307
 
 
308
    def __del__(self):
 
309
        self.store.unlock()
 
310
 
 
311
    def add(self, sha, path):
 
312
        self.objects[sha] = path
 
313
 
 
314
    def __len__(self):
 
315
        return len(self.objects)
 
316
 
 
317
    def __iter__(self):
 
318
        return ((self.store[object_id], path) for (object_id, path) in
 
319
                self.objects.iteritems())
288
320
 
289
321
 
290
322
class BazaarObjectStore(BaseObjectStore):
292
324
 
293
325
    def __init__(self, repository, mapping=None):
294
326
        self.repository = repository
 
327
        self._map_updated = False
 
328
        self._locked = None
295
329
        if mapping is None:
296
330
            self.mapping = default_mapping
297
331
        else:
298
332
            self.mapping = mapping
299
333
        self._cache = cache_from_repository(repository)
300
 
        self._content_cache_types = ("tree")
 
334
        self._content_cache_types = ("tree",)
301
335
        self.start_write_group = self._cache.idmap.start_write_group
302
336
        self.abort_write_group = self._cache.idmap.abort_write_group
303
337
        self.commit_write_group = self._cache.idmap.commit_write_group
304
338
        self.tree_cache = LRUTreeCache(self.repository)
 
339
        self.unpeel_map = UnpeelMap.from_repository(self.repository)
 
340
 
 
341
    def _missing_revisions(self, revisions):
 
342
        return self._cache.idmap.missing_revisions(revisions)
305
343
 
306
344
    def _update_sha_map(self, stop_revision=None):
 
345
        if not self.is_locked():
 
346
            raise AssertionError()
 
347
        if self._map_updated:
 
348
            return
 
349
        if (stop_revision is not None and
 
350
            not self._missing_revisions([stop_revision])):
 
351
            return
307
352
        graph = self.repository.get_graph()
308
353
        if stop_revision is None:
309
 
            heads = graph.heads(self.repository.all_revision_ids())
 
354
            all_revids = self.repository.all_revision_ids()
 
355
            missing_revids = self._missing_revisions(all_revids)
310
356
        else:
311
357
            heads = set([stop_revision])
312
 
        missing_revids = self._cache.idmap.missing_revisions(heads)
313
 
        while heads:
314
 
            parents = graph.get_parent_map(heads)
315
 
            todo = set()
316
 
            for p in parents.values():
317
 
                todo.update([x for x in p if x not in missing_revids])
318
 
            heads = self._cache.idmap.missing_revisions(todo)
319
 
            missing_revids.update(heads)
 
358
            missing_revids = self._missing_revisions(heads)
 
359
            while heads:
 
360
                parents = graph.get_parent_map(heads)
 
361
                todo = set()
 
362
                for p in parents.values():
 
363
                    todo.update([x for x in p if x not in missing_revids])
 
364
                heads = self._missing_revisions(todo)
 
365
                missing_revids.update(heads)
320
366
        if NULL_REVISION in missing_revids:
321
367
            missing_revids.remove(NULL_REVISION)
322
368
        missing_revids = self.repository.has_revisions(missing_revids)
323
369
        if not missing_revids:
 
370
            if stop_revision is None:
 
371
                self._map_updated = True
324
372
            return
325
373
        self.start_write_group()
326
374
        try:
332
380
                    self._update_sha_map_revision(revid)
333
381
            finally:
334
382
                pb.finished()
 
383
            if stop_revision is None:
 
384
                self._map_updated = True
335
385
        except:
336
386
            self.abort_write_group()
337
387
            raise
342
392
        self._update_sha_map()
343
393
        return iter(self._cache.idmap.sha1s())
344
394
 
345
 
    def _reconstruct_commit(self, rev, tree_sha, roundtrip, verifiers):
 
395
    def _reconstruct_commit(self, rev, tree_sha, lossy, verifiers):
346
396
        """Reconstruct a Commit object.
347
397
 
348
398
        :param rev: Revision object
349
399
        :param tree_sha: SHA1 of the root tree object
350
 
        :param roundtrip: Whether or not to roundtrip bzr metadata
 
400
        :param lossy: Whether or not to roundtrip bzr metadata
351
401
        :param verifiers: Verifiers for the commits
352
402
        :return: Commit object
353
403
        """
357
407
            except errors.NoSuchRevision:
358
408
                return None
359
409
        return self.mapping.export_commit(rev, tree_sha, parent_lookup,
360
 
            roundtrip, verifiers)
 
410
            lossy, verifiers)
361
411
 
362
 
    def _create_fileid_map_blob(self, inv):
 
412
    def _create_fileid_map_blob(self, tree):
363
413
        # FIXME: This can probably be a lot more efficient, 
364
414
        # not all files necessarily have to be processed.
365
415
        file_ids = {}
366
 
        for (path, ie) in inv.iter_entries():
 
416
        for (path, ie) in tree.inventory.iter_entries():
367
417
            if self.mapping.generate_file_id(path) != ie.file_id:
368
418
                file_ids[path] = ie.file_id
369
419
        return self.mapping.export_fileid_map(file_ids)
370
420
 
371
 
    def _revision_to_objects(self, rev, tree, roundtrip):
 
421
    def _revision_to_objects(self, rev, tree, lossy):
372
422
        """Convert a revision to a set of git objects.
373
423
 
374
424
        :param rev: Bazaar revision object
375
425
        :param tree: Bazaar revision tree
376
 
        :param roundtrip: Whether to roundtrip all Bazaar revision data
 
426
        :param lossy: Whether to not roundtrip all Bazaar revision data
377
427
        """
378
428
        unusual_modes = extract_unusual_modes(rev)
379
429
        present_parents = self.repository.has_revisions(rev.parent_ids)
380
430
        parent_trees = self.tree_cache.revision_trees(
381
431
            [p for p in rev.parent_ids if p in present_parents])
382
432
        root_tree = None
383
 
        for path, obj, ie in _tree_to_objects(tree, parent_trees,
 
433
        for path, obj, bzr_key_data in _tree_to_objects(tree, parent_trees,
384
434
                self._cache.idmap, unusual_modes, self.mapping.BZR_DUMMY_FILE):
385
435
            if path == "":
386
436
                root_tree = obj
387
 
                root_ie = ie
 
437
                root_key_data = bzr_key_data
388
438
                # Don't yield just yet
389
439
            else:
390
 
                yield path, obj, ie
 
440
                yield path, obj, bzr_key_data
391
441
        if root_tree is None:
392
442
            # Pointless commit - get the tree sha elsewhere
393
443
            if not rev.parent_ids:
395
445
            else:
396
446
                base_sha1 = self._lookup_revision_sha1(rev.parent_ids[0])
397
447
                root_tree = self[self[base_sha1].tree]
398
 
            root_ie = tree.inventory.root
399
 
        if roundtrip and self.mapping.BZR_FILE_IDS_FILE is not None:
400
 
            b = self._create_fileid_map_blob(tree.inventory)
 
448
            root_key_data = (tree.get_root_id(), )
 
449
        if not lossy and self.mapping.BZR_FILE_IDS_FILE is not None:
 
450
            b = self._create_fileid_map_blob(tree)
401
451
            if b is not None:
402
 
                root_tree[self.mapping.BZR_FILE_IDS_FILE] = ((stat.S_IFREG | 0644), b.id)
 
452
                root_tree[self.mapping.BZR_FILE_IDS_FILE] = (
 
453
                    (stat.S_IFREG | 0644), b.id)
403
454
                yield self.mapping.BZR_FILE_IDS_FILE, b, None
404
 
        yield "", root_tree, root_ie
405
 
        if roundtrip:
406
 
            testament3 = StrictTestament3(rev, tree.inventory)
 
455
        yield "", root_tree, root_key_data
 
456
        if not lossy:
 
457
            testament3 = StrictTestament3(rev, tree)
407
458
            verifiers = { "testament3-sha1": testament3.as_sha1() }
408
459
        else:
409
460
            verifiers = {}
410
461
        commit_obj = self._reconstruct_commit(rev, root_tree.id,
411
 
            roundtrip=roundtrip, verifiers=verifiers)
 
462
            lossy=lossy, verifiers=verifiers)
412
463
        try:
413
464
            foreign_revid, mapping = mapping_registry.parse_revision_id(
414
465
                rev.revision_id)
425
476
        rev = self.repository.get_revision(revid)
426
477
        tree = self.tree_cache.revision_tree(rev.revision_id)
427
478
        updater = self._get_updater(rev)
428
 
        for path, obj, ie in self._revision_to_objects(rev, tree,
429
 
            roundtrip=True):
 
479
        # FIXME JRV 2011-12-15: Shouldn't we try both values for lossy ?
 
480
        for path, obj, ie in self._revision_to_objects(rev, tree, lossy=(not self.mapping.roundtripping)):
430
481
            if isinstance(obj, Commit):
431
 
                testament3 = StrictTestament3(rev, tree.inventory)
 
482
                testament3 = StrictTestament3(rev, tree)
432
483
                ie = { "testament3-sha1": testament3.as_sha1() }
433
484
            updater.add_object(obj, ie, path)
434
485
        commit_obj = updater.finish()
454
505
            _check_expected_sha(expected_sha, blob)
455
506
            yield blob
456
507
 
457
 
    def _reconstruct_tree(self, fileid, revid, inv, unusual_modes,
 
508
    def _reconstruct_tree(self, fileid, revid, bzr_tree, unusual_modes,
458
509
        expected_sha=None):
459
510
        """Return a Git Tree object from a file id and a revision stored in bzr.
460
511
 
467
518
                    return self._cache.idmap.lookup_tree_id(entry.file_id,
468
519
                        revid)
469
520
                except (NotImplementedError, KeyError):
470
 
                    obj = self._reconstruct_tree(entry.file_id, revid, inv,
 
521
                    obj = self._reconstruct_tree(entry.file_id, revid, bzr_tree,
471
522
                        unusual_modes)
472
523
                    if obj is None:
473
524
                        return None
481
532
                    # no-change merge?
482
533
                    return self._reconstruct_blobs(
483
534
                        [(entry.file_id, entry.revision, None)]).next().id
 
535
            elif entry.kind == 'tree-reference':
 
536
                # FIXME: Make sure the file id is the root id
 
537
                return self._lookup_revision_sha1(entry.reference_revision)
484
538
            else:
485
539
                raise AssertionError("unknown entry kind '%s'" % entry.kind)
486
 
        tree = directory_to_tree(inv[fileid], get_ie_sha1, unusual_modes,
487
 
            self.mapping.BZR_DUMMY_FILE)
488
 
        if (inv.root.file_id == fileid and
 
540
        try:
 
541
            inv = bzr_tree.root_inventory
 
542
        except AttributeError:
 
543
            inv = bzr_tree.inventory
 
544
        tree = directory_to_tree(inv[fileid].children,
 
545
                get_ie_sha1, unusual_modes, self.mapping.BZR_DUMMY_FILE,
 
546
                bzr_tree.get_root_id() == fileid)
 
547
        if (bzr_tree.get_root_id() == fileid and
489
548
            self.mapping.BZR_FILE_IDS_FILE is not None):
490
 
            b = self._create_fileid_map_blob(inv)
 
549
            if tree is None:
 
550
                tree = Tree()
 
551
            b = self._create_fileid_map_blob(bzr_tree)
491
552
            # If this is the root tree, add the file ids
492
 
            tree[self.mapping.BZR_FILE_IDS_FILE] = ((stat.S_IFREG | 0644), b.id)
493
 
        _check_expected_sha(expected_sha, tree)
 
553
            tree[self.mapping.BZR_FILE_IDS_FILE] = (
 
554
                (stat.S_IFREG | 0644), b.id)
 
555
        if tree is not None:
 
556
            _check_expected_sha(expected_sha, tree)
494
557
        return tree
495
558
 
496
559
    def get_parents(self, sha):
503
566
 
504
567
    def _lookup_revision_sha1(self, revid):
505
568
        """Return the SHA1 matching a Bazaar revision."""
506
 
        from dulwich.protocol import ZERO_SHA
507
569
        if revid == NULL_REVISION:
508
570
            return ZERO_SHA
509
571
        try:
512
574
            try:
513
575
                return mapping_registry.parse_revision_id(revid)[0]
514
576
            except errors.InvalidRevisionId:
515
 
                self.repository.lock_read()
516
 
                try:
517
 
                    self._update_sha_map(revid)
518
 
                finally:
519
 
                    self.repository.unlock()
 
577
                self._update_sha_map(revid)
520
578
                return self._cache.idmap.lookup_commit(revid)
521
579
 
522
580
    def get_raw(self, sha):
530
588
    def __contains__(self, sha):
531
589
        # See if sha is in map
532
590
        try:
533
 
            (type, type_data) = self.lookup_git_sha(sha)
534
 
            if type == "commit":
535
 
                return self.repository.has_revision(type_data[0])
536
 
            elif type == "blob":
537
 
                return self.repository.texts.has_key(type_data)
538
 
            elif type == "tree":
539
 
                return self.repository.has_revision(type_data[1])
 
591
            for (type, type_data) in self.lookup_git_sha(sha):
 
592
                if type == "commit":
 
593
                    if self.repository.has_revision(type_data[0]):
 
594
                        return True
 
595
                elif type == "blob":
 
596
                    if self.repository.texts.has_key(type_data):
 
597
                        return True
 
598
                elif type == "tree":
 
599
                    if self.repository.has_revision(type_data[1]):
 
600
                        return True
 
601
                else:
 
602
                    raise AssertionError("Unknown object type '%s'" % type)
540
603
            else:
541
 
                raise AssertionError("Unknown object type '%s'" % type)
 
604
                return False
542
605
        except KeyError:
543
606
            return False
544
607
 
545
 
    def lookup_git_shas(self, shas, update_map=True):
546
 
        from dulwich.protocol import ZERO_SHA
 
608
    def lock_read(self):
 
609
        self._locked = 'r'
 
610
        self._map_updated = False
 
611
        self.repository.lock_read()
 
612
        return LogicalLockResult(self.unlock)
 
613
 
 
614
    def lock_write(self):
 
615
        self._locked = 'r'
 
616
        self._map_updated = False
 
617
        self.repository.lock_write()
 
618
        return LogicalLockResult(self.unlock)
 
619
 
 
620
    def is_locked(self):
 
621
        return (self._locked is not None)
 
622
 
 
623
    def unlock(self):
 
624
        self._locked = None
 
625
        self._map_updated = False
 
626
        self.repository.unlock()
 
627
 
 
628
    def lookup_git_shas(self, shas):
547
629
        ret = {}
548
630
        for sha in shas:
549
631
            if sha == ZERO_SHA:
550
 
                ret[sha] = ("commit", (NULL_REVISION, None, {}))
 
632
                ret[sha] = [("commit", (NULL_REVISION, None, {}))]
551
633
                continue
552
634
            try:
553
 
                ret[sha] = self._cache.idmap.lookup_git_sha(sha)
 
635
                ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
554
636
            except KeyError:
555
 
                if update_map:
556
 
                    # if not, see if there are any unconverted revisions and add
557
 
                    # them to the map, search for sha in map again
558
 
                    self._update_sha_map()
559
 
                    update_map = False
560
 
                    try:
561
 
                        ret[sha] = self._cache.idmap.lookup_git_sha(sha)
562
 
                    except KeyError:
563
 
                        pass
 
637
                # if not, see if there are any unconverted revisions and
 
638
                # add them to the map, search for sha in map again
 
639
                self._update_sha_map()
 
640
                try:
 
641
                    ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
 
642
                except KeyError:
 
643
                    pass
564
644
        return ret
565
645
 
566
 
    def lookup_git_sha(self, sha, update_map=True):
567
 
        return self.lookup_git_shas([sha], update_map=update_map)[sha]
 
646
    def lookup_git_sha(self, sha):
 
647
        return self.lookup_git_shas([sha])[sha]
568
648
 
569
649
    def __getitem__(self, sha):
570
650
        if self._cache.content_cache is not None:
572
652
                return self._cache.content_cache[sha]
573
653
            except KeyError:
574
654
                pass
575
 
        (type, type_data) = self.lookup_git_sha(sha)
576
 
        # convert object to git object
577
 
        if type == "commit":
578
 
            (revid, tree_sha, verifiers) = type_data
579
 
            try:
580
 
                rev = self.repository.get_revision(revid)
581
 
            except errors.NoSuchRevision:
582
 
                trace.mutter('entry for %s %s in shamap: %r, but not found in '
583
 
                             'repository', type, sha, type_data)
584
 
                raise KeyError(sha)
585
 
            commit = self._reconstruct_commit(rev, tree_sha, roundtrip=True,
586
 
                verifiers=verifiers)
587
 
            _check_expected_sha(sha, commit)
588
 
            return commit
589
 
        elif type == "blob":
590
 
            (fileid, revision) = type_data
591
 
            return self._reconstruct_blobs([(fileid, revision, sha)]).next()
592
 
        elif type == "tree":
593
 
            (fileid, revid) = type_data
594
 
            try:
595
 
                tree = self.tree_cache.revision_tree(revid)
596
 
                rev = self.repository.get_revision(revid)
597
 
            except errors.NoSuchRevision:
598
 
                trace.mutter('entry for %s %s in shamap: %r, but not found in repository', type, sha, type_data)
599
 
                raise KeyError(sha)
600
 
            unusual_modes = extract_unusual_modes(rev)
601
 
            try:
602
 
                return self._reconstruct_tree(fileid, revid, tree.inventory,
603
 
                    unusual_modes, expected_sha=sha)
604
 
            except errors.NoSuchRevision:
605
 
                raise KeyError(sha)
 
655
        for (kind, type_data) in self.lookup_git_sha(sha):
 
656
            # convert object to git object
 
657
            if kind == "commit":
 
658
                (revid, tree_sha, verifiers) = type_data
 
659
                try:
 
660
                    rev = self.repository.get_revision(revid)
 
661
                except errors.NoSuchRevision:
 
662
                    if revid == NULL_REVISION:
 
663
                        raise AssertionError(
 
664
                            "should not try to look up NULL_REVISION")
 
665
                    trace.mutter('entry for %s %s in shamap: %r, but not '
 
666
                                 'found in repository', kind, sha, type_data)
 
667
                    raise KeyError(sha)
 
668
                # FIXME: the type data should say whether conversion was lossless
 
669
                commit = self._reconstruct_commit(rev, tree_sha,
 
670
                    lossy=(not self.mapping.roundtripping), verifiers=verifiers)
 
671
                _check_expected_sha(sha, commit)
 
672
                return commit
 
673
            elif kind == "blob":
 
674
                (fileid, revision) = type_data
 
675
                blobs = self._reconstruct_blobs([(fileid, revision, sha)])
 
676
                return blobs.next()
 
677
            elif kind == "tree":
 
678
                (fileid, revid) = type_data
 
679
                try:
 
680
                    tree = self.tree_cache.revision_tree(revid)
 
681
                    rev = self.repository.get_revision(revid)
 
682
                except errors.NoSuchRevision:
 
683
                    trace.mutter('entry for %s %s in shamap: %r, but not found in '
 
684
                        'repository', kind, sha, type_data)
 
685
                    raise KeyError(sha)
 
686
                unusual_modes = extract_unusual_modes(rev)
 
687
                try:
 
688
                    return self._reconstruct_tree(fileid, revid,
 
689
                        tree, unusual_modes, expected_sha=sha)
 
690
                except errors.NoSuchRevision:
 
691
                    raise KeyError(sha)
 
692
            else:
 
693
                raise AssertionError("Unknown object type '%s'" % kind)
606
694
        else:
607
 
            raise AssertionError("Unknown object type '%s'" % type)
 
695
            raise KeyError(sha)
608
696
 
609
697
    def generate_lossy_pack_contents(self, have, want, progress=None,
610
698
            get_tagged=None):
621
709
        processed = set()
622
710
        ret = self.lookup_git_shas(have + want)
623
711
        for commit_sha in have:
 
712
            commit_sha = self.unpeel_map.peel_tag(commit_sha, commit_sha)
624
713
            try:
625
 
                (type, (revid, tree_sha)) = ret[commit_sha]
 
714
                for (type, type_data) in ret[commit_sha]:
 
715
                    assert type == "commit"
 
716
                    processed.add(type_data[0])
626
717
            except KeyError:
627
 
                pass
628
 
            else:
629
 
                assert type == "commit"
630
 
                processed.add(revid)
 
718
                trace.mutter("unable to find remote ref %s", commit_sha)
631
719
        pending = set()
632
720
        for commit_sha in want:
633
721
            if commit_sha in have:
634
722
                continue
635
723
            try:
636
 
                (type, (revid, tree_sha)) = ret[commit_sha]
 
724
                for (type, type_data) in ret[commit_sha]:
 
725
                    assert type == "commit"
 
726
                    pending.add(type_data[0])
637
727
            except KeyError:
638
728
                pass
639
 
            else:
640
 
                assert type == "commit"
641
 
                pending.add(revid)
642
729
 
643
 
        todo = _find_missing_bzr_revids(self.repository.get_parent_map, 
644
 
                                        pending, processed)
645
 
        trace.mutter('sending revisions %r', todo)
646
 
        ret = []
 
730
        graph = self.repository.get_graph()
 
731
        todo = _find_missing_bzr_revids(graph, pending, processed)
 
732
        ret = PackTupleIterable(self)
647
733
        pb = ui.ui_factory.nested_progress_bar()
648
734
        try:
649
735
            for i, revid in enumerate(todo):
650
736
                pb.update("generating git objects", i, len(todo))
651
 
                rev = self.repository.get_revision(revid)
 
737
                try:
 
738
                    rev = self.repository.get_revision(revid)
 
739
                except errors.NoSuchRevision:
 
740
                    continue
652
741
                tree = self.tree_cache.revision_tree(revid)
653
 
                for path, obj, ie in self._revision_to_objects(rev, tree,
654
 
                    roundtrip=not lossy):
655
 
                    ret.append((obj, path))
 
742
                for path, obj, ie in self._revision_to_objects(rev, tree, lossy=lossy):
 
743
                    ret.add(obj.id, path)
 
744
            return ret
656
745
        finally:
657
746
            pb.finished()
658
 
        return ret
659
747
 
660
748
    def add_thin_pack(self):
661
749
        import tempfile
677
765
            try:
678
766
                self.repository.start_write_group()
679
767
                try:
680
 
                    import_git_objects(self.repository, self.mapping, 
 
768
                    import_git_objects(self.repository, self.mapping,
681
769
                        p.iterobjects(get_raw=self.get_raw),
682
770
                        self.object_store)
683
771
                except: