/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/remotebranch.py

  • Committer: Martin Pool
  • Date: 2005-09-13 23:42:32 UTC
  • mto: (1185.8.2) (974.1.91)
  • mto: This revision was merged to the branch mainline in revision 1390.
  • Revision ID: mbp@sourcefrog.net-20050913234232-4d901f2d843a35f3
- ignore .DS_Store by default

Show diffs side-by-side

added added

removed removed

Lines of Context:
26
26
 
27
27
import gzip
28
28
from cStringIO import StringIO
 
29
import os
29
30
import urllib2
30
 
 
31
 
from errors import BzrError, BzrCheckError
32
 
from branch import Branch, BZR_BRANCH_FORMAT
33
 
from trace import mutter
34
 
 
35
 
# velocitynet.com.au transparently proxies connections and thereby
36
 
# breaks keep-alive -- sucks!
37
 
 
38
 
 
39
 
ENABLE_URLGRABBER = True
40
 
 
41
 
from bzrlib.errors import BzrError
 
31
import urlparse
 
32
 
 
33
from bzrlib.errors import BzrError, BzrCheckError
 
34
from bzrlib.branch import Branch, BZR_BRANCH_FORMAT
 
35
from bzrlib.trace import mutter
 
36
from bzrlib.xml import serializer_v4
 
37
 
 
38
 
 
39
ENABLE_URLGRABBER = False
 
40
 
 
41
from bzrlib.errors import BzrError, NoSuchRevision
42
42
 
43
43
class GetFailed(BzrError):
44
44
    def __init__(self, url, status):
47
47
        self.status = status
48
48
 
49
49
if ENABLE_URLGRABBER:
50
 
    import urlgrabber
51
 
    import urlgrabber.keepalive
52
 
    urlgrabber.keepalive.DEBUG = 0
 
50
    import util.urlgrabber
 
51
    import util.urlgrabber.keepalive
 
52
    util.urlgrabber.keepalive.DEBUG = 0
53
53
    def get_url(path, compressed=False):
54
54
        try:
55
55
            url = path
56
56
            if compressed:
57
57
                url += '.gz'
58
58
            mutter("grab url %s" % url)
59
 
            url_f = urlgrabber.urlopen(url, keepalive=1, close_connection=0)
 
59
            url_f = util.urlgrabber.urlopen(url, keepalive=1, close_connection=0)
60
60
            if url_f.status != 200:
61
61
                raise GetFailed(url, url_f.status)
62
62
            if not compressed:
84
84
    orig_url = url
85
85
    while True:
86
86
        try:
87
 
            ff = get_url(url + '/.bzr/branch-format')
88
 
 
 
87
            fmt_url = url + '/.bzr/branch-format'
 
88
            ff = get_url(fmt_url)
89
89
            fmt = ff.read()
90
90
            ff.close()
91
91
 
98
98
        except urllib2.URLError:
99
99
            pass
100
100
 
101
 
        try:
102
 
            idx = url.rindex('/')
103
 
        except ValueError:
104
 
            raise BzrError('no branch root found for URL %s' % orig_url)
105
 
 
106
 
        url = url[:idx]        
 
101
        scheme, host, path = list(urlparse.urlparse(url))[:3]
 
102
        # discard params, query, fragment
 
103
        
 
104
        # strip off one component of the path component
 
105
        idx = path.rfind('/')
 
106
        if idx == -1 or path == '/':
 
107
            raise BzrError('no branch root found for URL %s'
 
108
                           ' or enclosing directories'
 
109
                           % orig_url)
 
110
        path = path[:idx]
 
111
        url = urlparse.urlunparse((scheme, host, path, '', '', ''))
107
112
        
108
113
 
109
114
 
154
159
 
155
160
 
156
161
    def get_revision(self, revision_id):
157
 
        from bzrlib.revision import Revision
158
 
        from bzrlib.xml import unpack_xml
159
 
        revf = self.revision_store[revision_id]
160
 
        r = unpack_xml(Revision, revf)
 
162
        try:
 
163
            revf = self.revision_store[revision_id]
 
164
        except KeyError:
 
165
            raise NoSuchRevision(self, revision_id)
 
166
        r = serializer_v4.read_revision(revf)
161
167
        if r.revision_id != revision_id:
162
168
            raise BzrCheckError('revision stored as {%s} actually contains {%s}'
163
169
                                % (revision_id, r.revision_id))
178
184
        p = self._path(fileid)
179
185
        try:
180
186
            return get_url(p, compressed=True)
181
 
        except:
 
187
        except urllib2.URLError:
182
188
            raise KeyError(fileid)
183
189
    
184
190