Commit d414c731 by rfkelly0

adding initial version of DAVFS - no tests yet...

parent da8f721f
0.4:
* New FS implementations (under fs.contrib):
* BigFS: read contents of a BIG file (C&C game file format)
* DAVFS: access a remote files stored on a WebDAV server
0.3:
* New FS implementations:
......
# Copyright (c) 2009-2010, Cloud Matrix Pty. Ltd.
# All rights reserved; available under the terms of the MIT License.
"""
fs.contrib.davfs: FS implementation accessing a WebDAV server.
This module provides a relatively-complete WebDAV Level 1 client that exposes
a WebDAV server as an FS object. Locks are not currently supported.
"""
import os
import httplib
import socket
from urlparse import urlparse
import stat as statinfo
from urllib import quote as urlquote
from urllib import unquote as urlunquote
import base64
import re
import datetime
import cookielib
from fs.base import *
from fs.path import *
from fs.errors import *
from fs.remote import RemoteFileBuffer
from fs.contrib.davfs.util import *
from fs.contrib.davfs.xmlobj import *
import errno
_RETRYABLE_ERRORS = [errno.EADDRINUSE]
try:
_RETRYABLE_ERRORS.append(errno.ECONNRESET)
_RETRYABLE_ERRORS.append(errno.ECONNABORTED)
except AttributeError:
_RETRYABLE_ERRORS.append(104)
class DAVFS(FS):
"""Access a remote filesystem via WebDAV.
This FS implementation provides access to a remote filesystem via the
WebDAV protocol. Basic Level 1 WebDAV is supported; locking is not
currently supported, but planned for the future.
HTTP Basic authentication is supported; provide a dict giving username
and password in the "credentials" argument, or a callback for obtaining
one in the "get_credentials" argument.
To use custom HTTP connector classes (e.g. to implement proper certificate
checking for SSL connections) you can replace the factory functions in the
DAVFS.connection_classes dictionary, or provide the "connection_classes"
argument.
"""
connection_classes = {
"http": httplib.HTTPConnection,
"https": httplib.HTTPSConnection,
}
def __init__(self,url,credentials=None,get_credentials=None,thread_synchronize=True,connection_classes=None,timeout=None):
"""DAVFS constructor.
The only required argument is the root url of the remote server. If
authentication is required, provide the 'credentials' keyword argument
and/or the 'get_credentials' keyword argument. The former is a dict
of credentials info, while the latter is a callback function returning
such a dict. Only HTTP Basic Auth is supported at this stage, so the
only useful keys in a credentials dict are 'username' and 'password'.
"""
if not url.endswith("/"):
url = url + "/"
self.url = url
self.timeout = timeout
self.credentials = credentials
self.get_credentials = get_credentials
if connection_classes is not None:
self.connection_classes = self.connection_classes.copy()
self.connection_classes.update(connection_classes)
self._connections = []
self._cookiejar = cookielib.CookieJar()
super(DAVFS,self).__init__(thread_synchronize=thread_synchronize)
# Check that the server speaks WebDAV, and normalize the URL
# after any redirects have been followed.
self.url = url
resp = self._request("/","PROPFIND","",{"Depth":"0"})
try:
if resp.status == 404:
raise ResourceNotFoundError("/",msg="root url gives 404")
if resp.status in (401,403):
raise PermissionDeniedError("listdir")
if resp.status != 207:
msg = "server at %s doesn't speak WebDAV" % (self.url,)
raise RemoteConnectionError("",msg=msg,details=resp)
finally:
resp.close()
self.url = resp.request_url
self._url_p = urlparse(self.url)
def close(self):
for con in self._connections:
con.close()
super(DAVFS,self).close()
def _add_connection(self,con):
self._connections.append(con)
def _del_connection(self,con):
try:
self._connections.remove(con)
except ValueError:
pass
else:
con.close()
def __str__(self):
return '<DAVFS: %s>' % (self.url,)
__repr__ = __str__
def __getstate__(self):
# Python2.5 cannot load pickled urlparse.ParseResult objects.
state = super(DAVFS,self).__getstate__()
del state["_url_p"]
# CookieJar objects contain a lock, so they can't be pickled.
del state["_cookiejar"]
return state
def __setstate__(self,state):
super(DAVFS,self).__setstate__(state)
self._url_p = urlparse(self.url)
self._cookiejar = cookielib.CookieJar()
def _path2url(self,path):
"""Convert a client-side path into a server-side URL."""
path = relpath(normpath(path))
if path.endswith("/"):
path = path[:-1]
if isinstance(path,unicode):
path = path.encode("utf8")
return self.url + urlquote(path)
def _url2path(self,url):
"""Convert a server-side URL into a client-side path."""
path = urlunquote(urlparse(url).path)
root = self._url_p.path
return path[len(root)-1:].decode("utf8")
def _isurl(self,path,url):
"""Check whether the given URL corresponds to the given local path."""
path = normpath(relpath(path))
upath = relpath(normpath(self._url2path(url)))
return path == upath
def _request(self,path,method,body="",headers={}):
"""Issue a HTTP request to the remote server.
This is a simple wrapper around httplib that does basic error and
sanity checking e.g. following redirects and providing authentication.
"""
url = self._path2url(path)
visited = []
resp = None
try:
resp = self._raw_request(url,method,body,headers)
# Loop to retry for redirects and authentication responses.
while resp.status in (301,302,401,403):
resp.close()
if resp.status in (301,302,):
visited.append(url)
url = resp.getheader("Location",None)
if not url:
raise OperationFailedError(msg="no location header in 301 response")
if url in visited:
raise OperationFailedError(msg="redirection seems to be looping")
if len(visited) > 10:
raise OperationFailedError("too much redirection")
elif resp.status in (401,403):
if self.get_credentials is None:
break
else:
creds = self.get_credentials(self.credentials)
if creds is None:
break
else:
self.credentials = creds
resp = self._raw_request(url,method,body,headers)
except Exception:
if resp is not None:
resp.close()
raise
resp.request_url = url
return resp
def _raw_request(self,url,method,body,headers,num_tries=0):
"""Perform a single HTTP request, without any error handling."""
if self.closed:
raise RemoteConnectionError("",msg="FS is closed")
if isinstance(url,basestring):
url = urlparse(url)
if self.credentials is not None:
username = self.credentials.get("username","")
password = self.credentials.get("password","")
if username is not None and password is not None:
creds = "%s:%s" % (username,password,)
creds = "Basic %s" % (base64.b64encode(creds).strip(),)
headers["Authorization"] = creds
(size,chunks) = normalize_req_body(body)
try:
try:
ConClass = self.connection_classes[url.scheme.lower()]
except KeyError:
msg = "unsupported protocol: '%s'" % (url.scheme,)
raise RemoteConnectionError(msg=msg)
con = ConClass(url.hostname,url.port,timeout=self.timeout)
self._add_connection(con)
try:
con.putrequest(method,url.path)
if size is not None:
con.putheader("Content-Length",str(size))
for hdr,val in headers.iteritems():
con.putheader(hdr,val)
self._cookiejar.add_cookie_header(FakeReq(con,url.scheme,url.path))
con.endheaders()
for chunk in chunks:
con.send(chunk)
if self.closed:
raise RemoteConnectionError("",msg="FS is closed")
resp = con.getresponse()
self._cookiejar.extract_cookies(FakeResp(resp),FakeReq(con,url.scheme,url.path))
except Exception, e:
self._del_connection(con)
raise
else:
old_close = resp.close
def new_close():
old_close()
self._del_connection(con)
resp.close = new_close
return resp
except socket.error, e:
if e.args[0] in _RETRYABLE_ERRORS:
if num_tries < 3:
num_tries += 1
return self._raw_request(url,method,body,headers,num_tries)
try:
msg = e.args[1]
except IndexError:
msg = str(e)
raise RemoteConnectionError("",msg=msg,details=e)
def setcontents(self,path,contents):
resp = self._request(path,"PUT",contents)
resp.close()
if resp.status == 405:
raise ResourceInvalidError(path)
if resp.status == 409:
raise ParentDirectoryMissingError(path)
if resp.status not in (200,201):
raise_generic_error(response,"setcontents",path)
def open(self,path,mode="r"):
# Truncate the file if requested
mode = mode.replace("b","").replace("t","")
contents = ""
if "w" in mode:
self.setcontents(path,contents)
else:
contents = self._request(path,"GET")
if contents.status == 404:
# Create the file if it's missing
if "w" not in mode and "a" not in mode:
contents.close()
raise ResourceNotFoundError(path)
contents = ""
self.setcontents(path,contents)
elif contents.status != 200:
contents.close()
raise_generic_error(resp,"open",path)
if mode == "r-":
contents.size = contents.getheader("Content-Length",None)
if contents.size is not None:
try:
contents.size = int(contents.size)
except ValueError:
contents.size = None
return contents
try:
return RemoteFileBuffer(self,path,mode,contents)
finally:
if hasattr(contents,"close"):
contents.close()
def exists(self,path):
response = self._request(path,"PROPFIND","",{"Depth":"0"})
response.close()
if response.status == 207:
return True
if response.status == 404:
return False
raise_generic_error(response,"exists",path)
def isdir(self,path):
pf = propfind(prop="<prop xmlns='DAV:'><resourcetype /></prop>")
response = self._request(path,"PROPFIND",pf.render(),{"Depth":"0"})
try:
if response.status == 404:
return False
if response.status != 207:
raise_generic_error(response,"isdir",path)
body = response.read()
msres = multistatus.parse(body)
for res in msres.responses:
if self._isurl(path,res.href):
for ps in res.propstats:
if ps.props.getElementsByTagNameNS("DAV:","collection"):
return True
return False
finally:
response.close()
def isfile(self,path):
pf = propfind(prop="<prop xmlns='DAV:'><resourcetype /></prop>")
response = self._request(path,"PROPFIND",pf.render(),{"Depth":"0"})
try:
if response.status == 404:
return False
if response.status != 207:
raise_generic_error(response,"isfile",path)
msres = multistatus.parse(response.read())
for res in msres.responses:
if self._isurl(path,res.href):
for ps in res.propstats:
rt = ps.props.getElementsByTagNameNS("DAV:","resourcetype")
cl = ps.props.getElementsByTagNameNS("DAV:","collection")
if rt and not cl:
return True
return False
finally:
response.close()
def listdir(self,path="./",wildcard=None,full=False,absolute=False,info=False,dirs_only=False,files_only=False):
if info:
pf = propfind(prop="<prop xmlns='DAV:'><resourcetype /><getcontentlength /><getlastmodified /><getetag /></prop>")
else:
pf = propfind(prop="<prop xmlns='DAV:'><resourcetype /></prop>")
response = self._request(path,"PROPFIND",pf.render(),{"Depth":"1"})
try:
if response.status == 404:
raise ResourceNotFoundError(path)
if response.status != 207:
raise_generic_error(response,"listdir",path)
entries = []
msres = multistatus.parse(response.read())
dir_ok = False
for res in msres.responses:
if self._isurl(path,res.href):
# The directory itself, check it's actually a directory
for ps in res.propstats:
if ps.props.getElementsByTagNameNS("DAV:","collection"):
dir_ok = True
break
else:
# An entry in the directory, check if it's of the
# appropriate type and add to entries list as required.
if info:
e_info = self._info_from_propfind(res)
e_info["name"] = basename(self._url2path(res.href))
else:
# TODO: technically, should use displayname for this
e_info = basename(self._url2path(res.href))
if dirs_only:
for ps in res.propstats:
if ps.props.getElementsByTagNameNS("DAV:","collection"):
entries.append(e_info)
break
elif files_only:
for ps in res.propstats:
if ps.props.getElementsByTagNameNS("DAV:","collection"):
break
else:
entries.append(e_info)
else:
entries.append(e_info)
if not dir_ok:
raise ResourceInvalidError(path)
return self._listdir_helper(path,entries,wildcard,full,absolute,False,False)
finally:
response.close()
def makedir(self,path,recursive=False,allow_recreate=False):
response = self._request(path,"MKCOL")
response.close()
if response.status == 201:
return True
if response.status == 409:
if not recursive:
raise ParentDirectoryMissingError(path)
self.makedir(dirname(path),recursive=True,allow_recreate=True)
self.makedir(path,recursive=False,allow_recreate=allow_recreate)
return True
if response.status == 405:
if not self.isdir(path):
raise ResourceInvalidError(path)
if not allow_recreate:
raise DestinationExistsError(path)
return True
if response.status < 200 or response.status >= 300:
raise_generic_error(response,"makedir",path)
def remove(self,path):
if self.isdir(path):
raise ResourceInvalidError(path)
response = self._request(path,"DELETE")
response.close()
if response.status == 405:
raise ResourceInvalidError(path)
if response.status < 200 or response.status >= 300:
raise_generic_error(response,"remove",path)
return True
def removedir(self,path,recursive=False,force=False):
if self.isfile(path):
raise ResourceInvalidError(path)
if not force and self.listdir(path):
raise DirectoryNotEmptyError(path)
response = self._request(path,"DELETE")
response.close()
if response.status == 405:
raise ResourceInvalidError(path)
if response.status < 200 or response.status >= 300:
raise_generic_error(response,"removedir",path)
if recursive and path not in ("","/"):
try:
self.removedir(dirname(path),recursive=True)
except DirectoryNotEmptyError:
pass
return True
def rename(self,src,dst):
self._move(src,dst)
def getinfo(self,path):
info = {}
info["name"] = basename(path)
pf = propfind(prop="<prop xmlns='DAV:'><resourcetype /><getcontentlength /><getlastmodified /><getetag /></prop>")
response = self._request(path,"PROPFIND",pf.render(),{"Depth":"0"})
try:
if response.status != 207:
raise_generic_error(response,"getinfo",path)
msres = multistatus.parse(response.read())
for res in msres.responses:
if self._isurl(path,res.href):
info.update(self._info_from_propfind(res))
if "st_mode" not in info:
info["st_mode"] = 0700 | statinfo.S_IFREG
return info
finally:
response.close()
def _info_from_propfind(self,res):
info = {}
for ps in res.propstats:
findElements = ps.props.getElementsByTagNameNS
# TODO: should check for status of the propfind first...
# check for directory indicator
if findElements("DAV:","collection"):
info["st_mode"] = 0700 | statinfo.S_IFDIR
# check for content length
cl = findElements("DAV:","getcontentlength")
if cl:
cl = "".join(c.nodeValue for c in cl[0].childNodes)
try:
info["size"] = int(cl)
except ValueError:
pass
# check for last modified time
lm = findElements("DAV:","getlastmodified")
if lm:
lm = "".join(c.nodeValue for c in lm[0].childNodes)
try:
# TODO: more robust datetime parsing
fmt = "%a, %d %b %Y %H:%M:%S GMT"
mtime = datetime.datetime.strptime(lm,fmt)
info["modified_time"] = mtime
except ValueError:
pass
# check for etag
etag = findElements("DAV:","getetag")
if etag:
etag = "".join(c.nodeValue for c in etag[0].childNodes)
if etag:
info["etag"] = etag
if "st_mode" not in info:
info["st_mode"] = 0700 | statinfo.S_IFREG
return info
def copy(self,src,dst,overwrite=False,chunk_size=None):
if self.isdir(src):
msg = "Source is not a file: %(path)s"
raise ResourceInvalidError(src, msg=msg)
self._copy(src,dst,overwrite=overwrite)
def copydir(self,src,dst,overwrite=False,ignore_errors=False,chunk_size=0):
if self.isfile(src):
msg = "Source is not a directory: %(path)s"
raise ResourceInvalidError(src, msg=msg)
self._copy(src,dst,overwrite=overwrite)
def _copy(self,src,dst,overwrite=False):
headers = {"Destination":self._path2url(dst)}
if overwrite:
headers["Overwrite"] = "T"
else:
headers["Overwrite"] = "F"
response = self._request(src,"COPY",headers=headers)
response.close()
if response.status == 412:
raise DestinationExistsError(dst)
if response.status == 409:
raise ParentDirectoryMissingError(dst)
if response.status < 200 or response.status >= 300:
raise_generic_error(response,"copy",path)
def move(self,src,dst,overwrite=False,chunk_size=None):
if self.isdir(src):
msg = "Source is not a file: %(path)s"
raise ResourceInvalidError(src, msg=msg)
self._move(src,dst,overwrite=overwrite)
def movedir(self,src,dst,overwrite=False,ignore_errors=False,chunk_size=0):
if self.isfile(src):
msg = "Source is not a directory: %(path)s"
raise ResourceInvalidError(src, msg=msg)
self._move(src,dst,overwrite=overwrite)
def _move(self,src,dst,overwrite=False):
headers = {"Destination":self._path2url(dst)}
if overwrite:
headers["Overwrite"] = "T"
else:
headers["Overwrite"] = "F"
response = self._request(src,"MOVE",headers=headers)
response.close()
if response.status == 412:
raise DestinationExistsError(dst)
if response.status == 409:
raise ParentDirectoryMissingError(dst)
if response.status < 200 or response.status >= 300:
raise_generic_error(response,"move",path)
@staticmethod
def _split_xattr(name):
"""Split extended attribute name into (namespace,localName) pair."""
idx = len(name)-1
while idx >= 0 and name[idx].isalnum():
idx -= 1
return (name[:idx+1],name[idx+1:])
def getxattr(self,path,name,default=None):
(namespaceURI,localName) = self._split_xattr(name)
# TODO: encode xml character entities in the namespace
if namespaceURI:
pf = propfind(prop="<prop xmlns='"+namespaceURI+"'><"+localName+" /></prop>")
else:
pf = propfind(prop="<prop><"+localName+" /></prop>")
response = self._request(path,"PROPFIND",pf.render(),{"Depth":"0"})
try:
if response.status != 207:
raise_generic_error(response,"getxattr",path)
msres = multistatus.parse(response.read())
finally:
response.close()
for res in msres.responses:
if self._isurl(path,res.href):
for ps in res.propstats:
if namespaceURI:
findElements = ps.props.getElementsByTagNameNS
propNode = findElements(namespaceURI,localName)
else:
findElements = ps.props.getElementsByTagName
propNode = findElements(localName)
if propNode:
propNode = propNode[0]
if ps.status.code == 200:
return "".join(c.toxml() for c in propNode.childNodes)
if ps.status.code == 404:
return default
raise OperationFailedError("getxattr",details=response)
return default
def setxattr(self,path,name,value):
(namespaceURI,localName) = self._split_xattr(name)
# TODO: encode xml character entities in the namespace
if namespaceURI:
p = "<%s xmlns='%s'>%s</%s>" % (localName,namespaceURI,value,localName)
else:
p = "<%s>%s</%s>" % (localName,value,localName)
pu = propertyupdate()
pu.commands.append(set(props="<prop>"+p+"</prop>"))
response = self._request(path,"PROPPATCH",pu.render(),{"Depth":"0"})
response.close()
if response.status < 200 or response.status >= 300:
raise_generic_error(response,"setxattr",path)
def delxattr(self,path,name):
(namespaceURI,localName) = self._split_xattr(name)
# TODO: encode xml character entities in the namespace
if namespaceURI:
p = "<%s xmlns='%s' />" % (localName,namespaceURI,)
else:
p = "<%s />" % (localName,)
pu = propertyupdate()
pu.commands.append(remove(props="<prop>"+p+"</prop>"))
response = self._request(path,"PROPPATCH",pu.render(),{"Depth":"0"})
response.close()
if response.status < 200 or response.status >= 300:
raise_generic_error(response,"delxattr",path)
def listxattrs(self,path):
pf = propfind(propname=True)
response = self._request(path,"PROPFIND",pf.render(),{"Depth":"0"})
try:
if response.status != 207:
raise_generic_error(response,"listxattrs",path)
msres = multistatus.parse(response.read())
finally:
response.close()
props = []
for res in msres.responses:
if self._isurl(path,res.href):
for ps in res.propstats:
for node in ps.props.childNodes:
if node.nodeType != node.ELEMENT_NODE:
continue
if node.namespaceURI:
if node.namespaceURI == "DAV:":
continue
propname = node.namespaceURI + node.localName
else:
propname = node.nodeName
props.append(propname)
return props
# TODO: getxattrs() and setxattrs()
def raise_generic_error(response,opname,path):
if response.status == 404:
raise ResourceNotFoundError(path,details=response)
if response.status in (401,403):
raise PermissionDeniedError(opname,details=response)
if response.status == 423:
raise ResourceLockedError(path,opname=opname,details=response)
if response.status == 501:
raise UnsupportedError(opname,details=response)
raise OperationFailedError(opname,details=response)
# Copyright (c) 2009-2010, Cloud Matrix Pty. Ltd.
# All rights reserved; available under the terms of the MIT License.
"""
fs.contrib.davfs.util: utils for FS WebDAV implementation.
"""
import os
import re
import cookielib
def get_fileno(file):
"""Get the os-level fileno of a file-like object.
This function decodes several common file wrapper structures in an attempt
to determine the underlying OS-level fileno for an object.
"""
while not hasattr(file,"fileno"):
if hasattr(file,"file"):
file = file.file
elif hasattr(file,"_file"):
file = file._file
elif hasattr(file,"_fileobj"):
file = file._fileobj
else:
raise AttributeError
return file.fileno()
def file_chunks(f,chunk_size=1024*64):
"""Generator yielding chunks of a file.
This provides a simple way to iterate through binary data chunks from
a file. Recall that using a file directly as an iterator generates the
*lines* from that file, which is useless and very inefficient for binary
data.
"""
chunk = f.read(chunk_size)
while chunk:
yield chunk
chunk = f.read(chunk_size)
def normalize_req_body(body,chunk_size=1024*64):
"""Convert given request body into (size,data_iter) pair.
This function is used to accept a variety of different inputs in HTTP
requests, converting them to a standard format.
"""
if hasattr(body,"read"):
try:
size = int(body.size)
except (AttributeError,TypeError):
try:
size = os.fstat(get_fileno(body)).st_size
except (AttributeError,OSError):
size = None
return (size,file_chunks(body,chunk_size))
else:
body = str(body)
return (len(body),[body])
class FakeReq:
"""Compatability interface to use cookielib with raw httplib objects."""
def __init__(self,connection,scheme,path):
self.connection = connection
self.scheme = scheme
self.path = path
def get_full_url(self):
return self.scheme + "://" + self.connection.host + self.path
def get_type(self):
return self.scheme
def get_host(self):
return self.connection.host
def is_unverifiable(self):
return True
def get_origin_req_host(self):
return self.connection.host
def has_header(self,header):
return False
def add_unredirected_header(self,header,value):
self.connection.putheader(header,value)
class FakeResp:
"""Compatability interface to use cookielib with raw httplib objects."""
def __init__(self,response):
self.response = response
def info(self):
return self
def getheaders(self,header):
header = header.lower()
headers = self.response.getheaders()
return [v for (h,v) in headers if h.lower() == header]
# The standard cooklielib cookie parser doesn't seem to handle multiple
# cookies correctory, so we replace it with a better version. This code
# is a tweaked version of the cookielib function of the same name.
#
_test_cookie = "sessionid=e9c9b002befa93bd865ce155270307ef; Domain=.cloud.me; expires=Wed, 10-Feb-2010 03:27:20 GMT; httponly; Max-Age=1209600; Path=/, sessionid_https=None; Domain=.cloud.me; expires=Wed, 10-Feb-2010 03:27:20 GMT; httponly; Max-Age=1209600; Path=/; secure"
if len(cookielib.parse_ns_headers([_test_cookie])) != 2:
def parse_ns_headers(ns_headers):
"""Improved parser for netscape-style cookies.
This version can handle multiple cookies in a single header.
"""
known_attrs = ("expires", "domain", "path", "secure","port", "max-age")
result = []
for ns_header in ns_headers:
pairs = []
version_set = False
for ii, param in enumerate(re.split(r"(;\s)|(,\s(?=[a-zA-Z0-9_\-]+=))", ns_header)):
if param is None:
continue
param = param.rstrip()
if param == "" or param[0] == ";":
continue
if param[0] == ",":
if pairs:
if not version_set:
pairs.append(("version", "0"))
result.append(pairs)
pairs = []
continue
if "=" not in param:
k, v = param, None
else:
k, v = re.split(r"\s*=\s*", param, 1)
k = k.lstrip()
if ii != 0:
lc = k.lower()
if lc in known_attrs:
k = lc
if k == "version":
# This is an RFC 2109 cookie.
version_set = True
if k == "expires":
# convert expires date to seconds since epoch
if v.startswith('"'): v = v[1:]
if v.endswith('"'): v = v[:-1]
v = cookielib.http2time(v) # None if invalid
pairs.append((k, v))
if pairs:
if not version_set:
pairs.append(("version", "0"))
result.append(pairs)
return result
cookielib.parse_ns_headers = parse_ns_headers
assert len(cookielib.parse_ns_headers([_test_cookie])) == 2
# Copyright (c) 2009-2010, Cloud Matrix Pty. Ltd.
# All rights reserved; available under the terms of the MIT License.
"""
fs.contrib.davfs.xmlobj: dexml model definitions for WebDAV
This module defines the various XML elemen structures for WebDAV as a set
of dexml.Model subclasses.
"""
from urlparse import urlparse, urlunparse
from httplib import responses as STATUS_CODE_TEXT
STATUS_CODE_TEXT[207] = "Multi-Status"
import dexml
from dexml import fields
Error = dexml.Error
class _davbase(dexml.Model):
"""Base class for all davfs XML models."""
class meta:
namespace = "DAV:"
namespace_prefix = "D"
class HrefField(fields.String):
"""Field representing a <href> tag."""
def __init__(self,*args,**kwds):
kwds["tagname"] = "href"
super(HrefField,self).__init__(*args,**kwds)
def parse_value(self,value):
url = urlparse(value.encode("UTF-8"))
return urlunparse((url.scheme,url.netloc,url.path,url.params,url.query,url.fragment))
def render_value(self,value):
url = urlparse(value.encode("UTF-8"))
return urlunparse((url.scheme,url.netloc,url.path,url.params,url.query,url.fragment))
class TimeoutField(fields.Field):
"""Field representing a WebDAV timeout value."""
def __init__(self,*args,**kwds):
if "tagname" not in kwds:
kwds["tagname"] = "timeout"
super(TimeoutField,self).__init__(*args,**kwds)
@classmethod
def parse_value(cls,value):
if value == "Infinite":
return None
if value.startswith("Second-"):
return int(value[len("Second-"):])
raise ValueError("invalid timeout specifier: %s" % (value,))
def render_value(self,value):
if value is None:
return "Infinite"
else:
return "Second-" + str(value)
class StatusField(fields.Value):
"""Field representing a WebDAV status-line value.
The value may be set as either a string or an integer, and is converted
into a StatusString instance.
"""
def __init__(self,*args,**kwds):
kwds["tagname"] = "status"
super(StatusField,self).__init__(*args,**kwds)
def __get__(self,instance,owner):
val = super(StatusField,self).__get__(instance,owner)
if val is not None:
val = StatusString(val,instance,self)
return val
def __set__(self,instance,value):
if isinstance(value,basestring):
# sanity check it
bits = value.split(" ")
if len(bits) < 3 or bits[0] != "HTTP/1.1":
raise ValueError("Not a valid status: '%s'" % (value,))
int(bits[1])
elif isinstance(value,int):
# convert it to a message
value = StatusString._value_for_code(value)
super(StatusField,self).__set__(instance,value)
class StatusString(str):
"""Special string representing a HTTP status line.
It's a string, but it exposes the integer attribute "code" giving just
the actual response code.
"""
def __new__(cls,val,inst,owner):
return str.__new__(cls,val)
def __init__(self,val,inst,owner):
self._owner = owner
self._inst = inst
@staticmethod
def _value_for_code(code):
msg = STATUS_CODE_TEXT.get(code,"UNKNOWN STATUS CODE")
return "HTTP/1.1 %d %s" % (code,msg)
def _get_code(self):
return int(self.split(" ")[1])
def _set_code(self,code):
newval = self._value_for_code(code)
self._owner.__set__(self._inst,newval)
code = property(_get_code,_set_code)
class multistatus(_davbase):
"""XML model for a multi-status response message."""
responses = fields.List("response",minlength=1)
description = fields.String(tagname="responsedescription",required=False)
class response(_davbase):
"""XML model for an individual response in a multi-status message."""
href = HrefField()
# TODO: ensure only one of hrefs/propstats
hrefs = fields.List(HrefField(),minlength=1,required=False)
status = StatusField(required=False)
propstats = fields.List("propstat",minlenth=1,required=False)
description = fields.String(tagname="responsedescription",required=False)
class propstat(_davbase):
"""XML model for a propstat response message."""
props = fields.XmlNode(tagname="prop",encoding="UTF-8")
status = StatusField()
description = fields.String(tagname="responsedescription",required=False)
class propfind(_davbase):
"""XML model for a propfind request message."""
allprop = fields.Boolean(tagname="allprop",required=False)
propname = fields.Boolean(tagname="propname",required=False)
prop = fields.XmlNode(tagname="prop",required=False,encoding="UTF-8")
class propertyupdate(_davbase):
"""XML model for a propertyupdate request message."""
commands = fields.List(fields.Choice("remove","set"))
class remove(_davbase):
"""XML model for a propertyupdate remove command."""
props = fields.XmlNode(tagname="prop",encoding="UTF-8")
class set(_davbase):
"""XML model for a propertyupdate set command."""
props = fields.XmlNode(tagname="prop",encoding="UTF-8")
class lockdiscovery(_davbase):
"""XML model for a lockdiscovery request message."""
locks = fields.List("activelock")
class activelock(_davbase):
"""XML model for an activelock response message."""
lockscope = fields.Model("lockscope")
locktype = fields.Model("locktype")
depth = fields.String(tagname="depth")
owner = fields.XmlNode(tagname="owner",encoding="UTF-8")
timeout = TimeoutField()
locktoken = fields.Model("locktoken")
class lockscope(_davbase):
"""XML model for a lockscope response message."""
shared = fields.Boolean(tagname="shared",empty_only=True)
exclusive = fields.Boolean(tagname="exclusive",empty_only=True)
class locktoken(_davbase):
"""XML model for a locktoken response message."""
tokens = fields.List(HrefField())
class lockentry(_davbase):
"""XML model for a lockentry response message."""
lockscope = fields.Model("lockscope")
locktype = fields.Model("locktype")
class lockinfo(_davbase):
"""XML model for a lockinfo response message."""
lockscope = fields.Model("lockscope")
locktype = fields.Model("locktype")
owner = fields.XmlNode(tagname="owner",encoding="UTF-8")
class locktype(_davbase):
"""XML model for a locktype response message."""
type = fields.XmlNode(encoding="UTF-8")
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment