Commit 3ea4efe1 by willmcgugan@gmail.com

Change of api (fs.open, fs.setcontent, fs.getcontents) to support io module in Py2.6+ and Py3

parent c6391b6f
......@@ -19,8 +19,8 @@ __version__ = "0.4.1"
__author__ = "Will McGugan (will@willmcgugan.com)"
# provide these by default so people can use 'fs.path.basename' etc.
import errors
import path
from fs import errors
from fs import path
_thread_synchronize_default = True
def set_thread_synchronize_default(sync):
......
......@@ -33,7 +33,7 @@ class UserDataFS(OSFS):
"""
app_dirs = AppDirs(appname, appauthor, version, roaming)
super(self.__class__, self).__init__(app_dirs.user_data_dir, create=create)
super(UserDataFS, self).__init__(app_dirs.user_data_dir, create=create)
class SiteDataFS(OSFS):
......@@ -48,7 +48,7 @@ class SiteDataFS(OSFS):
"""
app_dirs = AppDirs(appname, appauthor, version, roaming)
super(self.__class__, self).__init__(app_dirs.site_data_dir, create=create)
super(SiteDataFS, self).__init__(app_dirs.site_data_dir, create=create)
class UserCacheFS(OSFS):
......@@ -63,7 +63,7 @@ class UserCacheFS(OSFS):
"""
app_dirs = AppDirs(appname, appauthor, version, roaming)
super(self.__class__, self).__init__(app_dirs.user_cache_dir, create=create)
super(UserCacheFS, self).__init__(app_dirs.user_cache_dir, create=create)
class UserLogFS(OSFS):
......@@ -78,10 +78,11 @@ class UserLogFS(OSFS):
"""
app_dirs = AppDirs(appname, appauthor, version, roaming)
super(self.__class__, self).__init__(app_dirs.user_log_dir, create=create)
super(UserLogFS, self).__init__(app_dirs.user_log_dir, create=create)
if __name__ == "__main__":
udfs = UserDataFS('sexytime', appauthor='pyfs')
udfs = UserDataFS('exampleapp', appauthor='pyfs')
print udfs
udfs2 = UserDataFS('sexytime2', appauthor='pyfs', create=False)
udfs2 = UserDataFS('exampleapp2', appauthor='pyfs', create=False)
print udfs2
......@@ -37,9 +37,10 @@ from fs.path import *
from fs.errors import *
from fs.local_functools import wraps
import compatibility
import six
from six import b
class DummyLock(object):
"""A dummy lock object that doesn't do anything.
......@@ -373,7 +374,7 @@ class FS(object):
"""
return self.getpathurl(path, allow_none=True) is not None
def open(self, path, mode="r", **kwargs):
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
"""Open a the given path as a file-like object.
:param path: a path to file that should be opened
......@@ -394,7 +395,7 @@ class FS(object):
"""
raise UnsupportedError("open file")
def safeopen(self, path, mode="r", **kwargs):
def safeopen(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
"""Like :py:meth:`~fs.base.FS.open`, but returns a
:py:class:`~fs.base.NullFile` if the file could not be opened.
......@@ -414,7 +415,7 @@ class FS(object):
"""
try:
f = self.open(path, mode, **kwargs)
f = self.open(path, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, line_buffering=line_buffering, **kwargs)
except ResourceNotFoundError:
return NullFile()
return f
......@@ -457,7 +458,8 @@ class FS(object):
for f in self.listdir():
yield f
def listdir(self, path="./",
def listdir(self,
path="./",
wildcard=None,
full=False,
absolute=False,
......@@ -489,7 +491,8 @@ class FS(object):
"""
raise UnsupportedError("list directory")
def listdirinfo(self, path="./",
def listdirinfo(self,
path="./",
wildcard=None,
full=False,
absolute=False,
......@@ -517,6 +520,7 @@ class FS(object):
"""
path = normpath(path)
def getinfo(p):
try:
if full or absolute:
......@@ -534,7 +538,8 @@ class FS(object):
dirs_only=dirs_only,
files_only=files_only)]
def _listdir_helper(self, path,
def _listdir_helper(self,
path,
entries,
wildcard=None,
full=False,
......@@ -556,7 +561,7 @@ class FS(object):
if wildcard is not None:
if not callable(wildcard):
wildcard_re = re.compile(fnmatch.translate(wildcard))
wildcard = lambda fn:bool (wildcard_re.match(fn))
wildcard = lambda fn: bool(wildcard_re.match(fn))
entries = [p for p in entries if wildcard(p)]
if dirs_only:
......@@ -574,7 +579,8 @@ class FS(object):
return entries
def ilistdir(self, path="./",
def ilistdir(self,
path="./",
wildcard=None,
full=False,
absolute=False,
......@@ -594,7 +600,8 @@ class FS(object):
dirs_only=dirs_only,
files_only=files_only))
def ilistdirinfo(self, path="./",
def ilistdirinfo(self,
path="./",
wildcard=None,
full=False,
absolute=False,
......@@ -748,40 +755,94 @@ class FS(object):
return "No description available"
return sys_path
def getcontents(self, path, mode="rb"):
def getcontents(self, path, mode='rb', encoding=None, errors=None, newline=None):
"""Returns the contents of a file as a string.
:param path: A path of file to read
:rtype: str
:returns: file contents
"""
if 'r' not in mode:
raise ValueError("mode must contain 'r' to be readable")
f = None
try:
f = self.open(path, mode)
f = self.open(path, mode=mode, encoding=encoding, errors=errors, newline=newline)
contents = f.read()
return contents
finally:
if f is not None:
f.close()
def setcontents(self, path, data, chunk_size=1024 * 64):
def _setcontents(self,
path,
data,
encoding=None,
errors=None,
chunk_size=1024 * 64,
progress_callback=None,
finished_callback=None):
"""Does the work of setcontents. Factored out, so that `setcontents_async` can use it"""
if progress_callback is None:
progress_callback = lambda bytes_written: None
if finished_callback is None:
finished_callback = lambda: None
if not data:
progress_callback(0)
self.createfile(path)
finished_callback()
return 0
bytes_written = 0
progress_callback(0)
if hasattr(data, 'read'):
read = data.read
chunk = read(chunk_size)
if isinstance(chunk, six.text_type):
f = self.open(path, 'wt', encoding=encoding, errors=errors)
else:
f = self.open(path, 'wb')
write = f.write
try:
while chunk:
write(chunk)
bytes_written += len(chunk)
progress_callback(bytes_written)
chunk = read(chunk_size)
finally:
f.close()
else:
if isinstance(data, six.text_type):
with self.open(path, 'wt', encoding=encoding, errors=errors) as f:
f.write(data)
bytes_written += len(data)
else:
with self.open(path, 'wb') as f:
f.write(data)
bytes_written += len(data)
progress_callback(bytes_written)
finished_callback()
return bytes_written
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=1024 * 64):
"""A convenience method to create a new file from a string or file-like object
:param path: a path of the file to create
:param data: a string or a file-like object containing the contents for the new file
:param data: a string or bytes object containing the contents for the new file
:param encoding: if `data` is a file open in text mode, or a text string, then use this `encoding` to write to the destination file
:param errors: if `data` is a file open in text mode or a text string, then use `errors` when opening the destination file
:param chunk_size: Number of bytes to read in a chunk, if the implementation has to resort to a read / copy loop
"""
if not data:
self.createfile(path)
else:
compatibility.copy_file_to_fs(data, self, path, chunk_size=chunk_size)
return self._setcontents(path, data, encoding=encoding, errors=errors, chunk_size=1024 * 64)
def setcontents_async(self,
path,
data,
encoding=None,
errors=None,
chunk_size=1024 * 64,
progress_callback=None,
finished_callback=None,
......@@ -793,6 +854,8 @@ class FS(object):
:param path: a path of the file to create
:param data: a string or a file-like object containing the contents for the new file
:param encoding: if `data` is a file open in text mode, or a text string, then use this `encoding` to write to the destination file
:param errors: if `data` is a file open in text mode or a text string, then use `errors` when opening the destination file
:param chunk_size: Number of bytes to read and write in a chunk
:param progress_callback: A function that is called periodically
with the number of bytes written.
......@@ -805,9 +868,16 @@ class FS(object):
"""
finished_event = threading.Event()
def do_setcontents():
try:
compatibility.copy_file_to_fs(data, self, path, chunk_size=chunk_size, progress_callback=progress_callback, finished_callback=finished_callback)
self._setcontents(path,
data,
encoding=encoding,
errors=errors,
chunk_size=1024 * 64,
progress_callback=progress_callback,
finished_callback=finished_callback)
except Exception, e:
if error_callback is not None:
error_callback(e)
......@@ -817,7 +887,6 @@ class FS(object):
threading.Thread(target=do_setcontents).start()
return finished_event
def createfile(self, path, wipe=False):
"""Creates an empty file if it doesn't exist
......@@ -835,7 +904,6 @@ class FS(object):
if f is not None:
f.close()
def opendir(self, path):
"""Opens a directory and returns a FS object representing its contents.
......@@ -897,19 +965,18 @@ class FS(object):
return self.listdir(path, *args, **kwargs)
if wildcard is None:
wildcard = lambda f:True
wildcard = lambda f: True
elif not callable(wildcard):
wildcard_re = re.compile(fnmatch.translate(wildcard))
wildcard = lambda fn:bool (wildcard_re.match(fn))
wildcard = lambda fn: bool(wildcard_re.match(fn))
if dir_wildcard is None:
dir_wildcard = lambda f:True
dir_wildcard = lambda f: True
elif not callable(dir_wildcard):
dir_wildcard_re = re.compile(fnmatch.translate(dir_wildcard))
dir_wildcard = lambda fn:bool (dir_wildcard_re.match(fn))
dir_wildcard = lambda fn: bool(dir_wildcard_re.match(fn))
if search == "breadth":
dirs = [path]
dirs_append = dirs.append
dirs_pop = dirs.pop
......@@ -1005,7 +1072,6 @@ class FS(object):
for p, _files in self.walk(path, dir_wildcard=wildcard, search=search, ignore_errors=ignore_errors):
yield p
def getsize(self, path):
"""Returns the size (in bytes) of a resource.
......@@ -1207,6 +1273,7 @@ class FS(object):
with self._lock:
if not self.isdir(src):
raise ResourceInvalidError(src, msg="Source is not a directory: %(path)s")
def copyfile_noerrors(src, dst, **kwargs):
try:
return self.copy(src, dst, **kwargs)
......@@ -1227,13 +1294,10 @@ class FS(object):
self.makedir(dst, allow_recreate=True)
for dirname, filenames in self.walk(src):
dst_dirname = relpath(frombase(src, abspath(dirname)))
dst_dirpath = pathjoin(dst, dst_dirname)
self.makedir(dst_dirpath, allow_recreate=True, recursive=True)
for filename in filenames:
src_filename = pathjoin(dirname, filename)
dst_filename = pathjoin(dst_dirpath, filename)
copyfile(src_filename, dst_filename, overwrite=overwrite, chunk_size=chunk_size)
......@@ -1248,9 +1312,9 @@ class FS(object):
"""
with self._lock:
path = normpath(path)
iter_dir = iter(self.listdir(path))
iter_dir = iter(self.ilistdir(path))
try:
iter_dir.next()
next(iter_dir)
except StopIteration:
return True
return False
......@@ -1326,7 +1390,7 @@ class FS(object):
return m
def flags_to_mode(flags):
def flags_to_mode(flags, binary=True):
"""Convert an os.O_* flag bitmask into an FS mode string."""
if flags & os.O_WRONLY:
if flags & os.O_TRUNC:
......@@ -1346,6 +1410,10 @@ def flags_to_mode(flags):
mode = "r"
if flags & os.O_EXCL:
mode += "x"
if binary:
mode += 'b'
else:
mode += 't'
return mode
......@@ -8,10 +8,11 @@ Not for general usage, the functionality in this file is exposed elsewhere
import six
from six import PY3
def copy_file_to_fs(data, dst_fs, dst_path, chunk_size=64 * 1024, progress_callback=None, finished_callback=None):
"""Copy data from a string or a file-like object to a given fs/path"""
if progress_callback is None:
progress_callback = lambda bytes_written:None
progress_callback = lambda bytes_written: None
bytes_written = 0
f = None
try:
......@@ -19,7 +20,7 @@ def copy_file_to_fs(data, dst_fs, dst_path, chunk_size=64 * 1024, progress_callb
if hasattr(data, "read"):
read = data.read
chunk = read(chunk_size)
if PY3 and isinstance(chunk, six.text_type):
if isinstance(chunk, six.text_type):
f = dst_fs.open(dst_path, 'w')
else:
f = dst_fs.open(dst_path, 'wb')
......@@ -30,7 +31,7 @@ def copy_file_to_fs(data, dst_fs, dst_path, chunk_size=64 * 1024, progress_callb
progress_callback(bytes_written)
chunk = read(chunk_size)
else:
if PY3 and isinstance(data, six.text_type):
if isinstance(data, six.text_type):
f = dst_fs.open(dst_path, 'w')
else:
f = dst_fs.open(dst_path, 'wb')
......
......@@ -112,10 +112,10 @@ class ArchiveFS(FS):
return SizeUpdater(entry, self.archive.writestream(path))
@synchronize
def getcontents(self, path, mode="rb"):
def getcontents(self, path, mode="rb", encoding=None, errors=None, newline=None):
if not self.exists(path):
raise ResourceNotFoundError(path)
f = self.open(path)
with self.open(path, mode, encoding=encoding, errors=errors, newline=newline) as f:
return f.read()
def desc(self, path):
......
......@@ -41,11 +41,13 @@ from fs.base import *
from fs.path import *
from fs.errors import *
from fs.remote import RemoteFileBuffer
from fs import iotools
from fs.contrib.davfs.util import *
from fs.contrib.davfs import xmlobj
from fs.contrib.davfs.xmlobj import *
import six
from six import b
import errno
......@@ -343,8 +345,10 @@ class DAVFS(FS):
msg = str(e)
raise RemoteConnectionError("",msg=msg,details=e)
def setcontents(self,path, contents, chunk_size=1024*64):
resp = self._request(path,"PUT",contents)
def setcontents(self,path, data=b'', encoding=None, errors=None, chunk_size=1024 * 64):
if isinstance(data, six.text_type):
data = data.encode(encoding=encoding, errors=errors)
resp = self._request(path, "PUT", data)
resp.close()
if resp.status == 405:
raise ResourceInvalidError(path)
......@@ -353,7 +357,8 @@ class DAVFS(FS):
if resp.status not in (200,201,204):
raise_generic_error(resp,"setcontents",path)
def open(self,path,mode="r"):
@iotools.filelike_to_stream
def open(self,path,mode="r", **kwargs):
mode = mode.replace("b","").replace("t","")
# Truncate the file if requested
contents = b("")
......
......@@ -77,9 +77,9 @@ from fs.wrapfs import WrapFS
try:
import libdokan
except (NotImplementedError,EnvironmentError,ImportError,NameError,):
except (NotImplementedError, EnvironmentError, ImportError, NameError,):
is_available = False
sys.modules.pop("fs.expose.dokan.libdokan",None)
sys.modules.pop("fs.expose.dokan.libdokan", None)
libdokan = None
else:
is_available = True
......@@ -325,9 +325,9 @@ class FSOperations(object):
"""
self._files_lock.acquire()
try:
(f2,path,lock) = self._files_by_handle[fh]
(f2, path, lock) = self._files_by_handle[fh]
assert f2.closed
self._files_by_handle[fh] = (f,path,lock)
self._files_by_handle[fh] = (f, path, lock)
return fh
finally:
self._files_lock.release()
......@@ -336,7 +336,7 @@ class FSOperations(object):
"""Unregister the given file handle."""
self._files_lock.acquire()
try:
(f,path,lock) = self._files_by_handle.pop(fh)
(f, path, lock) = self._files_by_handle.pop(fh)
del self._files_size_written[path][fh]
if not self._files_size_written[path]:
del self._files_size_written[path]
......@@ -368,7 +368,7 @@ class FSOperations(object):
locks = self._active_locks[path]
except KeyError:
return 0
for (lh,lstart,lend) in locks:
for (lh, lstart, lend) in locks:
if info is not None and info.contents.Context == lh:
continue
if lstart >= offset + length:
......@@ -423,7 +423,8 @@ class FSOperations(object):
# Try to open the requested file. It may actually be a directory.
info.contents.Context = 1
try:
f = self.fs.open(path,mode)
f = self.fs.open(path, mode)
print path, mode, repr(f)
except ResourceInvalidError:
info.contents.IsDirectory = True
except FSError:
......@@ -434,7 +435,7 @@ class FSOperations(object):
else:
raise
else:
info.contents.Context = self._reg_file(f,path)
info.contents.Context = self._reg_file(f, path)
return retcode
@timeout_protect
......@@ -468,7 +469,7 @@ class FSOperations(object):
self.fs.removedir(path)
self._pending_delete.remove(path)
else:
(file,_,lock) = self._get_file(info.contents.Context)
(file, _, lock) = self._get_file(info.contents.Context)
lock.acquire()
try:
file.close()
......@@ -484,7 +485,7 @@ class FSOperations(object):
@handle_fs_errors
def CloseFile(self, path, info):
if info.contents.Context >= MIN_FH:
(file,_,lock) = self._get_file(info.contents.Context)
(file, _, lock) = self._get_file(info.contents.Context)
lock.acquire()
try:
file.close()
......@@ -497,20 +498,20 @@ class FSOperations(object):
@handle_fs_errors
def ReadFile(self, path, buffer, nBytesToRead, nBytesRead, offset, info):
path = normpath(path)
(file,_,lock) = self._get_file(info.contents.Context)
(file, _, lock) = self._get_file(info.contents.Context)
lock.acquire()
try:
errno = self._check_lock(path,offset,nBytesToRead,info)
errno = self._check_lock(path, offset, nBytesToRead, info)
if errno:
return errno
# This may be called after Cleanup, meaning we
# need to re-open the file.
if file.closed:
file = self.fs.open(path,file.mode)
self._rereg_file(info.contents.Context,file)
file = self.fs.open(path, file.mode)
self._rereg_file(info.contents.Context, file)
file.seek(offset)
data = file.read(nBytesToRead)
ctypes.memmove(buffer,ctypes.create_string_buffer(data),len(data))
ctypes.memmove(buffer, ctypes.create_string_buffer(data), len(data))
nBytesRead[0] = len(data)
finally:
lock.release()
......@@ -520,23 +521,23 @@ class FSOperations(object):
def WriteFile(self, path, buffer, nBytesToWrite, nBytesWritten, offset, info):
path = normpath(path)
fh = info.contents.Context
(file,_,lock) = self._get_file(fh)
(file, _, lock) = self._get_file(fh)
lock.acquire()
try:
errno = self._check_lock(path,offset,nBytesToWrite,info)
errno = self._check_lock(path, offset, nBytesToWrite, info)
if errno:
return errno
# This may be called after Cleanup, meaning we
# need to re-open the file.
if file.closed:
file = self.fs.open(path,file.mode)
self._rereg_file(info.contents.Context,file)
file = self.fs.open(path, file.mode)
self._rereg_file(info.contents.Context, file)
if info.contents.WriteToEndOfFile:
file.seek(0,os.SEEK_END)
file.seek(0, os.SEEK_END)
else:
file.seek(offset)
data = ctypes.create_string_buffer(nBytesToWrite)
ctypes.memmove(data,buffer,nBytesToWrite)
ctypes.memmove(data, buffer, nBytesToWrite)
file.write(data.raw)
nBytesWritten[0] = len(data.raw)
try:
......@@ -554,7 +555,7 @@ class FSOperations(object):
@handle_fs_errors
def FlushFileBuffers(self, path, info):
path = normpath(path)
(file,_,lock) = self._get_file(info.contents.Context)
(file, _, lock) = self._get_file(info.contents.Context)
lock.acquire()
try:
file.flush()
......@@ -567,7 +568,7 @@ class FSOperations(object):
path = normpath(path)
finfo = self.fs.getinfo(path)
data = buffer.contents
self._info2finddataw(path,finfo,data,info)
self._info2finddataw(path, finfo, data, info)
try:
written_size = max(self._files_size_written[path].values())
except KeyError:
......@@ -583,26 +584,25 @@ class FSOperations(object):
@handle_fs_errors
def FindFiles(self, path, fillFindData, info):
path = normpath(path)
for (nm,finfo) in self.fs.listdirinfo(path):
fpath = pathjoin(path,nm)
for (nm, finfo) in self.fs.listdirinfo(path):
fpath = pathjoin(path, nm)
if self._is_pending_delete(fpath):
continue
data = self._info2finddataw(fpath,finfo)
fillFindData(ctypes.byref(data),info)
data = self._info2finddataw(fpath, finfo)
fillFindData(ctypes.byref(data), info)
@timeout_protect
@handle_fs_errors
def FindFilesWithPattern(self, path, pattern, fillFindData, info):
path = normpath(path)
infolist = []
for (nm,finfo) in self.fs.listdirinfo(path):
fpath = pathjoin(path,nm)
for (nm, finfo) in self.fs.listdirinfo(path):
fpath = pathjoin(path, nm)
if self._is_pending_delete(fpath):
continue
if not libdokan.DokanIsNameInExpression(pattern,nm,True):
if not libdokan.DokanIsNameInExpression(pattern, nm, True):
continue
data = self._info2finddataw(fpath,finfo,None)
fillFindData(ctypes.byref(data),info)
data = self._info2finddataw(fpath, finfo, None)
fillFindData(ctypes.byref(data), info)
@timeout_protect
@handle_fs_errors
......@@ -648,7 +648,7 @@ class FSOperations(object):
def DeleteDirectory(self, path, info):
path = normpath(path)
for nm in self.fs.listdir(path):
if not self._is_pending_delete(pathjoin(path,nm)):
if not self._is_pending_delete(pathjoin(path, nm)):
raise DirectoryNotEmptyError(path)
self._pending_delete.add(path)
# the actual delete takes place in self.CloseFile()
......@@ -658,7 +658,7 @@ class FSOperations(object):
def MoveFile(self, src, dst, overwrite, info):
# Close the file if we have an open handle to it.
if info.contents.Context >= MIN_FH:
(file,_,lock) = self._get_file(info.contents.Context)
(file, _, lock) = self._get_file(info.contents.Context)
lock.acquire()
try:
file.close()
......@@ -668,15 +668,15 @@ class FSOperations(object):
src = normpath(src)
dst = normpath(dst)
if info.contents.IsDirectory:
self.fs.movedir(src,dst,overwrite=overwrite)
self.fs.movedir(src, dst, overwrite=overwrite)
else:
self.fs.move(src,dst,overwrite=overwrite)
self.fs.move(src, dst, overwrite=overwrite)
@timeout_protect
@handle_fs_errors
def SetEndOfFile(self, path, length, info):
path = normpath(path)
(file,_,lock) = self._get_file(info.contents.Context)
(file, _, lock) = self._get_file(info.contents.Context)
lock.acquire()
try:
pos = file.tell()
......@@ -684,7 +684,7 @@ class FSOperations(object):
file.seek(length)
file.truncate()
if pos < length:
file.seek(min(pos,length))
file.seek(min(pos, length))
finally:
lock.release()
......@@ -694,15 +694,15 @@ class FSOperations(object):
# It's better to pretend an operation is possible and have it fail
# than to pretend an operation will fail when it's actually possible.
large_amount = 100 * 1024*1024*1024
nBytesFree[0] = self.fs.getmeta("free_space",large_amount)
nBytesTotal[0] = self.fs.getmeta("total_space",2*large_amount)
nBytesFree[0] = self.fs.getmeta("free_space", large_amount)
nBytesTotal[0] = self.fs.getmeta("total_space", 2 * large_amount)
nBytesAvail[0] = nBytesFree[0]
@handle_fs_errors
def GetVolumeInformation(self, vnmBuf, vnmSz, sNum, maxLen, flags, fnmBuf, fnmSz, info):
nm = ctypes.create_unicode_buffer(self.volname[:vnmSz-1])
sz = (len(nm.value)+1) * ctypes.sizeof(ctypes.c_wchar)
ctypes.memmove(vnmBuf,nm,sz)
sz = (len(nm.value) + 1) * ctypes.sizeof(ctypes.c_wchar)
ctypes.memmove(vnmBuf, nm, sz)
if sNum:
sNum[0] = 0
if maxLen:
......@@ -710,8 +710,8 @@ class FSOperations(object):
if flags:
flags[0] = 0
nm = ctypes.create_unicode_buffer(self.fsname[:fnmSz-1])
sz = (len(nm.value)+1) * ctypes.sizeof(ctypes.c_wchar)
ctypes.memmove(fnmBuf,nm,sz)
sz = (len(nm.value) + 1) * ctypes.sizeof(ctypes.c_wchar)
ctypes.memmove(fnmBuf, nm, sz)
@timeout_protect
@handle_fs_errors
......@@ -731,10 +731,10 @@ class FSOperations(object):
except KeyError:
locks = self._active_locks[path] = []
else:
errno = self._check_lock(path,offset,length,None,locks)
errno = self._check_lock(path, offset, length, None, locks)
if errno:
return errno
locks.append((info.contents.Context,offset,end))
locks.append((info.contents.Context, offset, end))
return 0
@timeout_protect
......@@ -747,7 +747,7 @@ class FSOperations(object):
except KeyError:
return -ERROR_NOT_LOCKED
todel = []
for i,(lh,lstart,lend) in enumerate(locks):
for i, (lh, lstart, lend) in enumerate(locks):
if info.contents.Context == lh:
if lstart == offset:
if lend == offset + length:
......@@ -762,10 +762,10 @@ class FSOperations(object):
def Unmount(self, info):
pass
def _info2attrmask(self,path,info,hinfo=None):
def _info2attrmask(self, path, info, hinfo=None):
"""Convert a file/directory info dict to a win32 file attribute mask."""
attrs = 0
st_mode = info.get("st_mode",None)
st_mode = info.get("st_mode", None)
if st_mode:
if statinfo.S_ISDIR(st_mode):
attrs |= FILE_ATTRIBUTE_DIRECTORY
......
......@@ -26,6 +26,7 @@ from pyftpdlib import ftpserver
from fs.path import *
from fs.osfs import OSFS
from fs.errors import convert_fs_errors
from fs import iotools
# Get these once so we can reuse them:
......@@ -96,8 +97,9 @@ class FTPFS(ftpserver.AbstractedFS):
@convert_fs_errors
@decode_args
def open(self, path, mode):
return self.fs.open(path, mode)
@iotools.filelike_to_stream
def open(self, path, mode, **kwargs):
return self.fs.open(path, mode, **kwargs)
@convert_fs_errors
def chdir(self, path):
......
......@@ -70,11 +70,11 @@ from six import PY3
from six import b
try:
#if PY3:
# import fuse3 as fuse
#else:
# import fuse
import fuse_ctypes as fuse
if PY3:
from fs.expose.fuse import fuse_ctypes as fuse
else:
from fs.expose.fuse import fuse3 as fuse
except NotImplementedError:
raise ImportError("FUSE found but not usable")
try:
......@@ -116,7 +116,6 @@ def handle_fs_errors(func):
return wrapper
class FSOperations(Operations):
"""FUSE Operations interface delegating all activities to an FS object."""
......@@ -186,13 +185,13 @@ class FSOperations(Operations):
# I haven't figured out how to distinguish between "w" and "w+".
# Go with the most permissive option.
mode = flags_to_mode(fi.flags)
fh = self._reg_file(self.fs.open(path,mode),path)
fh = self._reg_file(self.fs.open(path, mode), path)
fi.fh = fh
fi.keep_cache = 0
@handle_fs_errors
def flush(self, path, fh):
(file,_,lock) = self._get_file(fh)
(file, _, lock) = self._get_file(fh)
lock.acquire()
try:
file.flush()
......@@ -209,12 +208,12 @@ class FSOperations(Operations):
path = path.decode(NATIVE_ENCODING)
name = name.decode(NATIVE_ENCODING)
try:
value = self.fs.getxattr(path,name)
value = self.fs.getxattr(path, name)
except AttributeError:
raise UnsupportedError("getxattr")
else:
if value is None:
raise OSError(errno.ENODATA,"no attribute '%s'" % (name,))
raise OSError(errno.ENODATA, "no attribute '%s'" % (name,))
return value
@handle_fs_errors
......@@ -245,13 +244,13 @@ class FSOperations(Operations):
def open(self, path, fi):
path = path.decode(NATIVE_ENCODING)
mode = flags_to_mode(fi.flags)
fi.fh = self._reg_file(self.fs.open(path,mode),path)
fi.fh = self._reg_file(self.fs.open(path, mode), path)
fi.keep_cache = 0
return 0
@handle_fs_errors
def read(self, path, size, offset, fh):
(file,_,lock) = self._get_file(fh)
(file, _, lock) = self._get_file(fh)
lock.acquire()
try:
file.seek(offset)
......@@ -264,9 +263,9 @@ class FSOperations(Operations):
def readdir(self, path, fh=None):
path = path.decode(NATIVE_ENCODING)
entries = ['.', '..']
for (nm,info) in self.fs.listdirinfo(path):
self._fill_stat_dict(pathjoin(path,nm),info)
entries.append((nm.encode(NATIVE_ENCODING),info,0))
for (nm, info) in self.fs.listdirinfo(path):
self._fill_stat_dict(pathjoin(path, nm), info)
entries.append((nm.encode(NATIVE_ENCODING), info, 0))
return entries
@handle_fs_errors
......@@ -275,7 +274,7 @@ class FSOperations(Operations):
@handle_fs_errors
def release(self, path, fh):
(file,_,lock) = self._get_file(fh)
(file, _, lock) = self._get_file(fh)
lock.acquire()
try:
file.close()
......@@ -288,7 +287,7 @@ class FSOperations(Operations):
path = path.decode(NATIVE_ENCODING)
name = name.decode(NATIVE_ENCODING)
try:
return self.fs.delxattr(path,name)
return self.fs.delxattr(path, name)
except AttributeError:
raise UnsupportedError("removexattr")
......@@ -297,12 +296,12 @@ class FSOperations(Operations):
old = old.decode(NATIVE_ENCODING)
new = new.decode(NATIVE_ENCODING)
try:
self.fs.rename(old,new)
self.fs.rename(old, new)
except FSError:
if self.fs.isdir(old):
self.fs.movedir(old,new)
self.fs.movedir(old, new)
else:
self.fs.move(old,new)
self.fs.move(old, new)
@handle_fs_errors
def rmdir(self, path):
......@@ -314,7 +313,7 @@ class FSOperations(Operations):
path = path.decode(NATIVE_ENCODING)
name = name.decode(NATIVE_ENCODING)
try:
return self.fs.setxattr(path,name,value)
return self.fs.setxattr(path, name, value)
except AttributeError:
raise UnsupportedError("setxattr")
......@@ -326,18 +325,18 @@ class FSOperations(Operations):
def truncate(self, path, length, fh=None):
path = path.decode(NATIVE_ENCODING)
if fh is None and length == 0:
self.fs.open(path,"wb").close()
self.fs.open(path, "wb").close()
else:
if fh is None:
f = self.fs.open(path,"rb+")
if not hasattr(f,"truncate"):
f = self.fs.open(path, "rb+")
if not hasattr(f, "truncate"):
raise UnsupportedError("truncate")
f.truncate(length)
else:
(file,_,lock) = self._get_file(fh)
(file, _, lock) = self._get_file(fh)
lock.acquire()
try:
if not hasattr(file,"truncate"):
if not hasattr(file, "truncate"):
raise UnsupportedError("truncate")
file.truncate(length)
finally:
......@@ -371,7 +370,7 @@ class FSOperations(Operations):
@handle_fs_errors
def write(self, path, data, offset, fh):
(file,path,lock) = self._get_file(fh)
(file, path, lock) = self._get_file(fh)
lock.acquire()
try:
file.seek(offset)
......@@ -385,7 +384,7 @@ class FSOperations(Operations):
def _get_stat_dict(self, path):
"""Build a 'stat' dictionary for the given file."""
info = self.fs.getinfo(path)
self._fill_stat_dict(path,info)
self._fill_stat_dict(path, info)
return info
def _fill_stat_dict(self, path, info):
......@@ -395,13 +394,13 @@ class FSOperations(Operations):
for k in private_keys:
del info[k]
# Basic stuff that is constant for all paths
info.setdefault("st_ino",0)
info.setdefault("st_dev",0)
info.setdefault("st_uid",uid)
info.setdefault("st_gid",gid)
info.setdefault("st_rdev",0)
info.setdefault("st_blksize",1024)
info.setdefault("st_blocks",1)
info.setdefault("st_ino", 0)
info.setdefault("st_dev", 0)
info.setdefault("st_uid", uid)
info.setdefault("st_gid", gid)
info.setdefault("st_rdev", 0)
info.setdefault("st_blksize", 1024)
info.setdefault("st_blocks", 1)
# The interesting stuff
if 'st_mode' not in info:
if self.fs.isdir(path):
......@@ -412,11 +411,11 @@ class FSOperations(Operations):
if not statinfo.S_ISDIR(mode) and not statinfo.S_ISREG(mode):
if self.fs.isdir(path):
info["st_mode"] = mode | statinfo.S_IFDIR
info.setdefault("st_nlink",2)
info.setdefault("st_nlink", 2)
else:
info["st_mode"] = mode | statinfo.S_IFREG
info.setdefault("st_nlink",1)
for (key1,key2) in [("st_atime","accessed_time"),("st_mtime","modified_time"),("st_ctime","created_time")]:
info.setdefault("st_nlink", 1)
for (key1, key2) in [("st_atime", "accessed_time"), ("st_mtime", "modified_time"), ("st_ctime", "created_time")]:
if key1 not in info:
if key2 in info:
info[key1] = time.mktime(info[key2].timetuple())
......@@ -467,6 +466,7 @@ def mount(fs, path, foreground=False, ready_callback=None, unmount_callback=None
ready_callback()
if unmount_callback:
orig_unmount = mp.unmount
def new_unmount():
orig_unmount()
unmount_callback()
......@@ -492,7 +492,8 @@ def unmount(path):
args = ["fusermount", "-u", path]
for num_tries in xrange(3):
p = subprocess.Popen(args, stderr=subprocess.PIPE,
p = subprocess.Popen(args,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode == 0:
......@@ -544,7 +545,7 @@ class MountProcess(subprocess.Popen):
def __init__(self, fs, path, fuse_opts={}, nowait=False, **kwds):
self.path = path
if nowait or kwds.get("close_fds",False):
if nowait or kwds.get("close_fds", False):
if PY3:
cmd = "from pickle import loads;"
else:
......@@ -553,11 +554,11 @@ class MountProcess(subprocess.Popen):
cmd = cmd + 'data = loads(%s); '
cmd = cmd + 'from fs.expose.fuse import MountProcess; '
cmd = cmd + 'MountProcess._do_mount_nowait(data)'
cmd = cmd % (repr(cPickle.dumps((fs,path,fuse_opts),-1)),)
cmd = [sys.executable,"-c",cmd]
super(MountProcess,self).__init__(cmd,**kwds)
cmd = cmd % (repr(cPickle.dumps((fs, path, fuse_opts), -1)),)
cmd = [sys.executable, "-c", cmd]
super(MountProcess, self).__init__(cmd, **kwds)
else:
(r,w) = os.pipe()
(r, w) = os.pipe()
if PY3:
cmd = "from pickle import loads;"
else:
......@@ -566,15 +567,18 @@ class MountProcess(subprocess.Popen):
cmd = cmd + 'data = loads(%s); '
cmd = cmd + 'from fs.expose.fuse import MountProcess; '
cmd = cmd + 'MountProcess._do_mount_wait(data)'
cmd = cmd % (repr(cPickle.dumps((fs,path,fuse_opts,r,w),-1)),)
cmd = [sys.executable,"-c",cmd]
super(MountProcess,self).__init__(cmd,**kwds)
cmd = cmd % (repr(cPickle.dumps((fs, path, fuse_opts, r, w), -1)),)
cmd = [sys.executable, "-c", cmd]
super(MountProcess, self).__init__(cmd, **kwds)
os.close(w)
byte = os.read(r, 1)
if byte != b("S"):
err_text = os.read(r, 20)
self.terminate()
raise RuntimeError("FUSE error: " + os.read(r,20).decode(NATIVE_ENCODING))
if hasattr(err_text, 'decode'):
err_text = err_text.decode(NATIVE_ENCODING)
raise RuntimeError("FUSE error: " + err_text)
def unmount(self):
"""Cleanly unmount the FUSE filesystem, terminating this subprocess."""
......@@ -586,7 +590,7 @@ class MountProcess(subprocess.Popen):
unmount(self.path)
except OSError:
pass
tmr = threading.Timer(self.unmount_timeout,killme)
tmr = threading.Timer(self.unmount_timeout, killme)
tmr.start()
self.wait()
tmr.cancel()
......@@ -594,56 +598,60 @@ class MountProcess(subprocess.Popen):
if not hasattr(subprocess.Popen, "terminate"):
def terminate(self):
"""Gracefully terminate the subprocess."""
os.kill(self.pid,signal.SIGTERM)
os.kill(self.pid, signal.SIGTERM)
if not hasattr(subprocess.Popen, "kill"):
def kill(self):
"""Forcibly terminate the subprocess."""
os.kill(self.pid,signal.SIGKILL)
os.kill(self.pid, signal.SIGKILL)
@staticmethod
def _do_mount_nowait(data):
"""Perform the specified mount, return without waiting."""
(fs,path,opts) = data
fs, path, opts = data
opts["foreground"] = True
def unmount_callback():
fs.close()
opts["unmount_callback"] = unmount_callback
mount(fs,path,*opts)
mount(fs, path, *opts)
@staticmethod
def _do_mount_wait(data):
"""Perform the specified mount, signalling when ready."""
(fs,path,opts,r,w) = data
fs, path, opts, r, w = data
os.close(r)
opts["foreground"] = True
successful = []
def ready_callback():
successful.append(True)
os.write(w, b("S"))
os.close(w)
opts["ready_callback"] = ready_callback
def unmount_callback():
fs.close()
opts["unmount_callback"] = unmount_callback
try:
mount(fs,path,**opts)
mount(fs, path, **opts)
except Exception, e:
os.write(w,b("E")+b(e))
os.write(w, b("E") + unicode(e).encode('ascii', errors='replace'))
os.close(w)
else:
if not successful:
os.write(w,b("E"))
os.write(w, b("EMount unsuccessful"))
os.close(w)
if __name__ == "__main__":
import os, os.path
import os
import os.path
from fs.tempfs import TempFS
mount_point = os.path.join(os.environ["HOME"], "fs.expose.fuse")
if not os.path.exists(mount_point):
os.makedirs(mount_point)
def ready_callback():
print "READY"
mount(TempFS(), mount_point, foreground=True, ready_callback=ready_callback)
......@@ -221,8 +221,8 @@ class SFTPHandle(paramiko.SFTPHandle):
"""
def __init__(self, owner, path, flags):
super(SFTPHandle,self).__init__(flags)
mode = flags_to_mode(flags) + "b"
super(SFTPHandle, self).__init__(flags)
mode = flags_to_mode(flags)
self.owner = owner
if not isinstance(path, unicode):
path = path.decode(self.owner.encoding)
......
......@@ -18,9 +18,11 @@ an FS object, which can then be exposed using whatever server you choose
import xmlrpclib
from SimpleXMLRPCServer import SimpleXMLRPCServer
from datetime import datetime
import base64
import six
from six import PY3, b
from six import PY3
class RPCFSInterface(object):
"""Wrapper to expose an FS via a XML-RPC compatible interface.
......@@ -40,26 +42,23 @@ class RPCFSInterface(object):
must return something that can be represented in ASCII. The default
is base64-encoded UTF-8.
"""
if PY3:
return path
return path.encode("utf8").encode("base64")
#return path
return six.text_type(base64.b64encode(path.encode("utf8")), 'ascii')
def decode_path(self, path):
"""Decode paths arriving over the wire."""
if PY3:
return path
return path.decode("base64").decode("utf8")
return six.text_type(base64.b64decode(path.encode('ascii')), 'utf8')
def getmeta(self, meta_name):
meta = self.fs.getmeta(meta_name)
if isinstance(meta, basestring):
meta = meta.decode('base64')
meta = self.decode_path(meta)
return meta
def getmeta_default(self, meta_name, default):
meta = self.fs.getmeta(meta_name, default)
if isinstance(meta, basestring):
meta = meta.decode('base64')
meta = self.decode_path(meta)
return meta
def hasmeta(self, meta_name):
......@@ -72,7 +71,7 @@ class RPCFSInterface(object):
def set_contents(self, path, data):
path = self.decode_path(path)
self.fs.setcontents(path,data.data)
self.fs.setcontents(path, data.data)
def exists(self, path):
path = self.decode_path(path)
......@@ -88,7 +87,7 @@ class RPCFSInterface(object):
def listdir(self, path="./", wildcard=None, full=False, absolute=False, dirs_only=False, files_only=False):
path = self.decode_path(path)
entries = self.fs.listdir(path,wildcard,full,absolute,dirs_only,files_only)
entries = self.fs.listdir(path, wildcard, full, absolute, dirs_only, files_only)
return [self.encode_path(e) for e in entries]
def makedir(self, path, recursive=False, allow_recreate=False):
......@@ -149,7 +148,7 @@ class RPCFSInterface(object):
dst = self.decode_path(dst)
return self.fs.copy(src, dst, overwrite, chunk_size)
def move(self,src,dst,overwrite=False,chunk_size=16384):
def move(self, src, dst, overwrite=False, chunk_size=16384):
src = self.decode_path(src)
dst = self.decode_path(dst)
return self.fs.move(src, dst, overwrite, chunk_size)
......@@ -187,11 +186,10 @@ class RPCFSServer(SimpleXMLRPCServer):
if logRequests is not None:
kwds['logRequests'] = logRequests
self.serve_more_requests = True
SimpleXMLRPCServer.__init__(self,addr,**kwds)
SimpleXMLRPCServer.__init__(self, addr, **kwds)
self.register_instance(RPCFSInterface(fs))
def serve_forever(self):
"""Override serve_forever to allow graceful shutdown."""
while self.serve_more_requests:
self.handle_request()
......@@ -531,7 +531,7 @@ class FileLikeBase(object):
self._assert_mode("w-")
# If we were previously reading, ensure position is correct
if self._rbuffer is not None:
self.seek(0,1)
self.seek(0, 1)
# If we're actually behind the apparent position, we must also
# write the data in the gap.
if self._sbuffer:
......@@ -544,14 +544,16 @@ class FileLikeBase(object):
string = self._do_read(s) + string
except NotReadableError:
raise NotSeekableError("File not readable, could not complete simulation of seek")
self.seek(0,0)
self.seek(0, 0)
if self._wbuffer:
string = self._wbuffer + string
leftover = self._write(string)
if leftover is None or isinstance(leftover, int):
self._wbuffer = b("")
return len(string) - (leftover or 0)
else:
self._wbuffer = leftover
return len(string) - len(leftover)
def writelines(self,seq):
"""Write a sequence of lines to the file."""
......@@ -660,7 +662,7 @@ class FileWrapper(FileLikeBase):
return data
def _write(self,string,flushing=False):
return self.wrapped_file.write(string)
self.wrapped_file.write(string)
def _seek(self,offset,whence):
self.wrapped_file.seek(offset,whence)
......
......@@ -14,6 +14,7 @@ import fs
from fs.base import *
from fs.errors import *
from fs.path import pathsplit, abspath, dirname, recursepath, normpath, pathjoin, isbase
from fs import iotools
from ftplib import FTP, error_perm, error_temp, error_proto, error_reply
......@@ -1152,8 +1153,9 @@ class FTPFS(FS):
url = 'ftp://%s@%s%s' % (credentials, self.host.rstrip('/'), abspath(path))
return url
@iotools.filelike_to_stream
@ftperrors
def open(self, path, mode='r'):
def open(self, path, mode, buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
path = normpath(path)
mode = mode.lower()
if self.isdir(path):
......@@ -1168,19 +1170,21 @@ class FTPFS(FS):
return f
@ftperrors
def setcontents(self, path, data, chunk_size=1024*64):
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=1024*64):
path = normpath(path)
if isinstance(data, basestring):
data = StringIO(data)
data = iotools.make_bytes_io(data, encoding=encoding, errors=errors)
self.refresh_dircache(dirname(path))
self.ftp.storbinary('STOR %s' % _encode(path), data, blocksize=chunk_size)
@ftperrors
def getcontents(self, path, mode="rb"):
def getcontents(self, path, mode="rb", encoding=None, errors=None, newline=None):
path = normpath(path)
contents = StringIO()
self.ftp.retrbinary('RETR %s' % _encode(path), contents.write, blocksize=1024*64)
return contents.getvalue()
data = contents.getvalue()
if 'b' in data:
return data
return iotools.decode_binary(data, encoding=encoding, errors=errors)
@ftperrors
def exists(self, path):
......
......@@ -8,9 +8,12 @@ fs.httpfs
from fs.base import FS
from fs.path import normpath
from fs.errors import ResourceNotFoundError, UnsupportedError
from fs.filelike import FileWrapper
from fs import iotools
from urllib2 import urlopen, URLError
from datetime import datetime
from fs.filelike import FileWrapper
class HTTPFS(FS):
......@@ -22,8 +25,8 @@ class HTTPFS(FS):
"""
_meta = {'read_only':True,
'network':True,}
_meta = {'read_only': True,
'network': True}
def __init__(self, url):
"""
......@@ -38,7 +41,8 @@ class HTTPFS(FS):
url = '%s/%s' % (self.root_url.rstrip('/'), path.lstrip('/'))
return url
def open(self, path, mode="r"):
@iotools.filelike_to_stream
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
if '+' in mode or 'w' in mode or 'a' in mode:
raise UnsupportedError('write')
......
from __future__ import unicode_literals
from __future__ import print_function
import io
from functools import wraps
import six
class RawWrapper(object):
"""Convert a Python 2 style file-like object in to a IO object"""
def __init__(self, f, mode=None, name=None):
self._f = f
self.is_io = isinstance(f, io.IOBase)
if mode is None and hasattr(f, 'mode'):
mode = f.mode
self.mode = mode
self.name = name
self.closed = False
super(RawWrapper, self).__init__()
def __repr__(self):
......@@ -35,12 +39,18 @@ class RawWrapper(object):
return self._f.seek(offset, whence)
def readable(self):
if hasattr(self._f, 'readable'):
return self._f.readable()
return 'r' in self.mode
def writable(self):
if hasattr(self._f, 'writeable'):
return self._fs.writeable()
return 'w' in self.mode
def seekable(self):
if hasattr(self._f, 'seekable'):
return self._f.seekable()
try:
self.seek(0, io.SEEK_CUR)
except IOError:
......@@ -51,11 +61,14 @@ class RawWrapper(object):
def tell(self):
return self._f.tell()
def truncate(self, size):
def truncate(self, size=None):
return self._f.truncate(size)
def write(self, data):
if self.is_io:
return self._f.write(data)
self._f.write(data)
return len(data)
def read(self, n=-1):
if n == -1:
......@@ -63,21 +76,21 @@ class RawWrapper(object):
return self._f.read(n)
def read1(self, n=-1):
if self.is_io:
return self.read1(n)
return self.read(n)
def readall(self):
return self._f.read()
def readinto(self, b):
if self.is_io:
return self._f.readinto(b)
data = self._f.read(len(b))
bytes_read = len(data)
b[:len(data)] = data
return bytes_read
def write(self, b):
bytes_written = self._f.write(b)
return bytes_written
def writelines(self, sequence):
return self._f.writelines(sequence)
......@@ -87,6 +100,32 @@ class RawWrapper(object):
def __exit__(self, *args, **kwargs):
self.close()
def __iter__(self):
return iter(self._f)
def filelike_to_stream(f):
@wraps(f)
def wrapper(self, path, mode='rt', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
file_like = f(self,
path,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
line_buffering=line_buffering,
**kwargs)
return make_stream(path,
file_like,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
line_buffering=line_buffering)
return wrapper
def make_stream(name,
f,
......@@ -95,9 +134,8 @@ def make_stream(name,
encoding=None,
errors=None,
newline=None,
closefd=True,
line_buffering=False,
**params):
**kwargs):
"""Take a Python 2.x binary file and returns an IO Stream"""
r, w, a, binary = 'r' in mode, 'w' in mode, 'a' in mode, 'b' in mode
if '+' in mode:
......@@ -122,6 +160,51 @@ def make_stream(name,
return io_object
def decode_binary(data, encoding=None, errors=None, newline=None):
"""Decode bytes as though read from a text file"""
return io.TextIOWrapper(io.BytesIO(data), encoding=encoding, errors=errors, newline=newline).read()
def make_bytes_io(data, encoding=None, errors=None):
"""Make a bytes IO object from either a string or an open file"""
if hasattr(data, 'mode') and 'b' in data.mode:
# It's already a binary file
return data
if not isinstance(data, basestring):
# It's a file, but we don't know if its binary
# TODO: Is there a better way than reading the entire file?
data = data.read() or b''
if isinstance(data, six.text_type):
# If its text, encoding in to bytes
data = data.encode(encoding=encoding, errors=errors)
return io.BytesIO(data)
def copy_file_to_fs(f, fs, path, encoding=None, errors=None, progress_callback=None, chunk_size=64 * 1024):
"""Copy an open file to a path on an FS"""
if progress_callback is None:
progress_callback = lambda bytes_written: None
read = f.read
chunk = read(chunk_size)
if isinstance(chunk, six.text_type):
f = fs.open(path, 'wt', encoding=encoding, errors=errors)
else:
f = fs.open(path, 'wb')
write = f.write
bytes_written = 0
try:
while chunk:
write(chunk)
bytes_written += len(chunk)
progress_callback(bytes_written)
chunk = read(chunk_size)
finally:
f.close()
return bytes_written
if __name__ == "__main__":
print("Reading a binary file")
bin_file = open('tests/data/UTF-8-demo.txt', 'rb')
......
......@@ -17,6 +17,7 @@ from fs.base import *
from fs.errors import *
from fs import _thread_synchronize_default
from fs.filelike import StringIO
from fs import iotools
from os import SEEK_END
import threading
......@@ -408,8 +409,10 @@ class MemoryFS(FS):
# for f in file_dir_entry.open_files[:]:
# f.close()
@synchronize
def open(self, path, mode="r", **kwargs):
@iotools.filelike_to_stream
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
path = normpath(path)
filepath, filename = pathsplit(path)
parent_dir_entry = self._get_dir_entry(filepath)
......@@ -455,7 +458,7 @@ class MemoryFS(FS):
raise ResourceNotFoundError(path)
if dir_entry.isdir():
raise ResourceInvalidError(path,msg="That's a directory, not a file: %(path)s")
raise ResourceInvalidError(path, msg="That's a directory, not a file: %(path)s")
pathname, dirname = pathsplit(path)
parent_dir = self._get_dir_entry(pathname)
......@@ -628,27 +631,43 @@ class MemoryFS(FS):
dst_dir_entry.xattrs.update(src_xattrs)
@synchronize
def getcontents(self, path, mode="rb"):
def getcontents(self, path, mode="rb", encoding=None, errors=None, newline=None):
dir_entry = self._get_dir_entry(path)
if dir_entry is None:
raise ResourceNotFoundError(path)
if not dir_entry.isfile():
raise ResourceInvalidError(path, msg="not a file: %(path)s")
return dir_entry.data or b('')
data = dir_entry.data or b('')
if 'b' not in mode:
return iotools.decode_binary(data, encoding=encoding, errors=errors, newline=newline)
return data
@synchronize
def setcontents(self, path, data, chunk_size=1024*64):
if not isinstance(data, six.binary_type):
return super(MemoryFS, self).setcontents(path, data, chunk_size)
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=1024*64):
if isinstance(data, six.binary_type):
if not self.exists(path):
self.open(path, 'wb').close()
dir_entry = self._get_dir_entry(path)
if not dir_entry.isfile():
raise ResourceInvalidError('Not a directory %(path)s', path)
new_mem_file = StringIO()
new_mem_file.write(data)
dir_entry.mem_file = new_mem_file
return len(data)
return super(MemoryFS, self).setcontents(path, data=data, encoding=encoding, errors=errors, chunk_size=chunk_size)
# if isinstance(data, six.text_type):
# return super(MemoryFS, self).setcontents(path, data, encoding=encoding, errors=errors, chunk_size=chunk_size)
# if not self.exists(path):
# self.open(path, 'wb').close()
# dir_entry = self._get_dir_entry(path)
# if not dir_entry.isfile():
# raise ResourceInvalidError('Not a directory %(path)s', path)
# new_mem_file = StringIO()
# new_mem_file.write(data)
# dir_entry.mem_file = new_mem_file
@synchronize
def setxattr(self, path, key, value):
......
......@@ -46,6 +46,7 @@ from fs.base import *
from fs.errors import *
from fs.path import *
from fs import _thread_synchronize_default
from fs import iotools
class DirMount(object):
......@@ -286,7 +287,7 @@ class MountFS(FS):
def makedir(self, path, recursive=False, allow_recreate=False):
fs, _mount_path, delegate_path = self._delegate(path)
if fs is self or fs is None:
raise UnsupportedError("make directory", msg="Can only makedir for mounted paths" )
raise UnsupportedError("make directory", msg="Can only makedir for mounted paths")
if not delegate_path:
if allow_recreate:
return
......@@ -295,7 +296,7 @@ class MountFS(FS):
return fs.makedir(delegate_path, recursive=recursive, allow_recreate=allow_recreate)
@synchronize
def open(self, path, mode="r", **kwargs):
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
obj = self.mount_tree.get(path, None)
if type(obj) is MountFS.FileMount:
callable = obj.open_callable
......@@ -309,20 +310,24 @@ class MountFS(FS):
return fs.open(delegate_path, mode, **kwargs)
@synchronize
def setcontents(self, path, data, chunk_size=64*1024):
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=64*1024):
obj = self.mount_tree.get(path, None)
if type(obj) is MountFS.FileMount:
return super(MountFS,self).setcontents(path, data, chunk_size=chunk_size)
return super(MountFS, self).setcontents(path,
data,
encoding=encoding,
errors=errors,
chunk_size=chunk_size)
fs, _mount_path, delegate_path = self._delegate(path)
if fs is self or fs is None:
raise ParentDirectoryMissingError(path)
return fs.setcontents(delegate_path, data, chunk_size)
return fs.setcontents(delegate_path, data, encoding=encoding, errors=errors, chunk_size=chunk_size)
@synchronize
def createfile(self, path, wipe=False):
obj = self.mount_tree.get(path, None)
if type(obj) is MountFS.FileMount:
return super(MountFS,self).createfile(path, wipe=wipe)
return super(MountFS, self).createfile(path, wipe=wipe)
fs, _mount_path, delegate_path = self._delegate(path)
if fs is self or fs is None:
raise ParentDirectoryMissingError(path)
......@@ -430,7 +435,7 @@ class MountFS(FS):
"""Unmounts a path.
:param path: Path to unmount
:return: True if a dir was unmounted, False if the path was already unmounted
:return: True if a path was unmounted, False if the path was already unmounted
:rtype: bool
"""
......
......@@ -238,14 +238,14 @@ class MultiFS(FS):
return "%s, on %s (%s)" % (fs.desc(path), name, fs)
@synchronize
def open(self, path, mode="r", **kwargs):
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
if 'w' in mode or '+' in mode or 'a' in mode:
if self.writefs is None:
raise OperationFailedError('open', path=path, msg="No writeable FS set")
return self.writefs.open(path, mode)
return self.writefs.open(path, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, line_buffering=line_buffering, **kwargs)
for fs in self:
if fs.exists(path):
fs_file = fs.open(path, mode, **kwargs)
fs_file = fs.open(path, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, line_buffering=line_buffering, **kwargs)
return fs_file
raise ResourceNotFoundError(path)
......
......@@ -250,7 +250,7 @@ class OpenerRegistry(object):
return fs, fs_path or ''
def open(self, fs_url, mode='rb'):
def open(self, fs_url, mode='r', **kwargs):
"""Opens a file from a given FS url
If you intend to do a lot of file manipulation, it would likely be more
......@@ -271,15 +271,14 @@ class OpenerRegistry(object):
file_object.fs = fs
return file_object
def getcontents(self, fs_url, mode="rb"):
def getcontents(self, fs_url, node='rb', encoding=None, errors=None, newline=None):
"""Gets the contents from a given FS url (if it references a file)
:param fs_url: a FS URL e.g. ftp://ftp.mozilla.org/README
"""
fs, path = self.parse(fs_url)
return fs.getcontents(path, mode)
return fs.getcontents(path, mode, encoding=encoding, errors=errors, newline=newline)
def opendir(self, fs_url, writeable=True, create_dir=False):
"""Opens an FS object from an FS URL
......
......@@ -20,6 +20,7 @@ import sys
import errno
import datetime
import platform
import io
from fs.base import *
from fs.path import *
......@@ -76,16 +77,15 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
methods in the os and os.path modules.
"""
_meta = { 'thread_safe' : True,
'network' : False,
'virtual' : False,
'read_only' : False,
'unicode_paths' : os.path.supports_unicode_filenames,
'case_insensitive_paths' : os.path.normcase('Aa') == 'aa',
'atomic.makedir' : True,
'atomic.rename' : True,
'atomic.setcontents' : False,
}
_meta = {'thread_safe': True,
'network': False,
'virtual': False,
'read_only': False,
'unicode_paths': os.path.supports_unicode_filenames,
'case_insensitive_paths': os.path.normcase('Aa') == 'aa',
'atomic.makedir': True,
'atomic.rename': True,
'atomic.setcontents': False}
if platform.system() == 'Windows':
_meta["invalid_path_chars"] = ''.join(chr(n) for n in xrange(31)) + '\\:*?"<>|'
......@@ -215,11 +215,11 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
return super(OSFS, self).getmeta(meta_name, default)
@convert_os_errors
def open(self, path, mode="r", **kwargs):
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
mode = ''.join(c for c in mode if c in 'rwabt+')
sys_path = self.getsyspath(path)
try:
return open(sys_path, mode, kwargs.get("buffering", -1))
return io.open(sys_path, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline)
except EnvironmentError, e:
# Win32 gives EACCES when opening a directory.
if sys.platform == "win32" and e.errno in (errno.EACCES,):
......@@ -228,8 +228,8 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
raise
@convert_os_errors
def setcontents(self, path, contents, chunk_size=64 * 1024):
return super(OSFS, self).setcontents(path, contents, chunk_size)
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=64 * 1024):
return super(OSFS, self).setcontents(path, data, encoding=encoding, errors=errors, chunk_size=chunk_size)
@convert_os_errors
def exists(self, path):
......
......@@ -41,6 +41,7 @@ _SENTINAL = object()
from six import PY3, b
class RemoteFileBuffer(FileWrapper):
"""File-like object providing buffer for local file operations.
......@@ -82,7 +83,7 @@ class RemoteFileBuffer(FileWrapper):
self._readlen = 0 # How many bytes already loaded from rfile
self._rfile = None # Reference to remote file object
self._eof = False # Reached end of rfile?
if getattr(fs,"_lock",None) is not None:
if getattr(fs, "_lock", None) is not None:
self._lock = fs._lock.__class__()
else:
self._lock = threading.RLock()
......@@ -315,8 +316,8 @@ class ConnectionManagerFS(LazyFS):
self._poll_sleeper = threading.Event()
self.connected = connected
def setcontents(self, path, data, chunk_size=64*1024):
return self.wrapped_fs.setcontents(path, data, chunk_size=chunk_size)
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=64*1024):
return self.wrapped_fs.setcontents(path, data, encoding=encoding, errors=errors, chunk_size=chunk_size)
def __getstate__(self):
state = super(ConnectionManagerFS,self).__getstate__()
......@@ -536,12 +537,12 @@ class CacheFSMixin(FS):
except KeyError:
pass
def open(self,path,mode="r",**kwds):
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
# Try to validate the entry using the cached info
try:
ci = self.__get_cached_info(path)
except KeyError:
if path in ("","/"):
if path in ("", "/"):
raise ResourceInvalidError(path)
try:
ppath = dirname(path)
......@@ -549,38 +550,38 @@ class CacheFSMixin(FS):
except KeyError:
pass
else:
if not fs.utils.isdir(super(CacheFSMixin,self),ppath,pci.info):
if not fs.utils.isdir(super(CacheFSMixin, self), ppath, pci.info):
raise ResourceInvalidError(path)
if pci.has_full_children:
raise ResourceNotFoundError(path)
else:
if not fs.utils.isfile(super(CacheFSMixin,self),path,ci.info):
if not fs.utils.isfile(super(CacheFSMixin, self), path, ci.info):
raise ResourceInvalidError(path)
f = super(CacheFSMixin,self).open(path,mode,**kwds)
f = super(CacheFSMixin, self).open(path, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, line_buffering=line_buffering, **kwargs)
if "w" in mode or "a" in mode or "+" in mode:
with self.__cache_lock:
self.__cache.clear(path)
f = self._CacheInvalidatingFile(self,path,f,mode)
f = self._CacheInvalidatingFile(self, path, f, mode)
return f
class _CacheInvalidatingFile(FileWrapper):
def __init__(self,owner,path,wrapped_file,mode=None):
def __init__(self, owner, path, wrapped_file, mode=None):
self.path = path
sup = super(CacheFSMixin._CacheInvalidatingFile,self)
sup.__init__(wrapped_file,mode)
sup = super(CacheFSMixin._CacheInvalidatingFile, self)
sup.__init__(wrapped_file, mode)
self.owner = owner
def _write(self,string,flushing=False):
def _write(self, string, flushing=False):
with self.owner._CacheFSMixin__cache_lock:
self.owner._CacheFSMixin__cache.clear(self.path)
sup = super(CacheFSMixin._CacheInvalidatingFile,self)
return sup._write(string,flushing=flushing)
def _truncate(self,size):
sup = super(CacheFSMixin._CacheInvalidatingFile, self)
return sup._write(string, flushing=flushing)
def _truncate(self, size):
with self.owner._CacheFSMixin__cache_lock:
self.owner._CacheFSMixin__cache.clear(self.path)
sup = super(CacheFSMixin._CacheInvalidatingFile,self)
sup = super(CacheFSMixin._CacheInvalidatingFile, self)
return sup._truncate(size)
def exists(self,path):
def exists(self, path):
try:
self.getinfo(path)
except ResourceNotFoundError:
......@@ -588,7 +589,7 @@ class CacheFSMixin(FS):
else:
return True
def isdir(self,path):
def isdir(self, path):
try:
self.__cache.iternames(path).next()
return True
......@@ -601,9 +602,9 @@ class CacheFSMixin(FS):
except ResourceNotFoundError:
return False
else:
return fs.utils.isdir(super(CacheFSMixin,self),path,info)
return fs.utils.isdir(super(CacheFSMixin, self), path, info)
def isfile(self,path):
def isfile(self, path):
try:
self.__cache.iternames(path).next()
return False
......@@ -616,17 +617,17 @@ class CacheFSMixin(FS):
except ResourceNotFoundError:
return False
else:
return fs.utils.isfile(super(CacheFSMixin,self),path,info)
return fs.utils.isfile(super(CacheFSMixin, self), path, info)
def getinfo(self,path):
def getinfo(self, path):
try:
ci = self.__get_cached_info(path)
if not ci.has_full_info:
raise KeyError
info = ci.info
except KeyError:
info = super(CacheFSMixin,self).getinfo(path)
self.__set_cached_info(path,CachedInfo(info))
info = super(CacheFSMixin, self).getinfo(path)
self.__set_cached_info(path, CachedInfo(info))
return info
def listdir(self,path="",*args,**kwds):
......@@ -670,9 +671,9 @@ class CacheFSMixin(FS):
def getsize(self,path):
return self.getinfo(path)["size"]
def setcontents(self, path, contents=b(""), chunk_size=64*1024):
supsc = super(CacheFSMixin,self).setcontents
res = supsc(path, contents, chunk_size=chunk_size)
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=64*1024):
supsc = super(CacheFSMixin, self).setcontents
res = supsc(path, data, encoding=None, errors=None, chunk_size=chunk_size)
with self.__cache_lock:
self.__cache.clear(path)
self.__cache[path] = CachedInfo.new_file_stub()
......
......@@ -10,36 +10,42 @@ class from the :mod:`fs.expose.xmlrpc` module.
import xmlrpclib
import socket
import threading
import base64
from fs.base import *
from fs.errors import *
from fs.path import *
from fs import iotools
from fs.filelike import StringIO
import six
from six import PY3, b
def re_raise_faults(func):
"""Decorator to re-raise XML-RPC faults as proper exceptions."""
def wrapper(*args,**kwds):
def wrapper(*args, **kwds):
try:
return func(*args,**kwds)
return func(*args, **kwds)
except (xmlrpclib.Fault), f:
#raise
# Make sure it's in a form we can handle
print f.faultString
bits = f.faultString.split(" ")
if bits[0] not in ["<type","<class"]:
if bits[0] not in ["<type", "<class"]:
raise f
# Find the class/type object
bits = " ".join(bits[1:]).split(">:")
cls = bits[0]
msg = ">:".join(bits[1:])
cls = cls.strip('\'')
print "-" + cls
cls = _object_by_name(cls)
# Re-raise using the remainder of the fault code as message
if cls:
if issubclass(cls,FSError):
if issubclass(cls, FSError):
raise cls('', msg=msg)
else:
raise cls(msg)
......@@ -49,7 +55,7 @@ def re_raise_faults(func):
return wrapper
def _object_by_name(name,root=None):
def _object_by_name(name, root=None):
"""Look up an object by dotted-name notation."""
bits = name.split(".")
if root is None:
......@@ -59,11 +65,11 @@ def _object_by_name(name,root=None):
try:
obj = __builtins__[bits[0]]
except KeyError:
obj = __import__(bits[0],globals())
obj = __import__(bits[0], globals())
else:
obj = getattr(root,bits[0])
obj = getattr(root, bits[0])
if len(bits) > 1:
return _object_by_name(".".join(bits[1:]),obj)
return _object_by_name(".".join(bits[1:]), obj)
else:
return obj
......@@ -71,11 +77,11 @@ def _object_by_name(name,root=None):
class ReRaiseFaults:
"""XML-RPC proxy wrapper that re-raises Faults as proper Exceptions."""
def __init__(self,obj):
def __init__(self, obj):
self._obj = obj
def __getattr__(self,attr):
val = getattr(self._obj,attr)
def __getattr__(self, attr):
val = getattr(self._obj, attr)
if callable(val):
val = re_raise_faults(val)
self.__dict__[attr] = val
......@@ -120,9 +126,9 @@ class RPCFS(FS):
kwds = dict(allow_none=True, use_datetime=True)
if self._transport is not None:
proxy = xmlrpclib.ServerProxy(self.uri,self._transport,**kwds)
proxy = xmlrpclib.ServerProxy(self.uri, self._transport, **kwds)
else:
proxy = xmlrpclib.ServerProxy(self.uri,**kwds)
proxy = xmlrpclib.ServerProxy(self.uri, **kwds)
return ReRaiseFaults(proxy)
......@@ -134,7 +140,7 @@ class RPCFS(FS):
@synchronize
def __getstate__(self):
state = super(RPCFS,self).__getstate__()
state = super(RPCFS, self).__getstate__()
try:
del state['proxy']
except KeyError:
......@@ -152,15 +158,11 @@ class RPCFS(FS):
must return something that can be represented in ASCII. The default
is base64-encoded UTF8.
"""
if PY3:
return path
return path.encode("utf8").encode("base64")
return six.text_type(base64.b64encode(path.encode("utf8")), 'ascii')
def decode_path(self, path):
"""Decode paths arriving over the wire."""
if PY3:
return path
return path.decode("base64").decode("utf8")
return six.text_type(base64.b64decode(path.encode('ascii')), 'utf8')
@synchronize
def getmeta(self, meta_name, default=NoDefaultMeta):
......@@ -170,7 +172,7 @@ class RPCFS(FS):
meta = self.proxy.getmeta_default(meta_name, default)
if isinstance(meta, basestring):
# To allow transport of meta with invalid xml chars (like null)
meta = meta.encode('base64')
meta = self.encode_path(meta)
return meta
@synchronize
......@@ -178,37 +180,40 @@ class RPCFS(FS):
return self.proxy.hasmeta(meta_name)
@synchronize
def open(self, path, mode="r"):
@iotools.filelike_to_stream
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
# TODO: chunked transport of large files
path = self.encode_path(path)
epath = self.encode_path(path)
if "w" in mode:
self.proxy.set_contents(path,xmlrpclib.Binary(b("")))
self.proxy.set_contents(epath, xmlrpclib.Binary(b("")))
if "r" in mode or "a" in mode or "+" in mode:
try:
data = self.proxy.get_contents(path, "rb").data
data = self.proxy.get_contents(epath, "rb").data
except IOError:
if "w" not in mode and "a" not in mode:
raise ResourceNotFoundError(path)
if not self.isdir(dirname(path)):
raise ParentDirectoryMissingError(path)
self.proxy.set_contents(path,xmlrpclib.Binary(b("")))
self.proxy.set_contents(path, xmlrpclib.Binary(b("")))
else:
data = b("")
f = StringIO(data)
if "a" not in mode:
f.seek(0,0)
f.seek(0, 0)
else:
f.seek(0,2)
f.seek(0, 2)
oldflush = f.flush
oldclose = f.close
oldtruncate = f.truncate
def newflush():
self._lock.acquire()
try:
oldflush()
self.proxy.set_contents(path,xmlrpclib.Binary(f.getvalue()))
self.proxy.set_contents(epath, xmlrpclib.Binary(f.getvalue()))
finally:
self._lock.release()
def newclose():
self._lock.acquire()
try:
......@@ -216,6 +221,7 @@ class RPCFS(FS):
oldclose()
finally:
self._lock.release()
def newtruncate(size=None):
self._lock.acquire()
try:
......@@ -248,24 +254,32 @@ class RPCFS(FS):
def listdir(self, path="./", wildcard=None, full=False, absolute=False, dirs_only=False, files_only=False):
enc_path = self.encode_path(path)
if not callable(wildcard):
entries = self.proxy.listdir(enc_path,wildcard,full,absolute,
dirs_only,files_only)
entries = self.proxy.listdir(enc_path,
wildcard,
full,
absolute,
dirs_only,
files_only)
entries = [self.decode_path(e) for e in entries]
else:
entries = self.proxy.listdir(enc_path,None,False,False,
dirs_only,files_only)
entries = self.proxy.listdir(enc_path,
None,
False,
False,
dirs_only,
files_only)
entries = [self.decode_path(e) for e in entries]
entries = [e for e in entries if wildcard(e)]
if full:
entries = [relpath(pathjoin(path,e)) for e in entries]
entries = [relpath(pathjoin(path, e)) for e in entries]
elif absolute:
entries = [abspath(pathjoin(path,e)) for e in entries]
entries = [abspath(pathjoin(path, e)) for e in entries]
return entries
@synchronize
def makedir(self, path, recursive=False, allow_recreate=False):
path = self.encode_path(path)
return self.proxy.makedir(path,recursive,allow_recreate)
return self.proxy.makedir(path, recursive, allow_recreate)
@synchronize
def remove(self, path):
......@@ -275,13 +289,13 @@ class RPCFS(FS):
@synchronize
def removedir(self, path, recursive=False, force=False):
path = self.encode_path(path)
return self.proxy.removedir(path,recursive,force)
return self.proxy.removedir(path, recursive, force)
@synchronize
def rename(self, src, dst):
src = self.encode_path(src)
dst = self.encode_path(dst)
return self.proxy.rename(src,dst)
return self.proxy.rename(src, dst)
@synchronize
def settimes(self, path, accessed_time, modified_time):
......@@ -302,19 +316,19 @@ class RPCFS(FS):
def getxattr(self, path, attr, default=None):
path = self.encode_path(path)
attr = self.encode_path(attr)
return self.fs.getxattr(path,attr,default)
return self.fs.getxattr(path, attr, default)
@synchronize
def setxattr(self, path, attr, value):
path = self.encode_path(path)
attr = self.encode_path(attr)
return self.fs.setxattr(path,attr,value)
return self.fs.setxattr(path, attr, value)
@synchronize
def delxattr(self, path, attr):
path = self.encode_path(path)
attr = self.encode_path(attr)
return self.fs.delxattr(path,attr)
return self.fs.delxattr(path, attr)
@synchronize
def listxattrs(self, path):
......@@ -325,13 +339,13 @@ class RPCFS(FS):
def copy(self, src, dst, overwrite=False, chunk_size=16384):
src = self.encode_path(src)
dst = self.encode_path(dst)
return self.proxy.copy(src,dst,overwrite,chunk_size)
return self.proxy.copy(src, dst, overwrite, chunk_size)
@synchronize
def move(self, src, dst, overwrite=False, chunk_size=16384):
src = self.encode_path(src)
dst = self.encode_path(dst)
return self.proxy.move(src,dst,overwrite,chunk_size)
return self.proxy.move(src, dst, overwrite, chunk_size)
@synchronize
def movedir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384):
......@@ -343,6 +357,4 @@ class RPCFS(FS):
def copydir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384):
src = self.encode_path(src)
dst = self.encode_path(dst)
return self.proxy.copydir(src,dst,overwrite,ignore_errors,chunk_size)
return self.proxy.copydir(src, dst, overwrite, ignore_errors, chunk_size)
......@@ -26,7 +26,9 @@ from fs.path import *
from fs.errors import *
from fs.remote import *
from fs.filelike import LimitBytesFile
from fs import iotools
import six
# Boto is not thread-safe, so we need to use a per-thread S3 connection.
if hasattr(threading,"local"):
......@@ -253,9 +255,9 @@ class S3FS(FS):
k = self._s3bukt.get_key(s3path)
# Is there AllUsers group with READ permissions?
is_public = True in [grant.permission == 'READ' and \
is_public = True in [grant.permission == 'READ' and
grant.uri == 'http://acs.amazonaws.com/groups/global/AllUsers'
for grant in k.get_acl().acl.grants ]
for grant in k.get_acl().acl.grants]
url = k.generate_url(expires, force_http=is_public)
......@@ -270,11 +272,14 @@ class S3FS(FS):
return url
def setcontents(self, path, data, chunk_size=64*1024):
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=64*1024):
s3path = self._s3path(path)
if isinstance(data, six.text_type):
data = data.encode(encoding=encoding, errors=errors)
self._sync_set_contents(s3path, data)
def open(self,path,mode="r"):
@iotools.filelike_to_stream
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
"""Open the named file in the given mode.
This method downloads the file contents into a local temporary file
......
......@@ -19,6 +19,8 @@ from fs.base import *
from fs.path import *
from fs.errors import *
from fs.utils import isdir, isfile
from fs import iotools
class WrongHostKeyError(RemoteConnectionError):
pass
......@@ -108,7 +110,6 @@ class SFTPFS(FS):
if other authentication is not succesful
"""
credentials = dict(username=username,
password=password,
pkey=pkey)
......@@ -300,12 +301,12 @@ class SFTPFS(FS):
self._transport.close()
self.closed = True
def _normpath(self,path):
if not isinstance(path,unicode):
def _normpath(self, path):
if not isinstance(path, unicode):
path = path.decode(self.encoding)
npath = pathjoin(self.root_path,relpath(normpath(path)))
if not isprefix(self.root_path,npath):
raise PathError(path,msg="Path is outside root: %(path)s")
npath = pathjoin(self.root_path, relpath(normpath(path)))
if not isprefix(self.root_path, npath):
raise PathError(path, msg="Path is outside root: %(path)s")
return npath
def getpathurl(self, path, allow_none=False):
......@@ -325,17 +326,19 @@ class SFTPFS(FS):
@synchronize
@convert_os_errors
def open(self,path,mode="rb",bufsize=-1):
@iotools.filelike_to_stream
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, bufsize=-1, **kwargs):
npath = self._normpath(path)
if self.isdir(path):
msg = "that's a directory: %(path)s"
raise ResourceInvalidError(path,msg=msg)
raise ResourceInvalidError(path, msg=msg)
# paramiko implements its own buffering and write-back logic,
# so we don't need to use a RemoteFileBuffer here.
f = self.client.open(npath,mode,bufsize)
f = self.client.open(npath, mode, bufsize)
# Unfortunately it has a broken truncate() method.
# TODO: implement this as a wrapper
old_truncate = f.truncate
def new_truncate(size=None):
if size is None:
size = f.tell()
......@@ -354,7 +357,7 @@ class SFTPFS(FS):
@synchronize
@convert_os_errors
def exists(self,path):
def exists(self, path):
if path in ('', '/'):
return True
npath = self._normpath(path)
......@@ -369,7 +372,7 @@ class SFTPFS(FS):
@synchronize
@convert_os_errors
def isdir(self,path):
if path in ('', '/'):
if normpath(path) in ('', '/'):
return True
npath = self._normpath(path)
try:
......@@ -378,7 +381,7 @@ class SFTPFS(FS):
if getattr(e,"errno",None) == 2:
return False
raise
return statinfo.S_ISDIR(stat.st_mode)
return statinfo.S_ISDIR(stat.st_mode) != 0
@synchronize
@convert_os_errors
......@@ -390,7 +393,7 @@ class SFTPFS(FS):
if getattr(e,"errno",None) == 2:
return False
raise
return statinfo.S_ISREG(stat.st_mode)
return statinfo.S_ISREG(stat.st_mode) != 0
@synchronize
@convert_os_errors
......
......@@ -10,13 +10,14 @@ import os
import os.path
import time
import tempfile
import platform
from fs.base import synchronize
from fs.osfs import OSFS
from fs.errors import *
from fs import _thread_synchronize_default
class TempFS(OSFS):
"""Create a Filesystem in a temporary directory (with tempfile.mkdtemp),
......@@ -38,7 +39,7 @@ class TempFS(OSFS):
self.identifier = identifier
self.temp_dir = temp_dir
self.dir_mode = dir_mode
self._temp_dir = tempfile.mkdtemp(identifier or "TempFS",dir=temp_dir)
self._temp_dir = tempfile.mkdtemp(identifier or "TempFS", dir=temp_dir)
self._cleaned = False
super(TempFS, self).__init__(self._temp_dir, dir_mode=dir_mode, thread_synchronize=thread_synchronize)
......@@ -65,6 +66,7 @@ class TempFS(OSFS):
# dir_mode=self.dir_mode,
# thread_synchronize=self.thread_synchronize)
@synchronize
def close(self):
"""Removes the temporary directory.
......@@ -73,13 +75,13 @@ class TempFS(OSFS):
Note that once this method has been called, the FS object may
no longer be used.
"""
super(TempFS,self).close()
super(TempFS, self).close()
# Depending on how resources are freed by the OS, there could
# be some transient errors when freeing a TempFS soon after it
# was used. If they occur, do a small sleep and try again.
try:
self._close()
except (ResourceLockedError,ResourceInvalidError):
except (ResourceLockedError, ResourceInvalidError):
time.sleep(0.5)
self._close()
......@@ -97,20 +99,23 @@ class TempFS(OSFS):
try:
# shutil.rmtree doesn't handle long paths on win32,
# so we walk the tree by hand.
entries = os.walk(self.root_path,topdown=False)
for (dir,dirnames,filenames) in entries:
entries = os.walk(self.root_path, topdown=False)
for (dir, dirnames, filenames) in entries:
for filename in filenames:
try:
os_remove(os.path.join(dir,filename))
os_remove(os.path.join(dir, filename))
except ResourceNotFoundError:
pass
for dirname in dirnames:
try:
os_rmdir(os.path.join(dir,dirname))
os_rmdir(os.path.join(dir, dirname))
except ResourceNotFoundError:
pass
try:
os.rmdir(self.root_path)
except OSError:
pass
self._cleaned = True
finally:
self._lock.release()
super(TempFS,self).close()
super(TempFS, self).close()
......@@ -11,7 +11,7 @@ from __future__ import with_statement
# be captured by nose and reported appropriately
import sys
import logging
#logging.basicConfig(level=logging.ERROR, stream=sys.stdout)
logging.basicConfig(level=logging.ERROR, stream=sys.stdout)
from fs.base import *
from fs.path import *
......@@ -20,7 +20,8 @@ from fs.filelike import StringIO
import datetime
import unittest
import os, os.path
import os
import os.path
import pickle
import random
import copy
......@@ -34,6 +35,7 @@ except ImportError:
import six
from six import PY3, b
class FSTestCases(object):
"""Base suite of testcases for filesystem implementations.
......@@ -80,7 +82,6 @@ class FSTestCases(object):
except NoMetaError:
self.assertFalse(self.fs.hasmeta(meta_name))
def test_root_dir(self):
self.assertTrue(self.fs.isdir(""))
self.assertTrue(self.fs.isdir("/"))
......@@ -94,10 +95,10 @@ class FSTestCases(object):
except NoSysPathError:
pass
else:
self.assertTrue(isinstance(syspath,unicode))
syspath = self.fs.getsyspath("/",allow_none=True)
self.assertTrue(isinstance(syspath, unicode))
syspath = self.fs.getsyspath("/", allow_none=True)
if syspath is not None:
self.assertTrue(isinstance(syspath,unicode))
self.assertTrue(isinstance(syspath, unicode))
def test_debug(self):
str(self.fs)
......@@ -119,49 +120,54 @@ class FSTestCases(object):
assert False, "ResourceInvalidError was not raised"
def test_writefile(self):
self.assertRaises(ResourceNotFoundError,self.fs.open,"test1.txt")
f = self.fs.open("test1.txt","wb")
self.assertRaises(ResourceNotFoundError, self.fs.open, "test1.txt")
f = self.fs.open("test1.txt", "wb")
f.write(b("testing"))
f.close()
self.assertTrue(self.check("test1.txt"))
f = self.fs.open("test1.txt","rb")
self.assertEquals(f.read(),b("testing"))
f = self.fs.open("test1.txt", "rb")
self.assertEquals(f.read(), b("testing"))
f.close()
f = self.fs.open("test1.txt","wb")
f = self.fs.open("test1.txt", "wb")
f.write(b("test file overwrite"))
f.close()
self.assertTrue(self.check("test1.txt"))
f = self.fs.open("test1.txt","rb")
self.assertEquals(f.read(),b("test file overwrite"))
f = self.fs.open("test1.txt", "rb")
self.assertEquals(f.read(), b("test file overwrite"))
f.close()
def test_setcontents(self):
# setcontents() should accept both a string...
self.fs.setcontents("hello",b("world"))
self.assertEquals(self.fs.getcontents("hello", "rb"),b("world"))
self.fs.setcontents("hello", b("world"))
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents("hello",StringIO(b("to you, good sir!")))
self.assertEquals(self.fs.getcontents("hello", "rb"),b("to you, good sir!"))
self.fs.setcontents("hello", StringIO(b("to you, good sir!")))
self.assertEquals(self.fs.getcontents(
"hello", "rb"), b("to you, good sir!"))
# setcontents() should accept both a string...
self.fs.setcontents("hello",b("world"), chunk_size=2)
self.assertEquals(self.fs.getcontents("hello", "rb"),b("world"))
self.fs.setcontents("hello", b("world"), chunk_size=2)
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents("hello",StringIO(b("to you, good sir!")), chunk_size=2)
self.assertEquals(self.fs.getcontents("hello", "rb"),b("to you, good sir!"))
self.fs.setcontents("hello", StringIO(
b("to you, good sir!")), chunk_size=2)
self.assertEquals(self.fs.getcontents(
"hello", "rb"), b("to you, good sir!"))
def test_setcontents_async(self):
# setcontents() should accept both a string...
self.fs.setcontents_async("hello", b("world")).wait()
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents_async("hello",StringIO(b("to you, good sir!"))).wait()
self.fs.setcontents_async("hello", StringIO(
b("to you, good sir!"))).wait()
self.assertEquals(self.fs.getcontents("hello"), b("to you, good sir!"))
self.fs.setcontents_async("hello", b("world"), chunk_size=2).wait()
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents_async("hello", StringIO(b("to you, good sir!")), chunk_size=2).wait()
self.assertEquals(self.fs.getcontents("hello", "rb"), b("to you, good sir!"))
self.fs.setcontents_async("hello", StringIO(
b("to you, good sir!")), chunk_size=2).wait()
self.assertEquals(self.fs.getcontents(
"hello", "rb"), b("to you, good sir!"))
def test_isdir_isfile(self):
self.assertFalse(self.fs.exists("dir1"))
......@@ -182,7 +188,7 @@ class FSTestCases(object):
def test_listdir(self):
def check_unicode(items):
for item in items:
self.assertTrue(isinstance(item,unicode))
self.assertTrue(isinstance(item, unicode))
self.fs.setcontents(u"a", b(''))
self.fs.setcontents("b", b(''))
self.fs.setcontents("foo", b(''))
......@@ -206,7 +212,7 @@ class FSTestCases(object):
check_unicode(d2)
# Create some deeper subdirectories, to make sure their
# contents are not inadvertantly included
self.fs.makedir("p/1/2/3",recursive=True)
self.fs.makedir("p/1/2/3", recursive=True)
self.fs.setcontents("p/1/2/3/a", b(''))
self.fs.setcontents("p/1/2/3/b", b(''))
self.fs.setcontents("p/1/2/3/foo", b(''))
......@@ -218,7 +224,7 @@ class FSTestCases(object):
contains_a = self.fs.listdir(wildcard="*a*")
self.assertEqual(sorted(dirs_only), [u"p", u"q"])
self.assertEqual(sorted(files_only), [u"a", u"b", u"bar", u"foo"])
self.assertEqual(sorted(contains_a), [u"a",u"bar"])
self.assertEqual(sorted(contains_a), [u"a", u"bar"])
check_unicode(dirs_only)
check_unicode(files_only)
check_unicode(contains_a)
......@@ -237,16 +243,17 @@ class FSTestCases(object):
self.assertEqual(sorted(d4), [u"p/1/2/3/a", u"p/1/2/3/b", u"p/1/2/3/bar", u"p/1/2/3/foo"])
check_unicode(d4)
# Test that appropriate errors are raised
self.assertRaises(ResourceNotFoundError,self.fs.listdir,"zebra")
self.assertRaises(ResourceInvalidError,self.fs.listdir,"foo")
self.assertRaises(ResourceNotFoundError, self.fs.listdir, "zebra")
self.assertRaises(ResourceInvalidError, self.fs.listdir, "foo")
def test_listdirinfo(self):
def check_unicode(items):
for (nm,info) in items:
self.assertTrue(isinstance(nm,unicode))
def check_equal(items,target):
names = [nm for (nm,info) in items]
self.assertEqual(sorted(names),sorted(target))
for (nm, info) in items:
self.assertTrue(isinstance(nm, unicode))
def check_equal(items, target):
names = [nm for (nm, info) in items]
self.assertEqual(sorted(names), sorted(target))
self.fs.setcontents(u"a", b(''))
self.fs.setcontents("b", b(''))
self.fs.setcontents("foo", b(''))
......@@ -271,7 +278,7 @@ class FSTestCases(object):
check_unicode(d2)
# Create some deeper subdirectories, to make sure their
# contents are not inadvertantly included
self.fs.makedir("p/1/2/3",recursive=True)
self.fs.makedir("p/1/2/3", recursive=True)
self.fs.setcontents("p/1/2/3/a", b(''))
self.fs.setcontents("p/1/2/3/b", b(''))
self.fs.setcontents("p/1/2/3/foo", b(''))
......@@ -283,7 +290,7 @@ class FSTestCases(object):
contains_a = self.fs.listdirinfo(wildcard="*a*")
check_equal(dirs_only, [u"p", u"q"])
check_equal(files_only, [u"a", u"b", u"bar", u"foo"])
check_equal(contains_a, [u"a",u"bar"])
check_equal(contains_a, [u"a", u"bar"])
check_unicode(dirs_only)
check_unicode(files_only)
check_unicode(contains_a)
......@@ -302,20 +309,20 @@ class FSTestCases(object):
check_equal(d4, [u"p/1/2/3/a", u"p/1/2/3/b", u"p/1/2/3/bar", u"p/1/2/3/foo"])
check_unicode(d4)
# Test that appropriate errors are raised
self.assertRaises(ResourceNotFoundError,self.fs.listdirinfo,"zebra")
self.assertRaises(ResourceInvalidError,self.fs.listdirinfo,"foo")
self.assertRaises(ResourceNotFoundError, self.fs.listdirinfo, "zebra")
self.assertRaises(ResourceInvalidError, self.fs.listdirinfo, "foo")
def test_walk(self):
self.fs.setcontents('a.txt', b('hello'))
self.fs.setcontents('b.txt', b('world'))
self.fs.makeopendir('foo').setcontents('c', b('123'))
sorted_walk = sorted([(d,sorted(fs)) for (d,fs) in self.fs.walk()])
sorted_walk = sorted([(d, sorted(fs)) for (d, fs) in self.fs.walk()])
self.assertEquals(sorted_walk,
[("/",["a.txt","b.txt"]),
("/foo",["c"])])
[("/", ["a.txt", "b.txt"]),
("/foo", ["c"])])
# When searching breadth-first, shallow entries come first
found_a = False
for _,files in self.fs.walk(search="breadth"):
for _, files in self.fs.walk(search="breadth"):
if "a.txt" in files:
found_a = True
if "c" in files:
......@@ -323,12 +330,13 @@ class FSTestCases(object):
assert found_a, "breadth search order was wrong"
# When searching depth-first, deep entries come first
found_c = False
for _,files in self.fs.walk(search="depth"):
for _, files in self.fs.walk(search="depth"):
if "c" in files:
found_c = True
if "a.txt" in files:
break
assert found_c, "depth search order was wrong: " + str(list(self.fs.walk(search="depth")))
assert found_c, "depth search order was wrong: " + \
str(list(self.fs.walk(search="depth")))
def test_walk_wildcard(self):
self.fs.setcontents('a.txt', b('hello'))
......@@ -338,7 +346,7 @@ class FSTestCases(object):
for dir_path, paths in self.fs.walk(wildcard='*.txt'):
for path in paths:
self.assert_(path.endswith('.txt'))
for dir_path, paths in self.fs.walk(wildcard=lambda fn:fn.endswith('.txt')):
for dir_path, paths in self.fs.walk(wildcard=lambda fn: fn.endswith('.txt')):
for path in paths:
self.assert_(path.endswith('.txt'))
......@@ -347,22 +355,28 @@ class FSTestCases(object):
self.fs.setcontents('b.txt', b('world'))
self.fs.makeopendir('foo').setcontents('c', b('123'))
self.fs.makeopendir('.svn').setcontents('ignored', b(''))
for dir_path, paths in self.fs.walk(dir_wildcard=lambda fn:not fn.endswith('.svn')):
for dir_path, paths in self.fs.walk(dir_wildcard=lambda fn: not fn.endswith('.svn')):
for path in paths:
self.assert_('.svn' not in path)
def test_walkfiles(self):
self.fs.makeopendir('bar').setcontents('a.txt', b('123'))
self.fs.makeopendir('foo').setcontents('b', b('123'))
self.assertEquals(sorted(self.fs.walkfiles()),["/bar/a.txt","/foo/b"])
self.assertEquals(sorted(self.fs.walkfiles(dir_wildcard="*foo*")),["/foo/b"])
self.assertEquals(sorted(self.fs.walkfiles(wildcard="*.txt")),["/bar/a.txt"])
self.assertEquals(sorted(
self.fs.walkfiles()), ["/bar/a.txt", "/foo/b"])
self.assertEquals(sorted(self.fs.walkfiles(
dir_wildcard="*foo*")), ["/foo/b"])
self.assertEquals(sorted(self.fs.walkfiles(
wildcard="*.txt")), ["/bar/a.txt"])
def test_walkdirs(self):
self.fs.makeopendir('bar').setcontents('a.txt', b('123'))
self.fs.makeopendir('foo').makeopendir("baz").setcontents('b', b('123'))
self.assertEquals(sorted(self.fs.walkdirs()),["/","/bar","/foo","/foo/baz"])
self.assertEquals(sorted(self.fs.walkdirs(wildcard="*foo*")),["/","/foo","/foo/baz"])
self.fs.makeopendir('foo').makeopendir(
"baz").setcontents('b', b('123'))
self.assertEquals(sorted(self.fs.walkdirs()), [
"/", "/bar", "/foo", "/foo/baz"])
self.assertEquals(sorted(self.fs.walkdirs(
wildcard="*foo*")), ["/", "/foo", "/foo/baz"])
def test_unicode(self):
alpha = u"\N{GREEK SMALL LETTER ALPHA}"
......@@ -371,32 +385,33 @@ class FSTestCases(object):
self.fs.setcontents(alpha+"/a", b(''))
self.fs.setcontents(alpha+"/"+beta, b(''))
self.assertTrue(self.check(alpha))
self.assertEquals(sorted(self.fs.listdir(alpha)),["a",beta])
self.assertEquals(sorted(self.fs.listdir(alpha)), ["a", beta])
def test_makedir(self):
check = self.check
self.fs.makedir("a")
self.assertTrue(check("a"))
self.assertRaises(ParentDirectoryMissingError,self.fs.makedir,"a/b/c")
self.assertRaises(
ParentDirectoryMissingError, self.fs.makedir, "a/b/c")
self.fs.makedir("a/b/c", recursive=True)
self.assert_(check("a/b/c"))
self.fs.makedir("foo/bar/baz", recursive=True)
self.assert_(check("foo/bar/baz"))
self.fs.makedir("a/b/child")
self.assert_(check("a/b/child"))
self.assertRaises(DestinationExistsError,self.fs.makedir,"/a/b")
self.fs.makedir("/a/b",allow_recreate=True)
self.assertRaises(DestinationExistsError, self.fs.makedir, "/a/b")
self.fs.makedir("/a/b", allow_recreate=True)
self.fs.setcontents("/a/file", b(''))
self.assertRaises(ResourceInvalidError,self.fs.makedir,"a/file")
self.assertRaises(ResourceInvalidError, self.fs.makedir, "a/file")
def test_remove(self):
self.fs.setcontents("a.txt", b(''))
self.assertTrue(self.check("a.txt"))
self.fs.remove("a.txt")
self.assertFalse(self.check("a.txt"))
self.assertRaises(ResourceNotFoundError,self.fs.remove,"a.txt")
self.assertRaises(ResourceNotFoundError, self.fs.remove, "a.txt")
self.fs.makedir("dir1")
self.assertRaises(ResourceInvalidError,self.fs.remove,"dir1")
self.assertRaises(ResourceInvalidError, self.fs.remove, "dir1")
self.fs.setcontents("/dir1/a.txt", b(''))
self.assertTrue(self.check("dir1/a.txt"))
self.fs.remove("dir1/a.txt")
......@@ -431,10 +446,11 @@ class FSTestCases(object):
self.assert_(check("foo/file.txt"))
# Ensure that force=True works as expected
self.fs.makedir("frollic/waggle", recursive=True)
self.fs.setcontents("frollic/waddle.txt",b("waddlewaddlewaddle"))
self.assertRaises(DirectoryNotEmptyError,self.fs.removedir,"frollic")
self.assertRaises(ResourceInvalidError,self.fs.removedir,"frollic/waddle.txt")
self.fs.removedir("frollic",force=True)
self.fs.setcontents("frollic/waddle.txt", b("waddlewaddlewaddle"))
self.assertRaises(DirectoryNotEmptyError, self.fs.removedir, "frollic")
self.assertRaises(
ResourceInvalidError, self.fs.removedir, "frollic/waddle.txt")
self.fs.removedir("frollic", force=True)
self.assert_(not check("frollic"))
# Test removing unicode dirs
kappa = u"\N{GREEK CAPITAL LETTER KAPPA}"
......@@ -443,59 +459,64 @@ class FSTestCases(object):
self.fs.removedir(kappa)
self.assertRaises(ResourceNotFoundError, self.fs.removedir, kappa)
self.assert_(not self.fs.isdir(kappa))
self.fs.makedir(pathjoin("test",kappa),recursive=True)
self.assert_(check(pathjoin("test",kappa)))
self.fs.removedir("test",force=True)
self.fs.makedir(pathjoin("test", kappa), recursive=True)
self.assert_(check(pathjoin("test", kappa)))
self.fs.removedir("test", force=True)
self.assert_(not check("test"))
def test_rename(self):
check = self.check
# test renaming a file in the same directory
self.fs.setcontents("foo.txt",b("Hello, World!"))
self.fs.setcontents("foo.txt", b("Hello, World!"))
self.assert_(check("foo.txt"))
self.fs.rename("foo.txt", "bar.txt")
self.assert_(check("bar.txt"))
self.assert_(not check("foo.txt"))
# test renaming a directory in the same directory
self.fs.makedir("dir_a")
self.fs.setcontents("dir_a/test.txt",b("testerific"))
self.fs.setcontents("dir_a/test.txt", b("testerific"))
self.assert_(check("dir_a"))
self.fs.rename("dir_a","dir_b")
self.fs.rename("dir_a", "dir_b")
self.assert_(check("dir_b"))
self.assert_(check("dir_b/test.txt"))
self.assert_(not check("dir_a/test.txt"))
self.assert_(not check("dir_a"))
# test renaming a file into a different directory
self.fs.makedir("dir_a")
self.fs.rename("dir_b/test.txt","dir_a/test.txt")
self.fs.rename("dir_b/test.txt", "dir_a/test.txt")
self.assert_(not check("dir_b/test.txt"))
self.assert_(check("dir_a/test.txt"))
# test renaming a file into a non-existent directory
self.assertRaises(ParentDirectoryMissingError,self.fs.rename,"dir_a/test.txt","nonexistent/test.txt")
self.assertRaises(ParentDirectoryMissingError,
self.fs.rename, "dir_a/test.txt", "nonexistent/test.txt")
def test_info(self):
test_str = b("Hello, World!")
self.fs.setcontents("info.txt",test_str)
self.fs.setcontents("info.txt", test_str)
info = self.fs.getinfo("info.txt")
self.assertEqual(info['size'], len(test_str))
self.fs.desc("info.txt")
self.assertRaises(ResourceNotFoundError,self.fs.getinfo,"notafile")
self.assertRaises(ResourceNotFoundError,self.fs.getinfo,"info.txt/inval")
self.assertRaises(ResourceNotFoundError, self.fs.getinfo, "notafile")
self.assertRaises(
ResourceNotFoundError, self.fs.getinfo, "info.txt/inval")
def test_getsize(self):
test_str = b("*")*23
self.fs.setcontents("info.txt",test_str)
self.fs.setcontents("info.txt", test_str)
size = self.fs.getsize("info.txt")
self.assertEqual(size, len(test_str))
def test_movefile(self):
check = self.check
contents = b("If the implementation is hard to explain, it's a bad idea.")
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path,contents)
self.fs.setcontents(path, contents)
def checkcontents(path):
check_contents = self.fs.getcontents(path, "rb")
self.assertEqual(check_contents,contents)
self.assertEqual(check_contents, contents)
return contents == check_contents
self.fs.makedir("foo/bar", recursive=True)
......@@ -513,21 +534,23 @@ class FSTestCases(object):
self.assert_(checkcontents("/c.txt"))
makefile("foo/bar/a.txt")
self.assertRaises(DestinationExistsError,self.fs.move,"foo/bar/a.txt","/c.txt")
self.assertRaises(
DestinationExistsError, self.fs.move, "foo/bar/a.txt", "/c.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(check("/c.txt"))
self.fs.move("foo/bar/a.txt","/c.txt",overwrite=True)
self.fs.move("foo/bar/a.txt", "/c.txt", overwrite=True)
self.assert_(not check("foo/bar/a.txt"))
self.assert_(check("/c.txt"))
def test_movedir(self):
check = self.check
contents = b("If the implementation is hard to explain, it's a bad idea.")
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path, contents)
self.assertRaises(ResourceNotFoundError,self.fs.movedir,"a","b")
self.assertRaises(ResourceNotFoundError, self.fs.movedir, "a", "b")
self.fs.makedir("a")
self.fs.makedir("b")
makefile("a/1.txt")
......@@ -553,34 +576,37 @@ class FSTestCases(object):
self.assert_(not check("a"))
self.fs.makedir("a")
self.assertRaises(DestinationExistsError,self.fs.movedir,"copy of a","a")
self.fs.movedir("copy of a","a",overwrite=True)
self.assertRaises(
DestinationExistsError, self.fs.movedir, "copy of a", "a")
self.fs.movedir("copy of a", "a", overwrite=True)
self.assert_(not check("copy of a"))
self.assert_(check("a/1.txt"))
self.assert_(check("a/2.txt"))
self.assert_(check("a/3.txt"))
self.assert_(check("a/foo/bar/baz.txt"))
def test_cant_copy_from_os(self):
sys_executable = os.path.abspath(os.path.realpath(sys.executable))
self.assertRaises(FSError,self.fs.copy,sys_executable,"py.exe")
self.assertRaises(FSError, self.fs.copy, sys_executable, "py.exe")
def test_copyfile(self):
check = self.check
contents = b("If the implementation is hard to explain, it's a bad idea.")
def makefile(path,contents=contents):
self.fs.setcontents(path,contents)
def checkcontents(path,contents=contents):
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path, contents=contents):
self.fs.setcontents(path, contents)
def checkcontents(path, contents=contents):
check_contents = self.fs.getcontents(path, "rb")
self.assertEqual(check_contents,contents)
self.assertEqual(check_contents, contents)
return contents == check_contents
self.fs.makedir("foo/bar", recursive=True)
makefile("foo/bar/a.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(checkcontents("foo/bar/a.txt"))
#import rpdb2; rpdb2.start_embedded_debugger('password');
# import rpdb2; rpdb2.start_embedded_debugger('password');
self.fs.copy("foo/bar/a.txt", "foo/b.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(check("foo/b.txt"))
......@@ -592,23 +618,26 @@ class FSTestCases(object):
self.assert_(check("/c.txt"))
self.assert_(checkcontents("/c.txt"))
makefile("foo/bar/a.txt",b("different contents"))
self.assert_(checkcontents("foo/bar/a.txt",b("different contents")))
self.assertRaises(DestinationExistsError,self.fs.copy,"foo/bar/a.txt","/c.txt")
makefile("foo/bar/a.txt", b("different contents"))
self.assert_(checkcontents("foo/bar/a.txt", b("different contents")))
self.assertRaises(
DestinationExistsError, self.fs.copy, "foo/bar/a.txt", "/c.txt")
self.assert_(checkcontents("/c.txt"))
self.fs.copy("foo/bar/a.txt","/c.txt",overwrite=True)
self.assert_(checkcontents("foo/bar/a.txt",b("different contents")))
self.assert_(checkcontents("/c.txt",b("different contents")))
self.fs.copy("foo/bar/a.txt", "/c.txt", overwrite=True)
self.assert_(checkcontents("foo/bar/a.txt", b("different contents")))
self.assert_(checkcontents("/c.txt", b("different contents")))
def test_copydir(self):
check = self.check
contents = b("If the implementation is hard to explain, it's a bad idea.")
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path,contents)
self.fs.setcontents(path, contents)
def checkcontents(path):
check_contents = self.fs.getcontents(path)
self.assertEqual(check_contents,contents)
self.assertEqual(check_contents, contents)
return contents == check_contents
self.fs.makedir("a")
......@@ -632,8 +661,8 @@ class FSTestCases(object):
self.assert_(check("a/foo/bar/baz.txt"))
checkcontents("a/1.txt")
self.assertRaises(DestinationExistsError,self.fs.copydir,"a","b")
self.fs.copydir("a","b",overwrite=True)
self.assertRaises(DestinationExistsError, self.fs.copydir, "a", "b")
self.fs.copydir("a", "b", overwrite=True)
self.assert_(check("b/1.txt"))
self.assert_(check("b/2.txt"))
self.assert_(check("b/3.txt"))
......@@ -642,9 +671,11 @@ class FSTestCases(object):
def test_copydir_with_dotfile(self):
check = self.check
contents = b("If the implementation is hard to explain, it's a bad idea.")
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path,contents)
self.fs.setcontents(path, contents)
self.fs.makedir("a")
makefile("a/1.txt")
......@@ -663,7 +694,7 @@ class FSTestCases(object):
def test_readwriteappendseek(self):
def checkcontents(path, check_contents):
read_contents = self.fs.getcontents(path, "rb")
self.assertEqual(read_contents,check_contents)
self.assertEqual(read_contents, check_contents)
return read_contents == check_contents
test_strings = [b("Beautiful is better than ugly."),
b("Explicit is better than implicit."),
......@@ -688,11 +719,11 @@ class FSTestCases(object):
self.assert_(checkcontents("b.txt", test_strings[0]))
f3 = self.fs.open("b.txt", "ab")
# On win32, tell() gives zero until you actually write to the file
#self.assertEquals(f3.tell(),len(test_strings[0]))
# self.assertEquals(f3.tell(),len(test_strings[0]))
f3.write(test_strings[1])
self.assertEquals(f3.tell(),len(test_strings[0])+len(test_strings[1]))
self.assertEquals(f3.tell(), len(test_strings[0])+len(test_strings[1]))
f3.write(test_strings[2])
self.assertEquals(f3.tell(),len(all_strings))
self.assertEquals(f3.tell(), len(all_strings))
f3.close()
self.assert_(checkcontents("b.txt", all_strings))
f4 = self.fs.open("b.txt", "wb")
......@@ -723,46 +754,45 @@ class FSTestCases(object):
def test_truncate(self):
def checkcontents(path, check_contents):
read_contents = self.fs.getcontents(path, "rb")
self.assertEqual(read_contents,check_contents)
self.assertEqual(read_contents, check_contents)
return read_contents == check_contents
self.fs.setcontents("hello",b("world"))
checkcontents("hello",b("world"))
self.fs.setcontents("hello",b("hi"))
checkcontents("hello",b("hi"))
self.fs.setcontents("hello",b("1234567890"))
checkcontents("hello",b("1234567890"))
with self.fs.open("hello","rb+") as f:
self.fs.setcontents("hello", b("world"))
checkcontents("hello", b("world"))
self.fs.setcontents("hello", b("hi"))
checkcontents("hello", b("hi"))
self.fs.setcontents("hello", b("1234567890"))
checkcontents("hello", b("1234567890"))
with self.fs.open("hello", "rb+") as f:
f.truncate(7)
checkcontents("hello",b("1234567"))
with self.fs.open("hello","rb+") as f:
checkcontents("hello", b("1234567"))
with self.fs.open("hello", "rb+") as f:
f.seek(5)
f.truncate()
checkcontents("hello",b("12345"))
checkcontents("hello", b("12345"))
def test_truncate_to_larger_size(self):
with self.fs.open("hello","wb") as f:
with self.fs.open("hello", "wb") as f:
f.truncate(30)
self.assertEquals(self.fs.getsize("hello"), 30)
# Some file systems (FTPFS) don't support both reading and writing
if self.fs.getmeta('file.read_and_write', True):
with self.fs.open("hello","rb+") as f:
with self.fs.open("hello", "rb+") as f:
f.seek(25)
f.write(b("123456"))
with self.fs.open("hello","rb") as f:
with self.fs.open("hello", "rb") as f:
f.seek(25)
self.assertEquals(f.read(),b("123456"))
self.assertEquals(f.read(), b("123456"))
def test_write_past_end_of_file(self):
if self.fs.getmeta('file.read_and_write', True):
with self.fs.open("write_at_end","wb") as f:
with self.fs.open("write_at_end", "wb") as f:
f.seek(25)
f.write(b("EOF"))
with self.fs.open("write_at_end","rb") as f:
self.assertEquals(f.read(),b("\x00")*25 + b("EOF"))
with self.fs.open("write_at_end", "rb") as f:
self.assertEquals(f.read(), b("\x00")*25 + b("EOF"))
def test_with_statement(self):
# This is a little tricky since 'with' is actually new syntax.
......@@ -775,7 +805,7 @@ class FSTestCases(object):
code += "with self.fs.open('f.txt','wb-') as testfile:\n"
code += " testfile.write(contents)\n"
code += "self.assertEquals(self.fs.getcontents('f.txt', 'rb'),contents)"
code = compile(code,"<string>",'exec')
code = compile(code, "<string>", 'exec')
eval(code)
# A 'with' statement raising an error
contents = "testing the with statement"
......@@ -783,42 +813,43 @@ class FSTestCases(object):
code += "with self.fs.open('f.txt','wb-') as testfile:\n"
code += " testfile.write(contents)\n"
code += " raise ValueError\n"
code = compile(code,"<string>",'exec')
self.assertRaises(ValueError,eval,code,globals(),locals())
self.assertEquals(self.fs.getcontents('f.txt', 'rb'),contents)
code = compile(code, "<string>", 'exec')
self.assertRaises(ValueError, eval, code, globals(), locals())
self.assertEquals(self.fs.getcontents('f.txt', 'rb'), contents)
def test_pickling(self):
if self.fs.getmeta('pickle_contents', True):
self.fs.setcontents("test1",b("hello world"))
self.fs.setcontents("test1", b("hello world"))
fs2 = pickle.loads(pickle.dumps(self.fs))
self.assert_(fs2.isfile("test1"))
fs3 = pickle.loads(pickle.dumps(self.fs,-1))
fs3 = pickle.loads(pickle.dumps(self.fs, -1))
self.assert_(fs3.isfile("test1"))
else:
# Just make sure it doesn't throw an exception
fs2 = pickle.loads(pickle.dumps(self.fs))
def test_big_file(self):
"""Test handling of a big file (1MB)"""
chunk_size = 1024 * 256
num_chunks = 4
def chunk_stream():
"""Generate predictable-but-randomy binary content."""
r = random.Random(0)
randint = r.randint
int2byte = six.int2byte
for _i in xrange(num_chunks):
c = b("").join(int2byte(randint(0,255)) for _j in xrange(chunk_size//8))
c = b("").join(int2byte(randint(
0, 255)) for _j in xrange(chunk_size//8))
yield c * 8
f = self.fs.open("bigfile","wb")
f = self.fs.open("bigfile", "wb")
try:
for chunk in chunk_stream():
f.write(chunk)
finally:
f.close()
chunks = chunk_stream()
f = self.fs.open("bigfile","rb")
f = self.fs.open("bigfile", "rb")
try:
try:
while True:
......@@ -854,17 +885,19 @@ class FSTestCases(object):
self.assertRaises(RemoveRootError, self.fs.removedir, "/")
# May be disabled - see end of file
class ThreadingTestCases(object):
"""Testcases for thread-safety of FS implementations."""
# These are either too slow to be worth repeating,
# or cannot possibly break cross-thread.
_dont_retest = ("test_pickling","test_multiple_overwrite",)
_dont_retest = ("test_pickling", "test_multiple_overwrite",)
__lock = threading.RLock()
def _yield(self):
#time.sleep(0.001)
# time.sleep(0.001)
# Yields without a delay
time.sleep(0)
......@@ -874,7 +907,7 @@ class ThreadingTestCases(object):
def _unlock(self):
self.__lock.release()
def _makeThread(self,func,errors):
def _makeThread(self, func, errors):
def runThread():
try:
func()
......@@ -884,74 +917,79 @@ class ThreadingTestCases(object):
thread.daemon = True
return thread
def _runThreads(self,*funcs):
def _runThreads(self, *funcs):
check_interval = sys.getcheckinterval()
sys.setcheckinterval(1)
try:
errors = []
threads = [self._makeThread(f,errors) for f in funcs]
threads = [self._makeThread(f, errors) for f in funcs]
for t in threads:
t.start()
for t in threads:
t.join()
for (c,e,t) in errors:
raise c,e,t
for (c, e, t) in errors:
raise e, None, t
finally:
sys.setcheckinterval(check_interval)
def test_setcontents_threaded(self):
def setcontents(name,contents):
f = self.fs.open(name,"wb")
def setcontents(name, contents):
f = self.fs.open(name, "wb")
self._yield()
try:
f.write(contents)
self._yield()
finally:
f.close()
def thread1():
c = b("thread1 was 'ere")
setcontents("thread1.txt",c)
self.assertEquals(self.fs.getcontents("thread1.txt", 'rb'),c)
setcontents("thread1.txt", c)
self.assertEquals(self.fs.getcontents("thread1.txt", 'rb'), c)
def thread2():
c = b("thread2 was 'ere")
setcontents("thread2.txt",c)
self.assertEquals(self.fs.getcontents("thread2.txt", 'rb'),c)
self._runThreads(thread1,thread2)
setcontents("thread2.txt", c)
self.assertEquals(self.fs.getcontents("thread2.txt", 'rb'), c)
self._runThreads(thread1, thread2)
def test_setcontents_threaded_samefile(self):
def setcontents(name,contents):
f = self.fs.open(name,"wb")
def setcontents(name, contents):
f = self.fs.open(name, "wb")
self._yield()
try:
f.write(contents)
self._yield()
finally:
f.close()
def thread1():
c = b("thread1 was 'ere")
setcontents("threads.txt",c)
setcontents("threads.txt", c)
self._yield()
self.assertEquals(self.fs.listdir("/"),["threads.txt"])
self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
def thread2():
c = b("thread2 was 'ere")
setcontents("threads.txt",c)
setcontents("threads.txt", c)
self._yield()
self.assertEquals(self.fs.listdir("/"),["threads.txt"])
self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
def thread3():
c = b("thread3 was 'ere")
setcontents("threads.txt",c)
setcontents("threads.txt", c)
self._yield()
self.assertEquals(self.fs.listdir("/"),["threads.txt"])
self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
try:
self._runThreads(thread1,thread2,thread3)
self._runThreads(thread1, thread2, thread3)
except ResourceLockedError:
# that's ok, some implementations don't support concurrent writes
pass
def test_cases_in_separate_dirs(self):
class TestCases_in_subdir(self.__class__,unittest.TestCase):
class TestCases_in_subdir(self.__class__, unittest.TestCase):
"""Run all testcases against a subdir of self.fs"""
def __init__(this,subdir):
def __init__(this, subdir):
super(TestCases_in_subdir, this).__init__("test_listdir")
this.subdir = subdir
for meth in dir(this):
......@@ -959,113 +997,136 @@ class ThreadingTestCases(object):
continue
if meth in self._dont_retest:
continue
if not hasattr(FSTestCases,meth):
if not hasattr(FSTestCases, meth):
continue
if self.fs.exists(subdir):
self.fs.removedir(subdir,force=True)
self.fs.removedir(subdir, force=True)
self.assertFalse(self.fs.isdir(subdir))
self.assertTrue(self.fs.isdir("/"))
self.fs.makedir(subdir)
self._yield()
getattr(this,meth)()
getattr(this, meth)()
@property
def fs(this):
return self.fs.opendir(this.subdir)
def check(this,p):
return self.check(pathjoin(this.subdir,relpath(p)))
def check(this, p):
return self.check(pathjoin(this.subdir, relpath(p)))
def thread1():
TestCases_in_subdir("thread1")
def thread2():
TestCases_in_subdir("thread2")
def thread3():
TestCases_in_subdir("thread3")
self._runThreads(thread1,thread2,thread3)
self._runThreads(thread1, thread2, thread3)
def test_makedir_winner(self):
errors = []
def makedir():
try:
self.fs.makedir("testdir")
except DestinationExistsError, e:
errors.append(e)
def makedir_noerror():
try:
self.fs.makedir("testdir",allow_recreate=True)
self.fs.makedir("testdir", allow_recreate=True)
except DestinationExistsError, e:
errors.append(e)
def removedir():
try:
self.fs.removedir("testdir")
except (ResourceNotFoundError,ResourceLockedError), e:
except (ResourceNotFoundError, ResourceLockedError), e:
errors.append(e)
# One thread should succeed, one should error
self._runThreads(makedir,makedir)
self.assertEquals(len(errors),1)
self._runThreads(makedir, makedir)
self.assertEquals(len(errors), 1)
self.fs.removedir("testdir")
# One thread should succeed, two should error
errors = []
self._runThreads(makedir,makedir,makedir)
self._runThreads(makedir, makedir, makedir)
if len(errors) != 2:
raise AssertionError(errors)
self.fs.removedir("testdir")
# All threads should succeed
errors = []
self._runThreads(makedir_noerror,makedir_noerror,makedir_noerror)
self.assertEquals(len(errors),0)
self._runThreads(makedir_noerror, makedir_noerror, makedir_noerror)
self.assertEquals(len(errors), 0)
self.assertTrue(self.fs.isdir("testdir"))
self.fs.removedir("testdir")
# makedir() can beat removedir() and vice-versa
errors = []
self._runThreads(makedir,removedir)
self._runThreads(makedir, removedir)
if self.fs.isdir("testdir"):
self.assertEquals(len(errors),1)
self.assertFalse(isinstance(errors[0],DestinationExistsError))
self.assertEquals(len(errors), 1)
self.assertFalse(isinstance(errors[0], DestinationExistsError))
self.fs.removedir("testdir")
else:
self.assertEquals(len(errors),0)
self.assertEquals(len(errors), 0)
def test_concurrent_copydir(self):
self.fs.makedir("a")
self.fs.makedir("a/b")
self.fs.setcontents("a/hello.txt",b("hello world"))
self.fs.setcontents("a/guido.txt",b("is a space alien"))
self.fs.setcontents("a/b/parrot.txt",b("pining for the fiords"))
self.fs.setcontents("a/hello.txt", b("hello world"))
self.fs.setcontents("a/guido.txt", b("is a space alien"))
self.fs.setcontents("a/b/parrot.txt", b("pining for the fiords"))
def copydir():
self._yield()
self.fs.copydir("a","copy of a")
self.fs.copydir("a", "copy of a")
def copydir_overwrite():
self._yield()
self.fs.copydir("a","copy of a",overwrite=True)
self.fs.copydir("a", "copy of a", overwrite=True)
# This should error out since we're not overwriting
self.assertRaises(DestinationExistsError,self._runThreads,copydir,copydir)
self.assertRaises(
DestinationExistsError, self._runThreads, copydir, copydir)
self.assert_(self.fs.isdir('a'))
self.assert_(self.fs.isdir('a'))
copydir_overwrite()
self.assert_(self.fs.isdir('a'))
# This should run to completion and give a valid state, unless
# files get locked when written to.
try:
self._runThreads(copydir_overwrite,copydir_overwrite)
self._runThreads(copydir_overwrite, copydir_overwrite)
except ResourceLockedError:
pass
self.assertTrue(self.fs.isdir("copy of a"))
self.assertTrue(self.fs.isdir("copy of a/b"))
self.assertEqual(self.fs.getcontents("copy of a/b/parrot.txt", 'rb'),b("pining for the fiords"))
self.assertEqual(self.fs.getcontents("copy of a/hello.txt", 'rb'),b("hello world"))
self.assertEqual(self.fs.getcontents("copy of a/guido.txt", 'rb'),b("is a space alien"))
self.assertEqual(self.fs.getcontents(
"copy of a/b/parrot.txt", 'rb'), b("pining for the fiords"))
self.assertEqual(self.fs.getcontents(
"copy of a/hello.txt", 'rb'), b("hello world"))
self.assertEqual(self.fs.getcontents(
"copy of a/guido.txt", 'rb'), b("is a space alien"))
def test_multiple_overwrite(self):
contents = [b("contents one"),b("contents the second"),b("number three")]
contents = [b("contents one"), b(
"contents the second"), b("number three")]
def thread1():
for i in xrange(30):
for c in contents:
self.fs.setcontents("thread1.txt",c)
self.assertEquals(self.fs.getsize("thread1.txt"),len(c))
self.assertEquals(self.fs.getcontents("thread1.txt", 'rb'),c)
self.fs.setcontents("thread1.txt", c)
self.assertEquals(self.fs.getsize("thread1.txt"), len(c))
self.assertEquals(self.fs.getcontents(
"thread1.txt", 'rb'), c)
def thread2():
for i in xrange(30):
for c in contents:
self.fs.setcontents("thread2.txt",c)
self.assertEquals(self.fs.getsize("thread2.txt"),len(c))
self.assertEquals(self.fs.getcontents("thread2.txt", 'rb'),c)
self._runThreads(thread1,thread2)
self.fs.setcontents("thread2.txt", c)
self.assertEquals(self.fs.getsize("thread2.txt"), len(c))
self.assertEquals(self.fs.getcontents(
"thread2.txt", 'rb'), c)
self._runThreads(thread1, thread2)
# Uncomment to temporarily disable threading tests
#class ThreadingTestCases(object):
# class ThreadingTestCases(object):
# _dont_retest = ()
......@@ -6,7 +6,8 @@
import unittest
import sys
import os, os.path
import os
import os.path
import socket
import threading
import time
......@@ -32,6 +33,12 @@ try:
except ImportError:
if not PY3:
raise
import logging
logging.getLogger('paramiko').setLevel(logging.ERROR)
logging.getLogger('paramiko.transport').setLevel(logging.ERROR)
class TestSFTPFS(TestRPCFS):
__test__ = not PY3
......@@ -55,7 +62,7 @@ except ImportError:
pass
else:
from fs.osfs import OSFS
class TestFUSE(unittest.TestCase,FSTestCases,ThreadingTestCases):
class TestFUSE(unittest.TestCase, FSTestCases, ThreadingTestCases):
def setUp(self):
self.temp_fs = TempFS()
......@@ -64,7 +71,7 @@ else:
self.mounted_fs = self.temp_fs.opendir("root")
self.mount_point = self.temp_fs.getsyspath("mount")
self.fs = OSFS(self.temp_fs.getsyspath("mount"))
self.mount_proc = fuse.mount(self.mounted_fs,self.mount_point)
self.mount_proc = fuse.mount(self.mounted_fs, self.mount_point)
def tearDown(self):
self.mount_proc.unmount()
......@@ -76,7 +83,7 @@ else:
fuse.unmount(self.mount_point)
self.temp_fs.close()
def check(self,p):
def check(self, p):
return self.mounted_fs.exists(p)
......
......@@ -12,6 +12,7 @@ from fs.zipfs import ZipFS
from six import b
class TestFSImportHook(unittest.TestCase):
def setUp(self):
......@@ -140,4 +141,3 @@ class TestFSImportHook(unittest.TestCase):
sys.path_hooks.remove(FSImportHook)
sys.path.pop()
t.close()
from __future__ import unicode_literals
from fs import iotools
import io
import unittest
from os.path import dirname, join, abspath
try:
unicode
except NameError:
unicode = str
class OpenFilelike(object):
def __init__(self, make_f):
self.make_f = make_f
@iotools.filelike_to_stream
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
return self.make_f()
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
self.f.close()
class TestIOTools(unittest.TestCase):
def get_bin_file(self):
path = join(dirname(abspath(__file__)), 'data/UTF-8-demo.txt')
return io.open(path, 'rb')
def test_make_stream(self):
"""Test make_stream"""
with self.get_bin_file() as f:
text = f.read()
self.assert_(isinstance(text, bytes))
with self.get_bin_file() as f:
with iotools.make_stream("data/UTF-8-demo.txt", f, 'rt') as f2:
text = f2.read()
self.assert_(isinstance(text, unicode))
def test_decorator(self):
"""Test filelike_to_stream decorator"""
o = OpenFilelike(self.get_bin_file)
with o.open('file', 'rb') as f:
text = f.read()
self.assert_(isinstance(text, bytes))
with o.open('file', 'rt') as f:
text = f.read()
self.assert_(isinstance(text, unicode))
......@@ -2,10 +2,11 @@ from fs.mountfs import MountFS
from fs.memoryfs import MemoryFS
import unittest
class TestMultiFS(unittest.TestCase):
class TestMountFS(unittest.TestCase):
def test_auto_close(self):
"""Test MultiFS auto close is working"""
"""Test MountFS auto close is working"""
multi_fs = MountFS()
m1 = MemoryFS()
m2 = MemoryFS()
......@@ -18,7 +19,7 @@ class TestMultiFS(unittest.TestCase):
self.assert_(m2.closed)
def test_no_auto_close(self):
"""Test MultiFS auto close can be disabled"""
"""Test MountFS auto close can be disabled"""
multi_fs = MountFS(auto_close=False)
m1 = MemoryFS()
m2 = MemoryFS()
......@@ -32,7 +33,7 @@ class TestMultiFS(unittest.TestCase):
def test_mountfile(self):
"""Test mounting a file"""
quote = """If you wish to make an apple pie from scratch, you must first invent the universe."""
quote = b"""If you wish to make an apple pie from scratch, you must first invent the universe."""
mem_fs = MemoryFS()
mem_fs.makedir('foo')
mem_fs.setcontents('foo/bar.txt', quote)
......@@ -58,11 +59,11 @@ class TestMultiFS(unittest.TestCase):
# Check changes are written back
mem_fs.setcontents('foo/bar.txt', 'baz')
self.assertEqual(mount_fs.getcontents('bar.txt'), 'baz')
self.assertEqual(mount_fs.getcontents('bar.txt'), b'baz')
self.assertEqual(mount_fs.getsize('bar.txt'), len('baz'))
# Check changes are written to the original fs
self.assertEqual(mem_fs.getcontents('foo/bar.txt'), 'baz')
self.assertEqual(mem_fs.getcontents('foo/bar.txt'), b'baz')
self.assertEqual(mem_fs.getsize('foo/bar.txt'), len('baz'))
# Check unmount
......
......@@ -24,23 +24,27 @@ from fs.local_functools import wraps
from six import PY3, b
class RemoteTempFS(TempFS):
"""
Simple filesystem implementing setfilecontents
for RemoteFileBuffer tests
"""
def open(self, path, mode='rb', write_on_flush=True):
def open(self, path, mode='rb', write_on_flush=True, **kwargs):
if 'a' in mode or 'r' in mode or '+' in mode:
f = super(RemoteTempFS, self).open(path, 'rb')
f = super(RemoteTempFS, self).open(path, mode='rb', **kwargs)
f = TellAfterCloseFile(f)
else:
f = None
return RemoteFileBuffer(self, path, mode, f,
return RemoteFileBuffer(self,
path,
mode,
f,
write_on_flush=write_on_flush)
def setcontents(self, path, data, chunk_size=64*1024):
f = super(RemoteTempFS, self).open(path, 'wb')
def setcontents(self, path, data, encoding=None, errors=None, chunk_size=64*1024):
f = super(RemoteTempFS, self).open(path, 'wb', encoding=encoding, errors=errors, chunk_size=chunk_size)
if getattr(data, 'read', False):
f.write(data.read())
else:
......@@ -51,7 +55,7 @@ class RemoteTempFS(TempFS):
class TellAfterCloseFile(object):
"""File-like object that allows calling tell() after it's been closed."""
def __init__(self,file):
def __init__(self, file):
self._finalpos = None
self.file = file
......@@ -65,8 +69,8 @@ class TellAfterCloseFile(object):
return self._finalpos
return self.file.tell()
def __getattr__(self,attr):
return getattr(self.file,attr)
def __getattr__(self, attr):
return getattr(self.file, attr)
class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases):
......@@ -315,8 +319,8 @@ class DisconnectingFS(WrapFS):
time.sleep(random.random()*0.1)
self._connected = not self._connected
def setcontents(self, path, contents=b(''), chunk_size=64*1024):
return self.wrapped_fs.setcontents(path, contents)
def setcontents(self, path, data=b(''), encoding=None, errors=None, chunk_size=64*1024):
return self.wrapped_fs.setcontents(path, data, encoding=encoding, errors=errors, chunk_size=chunk_size)
def close(self):
if not self.closed:
......
......@@ -29,6 +29,10 @@ if sys.platform == "win32":
else:
watch_win32 = None
import logging
logging.getLogger('pyinotify').setLevel(logging.ERROR)
import six
from six import PY3, b
......@@ -53,7 +57,7 @@ class WatcherTestCases:
self.watchfs._poll_cond.wait()
self.watchfs._poll_cond.release()
else:
time.sleep(2)#0.5)
time.sleep(2)
def assertEventOccurred(self,cls,path=None,event_list=None,**attrs):
if not self.checkEventOccurred(cls,path,event_list,**attrs):
......@@ -222,4 +226,3 @@ class TestWatchers_MemoryFS_polling(TestWatchers_MemoryFS):
def setUp(self):
self.fs = memoryfs.MemoryFS()
self.watchfs = ensure_watchable(self.fs,poll_interval=0.1)
......@@ -17,6 +17,7 @@ from fs import zipfs
from six import PY3, b
class TestReadZipFS(unittest.TestCase):
def setUp(self):
......@@ -46,20 +47,22 @@ class TestReadZipFS(unittest.TestCase):
def test_reads(self):
def read_contents(path):
f = self.fs.open(path)
f = self.fs.open(path, 'rb')
contents = f.read()
return contents
def check_contents(path, expected):
self.assert_(read_contents(path)==expected)
self.assert_(read_contents(path) == expected)
check_contents("a.txt", b("Hello, World!"))
check_contents("1.txt", b("1"))
check_contents("foo/bar/baz.txt", b("baz"))
def test_getcontents(self):
def read_contents(path):
return self.fs.getcontents(path)
return self.fs.getcontents(path, 'rb')
def check_contents(path, expected):
self.assert_(read_contents(path)==expected)
self.assert_(read_contents(path) == expected)
check_contents("a.txt", b("Hello, World!"))
check_contents("1.txt", b("1"))
check_contents("foo/bar/baz.txt", b("baz"))
......@@ -82,7 +85,7 @@ class TestReadZipFS(unittest.TestCase):
dir_list = self.fs.listdir(path)
self.assert_(sorted(dir_list) == sorted(expected))
for item in dir_list:
self.assert_(isinstance(item,unicode))
self.assert_(isinstance(item, unicode))
check_listing('/', ['a.txt', '1.txt', 'foo', 'b.txt'])
check_listing('foo', ['second.txt', 'bar'])
check_listing('foo/bar', ['baz.txt'])
......
......@@ -72,6 +72,7 @@ def copyfile(src_fs, src_path, dst_fs, dst_path, overwrite=True, chunk_size=64*1
if src_lock is not None:
src_lock.release()
def copyfile_non_atomic(src_fs, src_path, dst_fs, dst_path, overwrite=True, chunk_size=64*1024):
"""A non atomic version of copyfile (will not block other threads using src_fs or dst_fst)
......
......@@ -291,29 +291,36 @@ class WatchableFS(WatchableFSMixin,WrapFS):
that might be made through other interfaces to the same filesystem.
"""
def __init__(self,*args,**kwds):
super(WatchableFS,self).__init__(*args,**kwds)
def __init__(self, *args, **kwds):
super(WatchableFS, self).__init__(*args, **kwds)
def close(self):
super(WatchableFS,self).close()
super(WatchableFS, self).close()
self.notify_watchers(CLOSED)
def open(self,path,mode="r",**kwargs):
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
existed = self.wrapped_fs.isfile(path)
f = super(WatchableFS,self).open(path,mode,**kwargs)
f = super(WatchableFS, self).open(path,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
line_buffering=line_buffering,
**kwargs)
if not existed:
self.notify_watchers(CREATED,path)
self.notify_watchers(ACCESSED,path)
return WatchedFile(f,self,path,mode)
self.notify_watchers(CREATED, path)
self.notify_watchers(ACCESSED, path)
return WatchedFile(f, self, path, mode)
def setcontents(self, path, data=b(''), chunk_size=64*1024):
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=64*1024):
existed = self.wrapped_fs.isfile(path)
ret = super(WatchableFS, self).setcontents(path, data, chunk_size=chunk_size)
if not existed:
self.notify_watchers(CREATED,path)
self.notify_watchers(ACCESSED,path)
self.notify_watchers(CREATED, path)
self.notify_watchers(ACCESSED, path)
if data:
self.notify_watchers(MODIFIED,path,True)
self.notify_watchers(MODIFIED, path, True)
return ret
def createfile(self, path):
......
......@@ -150,21 +150,21 @@ class WrapFS(FS):
return self.wrapped_fs.hassyspath(self._encode(path))
@rewrite_errors
def open(self, path, mode="r", **kwargs):
def open(self, path, mode='r', **kwargs):
(mode, wmode) = self._adjust_mode(mode)
f = self.wrapped_fs.open(self._encode(path), wmode, **kwargs)
return self._file_wrap(f, mode)
@rewrite_errors
def setcontents(self, path, data, chunk_size=64*1024):
def setcontents(self, path, data, encoding=None, errors=None, chunk_size=64*1024):
# We can't pass setcontents() through to the wrapped FS if the
# wrapper has defined a _file_wrap method, as it would bypass
# the file contents wrapping.
#if self._file_wrap.im_func is WrapFS._file_wrap.im_func:
if getattr(self.__class__, '_file_wrap', None) is getattr(WrapFS, '_file_wrap', None):
return self.wrapped_fs.setcontents(self._encode(path), data, chunk_size=chunk_size)
return self.wrapped_fs.setcontents(self._encode(path), data, encoding=encoding, errors=errors, chunk_size=chunk_size)
else:
return super(WrapFS,self).setcontents(path, data, chunk_size=chunk_size)
return super(WrapFS, self).setcontents(path, data, encoding=encoding, errors=errors, chunk_size=chunk_size)
@rewrite_errors
def createfile(self, path):
......
......@@ -58,14 +58,20 @@ class LimitSizeFS(WrapFS):
raise NoSysPathError(path)
return None
def open(self, path, mode="r"):
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
path = relpath(normpath(path))
with self._size_lock:
try:
size = self.getsize(path)
except ResourceNotFoundError:
size = 0
f = super(LimitSizeFS,self).open(path,mode)
f = super(LimitSizeFS,self).open(path,
mode=mode,
buffering=buffering,
errors=errors,
newline=newline,
line_buffering=line_buffering,
**kwargs)
if "w" not in mode:
self._set_file_size(path,None,1)
else:
......
......@@ -10,6 +10,7 @@ from fs.base import NoDefaultMeta
from fs.wrapfs import WrapFS
from fs.errors import UnsupportedError, NoSysPathError
class ReadOnlyFS(WrapFS):
""" Makes a FS object read only. Any operation that could potentially modify
the underlying file system will throw an UnsupportedError
......@@ -38,11 +39,18 @@ class ReadOnlyFS(WrapFS):
return None
raise NoSysPathError(path)
def open(self, path, mode='r', **kwargs):
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
""" Only permit read access """
if 'w' in mode or 'a' in mode or '+' in mode:
raise UnsupportedError('write')
return super(ReadOnlyFS, self).open(path, mode, **kwargs)
return super(ReadOnlyFS, self).open(path,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
line_buffering=line_buffering,
**kwargs)
def _no_can_do(self, *args, **kwargs):
""" Replacement method for methods that can modify the file system """
......
......@@ -21,7 +21,7 @@ class SubFS(WrapFS):
def __init__(self, wrapped_fs, sub_dir):
self.sub_dir = abspath(normpath(sub_dir))
super(SubFS,self).__init__(wrapped_fs)
super(SubFS, self).__init__(wrapped_fs)
def _encode(self, path):
return pathjoin(self.sub_dir, relpath(normpath(path)))
......@@ -44,7 +44,7 @@ class SubFS(WrapFS):
return self.wrapped_fs.desc(self.sub_dir)
return '%s!%s' % (self.wrapped_fs.desc(self.sub_dir), path)
def setcontents(self, path, data, chunk_size=64*1024):
def setcontents(self, path, data, encoding=None, errors=None, chunk_size=64*1024):
path = self._encode(path)
return self.wrapped_fs.setcontents(path, data, chunk_size=chunk_size)
......@@ -62,11 +62,11 @@ class SubFS(WrapFS):
path = normpath(path)
if path in ('', '/'):
raise RemoveRootError(path)
super(SubFS,self).removedir(path,force=force)
super(SubFS, self).removedir(path, force=force)
if recursive:
try:
if dirname(path) not in ('', '/'):
self.removedir(dirname(path),recursive=True)
self.removedir(dirname(path), recursive=True)
except DirectoryNotEmptyError:
pass
......
......@@ -13,6 +13,7 @@ from fs.base import *
from fs.path import *
from fs.errors import *
from fs.filelike import StringIO
from fs import iotools
from zipfile import ZipFile, ZIP_DEFLATED, ZIP_STORED, BadZipfile, LargeZipFile
from memoryfs import MemoryFS
......@@ -21,6 +22,7 @@ import tempfs
from six import PY3
class ZipOpenError(CreateFailedError):
"""Thrown when the zip file could not be opened"""
pass
......@@ -76,13 +78,13 @@ class _ExceptionProxy(object):
class ZipFS(FS):
"""A FileSystem that represents a zip file."""
_meta = { 'thread_safe' : True,
'virtual' : False,
'read_only' : False,
'unicode_paths' : True,
'case_insensitive_paths' : False,
'network' : False,
'atomic.setcontents' : False
_meta = {'thread_safe': True,
'virtual': False,
'read_only': False,
'unicode_paths': True,
'case_insensitive_paths': False,
'network': False,
'atomic.setcontents': False
}
def __init__(self, zip_file, mode="r", compression="deflated", allow_zip_64=False, encoding="CP437", thread_synchronize=True):
......@@ -189,7 +191,8 @@ class ZipFS(FS):
self.zf = _ExceptionProxy()
@synchronize
def open(self, path, mode="r", **kwargs):
@iotools.filelike_to_stream
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
path = normpath(relpath(path))
if 'r' in mode:
......@@ -222,7 +225,7 @@ class ZipFS(FS):
raise ValueError("Mode must contain be 'r' or 'w'")
@synchronize
def getcontents(self, path, mode="rb"):
def getcontents(self, path, mode="rb", encoding=None, errors=None, newline=None):
if not self.exists(path):
raise ResourceNotFoundError(path)
path = normpath(relpath(path))
......@@ -232,7 +235,9 @@ class ZipFS(FS):
raise ResourceNotFoundError(path)
except RuntimeError:
raise OperationFailedError("read file", path=path, msg="3 Zip file must be opened with 'r' or 'a' to read")
if 'b' in mode:
return contents
return iotools.decode_binary(contents, encoding=encoding, errors=errors, newline=newline)
@synchronize
def _on_write_close(self, filename):
......
......@@ -28,7 +28,6 @@ classifiers = [
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
......@@ -49,12 +48,12 @@ setup(install_requires=['distribute', 'six'],
version=VERSION,
description="Filesystem abstraction",
long_description=long_desc,
license = "BSD",
license="BSD",
author="Will McGugan",
author_email="will@willmcgugan.com",
url="http://code.google.com/p/pyfilesystem/",
download_url="http://code.google.com/p/pyfilesystem/downloads/list",
platforms = ['any'],
platforms=['any'],
packages=['fs',
'fs.expose',
'fs.expose.dokan',
......@@ -68,8 +67,8 @@ setup(install_requires=['distribute', 'six'],
'fs.contrib.davfs',
'fs.contrib.tahoelafs',
'fs.commands'],
package_data={'fs': ['tests/data/*.txt']},
scripts=['fs/commands/%s' % command for command in COMMANDS],
classifiers=classifiers,
**extra
)
[tox]
envlist = py25,py26,py27,py31,py32,pypy
envlist = py26,py27,py31,py32,pypy
sitepackages = False
[testenv]
......@@ -10,24 +10,11 @@ deps = distribute
boto
nose
mako
python-libarchive
pyftpdlib
changedir=.tox
commands = nosetests fs.tests -v \
[]
[testenv:py25]
deps = distribute
six
dexml
paramiko
boto
nose
mako
python-libarchive
pyftpdlib
simplejson
[testenv:py32]
commands = nosetests fs.tests -v \
[]
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment