Commit 754fcc98 by willmcgugan

First stab at a Python3 port

parent 87a736d5
...@@ -35,6 +35,10 @@ from fs.path import * ...@@ -35,6 +35,10 @@ from fs.path import *
from fs.errors import * from fs.errors import *
from fs.local_functools import wraps from fs.local_functools import wraps
import compatibility
import six
from six import PY3
class DummyLock(object): class DummyLock(object):
"""A dummy lock object that doesn't do anything. """A dummy lock object that doesn't do anything.
...@@ -703,7 +707,7 @@ class FS(object): ...@@ -703,7 +707,7 @@ class FS(object):
return sys_path return sys_path
def getcontents(self, path): def getcontents(self, path, mode="rb"):
"""Returns the contents of a file as a string. """Returns the contents of a file as a string.
:param path: A path of file to read :param path: A path of file to read
...@@ -712,7 +716,7 @@ class FS(object): ...@@ -712,7 +716,7 @@ class FS(object):
""" """
f = None f = None
try: try:
f = self.open(path, "rb") f = self.open(path, mode)
contents = f.read() contents = f.read()
return contents return contents
finally: finally:
...@@ -731,23 +735,7 @@ class FS(object): ...@@ -731,23 +735,7 @@ class FS(object):
if not data: if not data:
self.createfile(path) self.createfile(path)
else: else:
f = None compatibility.copy_file_to_fs(data, self, path, chunk_size=chunk_size)
try:
f = self.open(path, 'wb')
if hasattr(data, "read"):
read = data.read
write = f.write
chunk = read(chunk_size)
while chunk:
write(chunk)
chunk = read(chunk_size)
else:
f.write(data)
if hasattr(f, 'flush'):
f.flush()
finally:
if f is not None:
f.close()
def setcontents_async(self, def setcontents_async(self,
path, path,
...@@ -777,7 +765,53 @@ class FS(object): ...@@ -777,7 +765,53 @@ class FS(object):
if progress_callback is None: if progress_callback is None:
progress_callback = lambda bytes_written:None progress_callback = lambda bytes_written:None
finished_event = threading.Event()
def do_setcontents(): def do_setcontents():
if PY3:
try:
f = None
try:
progress_callback(0)
if hasattr(data, "read"):
bytes_written = 0
read = data.read
chunk = read(chunk_size)
if isinstance(chunk, six.text_type):
f = self.open(path, 'w')
else:
f = self.open(path, 'wb')
write = f.write
while chunk:
write(chunk)
bytes_written += len(chunk)
progress_callback(bytes_written)
chunk = read(chunk_size)
else:
if isinstance(data, six.text_type):
f = self.open(path, 'w')
else:
f = self.open(path, 'wb')
f.write(data)
progress_callback(len(data))
if finished_callback is not None:
finished_callback()
finally:
if f is not None:
f.close()
except Exception, e:
if error_callback is not None:
error_callback(e)
raise
finally:
finished_event.set()
else:
try: try:
f = None f = None
try: try:
...@@ -812,7 +846,6 @@ class FS(object): ...@@ -812,7 +846,6 @@ class FS(object):
finally: finally:
finished_event.set() finished_event.set()
finished_event = threading.Event()
threading.Thread(target=do_setcontents).start() threading.Thread(target=do_setcontents).start()
return finished_event return finished_event
......
"""
Some functions for Python3 compatibility.
Not for general usage, the functionality in this file is exposed elsewhere
"""
import six
from six import PY3
if PY3:
def copy_file_to_fs(data, dst_fs, dst_path, chunk_size=64 * 1024):
"""Copy data from a string or a file-like object to a given fs/path"""
if hasattr(data, "read"):
read = data.read
chunk = read(chunk_size)
f = None
try:
if isinstance(chunk, six.text_type):
f = dst_fs.open(dst_path, 'w')
else:
f = dst_fs.open(dst_path, 'wb')
write = f.write
while chunk:
write(chunk)
chunk = read(chunk_size)
finally:
if f is not None:
f.close()
else:
f = None
try:
if isinstance(data, six.text_type):
f = dst_fs.open(dst_path, 'w')
else:
f = dst_fs.open(dst_path, 'wb')
f.write(data)
finally:
if f is not None:
f.close()
else:
def copy_file_to_fs(data, dst_fs, dst_path, chunk_size=64 * 1024):
"""Copy data from a string or a file-like object to a given fs/path"""
f = None
try:
f = dst_fs.open(dst_path, 'wb')
if hasattr(data, "read"):
read = data.read
write = f.write
chunk = read(chunk_size)
while chunk:
write(chunk)
chunk = read(chunk_size)
else:
f.write(data)
if hasattr(f, 'flush'):
f.flush()
finally:
if f is not None:
f.close()
...@@ -544,7 +544,7 @@ class MountProcess(subprocess.Popen): ...@@ -544,7 +544,7 @@ class MountProcess(subprocess.Popen):
os.close(w) os.close(w)
if os.read(r,1) != "S": if os.read(r,1) != "S":
self.terminate() self.terminate()
raise RuntimeError("FUSE error: " + os.read(r,20)) raise RuntimeError("FUSE error: " + os.read(r,20)).decode(NATIVE_ENCODING)
def unmount(self): def unmount(self):
"""Cleanly unmount the FUSE filesystem, terminating this subprocess.""" """Cleanly unmount the FUSE filesystem, terminating this subprocess."""
......
...@@ -33,11 +33,6 @@ Other useful classes include: ...@@ -33,11 +33,6 @@ Other useful classes include:
import tempfile as _tempfile import tempfile as _tempfile
try:
from cStringIO import StringIO as _StringIO
except ImportError:
from StringIO import StringIO as _StringIO
import fs import fs
...@@ -50,6 +45,17 @@ class NotSeekableError(IOError): ...@@ -50,6 +45,17 @@ class NotSeekableError(IOError):
class NotTruncatableError(IOError): class NotTruncatableError(IOError):
pass pass
import six
from six import PY3, b
if PY3:
_StringIO = six.BytesIO
else:
try:
from cStringIO import StringIO as _StringIO
except ImportError:
from StringIO import StringIO as _StringIO
class FileLikeBase(object): class FileLikeBase(object):
"""Base class for implementing file-like objects. """Base class for implementing file-like objects.
...@@ -265,14 +271,14 @@ class FileLikeBase(object): ...@@ -265,14 +271,14 @@ class FileLikeBase(object):
if self.closed: if self.closed:
raise IOError("File has been closed") raise IOError("File has been closed")
if self._check_mode("w-") and self._wbuffer is not None: if self._check_mode("w-") and self._wbuffer is not None:
buffered = "" buffered = b("")
if self._sbuffer: if self._sbuffer:
buffered = buffered + self._sbuffer buffered = buffered + self._sbuffer
self._sbuffer = None self._sbuffer = None
buffered = buffered + self._wbuffer buffered = buffered + self._wbuffer
self._wbuffer = None self._wbuffer = None
leftover = self._write(buffered,flushing=True) leftover = self._write(buffered,flushing=True)
if leftover: if leftover and not isinstance(leftover, int):
raise IOError("Could not flush write buffer.") raise IOError("Could not flush write buffer.")
def close(self): def close(self):
...@@ -306,7 +312,7 @@ class FileLikeBase(object): ...@@ -306,7 +312,7 @@ class FileLikeBase(object):
next() returning subsequent lines from the file. next() returning subsequent lines from the file.
""" """
ln = self.readline() ln = self.readline()
if ln == "": if ln == b(""):
raise StopIteration() raise StopIteration()
return ln return ln
...@@ -442,19 +448,19 @@ class FileLikeBase(object): ...@@ -442,19 +448,19 @@ class FileLikeBase(object):
data = [self._rbuffer] data = [self._rbuffer]
else: else:
data = [] data = []
self._rbuffer = "" self._rbuffer = b("")
newData = self._read() newData = self._read()
while newData is not None: while newData is not None:
data.append(newData) data.append(newData)
newData = self._read() newData = self._read()
output = "".join(data) output = b("").join(data)
# Otherwise, we need to return a specific amount of data # Otherwise, we need to return a specific amount of data
else: else:
if self._rbuffer: if self._rbuffer:
newData = self._rbuffer newData = self._rbuffer
data = [newData] data = [newData]
else: else:
newData = "" newData = b("")
data = [] data = []
sizeSoFar = len(newData) sizeSoFar = len(newData)
while sizeSoFar < size: while sizeSoFar < size:
...@@ -463,20 +469,20 @@ class FileLikeBase(object): ...@@ -463,20 +469,20 @@ class FileLikeBase(object):
break break
data.append(newData) data.append(newData)
sizeSoFar += len(newData) sizeSoFar += len(newData)
data = "".join(data) data = b("").join(data)
if sizeSoFar > size: if sizeSoFar > size:
# read too many bytes, store in the buffer # read too many bytes, store in the buffer
self._rbuffer = data[size:] self._rbuffer = data[size:]
data = data[:size] data = data[:size]
else: else:
self._rbuffer = "" self._rbuffer = b("")
output = data output = data
return output return output
def _do_read_rest(self): def _do_read_rest(self):
"""Private method to read the file through to EOF.""" """Private method to read the file through to EOF."""
data = self._do_read(self._bufsize) data = self._do_read(self._bufsize)
while data != "": while data != b(""):
data = self._do_read(self._bufsize) data = self._do_read(self._bufsize)
def readline(self,size=-1): def readline(self,size=-1):
...@@ -488,11 +494,11 @@ class FileLikeBase(object): ...@@ -488,11 +494,11 @@ class FileLikeBase(object):
nextBit = self.read(self._bufsize) nextBit = self.read(self._bufsize)
bits.append(nextBit) bits.append(nextBit)
sizeSoFar += len(nextBit) sizeSoFar += len(nextBit)
if nextBit == "": if nextBit == b(""):
break break
if size > 0 and sizeSoFar >= size: if size > 0 and sizeSoFar >= size:
break break
indx = nextBit.find("\n") indx = nextBit.find(b("\n"))
# If not found, return whole string up to <size> length # If not found, return whole string up to <size> length
# Any leftovers are pushed onto front of buffer # Any leftovers are pushed onto front of buffer
if indx == -1: if indx == -1:
...@@ -508,7 +514,7 @@ class FileLikeBase(object): ...@@ -508,7 +514,7 @@ class FileLikeBase(object):
extra = bits[-1][indx:] extra = bits[-1][indx:]
bits[-1] = bits[-1][:indx] bits[-1] = bits[-1][:indx]
self._rbuffer = extra + self._rbuffer self._rbuffer = extra + self._rbuffer
return "".join(bits) return b("").join(bits)
def readlines(self,sizehint=-1): def readlines(self,sizehint=-1):
"""Return a list of all lines in the file.""" """Return a list of all lines in the file."""
...@@ -542,8 +548,8 @@ class FileLikeBase(object): ...@@ -542,8 +548,8 @@ class FileLikeBase(object):
if self._wbuffer: if self._wbuffer:
string = self._wbuffer + string string = self._wbuffer + string
leftover = self._write(string) leftover = self._write(string)
if leftover is None: if leftover is None or isinstance(leftover, int):
self._wbuffer = "" self._wbuffer = b("")
else: else:
self._wbuffer = leftover self._wbuffer = leftover
...@@ -649,7 +655,7 @@ class FileWrapper(FileLikeBase): ...@@ -649,7 +655,7 @@ class FileWrapper(FileLikeBase):
def _read(self,sizehint=-1): def _read(self,sizehint=-1):
data = self.wrapped_file.read(sizehint) data = self.wrapped_file.read(sizehint)
if data == "": if data == b(""):
return None return None
return data return data
...@@ -694,7 +700,7 @@ class StringIO(FileWrapper): ...@@ -694,7 +700,7 @@ class StringIO(FileWrapper):
if size > curlen: if size > curlen:
self.wrapped_file.seek(curlen) self.wrapped_file.seek(curlen)
try: try:
self.wrapped_file.write("\x00"*(size-curlen)) self.wrapped_file.write(b("\x00")*(size-curlen))
finally: finally:
self.wrapped_file.seek(pos) self.wrapped_file.seek(pos)
...@@ -715,7 +721,8 @@ class SpooledTemporaryFile(FileWrapper): ...@@ -715,7 +721,8 @@ class SpooledTemporaryFile(FileWrapper):
try: try:
stf_args = (max_size,mode,bufsize) + args stf_args = (max_size,mode,bufsize) + args
wrapped_file = _tempfile.SpooledTemporaryFile(*stf_args,**kwds) wrapped_file = _tempfile.SpooledTemporaryFile(*stf_args,**kwds)
wrapped_file._file = StringIO() #wrapped_file._file = StringIO()
wrapped_file._file = six.BytesIO()
self.__is_spooled = True self.__is_spooled = True
except AttributeError: except AttributeError:
ntf_args = (mode,bufsize) + args ntf_args = (mode,bufsize) + args
......
...@@ -1170,7 +1170,7 @@ class FTPFS(FS): ...@@ -1170,7 +1170,7 @@ class FTPFS(FS):
self.ftp.storbinary('STOR %s' % _encode(path), data, blocksize=chunk_size) self.ftp.storbinary('STOR %s' % _encode(path), data, blocksize=chunk_size)
@ftperrors @ftperrors
def getcontents(self, path): def getcontents(self, path, mode="rb"):
path = normpath(path) path = normpath(path)
contents = StringIO() contents = StringIO()
self.ftp.retrbinary('RETR %s' % _encode(path), contents.write, blocksize=1024*64) self.ftp.retrbinary('RETR %s' % _encode(path), contents.write, blocksize=1024*64)
......
...@@ -20,6 +20,9 @@ from fs.filelike import StringIO ...@@ -20,6 +20,9 @@ from fs.filelike import StringIO
from os import SEEK_END from os import SEEK_END
import threading import threading
import six
def _check_mode(mode, mode_chars): def _check_mode(mode, mode_chars):
for c in mode_chars: for c in mode_chars:
...@@ -264,10 +267,11 @@ class MemoryFS(FS): ...@@ -264,10 +267,11 @@ class MemoryFS(FS):
def __str__(self): def __str__(self):
return "<MemoryFS>" return "<MemoryFS>"
__repr__ = __str__ def __repr__(self):
return "MemoryFS()"
def __unicode__(self): def __unicode__(self):
return unicode(self.__str__()) return "<MemoryFS>"
@synchronize @synchronize
def _get_dir_entry(self, dirpath): def _get_dir_entry(self, dirpath):
...@@ -600,7 +604,7 @@ class MemoryFS(FS): ...@@ -600,7 +604,7 @@ class MemoryFS(FS):
dst_dir_entry.xattrs.update(src_xattrs) dst_dir_entry.xattrs.update(src_xattrs)
@synchronize @synchronize
def getcontents(self, path): def getcontents(self, path, mode="rb"):
dir_entry = self._get_dir_entry(path) dir_entry = self._get_dir_entry(path)
if dir_entry is None: if dir_entry is None:
raise ResourceNotFoundError(path) raise ResourceNotFoundError(path)
......
...@@ -91,7 +91,7 @@ class MountFS(FS): ...@@ -91,7 +91,7 @@ class MountFS(FS):
__repr__ = __str__ __repr__ = __str__
def __unicode__(self): def __unicode__(self):
return unicode(self.__str__()) return u"<%s [%s]>" % (self.__class__.__name__,self.mount_tree.items(),)
def _delegate(self, path): def _delegate(self, path):
path = abspath(normpath(path)) path = abspath(normpath(path))
......
...@@ -270,7 +270,7 @@ class OpenerRegistry(object): ...@@ -270,7 +270,7 @@ class OpenerRegistry(object):
file_object.fs = fs file_object.fs = fs
return file_object return file_object
def getcontents(self, fs_url): def getcontents(self, fs_url, mode="rb"):
"""Gets the contents from a given FS url (if it references a file) """Gets the contents from a given FS url (if it references a file)
:param fs_url: a FS URL e.g. ftp://ftp.mozilla.org/README :param fs_url: a FS URL e.g. ftp://ftp.mozilla.org/README
...@@ -278,7 +278,7 @@ class OpenerRegistry(object): ...@@ -278,7 +278,7 @@ class OpenerRegistry(object):
""" """
fs, path = self.parse(fs_url) fs, path = self.parse(fs_url)
return fs.getcontents(path) return fs.getcontents(path, mode)
def opendir(self, fs_url, writeable=True, create_dir=False): def opendir(self, fs_url, writeable=True, create_dir=False):
"""Opens an FS object from an FS URL """Opens an FS object from an FS URL
......
...@@ -205,7 +205,8 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS): ...@@ -205,7 +205,8 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
@convert_os_errors @convert_os_errors
def open(self, path, mode="r", **kwargs): def open(self, path, mode="r", **kwargs):
mode = filter(lambda c: c in "rwabt+",mode) #mode = filter(lambda c: c in "rwabt+",mode)
mode = ''.join(c for c in mode if c in 'rwabt+')
sys_path = self.getsyspath(path) sys_path = self.getsyspath(path)
try: try:
return open(sys_path, mode, kwargs.get("buffering", -1)) return open(sys_path, mode, kwargs.get("buffering", -1))
......
...@@ -39,6 +39,7 @@ from fs import SEEK_SET, SEEK_CUR, SEEK_END ...@@ -39,6 +39,7 @@ from fs import SEEK_SET, SEEK_CUR, SEEK_END
_SENTINAL = object() _SENTINAL = object()
from six import PY3, b
class RemoteFileBuffer(FileWrapper): class RemoteFileBuffer(FileWrapper):
"""File-like object providing buffer for local file operations. """File-like object providing buffer for local file operations.
...@@ -186,7 +187,7 @@ class RemoteFileBuffer(FileWrapper): ...@@ -186,7 +187,7 @@ class RemoteFileBuffer(FileWrapper):
self.wrapped_file.seek(curpos) self.wrapped_file.seek(curpos)
def _read(self, length=None): def _read(self, length=None):
if length < 0: if length is not None and length < 0:
length = None length = None
with self._lock: with self._lock:
self._fillbuffer(length) self._fillbuffer(length)
...@@ -668,7 +669,7 @@ class CacheFSMixin(FS): ...@@ -668,7 +669,7 @@ class CacheFSMixin(FS):
def getsize(self,path): def getsize(self,path):
return self.getinfo(path)["size"] return self.getinfo(path)["size"]
def setcontents(self, path, contents="", chunk_size=64*1024): def setcontents(self, path, contents=b(""), chunk_size=64*1024):
supsc = super(CacheFSMixin,self).setcontents supsc = super(CacheFSMixin,self).setcontents
res = supsc(path, contents, chunk_size=chunk_size) res = supsc(path, contents, chunk_size=chunk_size)
with self.__cache_lock: with self.__cache_lock:
......
...@@ -155,11 +155,11 @@ class S3FS(FS): ...@@ -155,11 +155,11 @@ class S3FS(FS):
super(S3FS,self).__setstate__(state) super(S3FS,self).__setstate__(state)
self._tlocal = thread_local() self._tlocal = thread_local()
def __str__(self): def __repr__(self):
args = (self.__class__.__name__,self._bucket_name,self._prefix) args = (self.__class__.__name__,self._bucket_name,self._prefix)
return '<%s: %s:%s>' % args return '<%s: %s:%s>' % args
__repr__ = __str__ __str__ = __repr__
def _s3path(self,path): def _s3path(self,path):
"""Get the absolute path to a file stored in S3.""" """Get the absolute path to a file stored in S3."""
......
...@@ -177,7 +177,7 @@ class SFTPFS(FS): ...@@ -177,7 +177,7 @@ class SFTPFS(FS):
self._transport = connection self._transport = connection
def __unicode__(self): def __unicode__(self):
return '<SFTPFS: %s>' % self.desc('/') return u'<SFTPFS: %s>' % self.desc('/')
@classmethod @classmethod
def _agent_auth(cls, transport, username): def _agent_auth(cls, transport, username):
......
...@@ -49,10 +49,10 @@ class TempFS(OSFS): ...@@ -49,10 +49,10 @@ class TempFS(OSFS):
self._cleaned = False self._cleaned = False
super(TempFS, self).__init__(self._temp_dir, dir_mode=dir_mode, thread_synchronize=thread_synchronize) super(TempFS, self).__init__(self._temp_dir, dir_mode=dir_mode, thread_synchronize=thread_synchronize)
def __str__(self): def __repr__(self):
return '<TempFS: %s>' % self._temp_dir return '<TempFS: %s>' % self._temp_dir
__repr__ = __str__ __str__ = __repr__
def __unicode__(self): def __unicode__(self):
return u'<TempFS: %s>' % self._temp_dir return u'<TempFS: %s>' % self._temp_dir
......
...@@ -20,6 +20,10 @@ from fs.errors import * ...@@ -20,6 +20,10 @@ from fs.errors import *
from fs import rpcfs from fs import rpcfs
from fs.expose.xmlrpc import RPCFSServer from fs.expose.xmlrpc import RPCFSServer
import six
from six import PY3, b
class TestRPCFS(unittest.TestCase, FSTestCases, ThreadingTestCases): class TestRPCFS(unittest.TestCase, FSTestCases, ThreadingTestCases):
def makeServer(self,fs,addr): def makeServer(self,fs,addr):
...@@ -106,7 +110,7 @@ class TestRPCFS(unittest.TestCase, FSTestCases, ThreadingTestCases): ...@@ -106,7 +110,7 @@ class TestRPCFS(unittest.TestCase, FSTestCases, ThreadingTestCases):
sock = socket.socket(af, socktype, proto) sock = socket.socket(af, socktype, proto)
sock.settimeout(.1) sock.settimeout(.1)
sock.connect(sa) sock.connect(sa)
sock.send("\n") sock.send(b("\n"))
except socket.error, e: except socket.error, e:
pass pass
finally: finally:
...@@ -114,10 +118,16 @@ class TestRPCFS(unittest.TestCase, FSTestCases, ThreadingTestCases): ...@@ -114,10 +118,16 @@ class TestRPCFS(unittest.TestCase, FSTestCases, ThreadingTestCases):
sock.close() sock.close()
from fs import sftpfs try:
from fs.expose.sftp import BaseSFTPServer from fs import sftpfs
from fs.expose.sftp import BaseSFTPServer
except ImportError:
if not PY3:
raise
class TestSFTPFS(TestRPCFS): class TestSFTPFS(TestRPCFS):
__test__ = not PY3
def makeServer(self,fs,addr): def makeServer(self,fs,addr):
return BaseSFTPServer(addr,fs) return BaseSFTPServer(addr,fs)
...@@ -209,7 +219,7 @@ if dokan.is_available: ...@@ -209,7 +219,7 @@ if dokan.is_available:
def test_safety_wrapper(self): def test_safety_wrapper(self):
rawfs = MemoryFS() rawfs = MemoryFS()
safefs = dokan.Win32SafetyFS(rawfs) safefs = dokan.Win32SafetyFS(rawfs)
rawfs.setcontents("autoRun.inf","evilcodeevilcode") rawfs.setcontents("autoRun.inf", b("evilcodeevilcode"))
self.assertTrue(safefs.exists("_autoRun.inf")) self.assertTrue(safefs.exists("_autoRun.inf"))
self.assertTrue("autoRun.inf" not in safefs.listdir("/")) self.assertTrue("autoRun.inf" not in safefs.listdir("/"))
safefs.setcontents("file:stream","test") safefs.setcontents("file:stream","test")
......
...@@ -12,9 +12,13 @@ import time ...@@ -12,9 +12,13 @@ import time
from os.path import abspath from os.path import abspath
import urllib import urllib
from six import PY3
try: try:
from pyftpdlib import ftpserver from pyftpdlib import ftpserver
except ImportError: except ImportError:
if not PY3:
raise ImportError("Requires pyftpdlib <http://code.google.com/p/pyftpdlib/>") raise ImportError("Requires pyftpdlib <http://code.google.com/p/pyftpdlib/>")
from fs.path import * from fs.path import *
...@@ -24,6 +28,8 @@ from fs import ftpfs ...@@ -24,6 +28,8 @@ from fs import ftpfs
ftp_port = 30000 ftp_port = 30000
class TestFTPFS(unittest.TestCase, FSTestCases, ThreadingTestCases): class TestFTPFS(unittest.TestCase, FSTestCases, ThreadingTestCases):
__test__ = not PY3
def setUp(self): def setUp(self):
global ftp_port global ftp_port
ftp_port += 1 ftp_port += 1
......
...@@ -22,6 +22,7 @@ from fs.tempfs import TempFS ...@@ -22,6 +22,7 @@ from fs.tempfs import TempFS
from fs.path import * from fs.path import *
from fs.local_functools import wraps from fs.local_functools import wraps
from six import PY3, b
class RemoteTempFS(TempFS): class RemoteTempFS(TempFS):
""" """
...@@ -79,7 +80,7 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases): ...@@ -79,7 +80,7 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases):
self.fs.close() self.fs.close()
self.fakeOff() self.fakeOff()
def fake_setcontents(self, path, content='', chunk_size=16*1024): def fake_setcontents(self, path, content=b(''), chunk_size=16*1024):
''' Fake replacement for RemoteTempFS setcontents() ''' ''' Fake replacement for RemoteTempFS setcontents() '''
raise self.FakeException("setcontents should not be called here!") raise self.FakeException("setcontents should not be called here!")
...@@ -99,8 +100,8 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases): ...@@ -99,8 +100,8 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases):
''' '''
Tests on-demand loading of remote content in RemoteFileBuffer Tests on-demand loading of remote content in RemoteFileBuffer
''' '''
contents = "Tristatricettri stribrnych strikacek strikalo" + \ contents = b("Tristatricettri stribrnych strikacek strikalo") + \
"pres tristatricettri stribrnych strech." b("pres tristatricettri stribrnych strech.")
f = self.fs.open('test.txt', 'wb') f = self.fs.open('test.txt', 'wb')
f.write(contents) f.write(contents)
f.close() f.close()
...@@ -136,10 +137,10 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases): ...@@ -136,10 +137,10 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases):
# Rollback position 5 characters before eof # Rollback position 5 characters before eof
f._rfile.seek(len(contents[:-5])) f._rfile.seek(len(contents[:-5]))
# Write 10 new characters (will make contents longer for 5 chars) # Write 10 new characters (will make contents longer for 5 chars)
f.write(u'1234567890') f.write(b('1234567890'))
f.flush() f.flush()
# We are on the end of file (and buffer not serve anything anymore) # We are on the end of file (and buffer not serve anything anymore)
self.assertEquals(f.read(), '') self.assertEquals(f.read(), b(''))
f.close() f.close()
self.fakeOn() self.fakeOn()
...@@ -147,7 +148,7 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases): ...@@ -147,7 +148,7 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases):
# Check if we wrote everything OK from # Check if we wrote everything OK from
# previous writing over the remote buffer edge # previous writing over the remote buffer edge
f = self.fs.open('test.txt', 'rb') f = self.fs.open('test.txt', 'rb')
self.assertEquals(f.read(), contents[:-5] + u'1234567890') self.assertEquals(f.read(), contents[:-5] + b('1234567890'))
f.close() f.close()
self.fakeOff() self.fakeOff()
...@@ -161,19 +162,19 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases): ...@@ -161,19 +162,19 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases):
''' '''
self.fakeOn() self.fakeOn()
f = self.fs.open('test.txt', 'wb', write_on_flush=True) f = self.fs.open('test.txt', 'wb', write_on_flush=True)
f.write('Sample text') f.write(b('Sample text'))
self.assertRaises(self.FakeException, f.flush) self.assertRaises(self.FakeException, f.flush)
f.write('Second sample text') f.write(b('Second sample text'))
self.assertRaises(self.FakeException, f.close) self.assertRaises(self.FakeException, f.close)
self.fakeOff() self.fakeOff()
f.close() f.close()
self.fakeOn() self.fakeOn()
f = self.fs.open('test.txt', 'wb', write_on_flush=False) f = self.fs.open('test.txt', 'wb', write_on_flush=False)
f.write('Sample text') f.write(b('Sample text'))
# FakeException is not raised, because setcontents is not called # FakeException is not raised, because setcontents is not called
f.flush() f.flush()
f.write('Second sample text') f.write(b('Second sample text'))
self.assertRaises(self.FakeException, f.close) self.assertRaises(self.FakeException, f.close)
self.fakeOff() self.fakeOff()
...@@ -183,8 +184,8 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases): ...@@ -183,8 +184,8 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases):
back to remote destination and opened file is still back to remote destination and opened file is still
in good condition. in good condition.
''' '''
contents = "Zlutoucky kun upel dabelske ody." contents = b("Zlutoucky kun upel dabelske ody.")
contents2 = 'Ententyky dva spaliky cert vyletel z elektriky' contents2 = b('Ententyky dva spaliky cert vyletel z elektriky')
f = self.fs.open('test.txt', 'wb') f = self.fs.open('test.txt', 'wb')
f.write(contents) f.write(contents)
...@@ -195,17 +196,17 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases): ...@@ -195,17 +196,17 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCases, ThreadingTestCases):
self.assertEquals(f.read(10), contents[:10]) self.assertEquals(f.read(10), contents[:10])
self.assertEquals(f._rfile.tell(), 10) self.assertEquals(f._rfile.tell(), 10)
# Write garbage to file to mark it as _changed # Write garbage to file to mark it as _changed
f.write('x') f.write(b('x'))
# This should read the rest of file and store file back to again. # This should read the rest of file and store file back to again.
f.flush() f.flush()
f.seek(0) f.seek(0)
# Try if we have unocrrupted file locally... # Try if we have unocrrupted file locally...
self.assertEquals(f.read(), contents[:10] + 'x' + contents[11:]) self.assertEquals(f.read(), contents[:10] + b('x') + contents[11:])
f.close() f.close()
# And if we have uncorrupted file also on storage # And if we have uncorrupted file also on storage
f = self.fs.open('test.txt', 'rb') f = self.fs.open('test.txt', 'rb')
self.assertEquals(f.read(), contents[:10] + 'x' + contents[11:]) self.assertEquals(f.read(), contents[:10] + b('x') + contents[11:])
f.close() f.close()
# Now try it again, but write garbage behind edge of remote file # Now try it again, but write garbage behind edge of remote file
...@@ -243,7 +244,7 @@ class TestCacheFS(unittest.TestCase,FSTestCases,ThreadingTestCases): ...@@ -243,7 +244,7 @@ class TestCacheFS(unittest.TestCase,FSTestCases,ThreadingTestCases):
self.fs.cache_timeout = None self.fs.cache_timeout = None
try: try:
self.assertFalse(self.fs.isfile("hello")) self.assertFalse(self.fs.isfile("hello"))
self.wrapped_fs.setcontents("hello","world") self.wrapped_fs.setcontents("hello",b("world"))
self.assertTrue(self.fs.isfile("hello")) self.assertTrue(self.fs.isfile("hello"))
self.wrapped_fs.remove("hello") self.wrapped_fs.remove("hello")
self.assertTrue(self.fs.isfile("hello")) self.assertTrue(self.fs.isfile("hello"))
...@@ -257,11 +258,11 @@ class TestCacheFS(unittest.TestCase,FSTestCases,ThreadingTestCases): ...@@ -257,11 +258,11 @@ class TestCacheFS(unittest.TestCase,FSTestCases,ThreadingTestCases):
self.fs.cache_timeout = None self.fs.cache_timeout = None
try: try:
self.assertFalse(self.fs.isfile("hello")) self.assertFalse(self.fs.isfile("hello"))
self.wrapped_fs.setcontents("hello","world") self.wrapped_fs.setcontents("hello",b("world"))
self.assertTrue(self.fs.isfile("hello")) self.assertTrue(self.fs.isfile("hello"))
self.wrapped_fs.remove("hello") self.wrapped_fs.remove("hello")
self.assertTrue(self.fs.isfile("hello")) self.assertTrue(self.fs.isfile("hello"))
self.wrapped_fs.setcontents("hello","world") self.wrapped_fs.setcontents("hello",b("world"))
self.assertTrue(self.fs.isfile("hello")) self.assertTrue(self.fs.isfile("hello"))
self.fs.remove("hello") self.fs.remove("hello")
self.assertFalse(self.fs.isfile("hello")) self.assertFalse(self.fs.isfile("hello"))
...@@ -315,7 +316,7 @@ class DisconnectingFS(WrapFS): ...@@ -315,7 +316,7 @@ class DisconnectingFS(WrapFS):
time.sleep(random.random()*0.1) time.sleep(random.random()*0.1)
self._connected = not self._connected self._connected = not self._connected
def setcontents(self, path, contents='', chunk_size=64*1024): def setcontents(self, path, contents=b(''), chunk_size=64*1024):
return self.wrapped_fs.setcontents(path, contents) return self.wrapped_fs.setcontents(path, contents)
def close(self): def close(self):
......
...@@ -13,7 +13,13 @@ import unittest ...@@ -13,7 +13,13 @@ import unittest
from fs.tests import FSTestCases, ThreadingTestCases from fs.tests import FSTestCases, ThreadingTestCases
from fs.path import * from fs.path import *
from fs import s3fs from six import PY3
try:
from fs import s3fs
except ImportError:
if not PY3:
raise
class TestS3FS(unittest.TestCase,FSTestCases,ThreadingTestCases): class TestS3FS(unittest.TestCase,FSTestCases,ThreadingTestCases):
# Disable the tests by default # Disable the tests by default
......
...@@ -29,6 +29,8 @@ if sys.platform == "win32": ...@@ -29,6 +29,8 @@ if sys.platform == "win32":
else: else:
watch_win32 = None watch_win32 = None
import six
from six import PY3, b
class WatcherTestCases: class WatcherTestCases:
"""Testcases for filesystems providing change watcher support. """Testcases for filesystems providing change watcher support.
...@@ -88,11 +90,11 @@ class WatcherTestCases: ...@@ -88,11 +90,11 @@ class WatcherTestCases:
def test_watch_readfile(self): def test_watch_readfile(self):
self.setupWatchers() self.setupWatchers()
self.fs.setcontents("hello","hello world") self.fs.setcontents("hello", b("hello world"))
self.assertEventOccurred(CREATED,"/hello") self.assertEventOccurred(CREATED,"/hello")
self.clearCapturedEvents() self.clearCapturedEvents()
old_atime = self.fs.getinfo("hello").get("accessed_time") old_atime = self.fs.getinfo("hello").get("accessed_time")
self.assertEquals(self.fs.getcontents("hello"),"hello world") self.assertEquals(self.fs.getcontents("hello"), b("hello world"))
if not isinstance(self.watchfs,PollingWatchableFS): if not isinstance(self.watchfs,PollingWatchableFS):
# Help it along by updting the atime. # Help it along by updting the atime.
# TODO: why is this necessary? # TODO: why is this necessary?
...@@ -121,17 +123,17 @@ class WatcherTestCases: ...@@ -121,17 +123,17 @@ class WatcherTestCases:
def test_watch_writefile(self): def test_watch_writefile(self):
self.setupWatchers() self.setupWatchers()
self.fs.setcontents("hello","hello world") self.fs.setcontents("hello", b("hello world"))
self.assertEventOccurred(CREATED,"/hello") self.assertEventOccurred(CREATED,"/hello")
self.clearCapturedEvents() self.clearCapturedEvents()
self.fs.setcontents("hello","hello again world") self.fs.setcontents("hello", b("hello again world"))
self.assertEventOccurred(MODIFIED,"/hello") self.assertEventOccurred(MODIFIED,"/hello")
def test_watch_single_file(self): def test_watch_single_file(self):
self.fs.setcontents("hello","hello world") self.fs.setcontents("hello", b("hello world"))
events = [] events = []
self.watchfs.add_watcher(events.append,"/hello",(MODIFIED,)) self.watchfs.add_watcher(events.append,"/hello",(MODIFIED,))
self.fs.setcontents("hello","hello again world") self.fs.setcontents("hello", b("hello again world"))
self.fs.remove("hello") self.fs.remove("hello")
self.waitForEvents() self.waitForEvents()
for evt in events: for evt in events:
...@@ -140,10 +142,10 @@ class WatcherTestCases: ...@@ -140,10 +142,10 @@ class WatcherTestCases:
def test_watch_single_file_remove(self): def test_watch_single_file_remove(self):
self.fs.makedir("testing") self.fs.makedir("testing")
self.fs.setcontents("testing/hello","hello world") self.fs.setcontents("testing/hello", b("hello world"))
events = [] events = []
self.watchfs.add_watcher(events.append,"/testing/hello",(REMOVED,)) self.watchfs.add_watcher(events.append,"/testing/hello",(REMOVED,))
self.fs.setcontents("testing/hello","hello again world") self.fs.setcontents("testing/hello", b("hello again world"))
self.waitForEvents() self.waitForEvents()
self.fs.remove("testing/hello") self.fs.remove("testing/hello")
self.waitForEvents() self.waitForEvents()
...@@ -154,7 +156,7 @@ class WatcherTestCases: ...@@ -154,7 +156,7 @@ class WatcherTestCases:
def test_watch_iter_changes(self): def test_watch_iter_changes(self):
changes = iter_changes(self.watchfs) changes = iter_changes(self.watchfs)
self.fs.makedir("test1") self.fs.makedir("test1")
self.fs.setcontents("test1/hello","hello world") self.fs.setcontents("test1/hello", b("hello world"))
self.waitForEvents() self.waitForEvents()
self.fs.removedir("test1",force=True) self.fs.removedir("test1",force=True)
self.waitForEvents() self.waitForEvents()
......
...@@ -15,11 +15,15 @@ import tempfile ...@@ -15,11 +15,15 @@ import tempfile
from fs import osfs from fs import osfs
from fs.errors import * from fs.errors import *
from fs.path import * from fs.path import *
from fs import wrapfs
import six
from six import PY3, b
from fs import wrapfs
class TestWrapFS(unittest.TestCase, FSTestCases, ThreadingTestCases): class TestWrapFS(unittest.TestCase, FSTestCases, ThreadingTestCases):
__test__ = False
def setUp(self): def setUp(self):
self.temp_dir = tempfile.mkdtemp(u"fstest") self.temp_dir = tempfile.mkdtemp(u"fstest")
self.fs = wrapfs.WrapFS(osfs.OSFS(self.temp_dir)) self.fs = wrapfs.WrapFS(osfs.OSFS(self.temp_dir))
...@@ -67,7 +71,7 @@ class TestLimitSizeFS(TestWrapFS): ...@@ -67,7 +71,7 @@ class TestLimitSizeFS(TestWrapFS):
for i in xrange(1024*2): for i in xrange(1024*2):
try: try:
total_written += 1030 total_written += 1030
self.fs.setcontents("file"+str(i),"C"*1030) self.fs.setcontents("file %i" % i, b("C")*1030)
except StorageSpaceError: except StorageSpaceError:
self.assertTrue(total_written > 1024*1024*2) self.assertTrue(total_written > 1024*1024*2)
self.assertTrue(total_written < 1024*1024*2 + 1030) self.assertTrue(total_written < 1024*1024*2 + 1030)
......
...@@ -120,8 +120,8 @@ class WrapFS(FS): ...@@ -120,8 +120,8 @@ class WrapFS(FS):
def __unicode__(self): def __unicode__(self):
return u"<%s: %s>" % (self.__class__.__name__,self.wrapped_fs,) return u"<%s: %s>" % (self.__class__.__name__,self.wrapped_fs,)
def __str__(self): #def __str__(self):
return unicode(self).encode(sys.getdefaultencoding(),"replace") # return unicode(self).encode(sys.getdefaultencoding(),"replace")
@rewrite_errors @rewrite_errors
...@@ -155,10 +155,11 @@ class WrapFS(FS): ...@@ -155,10 +155,11 @@ class WrapFS(FS):
# We can't pass setcontents() through to the wrapped FS if the # We can't pass setcontents() through to the wrapped FS if the
# wrapper has defined a _file_wrap method, as it would bypass # wrapper has defined a _file_wrap method, as it would bypass
# the file contents wrapping. # the file contents wrapping.
if self._file_wrap.im_func is WrapFS._file_wrap.im_func: #if self._file_wrap.im_func is WrapFS._file_wrap.im_func:
if getattr(self.__class__, '_file_wrap', None) is getattr(WrapFS, '_file_wrap', None):
return self.wrapped_fs.setcontents(self._encode(path), data, chunk_size=chunk_size) return self.wrapped_fs.setcontents(self._encode(path), data, chunk_size=chunk_size)
else: else:
return super(WrapFS,self).setcontents(path, data, chunk_size) return super(WrapFS,self).setcontents(path, data, chunk_size=chunk_size)
@rewrite_errors @rewrite_errors
def createfile(self, path): def createfile(self, path):
......
...@@ -95,7 +95,7 @@ class LimitSizeFS(WrapFS): ...@@ -95,7 +95,7 @@ class LimitSizeFS(WrapFS):
def setcontents(self, path, data, chunk_size=64*1024): def setcontents(self, path, data, chunk_size=64*1024):
f = None f = None
try: try:
f = self.open(path, 'w') f = self.open(path, 'wb')
if hasattr(data, 'read'): if hasattr(data, 'read'):
chunk = data.read(chunk_size) chunk = data.read(chunk_size)
while chunk: while chunk:
......
...@@ -37,7 +37,7 @@ class SubFS(WrapFS): ...@@ -37,7 +37,7 @@ class SubFS(WrapFS):
return u'<SubFS: %s/%s>' % (self.wrapped_fs, self.sub_dir.lstrip('/')) return u'<SubFS: %s/%s>' % (self.wrapped_fs, self.sub_dir.lstrip('/'))
def __repr__(self): def __repr__(self):
return str(self) return "SubFS(%r, %r)" % (self.wrapped_fs, self.sub_dir)
def desc(self, path): def desc(self, path):
if path in ('', '/'): if path in ('', '/'):
......
...@@ -142,7 +142,7 @@ class ZipFS(FS): ...@@ -142,7 +142,7 @@ class ZipFS(FS):
return "<ZipFS: %s>" % self.zip_path return "<ZipFS: %s>" % self.zip_path
def __unicode__(self): def __unicode__(self):
return unicode(self.__str__()) return u"<ZipFS: %s>" % self.zip_path
def _parse_resource_list(self): def _parse_resource_list(self):
for path in self.zf.namelist(): for path in self.zf.namelist():
...@@ -209,7 +209,7 @@ class ZipFS(FS): ...@@ -209,7 +209,7 @@ class ZipFS(FS):
raise ValueError("Mode must contain be 'r' or 'w'") raise ValueError("Mode must contain be 'r' or 'w'")
@synchronize @synchronize
def getcontents(self, path): def getcontents(self, path, mode="rb"):
if not self.exists(path): if not self.exists(path):
raise ResourceNotFoundError(path) raise ResourceNotFoundError(path)
path = normpath(relpath(path)) path = normpath(relpath(path))
......
#!/usr/bin/env python #!/usr/bin/env python
from distutils.core import setup #from distribute_setup import use_setuptools
from fs import __version__ as VERSION #use_setuptools()
from setuptools import setup
import sys
PY3 = sys.version_info >= (3,)
VERSION = "0.4.1"
COMMANDS = ['fscat', COMMANDS = ['fscat',
'fscp', 'fscp',
...@@ -17,7 +23,7 @@ COMMANDS = ['fscat', ...@@ -17,7 +23,7 @@ COMMANDS = ['fscat',
classifiers = [ classifiers = [
'Development Status :: 3 - Alpha', "Development Status :: 5 - Production/Stable",
'Intended Audience :: Developers', 'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License', 'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent', 'Operating System :: OS Independent',
...@@ -30,7 +36,12 @@ long_desc = """Pyfilesystem is a module that provides a simplified common interf ...@@ -30,7 +36,12 @@ long_desc = """Pyfilesystem is a module that provides a simplified common interf
Even if you only need to work with file and directories on the local hard-drive, Pyfilesystem can simplify your code and make it more robust -- with the added advantage that you can change where the files are located by changing a single line of code. Even if you only need to work with file and directories on the local hard-drive, Pyfilesystem can simplify your code and make it more robust -- with the added advantage that you can change where the files are located by changing a single line of code.
""" """
setup(name='fs', extra = {}
if PY3:
extra["use_2to3"] = True
setup(install_requires=['distribute'],
name='fs',
version=VERSION, version=VERSION,
description="Filesystem abstraction", description="Filesystem abstraction",
long_description=long_desc, long_description=long_desc,
...@@ -55,5 +66,6 @@ setup(name='fs', ...@@ -55,5 +66,6 @@ setup(name='fs',
'fs.commands'], 'fs.commands'],
scripts=['fs/commands/%s' % command for command in COMMANDS], scripts=['fs/commands/%s' % command for command in COMMANDS],
classifiers=classifiers, classifiers=classifiers,
**extra
) )
[tox] [tox]
envlist = py25,py26,py27 envlist = py25,py26,py27,py32
[testenv] [testenv]
deps = dexml deps = distribute
six
dexml
paramiko paramiko
boto boto
nose nose
mako mako
pyftpdlib pyftpdlib
commands = nosetests -v \ changedir=.tox
commands = nosetests fs.tests -v \
[] []
[testenv:py25] [testenv:py25]
deps = dexml deps = distribute
six
dexml
paramiko paramiko
boto boto
nose nose
mako mako
pyftpdlib pyftpdlib
simplejson simplejson
[testenv:py32]
commands = nosetests fs.tests -v \
[]
deps = distribute
six
dexml
nose
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment