Commit d9986180 by willmcgugan

Major ftp improvments

parent 7f1e44a3
......@@ -419,7 +419,7 @@ class FS(object):
:raises ResourceInvalidError: If the path exists, but is not a directory
"""
path = normpath(path)
def getinfo(p):
try:
if full or absolute:
......@@ -783,7 +783,7 @@ class FS(object):
raise OperationFailedError("get size of resource", path)
return size
def copy(self, src, dst, overwrite=False, chunk_size=16384):
def copy(self, src, dst, overwrite=False, chunk_size=1024*64):
"""Copies a file from src to dst.
:param src: the source path
......
......@@ -6,14 +6,14 @@ from fs.commands.runner import Command
from collections import defaultdict
import sys
class FSList(Command):
class FSls(Command):
usage = """fsls [OPTIONS]... [PATH]
List contents of [PATH]"""
def get_optparse(self):
optparse = super(FSList, self).get_optparse()
optparse = super(FSls, self).get_optparse()
optparse.add_option('-u', '--full', dest='fullpath', action="store_true", default=False,
help="output full path", metavar="FULL")
optparse.add_option('-s', '--syspath', dest='syspath', action="store_true", default=False,
......@@ -38,8 +38,10 @@ List contents of [PATH]"""
dir_paths = []
file_paths = []
fs_used = set()
for fs_url in args:
fs, path = self.open_fs(fs_url)
fs_used.add(fs)
path = path or '.'
wildcard = None
......@@ -62,6 +64,12 @@ List contents of [PATH]"""
full=options.fullpath,
files_only=True)
try:
for fs in fs_used:
fs.close()
except FSError:
pass
if options.syspath:
dir_paths = [fs.getsyspath(path, allow_none=True) or path for path in dir_paths]
file_paths = [fs.getsyspath(path, allow_none=True) or path for path in file_paths]
......@@ -154,7 +162,7 @@ List contents of [PATH]"""
output('\n')
def run():
return FSList().run()
return FSls().run()
if __name__ == "__main__":
sys.exit(run())
......@@ -22,6 +22,8 @@ Mounts a file system on a system path"""
help="run the mount process in the foreground", metavar="FOREGROUND")
optparse.add_option('-u', '--unmount', dest='unmount', action="store_true", default=False,
help="unmount path", metavar="UNMOUNT")
optparse.add_option('-n', '--nocache', dest='nocache', action="store_true", default=False,
help="do not cache network filesystems", metavar="NOCACHE")
return optparse
......@@ -59,6 +61,8 @@ Mounts a file system on a system path"""
return 1
fs = fs.opendir(path)
path = '/'
if not options.nocache:
fs.cache_hint(True)
if not os.path.exists(mount_path):
os.makedirs(mount_path)
from fs.expose import fuse
......@@ -67,9 +71,10 @@ Mounts a file system on a system path"""
mount_path,
foreground=True)
else:
if not os.fork():
mp = fuse.mount(fs,
mount_path,
foreground=False)
foreground=True)
......
......@@ -246,13 +246,9 @@ class FSOperations(Operations):
def readdir(self, path, fh=None):
path = path.decode(NATIVE_ENCODING)
entries = ['.', '..']
#print
#print self.fs
for (nm,info) in self.fs.listdirinfo(path):
#print "*", repr(nm), info
self._fill_stat_dict(pathjoin(path,nm),info)
entries.append((nm.encode(NATIVE_ENCODING),info,0))
#print
return entries
@handle_fs_errors
......
......@@ -11,7 +11,7 @@ __all__ = ['FTPFS']
import fs
from fs.base import *
from fs.errors import *
from fs.path import pathsplit, abspath, dirname, recursepath, normpath
from fs.path import pathsplit, abspath, dirname, recursepath, normpath, pathjoin, isbase
from fs.remote import RemoteFileBuffer
from ftplib import FTP, error_perm, error_temp, error_proto, error_reply
......@@ -545,7 +545,7 @@ class _FTPFile(object):
self.file_size = ftpfs.getsize(path)
self.conn = None
path = _encode(path)
path = _encode(abspath(path))
#self._lock = ftpfs._lock
self._start_file(mode, path)
......@@ -610,6 +610,7 @@ class _FTPFile(object):
remaining_data -= chunk_size
self.write_pos += chunk_size
def __enter__(self):
return self
......@@ -618,7 +619,7 @@ class _FTPFile(object):
#@synchronize
def flush(self):
return
self.ftpfs._on_file_written(self.path)
@synchronize
def seek(self, pos, where=fs.SEEK_SET):
......@@ -670,6 +671,7 @@ class _FTPFile(object):
@synchronize
def truncate(self, size=None):
self.ftpfs._on_file_written(self.path)
# Inefficient, but I don't know how else to implement this
if size is None:
size = self.tell()
......@@ -697,6 +699,8 @@ class _FTPFile(object):
@synchronize
def close(self):
if 'w' in self.mode or 'a' in self.mode or '+' in self.mode:
self.ftpfs._on_file_written(self.path)
if self.conn is not None:
self.conn.close()
self.conn = None
......@@ -704,12 +708,11 @@ class _FTPFile(object):
if self.ftp is not None:
self.ftp.close()
self.closed = True
if 'w' in self.mode or 'a' in self.mode:
self.ftpfs._on_file_written(self.path)
def __iter__(self):
return self.next()
@synchronize
def next(self):
""" Line iterator
......@@ -758,8 +761,6 @@ def ftperrors(f):
self._leave_dircache()
finally:
self._lock.release()
if not self.use_dircache:
self.clear_dircache()
return ret
return deco
......@@ -769,10 +770,20 @@ def _encode(s):
return s.encode('utf-8')
return s
class _DirCache(dict):
def __init__(self):
super(_DirCache, self).__init__()
self.count = 0
def addref(self):
self.count += 1
return self.count
class FTPFS(FS):
def decref(self):
self.count -= 1
return self.count
_locals = threading.local()
class FTPFS(FS):
_meta = { 'network' : True,
'virtual': False,
......@@ -799,7 +810,7 @@ class FTPFS(FS):
:param timeout: Timeout in seconds
:param port: Port to connection (default is 21)
:param dircache: If True then directory information will be cached,
which will speed up operations such as getinfo, isdi, isfile, but
which will speed up operations such as getinfo, isdir, isfile, but
changes to the ftp file structure will not be visible until
`~fs.ftpfs.FTPFS.clear_dircache` is called
:param dircache: If True directory information will be cached for fast access
......@@ -814,55 +825,47 @@ class FTPFS(FS):
self.passwd = passwd
self.acct = acct
self.timeout = timeout
self.default_timeout = timeout is _GLOBAL_DEFAULT_TIMEOUT
self.use_dircache = dircache
self.get_dircache()
self._lock = threading.RLock()
self._init_dircache()
self._cache_hint = False
self._locals._ftp = None
self._thread_ftps = set()
try:
self.ftp
except FSError:
self.closed = True
raise
def _init_dircache(self):
self.dircache = _DirCache()
@synchronize
def cache_hint(self, enabled):
self._cache_hint = enabled
self._cache_hint = bool(enabled)
@synchronize
def _enter_dircache(self):
self.get_dircache()
count = getattr(self._locals, '_dircache_count', 0)
count += 1
self._locals._dircache_count = count
self.dircache.addref()
@synchronize
def _leave_dircache(self):
self._locals._dircache_count -= 1
if not self._locals._dircache_count and not self._cache_hint:
self.dircache.decref()
if self.use_dircache:
if not self.dircache.count and not self._cache_hint:
self.clear_dircache()
assert self._locals._dircache_count >= 0, "dircache count should never be negative"
@synchronize
def get_dircache(self):
dircache = getattr(self._locals, '_dircache', None)
if dircache is None:
dircache = {}
self._locals._dircache = dircache
self._locals._dircache_count = 0
return dircache
else:
self.clear_dircache()
assert self.dircache.count >= 0, "dircache count should never be negative"
@synchronize
def _on_file_written(self, path):
self.clear_dircache(dirname(path))
self.refresh_dircache(dirname(path))
@synchronize
def _readdir(self, path):
dircache = self.get_dircache()
dircache_count = self._locals._dircache_count
if dircache_count:
cached_dirlist = dircache.get(path)
path = normpath(path)
if self.dircache.count:
cached_dirlist = self.dircache.get(path)
if cached_dirlist is not None:
return cached_dirlist
dirlist = {}
......@@ -870,19 +873,19 @@ class FTPFS(FS):
parser = FTPListDataParser()
def on_line(line):
#print repr(line)
if not isinstance(line, unicode):
line = line.decode('utf-8')
info = parser.parse_line(line)
if info:
info = info.__dict__
if info['name'] not in ('.', '..'):
dirlist[info['name']] = info
try:
self.ftp.dir(_encode(path), on_line)
except error_reply:
pass
dircache[path] = dirlist
self.dircache[path] = dirlist
return dirlist
......@@ -895,15 +898,28 @@ class FTPFS(FS):
None (the default)
"""
dircache = self.get_dircache()
if not paths:
dircache.clear()
self.dircache.clear()
else:
remove_paths = []
dircache = self.dircache
paths = [normpath(path) for path in paths]
for cached_path in dircache.keys():
for path in paths:
dircache.pop(path, None)
if isbase(cached_path, path):
dircache.pop(cached_path, None)
break
@synchronize
def refresh_dircache(self, *paths):
for path in paths:
path = abspath(normpath(path))
self.dircache.pop(path, None)
@synchronize
def _check_path(self, path):
path = normpath(path)
base, fname = pathsplit(abspath(path))
dirlist = self._readdir(base)
if fname and fname not in dirlist:
......@@ -911,31 +927,30 @@ class FTPFS(FS):
return dirlist, fname
def _get_dirlist(self, path):
path = normpath(path)
base, fname = pathsplit(abspath(path))
dirlist = self._readdir(base)
return dirlist, fname
@synchronize
@ftperrors
def get_ftp(self):
if getattr(self._locals, '_ftp', None) is None:
self._locals._ftp = self._open_ftp()
ftp = self._locals._ftp
self._thread_ftps.add(ftp)
return self._locals._ftp
@synchronize
def set_ftp(self, ftp):
self._locals._ftp = ftp
ftp = property(get_ftp, set_ftp)
if self.closed:
return None
if not getattr(self, '_ftp', None):
self._ftp = self._open_ftp()
return self._ftp
@synchronize
ftp = property(get_ftp)
@ftperrors
def _open_ftp(self):
try:
ftp = FTP()
if self.timeout is not _GLOBAL_DEFAULT_TIMEOUT:
ftp.connect(self.host, self.port, self.timeout)
else:
if self.default_timeout:
ftp.connect(self.host, self.port)
else:
ftp.connect(self.host, self.port, self.timeout)
ftp.login(self.user, self.passwd, self.acct)
except socket_error, e:
raise RemoteConnectionError(str(e), details=e)
......@@ -943,14 +958,16 @@ class FTPFS(FS):
def __getstate__(self):
state = super(FTPFS, self).__getstate__()
del state["_thread_ftps"]
del state['_lock']
state.pop('_ftp', None)
return state
def __setstate__(self,state):
super(FTPFS, self).__setstate__(state)
self._thread_ftps = set()
self.ftp
self._init_dircache()
self._lock = threading.RLock()
#self._ftp = None
#self.ftp
def __str__(self):
return '<FTPFS %s>' % self.host
......@@ -969,11 +986,13 @@ class FTPFS(FS):
"""
if isinstance(exception, socket_error):
self._ftp = None
raise RemoteConnectionError(str(exception), details=exception)
elif isinstance(exception, error_temp):
code, message = str(exception).split(' ', 1)
raise RemoteConnectionError(str(exception), path=path, msg="FTP error: %s (see details)" % str(exception), details=exception)
self._ftp = None
raise RemoteConnectionError(str(exception), path=path, msg="FTP error: %s" % str(exception), details=exception)
elif isinstance(exception, error_perm):
code, message = str(exception).split(' ', 1)
......@@ -982,16 +1001,17 @@ class FTPFS(FS):
raise ResourceNotFoundError(path)
if code == 552:
raise StorageSpaceError
raise PermissionDeniedError(str(exception), path=path, msg="FTP error: %s (see details)" % str(exception), details=exception)
raise PermissionDeniedError(str(exception), path=path, msg="FTP error: %s" % str(exception), details=exception)
raise exception
@ftperrors
def close(self):
for ftp in self._thread_ftps:
ftp.close()
self._thread_ftps.clear()
if not self.closed:
try:
self.ftp.close()
except FSError:
pass
self.closed = True
@ftperrors
......@@ -1002,30 +1022,29 @@ class FTPFS(FS):
if 'r' in mode:
if not self.isfile(path):
raise ResourceNotFoundError(path)
if 'w' in mode or 'a' in mode:
self.clear_dircache(dirname(path))
if 'w' in mode or 'a' in mode or '+' in mode:
self.refresh_dircache(dirname(path))
ftp = self._open_ftp()
f = _FTPFile(self, ftp, path, mode)
f = _FTPFile(self, ftp, normpath(path), mode)
return f
#remote_f = RemoteFileBuffer(self, path, mode, rfile = f)
#return remote_f
@ftperrors
def setcontents(self, path, data, chunk_size=8192):
path = normpath(path)
if isinstance(data, basestring):
data = StringIO(data)
self.ftp.storbinary('STOR %s' % _encode(normpath(path)), data, blocksize=chunk_size)
self.refresh_dircache(dirname(path))
self.ftp.storbinary('STOR %s' % _encode(path), data, blocksize=chunk_size)
@ftperrors
def getcontents(self, path, chunk_size=8192):
if not self.exists(path):
raise ResourceNotFoundError(path=path)
def getcontents(self, path):
contents = StringIO()
self.ftp.retrbinary('RETR %s' % _encode(normpath(path)), contents.write, blocksize=chunk_size)
self.ftp.retrbinary('RETR %s' % _encode(normpath(path)), contents.write, blocksize=1024*16)
return contents.getvalue()
@ftperrors
def exists(self, path):
path = normpath(path)
if path in ('', '/'):
return True
dirlist, fname = self._get_dirlist(path)
......@@ -1033,6 +1052,7 @@ class FTPFS(FS):
@ftperrors
def isdir(self, path):
path = normpath(path)
if path in ('', '/'):
return True
dirlist, fname = self._get_dirlist(path)
......@@ -1043,6 +1063,7 @@ class FTPFS(FS):
@ftperrors
def isfile(self, path):
path = normpath(path)
if path in ('', '/'):
return False
dirlist, fname = self._get_dirlist(path)
......@@ -1054,6 +1075,7 @@ class FTPFS(FS):
@ftperrors
def listdir(self, path="./", wildcard=None, full=False, absolute=False, dirs_only=False, files_only=False):
path = normpath(path)
self.clear_dircache(path)
if not self.exists(path):
raise ResourceNotFoundError(path)
if not self.isdir(path):
......@@ -1062,13 +1084,37 @@ class FTPFS(FS):
return self._listdir_helper(path, paths, wildcard, full, absolute, dirs_only, files_only)
@ftperrors
def listdirinfo(self, path="./",
wildcard=None,
full=False,
absolute=False,
dirs_only=False,
files_only=False):
path = normpath(path)
def getinfo(p):
try:
if full or absolute:
return self.getinfo(p)
else:
return self.getinfo(pathjoin(path,p))
except FSError:
return {}
return [(p, getinfo(p))
for p in self.listdir(path,
wildcard=wildcard,
full=full,
absolute=absolute,
dirs_only=dirs_only,
files_only=files_only)]
@ftperrors
def makedir(self, path, recursive=False, allow_recreate=False):
if path in ('', '/'):
return
def checkdir(path):
self.clear_dircache(dirname(path), path)
self.clear_dircache(dirname(path))
try:
self.ftp.mkd(_encode(path))
except error_reply:
......@@ -1101,7 +1147,7 @@ class FTPFS(FS):
raise ResourceNotFoundError(path)
if not self.isfile(path):
raise ResourceInvalidError(path)
self.clear_dircache(dirname(path))
self.refresh_dircache(dirname(path))
self.ftp.delete(_encode(path))
@ftperrors
......@@ -1125,7 +1171,7 @@ class FTPFS(FS):
self.removedir(rpath, force=force)
except FSError:
pass
self.clear_dircache(dirname(path), path)
self.clear_dircache(dirname(path))
self.ftp.rmd(_encode(path))
except error_reply:
pass
......@@ -1134,17 +1180,19 @@ class FTPFS(FS):
self.removedir(dirname(path), recursive=True)
except DirectoryNotEmptyError:
pass
self.clear_dircache(dirname(path), path)
@ftperrors
def rename(self, src, dst):
self.clear_dircache(dirname(src), dirname(dst), src, dst)
try:
self.refresh_dircache(dirname(src), dirname(dst))
self.ftp.rename(_encode(src), _encode(dst))
except error_perm, exception:
code, message = str(exception).split(' ', 1)
if code == "550":
if not self.exists(dirname(dst)):
raise ParentDirectoryMissingError(dst)
raise
except error_reply:
pass
......@@ -1162,7 +1210,7 @@ class FTPFS(FS):
def getsize(self, path):
size = None
if self._locals._dircache_count:
if self.dircache.count:
dirlist, fname = self._check_path(path)
size = dirlist[fname].get('size')
......@@ -1190,23 +1238,45 @@ class FTPFS(FS):
if not overwrite and self.exists(dst):
raise DestinationExistsError(dst)
self.clear_dircache(dirname(src), dirname(dst))
#self.refresh_dircache(dirname(src), dirname(dst))
try:
self.rename(src, dst)
except error_reply:
pass
except:
self.copy(src, dst)
self.copy(src, dst, overwrite=overwrite)
self.remove(src)
finally:
self.refresh_dircache(src, dirname(src), dst, dirname(dst))
@ftperrors
def copy(self, src, dst, overwrite=False, chunk_size=1024*64):
if not self.isfile(src):
if self.isdir(src):
raise ResourceInvalidError(src, msg="Source is not a file: %(path)s")
raise ResourceNotFoundError(src)
if not overwrite and self.exists(dst):
raise DestinationExistsError(dst)
src_file = None
try:
src_file = self.open(src, "rb")
ftp = self._open_ftp()
ftp.voidcmd('TYPE I')
ftp.storbinary('STOR %s' % _encode((normpath(dst))), src_file, blocksize=chunk_size)
finally:
self.refresh_dircache(dirname(dst))
if src_file is not None:
src_file.close()
@ftperrors
def movedir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384):
self.clear_dircache(src, dst, dirname(src), dirname(dst))
self.clear_dircache(dirname(src), dirname(dst))
super(FTPFS, self).movedir(src, dst, overwrite, ignore_errors, chunk_size)
@ftperrors
def copydir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384):
self.clear_dircache(src, dst, dirname(src), dirname(dst))
self.clear_dircache(dirname(dst))
super(FTPFS, self).copydir(src, dst, overwrite, ignore_errors, chunk_size)
......
......@@ -103,14 +103,12 @@ class OpenerRegistry(object):
fs_url = _expand_syspath(fs_url)
path = ''
fs_name, fs_name_params = self.parse_name(fs_name)
opener = self.get_opener(fs_name)
if fs_url is None:
raise OpenerError("Unable to parse '%s'" % orig_url)
fs, fs_path = opener.get_fs(self, fs_name, fs_name_params, fs_url, writeable, create)
if fs_path and iswildcard(fs_path):
......@@ -119,11 +117,6 @@ class OpenerRegistry(object):
fs = fs.opendir(pathname)
return fs, resourcename
#pathname, resourcename = pathsplit(fs_path or '')
#if pathname and resourcename:
# fs = fs.opendir(pathname)
# fs_path = resourcename
fs_path = join(fs_path, path)
pathname, resourcename = pathsplit(fs_path or '')
......@@ -198,13 +191,10 @@ class ZipOpener(Opener):
zip_fs, zip_path = registry.parse(fs_path)
if zip_path is None:
raise OpenerError('File required for zip opener')
if create:
open_mode = 'wb'
if append_zip:
if writeable:
open_mode = 'r+b'
else:
open_mode = 'rb'
zip_file = zip_fs.open(zip_path, mode=open_mode)
username, password, fs_path = registry.parse_credentials(fs_path)
......@@ -213,17 +203,11 @@ class ZipOpener(Opener):
if zip_file is None:
zip_file = fs_path
if append_zip:
mode = 'a'
elif create:
mode = 'w'
else:
mode = 'r'
if writeable:
mode = 'w'
else:
mode = 'a'
allow_zip_64 = fs_name == 'zip64'
allow_zip_64 = fs_name.endswith('64')
zipfs = ZipFS(zip_file, mode=mode, allow_zip_64=allow_zip_64)
return zipfs, None
......@@ -365,7 +349,11 @@ class TempOpener(Opener):
@classmethod
def get_fs(cls, registry, fs_name, fs_name_params, fs_path, writeable, create):
from fs.tempfs import TempFS
return TempFS(identifier=fs_name_params, temp_dir=fs_path), None
fs = TempFS(identifier=fs_name_params)
if create and fs_path:
fs = fs.makeopendir(fs_path)
fs_path = pathsplit(fs_path)
return fs, fs_path
opener = OpenerRegistry([OSFSOpener,
......@@ -382,7 +370,7 @@ opener = OpenerRegistry([OSFSOpener,
def main():
#fs, path = opener.parse('zip:zip://~/zips.zip!t.zip!')
fs, path = opener.parse('rpc://127.0.0.1/a/*.JPG')
fs, path = opener.parse('ftp://releases.mozilla.org/welcome.msg')
print fs, path
......
......@@ -266,6 +266,10 @@ def issamedir(path1, path2):
"""
return pathsplit(normpath(path1))[0] == pathsplit(normpath(path2))[0]
def isbase(path1, path2):
p1 = forcedir(abspath(path1))
p2 = forcedir(abspath(path2))
return p1 == p2 or p1.startswith(p2)
def isprefix(path1, path2):
"""Return true is path1 is a prefix of path2.
......
......@@ -108,6 +108,7 @@ class RPCFS(FS):
self._transport = transport
self.proxy = self._make_proxy()
FS.__init__(self,thread_synchronize=False)
self.isdir('/')
def _make_proxy(self):
kwds = dict(allow_none=True)
......
......@@ -39,6 +39,9 @@ class TempFS(OSFS):
default uses "TempFS"
"""
self.identifier = identifier
self.temp_dir = temp_dir
self.dir_mode = dir_mode
self._temp_dir = tempfile.mkdtemp(identifier or "TempFS",dir=temp_dir)
self._cleaned = False
super(TempFS, self).__init__(self._temp_dir, dir_mode=dir_mode, thread_synchronize=thread_synchronize)
......@@ -51,6 +54,13 @@ class TempFS(OSFS):
def __unicode__(self):
return u'<TempFS: %s>' % self._temp_dir
def __setstate__(self, state):
state = super(TempFS, self).__setstate__(state)
self._temp_dir = tempfile.mkdtemp(self.identifier or "TempFS", dir=self.temp_dir)
super(TempFS, self).__init__(self._temp_dir,
dir_mode=self.dir_mode,
thread_synchronize=self.thread_synchronize)
def close(self):
"""Removes the temporary directory.
......
......@@ -486,6 +486,7 @@ class FSTestCases(object):
makefile("foo/bar/a.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(checkcontents("foo/bar/a.txt"))
#import rpdb2; rpdb2.start_embedded_debugger('password');
self.fs.copy("foo/bar/a.txt", "foo/b.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(check("foo/b.txt"))
......
......@@ -32,8 +32,9 @@ class TestFTPFS(unittest.TestCase, FSTestCases, ThreadingTestCases):
self.ftp_server = subprocess.Popen([sys.executable, abspath(__file__), self.temp_dir, str(use_port)])
# Need to sleep to allow ftp server to start
time.sleep(.1)
self.fs = ftpfs.FTPFS('127.0.0.1', 'user', '12345', port=use_port, timeout=5.0)
time.sleep(.2)
self.fs = ftpfs.FTPFS('127.0.0.1', 'user', '12345', dircache=True, port=use_port, timeout=5.0)
self.fs.cache_hint(True)
def tearDown(self):
......
......@@ -54,13 +54,22 @@ def copyfile(src_fs, src_path, dst_fs, dst_path, overwrite=True, chunk_size=64*1
return
src = None
dst = None
try:
# Chunk copy
src = src_fs.open(src_path, 'rb')
dst_fs.setcontents(dst_path, src, chunk_size=chunk_size)
dst = src_fs.open(dst_path, 'wb')
write = dst.write
read = src.read
chunk = read(chunk_size)
while chunk:
write(chunk)
chunk = read(chunk_size)
finally:
if src is not None and hasattr(src, 'close'):
if src is not None:
src.close()
if dst is not None:
dst.close()
def movefile(src_fs, src_path, dst_fs, dst_path, overwrite=True, chunk_size=64*1024):
......@@ -90,13 +99,22 @@ def movefile(src_fs, src_path, dst_fs, dst_path, overwrite=True, chunk_size=64*1
return
src = None
dst = None
try:
# Chunk copy
src = src_fs.open(src_path, 'rb')
dst_fs.setcontents(dst_path, src, chunk_size=chunk_size)
dst = src_fs.open(dst_path, 'wb')
write = dst.write
read = src.read
chunk = read(chunk_size)
while chunk:
write(chunk)
chunk = read(chunk_size)
finally:
if src is not None and hasattr(src, 'close'):
if src is not None:
src.close()
if dst is not None:
dst.close()
src_fs.remove(src_path)
......@@ -416,7 +434,7 @@ def print_fs(fs, path='/', max_levels=5, file_out=None, terminal_colors=None, hi
if is_dir:
write('%s %s' % (wrap_prefix(prefix + '--'), wrap_dirname(item)))
if max_levels is not None and len(levels) >= max_levels:
if max_levels is not None and len(levels) + 1 >= max_levels:
pass
#write(wrap_prefix(prefix[:-1] + ' ') + wrap_error('max recursion levels reached'))
else:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment