Commit 4dff3320 by willmcgugan

Changed syntax for commands to be more url like, optimized sftps to use fewer queries for listdir

parent f1f224c1
from fs.opener import opener
from fs.utils import copyfile, copystructure
from fs.path import pathjoin
from fs.path import pathjoin, iswildcard
from fs.errors import FSError
from fs.commands.runner import Command
import sys
......@@ -92,7 +92,7 @@ Copy SOURCE to DESTINATION"""
if src_path is None:
src_path = '/'
if self.is_wildcard(src_path):
if iswildcard(src_path):
for file_path in src_fs.listdir(wildcard=src_path, full=True):
copy_fs_paths.append((self.FILE, src_fs, file_path, file_path))
......
#!/usr/bin/env python
from fs.opener import opener
from fs.path import pathsplit, abspath, isdotfile
from fs.path import pathsplit, abspath, isdotfile, iswildcard
from fs.commands.runner import Command
from collections import defaultdict
import sys
......@@ -43,10 +43,10 @@ List contents of [PATH]"""
path = path or '.'
wildcard = None
if self.is_wildcard(path):
if iswildcard(path):
path, wildcard = pathsplit(path)
if fs.isfile(path):
if path != '.' and fs.isfile(path):
if not options.dirsonly:
file_paths.append(path)
else:
......
......@@ -15,6 +15,8 @@ Recursively display the contents of PATH in an ascii tree"""
optparse = super(FSTree, self).get_optparse()
optparse.add_option('-d', '--depth', dest='depth', type="int", default=5,
help="Maximum depth to display", metavar="DEPTH")
optparse.add_option('-a', '--all', dest='all', action='store_true', default=False,
help="do not hide dot files")
return optparse
def do_run(self, options, args):
......@@ -31,7 +33,8 @@ Recursively display the contents of PATH in an ascii tree"""
print_fs(fs, path or '',
file_out=self.output_file,
max_levels=options.depth,
terminal_colors=self.is_terminal())
terminal_colors=self.is_terminal(),
hide_dotfiles=not options.all)
def run():
return FSTree().run()
......
......@@ -2,7 +2,7 @@ import sys
from optparse import OptionParser
from fs.opener import opener, OpenerError
from fs.errors import FSError
from fs.path import splitext, pathsplit, isdotfile
from fs.path import splitext, pathsplit, isdotfile, iswildcard
import platform
from collections import defaultdict
......@@ -55,11 +55,6 @@ class Command(object):
self.terminal_width = w
self.name = self.__class__.__name__.lower()
def is_wildcard(self, path):
if path is None:
return False
return '*' in path or '?' in path
def is_terminal(self):
try:
return self.output_file.isatty()
......@@ -111,7 +106,7 @@ class Command(object):
if path is None:
return [], []
pathname, resourcename = pathsplit(path)
if self.is_wildcard(resourcename):
if iswildcard(resourcename):
dir_paths = fs.listdir(pathname,
wildcard=resourcename,
absolute=True,
......@@ -137,7 +132,7 @@ class Command(object):
resources = []
for fs, path in fs_paths:
if self.is_wildcard(path):
if path and iswildcard(path):
if not files_only:
dir_paths = fs.listdir(wildcard=path, dirs_only=True)
for path in dir_paths:
......@@ -227,8 +222,8 @@ class Command(object):
if self.is_terminal():
self.output("\n")
return 0
except ValueError:
pass
#except ValueError:
# pass
except SystemExit:
return 0
except IOError:
......
......@@ -751,8 +751,7 @@ class FTPFS(FS):
def __init__(self, host='', user='', passwd='', acct='', timeout=_GLOBAL_DEFAULT_TIMEOUT,
port=21,
dircache=True,
max_buffer_size=128*1024*1024):
dircache=True):
""" Connect to a FTP server.
:param host: Host to connect to
......@@ -766,7 +765,6 @@ class FTPFS(FS):
changes to the ftp file structure will not be visible until
`~fs.ftpfs.FTPFS.clear_dircache` is called
:param dircache: If True directory information will be cached for fast access
:param max_buffer_size: Number of bytes to hold before blocking write operations
"""
......@@ -782,8 +780,6 @@ class FTPFS(FS):
self.use_dircache = dircache
self.get_dircache()
self.max_buffer_size = max_buffer_size
self._cache_hint = False
self._locals._ftp = None
self._thread_ftps = set()
......
......@@ -156,7 +156,7 @@ class DirEntry(object):
self.locks += 1
def unlock(self):
self.locks -=1
self.locks -= 1
assert self.locks >=0, "Lock / Unlock mismatch!"
def desc_contents(self):
......@@ -494,7 +494,7 @@ class MemoryFS(FS):
if dir_entry is None:
raise ResourceNotFoundError(path)
if dir_entry.isfile():
raise ResourceInvalidError(path,msg="that's a file, not a directory: %(path)s")
raise ResourceInvalidError(path, msg="not a directory: %(path)s")
paths = dir_entry.contents.keys()
for (i,p) in enumerate(paths):
if not isinstance(p,unicode):
......@@ -522,7 +522,7 @@ class MemoryFS(FS):
return info
@synchronize
def copydir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384):
def copydir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=1024*64):
src_dir_entry = self._get_dir_entry(src)
if src_dir_entry is None:
raise ResourceNotFoundError(src)
......@@ -533,7 +533,7 @@ class MemoryFS(FS):
dst_dir_entry.xattrs.update(src_xattrs)
@synchronize
def movedir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384):
def movedir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=1024*64):
src_dir_entry = self._get_dir_entry(src)
if src_dir_entry is None:
raise ResourceNotFoundError(src)
......@@ -544,7 +544,7 @@ class MemoryFS(FS):
dst_dir_entry.xattrs.update(src_xattrs)
@synchronize
def copy(self, src, dst, overwrite=False, chunk_size=16384):
def copy(self, src, dst, overwrite=False, chunk_size=1024*64):
src_dir_entry = self._get_dir_entry(src)
if src_dir_entry is None:
raise ResourceNotFoundError(src)
......@@ -555,7 +555,7 @@ class MemoryFS(FS):
dst_dir_entry.xattrs.update(src_xattrs)
@synchronize
def move(self, src, dst, overwrite=False, chunk_size=16384):
def move(self, src, dst, overwrite=False, chunk_size=1024*64):
src_dir_entry = self._get_dir_entry(src)
if src_dir_entry is None:
raise ResourceNotFoundError(src)
......@@ -566,6 +566,27 @@ class MemoryFS(FS):
dst_dir_entry.xattrs.update(src_xattrs)
@synchronize
def getcontents(self, path):
dir_entry = self._get_dir_entry(path)
if dir_entry is None:
raise ResourceNotFoundError(path)
if not dir_entry.isfile():
raise ResourceInvalidError(path, msg="not a directory: %(path)s")
return dir_entry.data or ''
@synchronize
def setcontents(self, path, data, chunk_size=1024*64):
if not isinstance(data, str):
return super(MemoryFS, self).setcontents(path, data, chunk_size)
if not self.exists(path):
self.open(path, 'w').close()
dir_entry = self._get_dir_entry(path)
if not dir_entry.isfile():
raise ResourceInvalidError('Not a directory %(path)s', path)
dir_entry.data = data
@synchronize
def setxattr(self, path, key, value):
dir_entry = self._dir_entry(path)
key = unicode(key)
......
......@@ -515,3 +515,16 @@ class PathMap(object):
def names(self,root="/"):
return list(self.iternames(root))
_wild_chars = frozenset('*?[]!{}')
def iswildcard(path):
"""Check if a path ends with a wildcard
>>> is_wildcard('foo/bar/baz.*')
True
>>> is_wildcard('foo/bar')
False
"""
assert path is not None
base_chars = frozenset(basename(path))
return not base_chars.isdisjoint(_wild_chars)
......@@ -9,12 +9,15 @@ Filesystem accessing an SFTP server (via paramiko)
import datetime
import stat as statinfo
import threading
import os
import paramiko
from getpass import getuser
from binascii import hexlify
from fs.base import *
from fs.path import *
from fs.errors import *
from fs.utils import isdir, isfile
# SFTPClient appears to not be thread-safe, so we use an instance per thread
if hasattr(threading, "local"):
......@@ -58,6 +61,7 @@ class SFTPFS(FS):
'atomic.setcontents' : False
}
def __init__(self, connection, root_path="/", encoding=None, **credentials):
"""SFTPFS constructor.
......@@ -88,18 +92,81 @@ class SFTPFS(FS):
self._tlocal = thread_local()
self._transport = None
self._client = None
hostname = None
if isinstance(connection, basestring):
hostname = connection
else:
try:
hostname, port = connection
except ValueError:
pass
hostkeytype = None
hostkey = None
if hostname is not None:
try:
host_keys = paramiko.util.load_host_keys(os.path.expanduser('~/.ssh/known_hosts'))
except IOError:
try:
# try ~/ssh/ too, because windows can't have a folder named ~/.ssh/
host_keys = paramiko.util.load_host_keys(os.path.expanduser('~/ssh/known_hosts'))
except IOError:
host_keys = {}
if host_keys.has_key(hostname):
hostkeytype = host_keys[hostname].keys()[0]
hostkey = host_keys[hostname][hostkeytype]
credentials['hostkey'] = hostkey
if not credentials.get('username'):
credentials['username'] = getuser()
super(SFTPFS, self).__init__()
if isinstance(connection,paramiko.Channel):
self._transport = None
self._client = paramiko.SFTPClient(connection)
else:
if not isinstance(connection,paramiko.Transport):
if not isinstance(connection, paramiko.Transport):
connection = paramiko.Transport(connection)
self._owns_transport = True
try:
if not connection.is_authenticated():
connection.connect(**credentials)
if not connection.is_authenticated():
self._agent_auth(connection, credentials.get('username'))
if not connection.is_authenticated():
connection.close()
raise RemoteConnectionError('No auth')
except paramiko.AuthenticationException:
raise RemoteConnectionError('Auth rejected')
self._transport = connection
self.root_path = abspath(normpath(root_path))
super(SFTPFS, self).__init__()
@classmethod
def _agent_auth(cls, transport, username):
"""
Attempt to authenticate to the given transport using any of the private
keys available from an SSH agent.
"""
agent = paramiko.Agent()
agent_keys = agent.get_keys()
if len(agent_keys) == 0:
return False
for key in agent_keys:
try:
transport.auth_publickey(username, key)
return key
except paramiko.SSHException:
pass
return None
def __del__(self):
self.close()
......@@ -184,6 +251,8 @@ class SFTPFS(FS):
@convert_os_errors
def isdir(self,path):
if path == '/':
return True
npath = self._normpath(path)
try:
stat = self.client.stat(npath)
......@@ -209,6 +278,10 @@ class SFTPFS(FS):
npath = self._normpath(path)
try:
paths = self.client.listdir(npath)
if dirs_only or files_only:
path_attrs = self.client.listdir_attr(npath)
else:
path_attrs = None
except IOError, e:
if getattr(e,"errno",None) == 2:
if self.isfile(path):
......@@ -217,10 +290,72 @@ class SFTPFS(FS):
elif self.isfile(path):
raise ResourceInvalidError(path,msg="Can't list directory contents of a file: %(path)s")
raise
if path_attrs is not None:
if dirs_only:
filter_paths = []
for path, attr in zip(paths, path_attrs):
if isdir(self, path, attr.__dict__):
filter_paths.append(path)
paths = filter_paths
elif files_only:
filter_paths = []
for path, attr in zip(paths, path_attrs):
if isfile(self, path, attr.__dict__):
filter_paths.append(path)
paths = filter_paths
for (i,p) in enumerate(paths):
if not isinstance(p,unicode):
paths[i] = p.decode(self.encoding)
return self._listdir_helper(path, paths, wildcard, full, absolute, dirs_only, files_only)
return self._listdir_helper(path, paths, wildcard, full, absolute, False, False)
@convert_os_errors
def listdirinfo(self,path="./",wildcard=None,full=False,absolute=False,dirs_only=False,files_only=False):
npath = self._normpath(path)
try:
paths = self.client.listdir(npath)
attrs = self.client.listdir_attr(npath)
attrs_map = dict(zip(paths, attrs))
except IOError, e:
if getattr(e,"errno",None) == 2:
if self.isfile(path):
raise ResourceInvalidError(path,msg="Can't list directory contents of a file: %(path)s")
raise ResourceNotFoundError(path)
elif self.isfile(path):
raise ResourceInvalidError(path,msg="Can't list directory contents of a file: %(path)s")
raise
if dirs_only:
filter_paths = []
for path, attr in zip(paths, attrs):
if isdir(self, path, attr.__dict__):
filter_paths.append(path)
paths = filter_paths
elif files_only:
filter_paths = []
for path, attr in zip(paths, attrs):
if isfile(self, path, attr.__dict__):
filter_paths.append(path)
paths = filter_paths
for (i, p) in enumerate(paths):
if not isinstance(p, unicode):
paths[i] = p.decode(self.encoding)
def getinfo(p):
resourcename = basename(p)
info = attrs_map.get(resourcename)
if info is None:
return self.getinfo(pathjoin(path, p))
return self._extract_info(info.__dict__)
return [(p, getinfo(p)) for p in
self._listdir_helper(path, paths, wildcard, full, absolute, False, False)]
@convert_os_errors
def makedir(self,path,recursive=False,allow_recreate=False):
......@@ -335,6 +470,23 @@ class SFTPFS(FS):
raise ParentDirectoryMissingError(dst,msg="Destination directory does not exist: %(path)s")
raise
_info_vars = frozenset('st_size st_uid st_gid st_mode st_atime st_mtime'.split())
@classmethod
def _extract_info(cls, stats):
fromtimestamp = datetime.datetime.fromtimestamp
info = dict((k, v) for k, v in stats.iteritems() if k in cls._info_vars)
info['size'] = info['st_size']
ct = info.get('st_ctime')
if ct is not None:
info['created_time'] = fromtimestamp(ct)
at = info.get('st_atime')
if at is not None:
info['accessed_time'] = fromtimestamp(at)
mt = info.get('st_mtime')
if mt is not None:
info['modified_time'] = fromtimestamp(mt)
return info
@convert_os_errors
def getinfo(self, path):
npath = self._normpath(path)
......
......@@ -56,10 +56,13 @@ def copyfile(src_fs, src_path, dst_fs, dst_path, overwrite=True, chunk_size=64*1
src = None
try:
# Chunk copy
if src_fs.getsize(src_path) < chunk_size:
src = src_fs.getcontents(src_path)
else:
src = src_fs.open(src_path, 'rb')
dst_fs.setcontents(dst_path, src, chunk_size=chunk_size)
finally:
if src is not None:
if src is not None and hasattr(src, 'close'):
src.close()
......@@ -89,14 +92,18 @@ def movefile(src_fs, src_path, dst_fs, dst_path, overwrite=True, chunk_size=64*1
FS._shutil_movefile(src_syspath, dst_syspath)
return
src = None
try:
# Chunk copy
if src_fs.getsize(src_path) < chunk_size:
src = src_fs.getcontents(src_path)
else:
src = src_fs.open(src_path, 'rb')
dst_fs.setcontents(dst_path, src, chunk_size=chunk_size)
src_fs.remove(src_path)
finally:
if src is not None:
if src is not None and hasattr(src, 'close'):
src.close()
def movedir(fs1, fs2, overwrite=False, ignore_errors=False, chunk_size=64*1024):
......@@ -324,7 +331,7 @@ def find_duplicates(fs,
paths = list(set(paths).difference(dups))
def print_fs(fs, path='/', max_levels=5, file_out=None, terminal_colors=None):
def print_fs(fs, path='/', max_levels=5, file_out=None, terminal_colors=None, hide_dotfiles=False):
"""Prints a filesystem listing to stdout (including sub dirs). Useful as a debugging aid.
Be careful about printing a OSFS, or any other large filesystem.
Without max_levels set, this function will traverse the entire directory tree.
......@@ -343,13 +350,14 @@ def print_fs(fs, path='/', max_levels=5, file_out=None, terminal_colors=None):
:param file_out: File object to write output to (defaults to sys.stdout)
:param terminal_colors: If True, terminal color codes will be written, set to False for non-console output.
The default (None) will select an appropriate setting for the platform.
:param hide_dotfiles: if True, files or directories begining with '.' will be removed
"""
if file_out is None:
file_out = sys.stdout
file_encoding = getattr(file_out, 'encoding', 'utf-8')
file_encoding = getattr(file_out, 'encoding', 'utf-8') or 'utf-8'
if terminal_colors is None:
if sys.platform.startswith('win'):
......@@ -388,12 +396,16 @@ def print_fs(fs, path='/', max_levels=5, file_out=None, terminal_colors=None):
def print_dir(fs, path, levels=[]):
try:
dir_listing = [(fs.isdir(pathjoin(path,p)), p) for p in fs.listdir(path)]
dir_listing = ( [(True, p) for p in fs.listdir(path, dirs_only=True)] +
[(False, p) for p in fs.listdir(path, files_only=True)] )
except Exception, e:
prefix = ''.join([('| ', ' ')[last] for last in levels]) + ' '
write(wrap_prefix(prefix[:-1] + ' ') + wrap_error("unabled to retrieve directory list (%s) ..." % str(e)))
return 0
if hide_dotfiles:
dir_listing = [(isdir, p) for isdir, p in dir_listing if not p.startswith('.')]
dir_listing.sort(key = lambda (isdir, p):(not isdir, p.lower()))
for i, (is_dir, item) in enumerate(dir_listing):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment