Commit 9ff3da0c by willmcgugan@gmail.com

Implemented generic validatepath method and optimized normpath

parent eac17257
...@@ -252,6 +252,7 @@ class FS(object): ...@@ -252,6 +252,7 @@ class FS(object):
* *free_space* The free space (in bytes) available on the file system * *free_space* The free space (in bytes) available on the file system
* *total_space* The total space (in bytes) available on the file system * *total_space* The total space (in bytes) available on the file system
* *virtual* True if the filesystem defers to other filesystems * *virtual* True if the filesystem defers to other filesystems
* *invalid_path_chars* A string containing characters that may not be used in paths
FS implementations may expose non-generic meta data through a self-named namespace. e.g. ``"somefs.some_meta"`` FS implementations may expose non-generic meta data through a self-named namespace. e.g. ``"somefs.some_meta"``
...@@ -282,6 +283,38 @@ class FS(object): ...@@ -282,6 +283,38 @@ class FS(object):
return False return False
return True return True
def validatepath(self, path):
"""Validate an fs path, throws an :class:`~fs.errors.InvalidPathError` exception if validation fails.
A path is invalid if it fails to map to a path on the underlaying filesystem. The default
implementation checks for the presence of any of the characters in the meta value 'invalid_path_chars',
but implementations may have other requirements for paths.
:param path: an fs path to validatepath
:raises `fs.errors.InvalidPathError`: if `path` does not map on to a valid path on this filesystem
"""
invalid_chars = self.getmeta('invalid_path_chars', default=None)
if invalid_chars:
re_invalid_chars = getattr(self, '_re_invalid_chars', None)
if re_invalid_chars is None:
self._re_invalid_chars = re_invalid_chars = re.compile('|'.join(re.escape(c) for c in invalid_chars), re.UNICODE)
if re_invalid_chars.search(path):
raise InvalidCharsInPathError(path)
def isvalidpath(self, path):
"""Check if a path is valid on this filesystem
:param path: an fs path
"""
try:
self.validatepath(path)
except InvalidPathError:
return False
else:
return True
def getsyspath(self, path, allow_none=False): def getsyspath(self, path, allow_none=False):
"""Returns the system path (a path recognized by the OS) if one is present. """Returns the system path (a path recognized by the OS) if one is present.
......
...@@ -11,6 +11,7 @@ catch-all exception. ...@@ -11,6 +11,7 @@ catch-all exception.
__all__ = ['FSError', __all__ = ['FSError',
'CreateFailedError', 'CreateFailedError',
'PathError', 'PathError',
'InvalidPathError',
'InvalidCharsInPathError', 'InvalidCharsInPathError',
'OperationFailedError', 'OperationFailedError',
'UnsupportedError', 'UnsupportedError',
...@@ -83,7 +84,13 @@ class PathError(FSError): ...@@ -83,7 +84,13 @@ class PathError(FSError):
super(PathError,self).__init__(**kwds) super(PathError,self).__init__(**kwds)
class InvalidCharsInPathError(PathError): class InvalidPathError(PathError):
"""Base exception for fs paths that can't be mapped on to the underlaying filesystem."""
default_message = "Path is invalid on this filesystem %(path)s"
class InvalidCharsInPathError(InvalidPathError):
"""The path contains characters that are invalid on this filesystem"""
default_message = "Path contains invalid characters: %(path)s" default_message = "Path contains invalid characters: %(path)s"
......
...@@ -88,10 +88,9 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS): ...@@ -88,10 +88,9 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
} }
if platform.system() == 'Windows': if platform.system() == 'Windows':
_invalid_path_chars = ''.join(chr(n) for n in xrange(31)) + '\\:*?"<>|' _meta["invalid_path_chars"] = ''.join(chr(n) for n in xrange(31)) + '\\:*?"<>|'
else: else:
_invalid_path_chars = '\0' _meta["invalid_path_chars"] = '\0'
_re_invalid_path_chars = re.compile('|'.join(re.escape(c) for c in _invalid_path_chars), re.UNICODE)
def __init__(self, root_path, thread_synchronize=_thread_synchronize_default, encoding=None, create=False, dir_mode=0700, use_long_paths=True): def __init__(self, root_path, thread_synchronize=_thread_synchronize_default, encoding=None, create=False, dir_mode=0700, use_long_paths=True):
""" """
...@@ -153,13 +152,8 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS): ...@@ -153,13 +152,8 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
return p return p
return p.decode(self.encoding, 'replace') return p.decode(self.encoding, 'replace')
def _validate_path(self, path):
"""Raise an error if there are any invalid characters in the path"""
if self._re_invalid_path_chars.search(path):
raise InvalidCharsInPathError(path)
def getsyspath(self, path, allow_none=False): def getsyspath(self, path, allow_none=False):
self._validate_path(path) self.validatepath(path)
path = relpath(normpath(path)).replace(u"/", os.sep) path = relpath(normpath(path)).replace(u"/", os.sep)
path = os.path.join(self.root_path, path) path = os.path.join(self.root_path, path)
if not path.startswith(self.root_path): if not path.startswith(self.root_path):
......
...@@ -14,7 +14,9 @@ import re ...@@ -14,7 +14,9 @@ import re
import os import os
_requires_normalization = re.compile(r'/\.\.|\./|\.|//').search #_requires_normalization = re.compile(r'/\.\.|\./|\.|//').search
# New improved re that avoids normalizing paths that don't need it - WM
_requires_normalization = re.compile(r'/\.\.|\./|^\.$|\.$|//').search
def normpath(path): def normpath(path):
...@@ -75,11 +77,6 @@ else: ...@@ -75,11 +77,6 @@ else:
return path return path
def normospath(path):
"""Normalizes a path with os separators"""
return normpath(ospath(path))
def iteratepath(path, numsplits=None): def iteratepath(path, numsplits=None):
"""Iterate over the individual components of a path. """Iterate over the individual components of a path.
...@@ -374,7 +371,7 @@ def isprefix(path1, path2): ...@@ -374,7 +371,7 @@ def isprefix(path1, path2):
bits1.pop() bits1.pop()
if len(bits1) > len(bits2): if len(bits1) > len(bits2):
return False return False
for (bit1,bit2) in zip(bits1,bits2): for (bit1, bit2) in zip(bits1, bits2):
if bit1 != bit2: if bit1 != bit2:
return False return False
return True return True
...@@ -434,7 +431,7 @@ class PathMap(object): ...@@ -434,7 +431,7 @@ class PathMap(object):
def __init__(self): def __init__(self):
self._map = {} self._map = {}
def __getitem__(self,path): def __getitem__(self, path):
"""Get the value stored under the given path.""" """Get the value stored under the given path."""
m = self._map m = self._map
for name in iteratepath(path): for name in iteratepath(path):
...@@ -447,7 +444,7 @@ class PathMap(object): ...@@ -447,7 +444,7 @@ class PathMap(object):
except KeyError: except KeyError:
raise KeyError(path) raise KeyError(path)
def __contains__(self,path): def __contains__(self, path):
"""Check whether the given path has a value stored in the map.""" """Check whether the given path has a value stored in the map."""
try: try:
self[path] self[path]
...@@ -456,22 +453,22 @@ class PathMap(object): ...@@ -456,22 +453,22 @@ class PathMap(object):
else: else:
return True return True
def __setitem__(self,path,value): def __setitem__(self, path, value):
"""Set the value stored under the given path.""" """Set the value stored under the given path."""
m = self._map m = self._map
for name in iteratepath(path): for name in iteratepath(path):
try: try:
m = m[name] m = m[name]
except KeyError: except KeyError:
m = m.setdefault(name,{}) m = m.setdefault(name, {})
m[""] = value m[""] = value
def __delitem__(self,path): def __delitem__(self, path):
"""Delete the value stored under the given path.""" """Delete the value stored under the given path."""
ms = [[self._map,None]] ms = [[self._map, None]]
for name in iteratepath(path): for name in iteratepath(path):
try: try:
ms.append([ms[-1][0][name],None]) ms.append([ms[-1][0][name], None])
except KeyError: except KeyError:
raise KeyError(path) raise KeyError(path)
else: else:
...@@ -485,19 +482,19 @@ class PathMap(object): ...@@ -485,19 +482,19 @@ class PathMap(object):
del ms[-1] del ms[-1]
del ms[-1][0][ms[-1][1]] del ms[-1][0][ms[-1][1]]
def get(self,path,default=None): def get(self, path, default=None):
"""Get the value stored under the given path, or the given default.""" """Get the value stored under the given path, or the given default."""
try: try:
return self[path] return self[path]
except KeyError: except KeyError:
return default return default
def pop(self,path,default=None): def pop(self, path, default=None):
"""Pop the value stored under the given path, or the given default.""" """Pop the value stored under the given path, or the given default."""
ms = [[self._map,None]] ms = [[self._map, None]]
for name in iteratepath(path): for name in iteratepath(path):
try: try:
ms.append([ms[-1][0][name],None]) ms.append([ms[-1][0][name], None])
except KeyError: except KeyError:
return default return default
else: else:
...@@ -512,16 +509,16 @@ class PathMap(object): ...@@ -512,16 +509,16 @@ class PathMap(object):
del ms[-1][0][ms[-1][1]] del ms[-1][0][ms[-1][1]]
return val return val
def setdefault(self,path,value): def setdefault(self, path, value):
m = self._map m = self._map
for name in iteratepath(path): for name in iteratepath(path):
try: try:
m = m[name] m = m[name]
except KeyError: except KeyError:
m = m.setdefault(name,{}) m = m.setdefault(name, {})
return m.setdefault("",value) return m.setdefault("", value)
def clear(self,root="/"): def clear(self, root="/"):
"""Clear all entries beginning with the given root path.""" """Clear all entries beginning with the given root path."""
m = self._map m = self._map
for name in iteratepath(root): for name in iteratepath(root):
...@@ -531,7 +528,7 @@ class PathMap(object): ...@@ -531,7 +528,7 @@ class PathMap(object):
return return
m.clear() m.clear()
def iterkeys(self,root="/",m=None): def iterkeys(self, root="/", m=None):
"""Iterate over all keys beginning with the given root path.""" """Iterate over all keys beginning with the given root path."""
if m is None: if m is None:
m = self._map m = self._map
...@@ -540,12 +537,12 @@ class PathMap(object): ...@@ -540,12 +537,12 @@ class PathMap(object):
m = m[name] m = m[name]
except KeyError: except KeyError:
return return
for (nm,subm) in m.iteritems(): for (nm, subm) in m.iteritems():
if not nm: if not nm:
yield abspath(root) yield abspath(root)
else: else:
k = pathcombine(root,nm) k = pathcombine(root, nm)
for subk in self.iterkeys(k,subm): for subk in self.iterkeys(k, subm):
yield subk yield subk
def __iter__(self): def __iter__(self):
...@@ -554,7 +551,7 @@ class PathMap(object): ...@@ -554,7 +551,7 @@ class PathMap(object):
def keys(self,root="/"): def keys(self,root="/"):
return list(self.iterkeys(root)) return list(self.iterkeys(root))
def itervalues(self,root="/",m=None): def itervalues(self, root="/", m=None):
"""Iterate over all values whose keys begin with the given root path.""" """Iterate over all values whose keys begin with the given root path."""
root = normpath(root) root = normpath(root)
if m is None: if m is None:
...@@ -564,18 +561,18 @@ class PathMap(object): ...@@ -564,18 +561,18 @@ class PathMap(object):
m = m[name] m = m[name]
except KeyError: except KeyError:
return return
for (nm,subm) in m.iteritems(): for (nm, subm) in m.iteritems():
if not nm: if not nm:
yield subm yield subm
else: else:
k = pathcombine(root,nm) k = pathcombine(root, nm)
for subv in self.itervalues(k,subm): for subv in self.itervalues(k, subm):
yield subv yield subv
def values(self,root="/"): def values(self, root="/"):
return list(self.itervalues(root)) return list(self.itervalues(root))
def iteritems(self,root="/",m=None): def iteritems(self, root="/", m=None):
"""Iterate over all (key,value) pairs beginning with the given root.""" """Iterate over all (key,value) pairs beginning with the given root."""
root = normpath(root) root = normpath(root)
if m is None: if m is None:
...@@ -585,18 +582,18 @@ class PathMap(object): ...@@ -585,18 +582,18 @@ class PathMap(object):
m = m[name] m = m[name]
except KeyError: except KeyError:
return return
for (nm,subm) in m.iteritems(): for (nm, subm) in m.iteritems():
if not nm: if not nm:
yield (abspath(normpath(root)),subm) yield (abspath(normpath(root)), subm)
else: else:
k = pathcombine(root,nm) k = pathcombine(root, nm)
for (subk,subv) in self.iteritems(k,subm): for (subk, subv) in self.iteritems(k, subm):
yield (subk,subv) yield (subk, subv)
def items(self,root="/"): def items(self, root="/"):
return list(self.iteritems(root)) return list(self.iteritems(root))
def iternames(self,root="/"): def iternames(self, root="/"):
"""Iterate over all names beneath the given root path. """Iterate over all names beneath the given root path.
This is basically the equivalent of listdir() for a PathMap - it yields This is basically the equivalent of listdir() for a PathMap - it yields
...@@ -608,15 +605,17 @@ class PathMap(object): ...@@ -608,15 +605,17 @@ class PathMap(object):
m = m[name] m = m[name]
except KeyError: except KeyError:
return return
for (nm,subm) in m.iteritems(): for (nm, subm) in m.iteritems():
if nm and subm: if nm and subm:
yield nm yield nm
def names(self,root="/"): def names(self, root="/"):
return list(self.iternames(root)) return list(self.iternames(root))
_wild_chars = frozenset('*?[]!{}') _wild_chars = frozenset('*?[]!{}')
def iswildcard(path): def iswildcard(path):
"""Check if a path ends with a wildcard """Check if a path ends with a wildcard
...@@ -627,8 +626,7 @@ def iswildcard(path): ...@@ -627,8 +626,7 @@ def iswildcard(path):
""" """
assert path is not None assert path is not None
base_chars = frozenset(basename(path)) return not _wild_chars.isdisjoint(path)
return bool(base_chars.intersection(_wild_chars))
if __name__ == "__main__": if __name__ == "__main__":
print recursepath('a/b/c') print recursepath('a/b/c')
...@@ -10,6 +10,7 @@ import os ...@@ -10,6 +10,7 @@ import os
import os.path import os.path
import time import time
import tempfile import tempfile
import platform
from fs.osfs import OSFS from fs.osfs import OSFS
from fs.errors import * from fs.errors import *
...@@ -20,7 +21,7 @@ class TempFS(OSFS): ...@@ -20,7 +21,7 @@ class TempFS(OSFS):
"""Create a Filesystem in a temporary directory (with tempfile.mkdtemp), """Create a Filesystem in a temporary directory (with tempfile.mkdtemp),
and removes it when the TempFS object is cleaned up.""" and removes it when the TempFS object is cleaned up."""
_meta = { 'thread_safe' : True, _meta = { 'thread_safe' : True,
'virtual' : False, 'virtual' : False,
'read_only' : False, 'read_only' : False,
...@@ -32,9 +33,14 @@ class TempFS(OSFS): ...@@ -32,9 +33,14 @@ class TempFS(OSFS):
'atomic.copy' : True, 'atomic.copy' : True,
'atomic.makedir' : True, 'atomic.makedir' : True,
'atomic.rename' : True, 'atomic.rename' : True,
'atomic.setcontents' : False 'atomic.setcontents' : False
} }
if platform.system() == 'Windows':
_meta["invalid_path_chars"] = ''.join(chr(n) for n in xrange(31)) + '\\:*?"<>|'
else:
_meta["invalid_path_chars"] = '\0'
def __init__(self, identifier=None, temp_dir=None, dir_mode=0700, thread_synchronize=_thread_synchronize_default): def __init__(self, identifier=None, temp_dir=None, dir_mode=0700, thread_synchronize=_thread_synchronize_default):
"""Creates a temporary Filesystem """Creates a temporary Filesystem
...@@ -56,21 +62,21 @@ class TempFS(OSFS): ...@@ -56,21 +62,21 @@ class TempFS(OSFS):
def __unicode__(self): def __unicode__(self):
return u'<TempFS: %s>' % self._temp_dir return u'<TempFS: %s>' % self._temp_dir
def __getstate__(self): def __getstate__(self):
# If we are picking a TempFS, we want to preserve its contents, # If we are picking a TempFS, we want to preserve its contents,
# so we *don't* do the clean # so we *don't* do the clean
state = super(TempFS, self).__getstate__() state = super(TempFS, self).__getstate__()
self._cleaned = True self._cleaned = True
return state return state
def __setstate__(self, state): def __setstate__(self, state):
state = super(TempFS, self).__setstate__(state) state = super(TempFS, self).__setstate__(state)
self._cleaned = False self._cleaned = False
#self._temp_dir = tempfile.mkdtemp(self.identifier or "TempFS", dir=self.temp_dir) #self._temp_dir = tempfile.mkdtemp(self.identifier or "TempFS", dir=self.temp_dir)
#super(TempFS, self).__init__(self._temp_dir, #super(TempFS, self).__init__(self._temp_dir,
# dir_mode=self.dir_mode, # dir_mode=self.dir_mode,
# thread_synchronize=self.thread_synchronize) # thread_synchronize=self.thread_synchronize)
def close(self): def close(self):
"""Removes the temporary directory. """Removes the temporary directory.
......
...@@ -41,7 +41,7 @@ class FSTestCases(object): ...@@ -41,7 +41,7 @@ class FSTestCases(object):
To apply the tests to your own FS implementation, simply use FSTestCase To apply the tests to your own FS implementation, simply use FSTestCase
as a mixin for your own unittest.TestCase subclass and have the setUp as a mixin for your own unittest.TestCase subclass and have the setUp
method set self.fs to an instance of your FS implementation. method set self.fs to an instance of your FS implementation.
NB. The Filesystem being tested must have a capacity of at least 3MB. NB. The Filesystem being tested must have a capacity of at least 3MB.
This class is designed as a mixin so that it's not detected by test This class is designed as a mixin so that it's not detected by test
...@@ -52,10 +52,19 @@ class FSTestCases(object): ...@@ -52,10 +52,19 @@ class FSTestCases(object):
"""Check that a file exists within self.fs""" """Check that a file exists within self.fs"""
return self.fs.exists(p) return self.fs.exists(p)
def test_invalid_chars(self):
"""Check paths validate ok"""
# Will have to be overriden selectively for custom validepath methods
self.assertEqual(self.fs.validatepath(''), None)
self.assertEqual(self.fs.validatepath('.foo'), None)
self.assertEqual(self.fs.validatepath('foo'), None)
self.assertEqual(self.fs.validatepath('foo/bar'), None)
self.assert_(self.fs.isvalidpath('foo/bar'))
def test_meta(self): def test_meta(self):
"""Checks getmeta / hasmeta are functioning""" """Checks getmeta / hasmeta are functioning"""
# getmeta / hasmeta are hard to test, since there is no way to validate # getmeta / hasmeta are hard to test, since there is no way to validate
# the implementations response # the implementation's response
meta_names = ["read_only", meta_names = ["read_only",
"network", "network",
"unicode_paths"] "unicode_paths"]
...@@ -70,7 +79,7 @@ class FSTestCases(object): ...@@ -70,7 +79,7 @@ class FSTestCases(object):
self.assertTrue(self.fs.hasmeta(meta_name)) self.assertTrue(self.fs.hasmeta(meta_name))
except NoMetaError: except NoMetaError:
self.assertFalse(self.fs.hasmeta(meta_name)) self.assertFalse(self.fs.hasmeta(meta_name))
def test_root_dir(self): def test_root_dir(self):
self.assertTrue(self.fs.isdir("")) self.assertTrue(self.fs.isdir(""))
...@@ -108,7 +117,7 @@ class FSTestCases(object): ...@@ -108,7 +117,7 @@ class FSTestCases(object):
else: else:
f.close() f.close()
assert False, "ResourceInvalidError was not raised" assert False, "ResourceInvalidError was not raised"
def test_writefile(self): def test_writefile(self):
self.assertRaises(ResourceNotFoundError,self.fs.open,"test1.txt") self.assertRaises(ResourceNotFoundError,self.fs.open,"test1.txt")
f = self.fs.open("test1.txt","wb") f = self.fs.open("test1.txt","wb")
...@@ -152,7 +161,7 @@ class FSTestCases(object): ...@@ -152,7 +161,7 @@ class FSTestCases(object):
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world")) self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object # ...and a file-like object
self.fs.setcontents_async("hello", StringIO(b("to you, good sir!")), chunk_size=2).wait() self.fs.setcontents_async("hello", StringIO(b("to you, good sir!")), chunk_size=2).wait()
self.assertEquals(self.fs.getcontents("hello", "rb"), b("to you, good sir!")) self.assertEquals(self.fs.getcontents("hello", "rb"), b("to you, good sir!"))
def test_isdir_isfile(self): def test_isdir_isfile(self):
self.assertFalse(self.fs.exists("dir1")) self.assertFalse(self.fs.exists("dir1"))
...@@ -236,7 +245,7 @@ class FSTestCases(object): ...@@ -236,7 +245,7 @@ class FSTestCases(object):
for (nm,info) in items: for (nm,info) in items:
self.assertTrue(isinstance(nm,unicode)) self.assertTrue(isinstance(nm,unicode))
def check_equal(items,target): def check_equal(items,target):
names = [nm for (nm,info) in items] names = [nm for (nm,info) in items]
self.assertEqual(sorted(names),sorted(target)) self.assertEqual(sorted(names),sorted(target))
self.fs.setcontents(u"a", b('')) self.fs.setcontents(u"a", b(''))
self.fs.setcontents("b", b('')) self.fs.setcontents("b", b(''))
...@@ -318,7 +327,7 @@ class FSTestCases(object): ...@@ -318,7 +327,7 @@ class FSTestCases(object):
if "c" in files: if "c" in files:
found_c = True found_c = True
if "a.txt" in files: if "a.txt" in files:
break break
assert found_c, "depth search order was wrong: " + str(list(self.fs.walk(search="depth"))) assert found_c, "depth search order was wrong: " + str(list(self.fs.walk(search="depth")))
def test_walk_wildcard(self): def test_walk_wildcard(self):
...@@ -730,18 +739,18 @@ class FSTestCases(object): ...@@ -730,18 +739,18 @@ class FSTestCases(object):
f.truncate() f.truncate()
checkcontents("hello",b("12345")) checkcontents("hello",b("12345"))
def test_truncate_to_larger_size(self): def test_truncate_to_larger_size(self):
with self.fs.open("hello","wb") as f: with self.fs.open("hello","wb") as f:
f.truncate(30) f.truncate(30)
self.assertEquals(self.fs.getsize("hello"), 30) self.assertEquals(self.fs.getsize("hello"), 30)
# Some file systems (FTPFS) don't support both reading and writing # Some file systems (FTPFS) don't support both reading and writing
if self.fs.getmeta('file.read_and_write', True): if self.fs.getmeta('file.read_and_write', True):
with self.fs.open("hello","rb+") as f: with self.fs.open("hello","rb+") as f:
f.seek(25) f.seek(25)
f.write(b("123456")) f.write(b("123456"))
with self.fs.open("hello","rb") as f: with self.fs.open("hello","rb") as f:
f.seek(25) f.seek(25)
self.assertEquals(f.read(),b("123456")) self.assertEquals(f.read(),b("123456"))
...@@ -788,10 +797,10 @@ class FSTestCases(object): ...@@ -788,10 +797,10 @@ class FSTestCases(object):
else: else:
# Just make sure it doesn't throw an exception # Just make sure it doesn't throw an exception
fs2 = pickle.loads(pickle.dumps(self.fs)) fs2 = pickle.loads(pickle.dumps(self.fs))
def test_big_file(self): def test_big_file(self):
"""Test handling of a big file (1MB)""" """Test handling of a big file (1MB)"""
chunk_size = 1024 * 256 chunk_size = 1024 * 256
num_chunks = 4 num_chunks = 4
def chunk_stream(): def chunk_stream():
...@@ -821,19 +830,19 @@ class FSTestCases(object): ...@@ -821,19 +830,19 @@ class FSTestCases(object):
finally: finally:
f.close() f.close()
def test_settimes(self): def test_settimes(self):
def cmp_datetimes(d1, d2): def cmp_datetimes(d1, d2):
"""Test datetime objects are the same to within the timestamp accuracy""" """Test datetime objects are the same to within the timestamp accuracy"""
dts1 = time.mktime(d1.timetuple()) dts1 = time.mktime(d1.timetuple())
dts2 = time.mktime(d2.timetuple()) dts2 = time.mktime(d2.timetuple())
return int(dts1) == int(dts2) return int(dts1) == int(dts2)
d1 = datetime.datetime(2010, 6, 20, 11, 0, 9, 987699) d1 = datetime.datetime(2010, 6, 20, 11, 0, 9, 987699)
d2 = datetime.datetime(2010, 7, 5, 11, 0, 9, 500000) d2 = datetime.datetime(2010, 7, 5, 11, 0, 9, 500000)
self.fs.setcontents('/dates.txt', b('check dates')) self.fs.setcontents('/dates.txt', b('check dates'))
# If the implementation supports settimes, check that the times # If the implementation supports settimes, check that the times
# can be set and then retrieved # can be set and then retrieved
try: try:
self.fs.settimes('/dates.txt', d1, d2) self.fs.settimes('/dates.txt', d1, d2)
except UnsupportedError: except UnsupportedError:
pass pass
else: else:
...@@ -847,7 +856,7 @@ class FSTestCases(object): ...@@ -847,7 +856,7 @@ class FSTestCases(object):
# May be disabled - see end of file # May be disabled - see end of file
class ThreadingTestCases(object): class ThreadingTestCases(object):
"""Testcases for thread-safety of FS implementations.""" """Testcases for thread-safety of FS implementations."""
# These are either too slow to be worth repeating, # These are either too slow to be worth repeating,
# or cannot possibly break cross-thread. # or cannot possibly break cross-thread.
_dont_retest = ("test_pickling","test_multiple_overwrite",) _dont_retest = ("test_pickling","test_multiple_overwrite",)
...@@ -1026,7 +1035,7 @@ class ThreadingTestCases(object): ...@@ -1026,7 +1035,7 @@ class ThreadingTestCases(object):
self.fs.copydir("a","copy of a") self.fs.copydir("a","copy of a")
def copydir_overwrite(): def copydir_overwrite():
self._yield() self._yield()
self.fs.copydir("a","copy of a",overwrite=True) self.fs.copydir("a","copy of a",overwrite=True)
# This should error out since we're not overwriting # This should error out since we're not overwriting
self.assertRaises(DestinationExistsError,self._runThreads,copydir,copydir) self.assertRaises(DestinationExistsError,self._runThreads,copydir,copydir)
# This should run to completion and give a valid state, unless # This should run to completion and give a valid state, unless
...@@ -1059,4 +1068,4 @@ class ThreadingTestCases(object): ...@@ -1059,4 +1068,4 @@ class ThreadingTestCases(object):
# Uncomment to temporarily disable threading tests # Uncomment to temporarily disable threading tests
#class ThreadingTestCases(object): #class ThreadingTestCases(object):
# _dont_retest = () # _dont_retest = ()
...@@ -31,7 +31,16 @@ class TestOSFS(unittest.TestCase,FSTestCases,ThreadingTestCases): ...@@ -31,7 +31,16 @@ class TestOSFS(unittest.TestCase,FSTestCases,ThreadingTestCases):
return os.path.exists(os.path.join(self.temp_dir, relpath(p))) return os.path.exists(os.path.join(self.temp_dir, relpath(p)))
def test_invalid_chars(self): def test_invalid_chars(self):
self.assertEqual(self.fs.validatepath(''), None)
self.assertEqual(self.fs.validatepath('.foo'), None)
self.assertEqual(self.fs.validatepath('foo'), None)
self.assertEqual(self.fs.validatepath('foo/bar'), None)
self.assert_(self.fs.isvalidpath('foo/bar'))
self.assertRaises(errors.InvalidCharsInPathError, self.fs.open, 'invalid\0file', 'wb') self.assertRaises(errors.InvalidCharsInPathError, self.fs.open, 'invalid\0file', 'wb')
self.assertFalse(self.fs.isvalidpath('invalid\0file'))
self.assert_(self.fs.isvalidpath('validfile'))
self.assert_(self.fs.isvalidpath('completely_valid/path/foo.bar'))
class TestSubFS(unittest.TestCase,FSTestCases,ThreadingTestCases): class TestSubFS(unittest.TestCase,FSTestCases,ThreadingTestCases):
......
...@@ -138,6 +138,15 @@ class TestPathFunctions(unittest.TestCase): ...@@ -138,6 +138,15 @@ class TestPathFunctions(unittest.TestCase):
for path, test_basename in tests: for path, test_basename in tests:
self.assertEqual(basename(path), test_basename) self.assertEqual(basename(path), test_basename)
def test_iswildcard(self):
self.assert_(iswildcard('*'))
self.assert_(iswildcard('*.jpg'))
self.assert_(iswildcard('foo/*'))
self.assert_(iswildcard('foo/{}'))
self.assertFalse(iswildcard('foo'))
self.assertFalse(iswildcard('img.jpg'))
self.assertFalse(iswildcard('foo/bar'))
class Test_PathMap(unittest.TestCase): class Test_PathMap(unittest.TestCase):
......
...@@ -49,7 +49,7 @@ class WrapFS(FS): ...@@ -49,7 +49,7 @@ class WrapFS(FS):
and/or contents of files in an FS. It could be used to implement and/or contents of files in an FS. It could be used to implement
e.g. compression or encryption in a relatively painless manner. e.g. compression or encryption in a relatively painless manner.
The following methods can be overridden to control how files are The following methods can be overridden to control how files are
accessed in the underlying FS object: accessed in the underlying FS object:
* _file_wrap(file, mode): called for each file that is opened from * _file_wrap(file, mode): called for each file that is opened from
...@@ -66,10 +66,10 @@ class WrapFS(FS): ...@@ -66,10 +66,10 @@ class WrapFS(FS):
""" """
def __init__(self, fs): def __init__(self, fs):
super(WrapFS, self).__init__() super(WrapFS, self).__init__()
try: try:
self._lock = fs._lock self._lock = fs._lock
except (AttributeError,FSError): except (AttributeError,FSError):
self._lock = self._lock = threading.RLock() self._lock = self._lock = threading.RLock()
self.wrapped_fs = fs self.wrapped_fs = fs
...@@ -116,7 +116,7 @@ class WrapFS(FS): ...@@ -116,7 +116,7 @@ class WrapFS(FS):
transparent file compression - in this case files from the wrapped transparent file compression - in this case files from the wrapped
FS cannot be opened in append mode. FS cannot be opened in append mode.
""" """
return (mode,mode) return (mode, mode)
def __unicode__(self): def __unicode__(self):
return u"<%s: %s>" % (self.__class__.__name__,self.wrapped_fs,) return u"<%s: %s>" % (self.__class__.__name__,self.wrapped_fs,)
...@@ -128,18 +128,22 @@ class WrapFS(FS): ...@@ -128,18 +128,22 @@ class WrapFS(FS):
@rewrite_errors @rewrite_errors
def getmeta(self, meta_name, default=NoDefaultMeta): def getmeta(self, meta_name, default=NoDefaultMeta):
return self.wrapped_fs.getmeta(meta_name, default) return self.wrapped_fs.getmeta(meta_name, default)
@rewrite_errors @rewrite_errors
def hasmeta(self, meta_name): def hasmeta(self, meta_name):
return self.wrapped_fs.hasmeta(meta_name) return self.wrapped_fs.hasmeta(meta_name)
@rewrite_errors @rewrite_errors
def validatepath(self, path):
return self.wrapped_fs.validatepath(self._encode(path))
@rewrite_errors
def getsyspath(self, path, allow_none=False): def getsyspath(self, path, allow_none=False):
return self.wrapped_fs.getsyspath(self._encode(path),allow_none) return self.wrapped_fs.getsyspath(self._encode(path), allow_none)
@rewrite_errors @rewrite_errors
def getpathurl(self, path, allow_none=False): def getpathurl(self, path, allow_none=False):
return self.wrapped_fs.getpathurl(self._encode(path),allow_none) return self.wrapped_fs.getpathurl(self._encode(path), allow_none)
@rewrite_errors @rewrite_errors
def hassyspath(self, path): def hassyspath(self, path):
...@@ -154,9 +158,9 @@ class WrapFS(FS): ...@@ -154,9 +158,9 @@ class WrapFS(FS):
@rewrite_errors @rewrite_errors
def setcontents(self, path, data, chunk_size=64*1024): def setcontents(self, path, data, chunk_size=64*1024):
# We can't pass setcontents() through to the wrapped FS if the # We can't pass setcontents() through to the wrapped FS if the
# wrapper has defined a _file_wrap method, as it would bypass # wrapper has defined a _file_wrap method, as it would bypass
# the file contents wrapping. # the file contents wrapping.
#if self._file_wrap.im_func is WrapFS._file_wrap.im_func: #if self._file_wrap.im_func is WrapFS._file_wrap.im_func:
if getattr(self.__class__, '_file_wrap', None) is getattr(WrapFS, '_file_wrap', None): if getattr(self.__class__, '_file_wrap', None) is getattr(WrapFS, '_file_wrap', None):
return self.wrapped_fs.setcontents(self._encode(path), data, chunk_size=chunk_size) return self.wrapped_fs.setcontents(self._encode(path), data, chunk_size=chunk_size)
else: else:
...@@ -184,7 +188,7 @@ class WrapFS(FS): ...@@ -184,7 +188,7 @@ class WrapFS(FS):
full=full, full=full,
absolute=absolute, absolute=absolute,
dirs_only=dirs_only, dirs_only=dirs_only,
files_only=files_only) files_only=files_only)
full = kwds.pop("full",False) full = kwds.pop("full",False)
absolute = kwds.pop("absolute",False) absolute = kwds.pop("absolute",False)
wildcard = kwds.pop("wildcard",None) wildcard = kwds.pop("wildcard",None)
...@@ -192,7 +196,7 @@ class WrapFS(FS): ...@@ -192,7 +196,7 @@ class WrapFS(FS):
wildcard = lambda fn:True wildcard = lambda fn:True
elif not callable(wildcard): elif not callable(wildcard):
wildcard_re = re.compile(fnmatch.translate(wildcard)) wildcard_re = re.compile(fnmatch.translate(wildcard))
wildcard = lambda fn:bool (wildcard_re.match(fn)) wildcard = lambda fn:bool (wildcard_re.match(fn))
entries = [] entries = []
enc_path = self._encode(path) enc_path = self._encode(path)
for e in self.wrapped_fs.listdir(enc_path,**kwds): for e in self.wrapped_fs.listdir(enc_path,**kwds):
...@@ -203,7 +207,7 @@ class WrapFS(FS): ...@@ -203,7 +207,7 @@ class WrapFS(FS):
e = pathcombine(path,e) e = pathcombine(path,e)
elif absolute: elif absolute:
e = abspath(pathcombine(path,e)) e = abspath(pathcombine(path,e))
entries.append(e) entries.append(e)
return entries return entries
@rewrite_errors @rewrite_errors
...@@ -212,7 +216,7 @@ class WrapFS(FS): ...@@ -212,7 +216,7 @@ class WrapFS(FS):
full=full, full=full,
absolute=absolute, absolute=absolute,
dirs_only=dirs_only, dirs_only=dirs_only,
files_only=files_only) files_only=files_only)
full = kwds.pop("full",False) full = kwds.pop("full",False)
absolute = kwds.pop("absolute",False) absolute = kwds.pop("absolute",False)
wildcard = kwds.pop("wildcard",None) wildcard = kwds.pop("wildcard",None)
...@@ -220,7 +224,7 @@ class WrapFS(FS): ...@@ -220,7 +224,7 @@ class WrapFS(FS):
wildcard = lambda fn:True wildcard = lambda fn:True
elif not callable(wildcard): elif not callable(wildcard):
wildcard_re = re.compile(fnmatch.translate(wildcard)) wildcard_re = re.compile(fnmatch.translate(wildcard))
wildcard = lambda fn:bool (wildcard_re.match(fn)) wildcard = lambda fn:bool (wildcard_re.match(fn))
enc_path = self._encode(path) enc_path = self._encode(path)
for e in self.wrapped_fs.ilistdir(enc_path,**kwds): for e in self.wrapped_fs.ilistdir(enc_path,**kwds):
e = basename(self._decode(pathcombine(enc_path,e))) e = basename(self._decode(pathcombine(enc_path,e)))
...@@ -238,7 +242,7 @@ class WrapFS(FS): ...@@ -238,7 +242,7 @@ class WrapFS(FS):
full=full, full=full,
absolute=absolute, absolute=absolute,
dirs_only=dirs_only, dirs_only=dirs_only,
files_only=files_only) files_only=files_only)
full = kwds.pop("full",False) full = kwds.pop("full",False)
absolute = kwds.pop("absolute",False) absolute = kwds.pop("absolute",False)
wildcard = kwds.pop("wildcard",None) wildcard = kwds.pop("wildcard",None)
...@@ -246,7 +250,7 @@ class WrapFS(FS): ...@@ -246,7 +250,7 @@ class WrapFS(FS):
wildcard = lambda fn:True wildcard = lambda fn:True
elif not callable(wildcard): elif not callable(wildcard):
wildcard_re = re.compile(fnmatch.translate(wildcard)) wildcard_re = re.compile(fnmatch.translate(wildcard))
wildcard = lambda fn:bool (wildcard_re.match(fn)) wildcard = lambda fn:bool (wildcard_re.match(fn))
entries = [] entries = []
enc_path = self._encode(path) enc_path = self._encode(path)
for (nm,info) in self.wrapped_fs.listdirinfo(enc_path,**kwds): for (nm,info) in self.wrapped_fs.listdirinfo(enc_path,**kwds):
...@@ -274,7 +278,7 @@ class WrapFS(FS): ...@@ -274,7 +278,7 @@ class WrapFS(FS):
wildcard = lambda fn:True wildcard = lambda fn:True
elif not callable(wildcard): elif not callable(wildcard):
wildcard_re = re.compile(fnmatch.translate(wildcard)) wildcard_re = re.compile(fnmatch.translate(wildcard))
wildcard = lambda fn:bool (wildcard_re.match(fn)) wildcard = lambda fn:bool (wildcard_re.match(fn))
enc_path = self._encode(path) enc_path = self._encode(path)
for (nm,info) in self.wrapped_fs.ilistdirinfo(enc_path,**kwds): for (nm,info) in self.wrapped_fs.ilistdirinfo(enc_path,**kwds):
nm = basename(self._decode(pathcombine(enc_path,nm))) nm = basename(self._decode(pathcombine(enc_path,nm)))
...@@ -299,7 +303,7 @@ class WrapFS(FS): ...@@ -299,7 +303,7 @@ class WrapFS(FS):
else: else:
if wildcard is not None and not callable(wildcard): if wildcard is not None and not callable(wildcard):
wildcard_re = re.compile(fnmatch.translate(wildcard)) wildcard_re = re.compile(fnmatch.translate(wildcard))
wildcard = lambda fn:bool (wildcard_re.match(fn)) wildcard = lambda fn:bool (wildcard_re.match(fn))
for (dirpath,filepaths) in self.wrapped_fs.walk(self._encode(path),search=search,ignore_errors=ignore_errors): for (dirpath,filepaths) in self.wrapped_fs.walk(self._encode(path),search=search,ignore_errors=ignore_errors):
filepaths = [basename(self._decode(pathcombine(dirpath,p))) filepaths = [basename(self._decode(pathcombine(dirpath,p)))
for p in filepaths] for p in filepaths]
...@@ -321,7 +325,7 @@ class WrapFS(FS): ...@@ -321,7 +325,7 @@ class WrapFS(FS):
else: else:
if wildcard is not None and not callable(wildcard): if wildcard is not None and not callable(wildcard):
wildcard_re = re.compile(fnmatch.translate(wildcard)) wildcard_re = re.compile(fnmatch.translate(wildcard))
wildcard = lambda fn:bool (wildcard_re.match(fn)) wildcard = lambda fn:bool (wildcard_re.match(fn))
for filepath in self.wrapped_fs.walkfiles(self._encode(path),search=search,ignore_errors=ignore_errors): for filepath in self.wrapped_fs.walkfiles(self._encode(path),search=search,ignore_errors=ignore_errors):
filepath = abspath(self._decode(filepath)) filepath = abspath(self._decode(filepath))
if wildcard is not None: if wildcard is not None:
...@@ -469,7 +473,7 @@ def wrap_fs_methods(decorator, cls=None, exclude=[]): ...@@ -469,7 +473,7 @@ def wrap_fs_methods(decorator, cls=None, exclude=[]):
wrap_fs_methods.method_names = ["open","exists","isdir","isfile","listdir", wrap_fs_methods.method_names = ["open","exists","isdir","isfile","listdir",
"makedir","remove","setcontents","removedir","rename","getinfo","copy", "makedir","remove","setcontents","removedir","rename","getinfo","copy",
"move","copydir","movedir","close","getxattr","setxattr","delxattr", "move","copydir","movedir","close","getxattr","setxattr","delxattr",
"listxattrs","getsyspath","createfile", "hasmeta", "getmeta","listdirinfo", "listxattrs","validatepath","getsyspath","createfile", "hasmeta", "getmeta","listdirinfo",
"ilistdir","ilistdirinfo"] "ilistdir","ilistdirinfo"]
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment