Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
P
pyfs
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
OpenEdx
pyfs
Commits
3ea4efe1
Commit
3ea4efe1
authored
Mar 31, 2013
by
willmcgugan@gmail.com
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Change of api (fs.open, fs.setcontent, fs.getcontents) to support io module in Py2.6+ and Py3
parent
c6391b6f
Show whitespace changes
Inline
Side-by-side
Showing
45 changed files
with
991 additions
and
619 deletions
+991
-619
MANIFEST.in
+0
-1
fs/__init__.py
+2
-2
fs/appdirfs.py
+7
-6
fs/base.py
+103
-35
fs/compatibility.py
+4
-3
fs/contrib/archivefs.py
+2
-2
fs/contrib/davfs/__init__.py
+8
-3
fs/expose/dokan/__init__.py
+50
-50
fs/expose/ftp.py
+4
-2
fs/expose/fuse/__init__.py
+68
-60
fs/expose/sftp.py
+2
-2
fs/expose/wsgi/wsgi.py
+0
-0
fs/expose/xmlrpc.py
+12
-14
fs/filelike.py
+5
-3
fs/ftpfs.py
+10
-6
fs/httpfs.py
+8
-4
fs/iotools.py
+91
-8
fs/memoryfs.py
+27
-8
fs/mountfs.py
+12
-7
fs/multifs.py
+3
-3
fs/opener.py
+3
-4
fs/osfs/__init__.py
+14
-14
fs/remote.py
+29
-28
fs/rpcfs.py
+59
-47
fs/s3fs.py
+9
-4
fs/sftpfs.py
+16
-13
fs/tempfs.py
+14
-9
fs/tests/__init__.py
+263
-202
fs/tests/data/__init__.py
+0
-0
fs/tests/test_expose.py
+11
-4
fs/tests/test_importhook.py
+1
-1
fs/tests/test_iotools.py
+56
-0
fs/tests/test_mountfs.py
+7
-6
fs/tests/test_remote.py
+14
-10
fs/tests/test_watch.py
+5
-2
fs/tests/test_zipfs.py
+8
-5
fs/utils.py
+1
-0
fs/watch.py
+19
-12
fs/wrapfs/__init__.py
+4
-4
fs/wrapfs/limitsizefs.py
+8
-2
fs/wrapfs/readonlyfs.py
+10
-2
fs/wrapfs/subfs.py
+4
-4
fs/zipfs.py
+14
-9
setup.py
+3
-4
tox.ini
+1
-14
No files found.
MANIFEST.in
View file @
3ea4efe1
include AUTHORS
include AUTHORS
fs/__init__.py
View file @
3ea4efe1
...
@@ -19,8 +19,8 @@ __version__ = "0.4.1"
...
@@ -19,8 +19,8 @@ __version__ = "0.4.1"
__author__
=
"Will McGugan (will@willmcgugan.com)"
__author__
=
"Will McGugan (will@willmcgugan.com)"
# provide these by default so people can use 'fs.path.basename' etc.
# provide these by default so people can use 'fs.path.basename' etc.
import
errors
from
fs
import
errors
import
path
from
fs
import
path
_thread_synchronize_default
=
True
_thread_synchronize_default
=
True
def
set_thread_synchronize_default
(
sync
):
def
set_thread_synchronize_default
(
sync
):
...
...
fs/appdirfs.py
View file @
3ea4efe1
...
@@ -33,7 +33,7 @@ class UserDataFS(OSFS):
...
@@ -33,7 +33,7 @@ class UserDataFS(OSFS):
"""
"""
app_dirs
=
AppDirs
(
appname
,
appauthor
,
version
,
roaming
)
app_dirs
=
AppDirs
(
appname
,
appauthor
,
version
,
roaming
)
super
(
self
.
__class__
,
self
)
.
__init__
(
app_dirs
.
user_data_dir
,
create
=
create
)
super
(
UserDataFS
,
self
)
.
__init__
(
app_dirs
.
user_data_dir
,
create
=
create
)
class
SiteDataFS
(
OSFS
):
class
SiteDataFS
(
OSFS
):
...
@@ -48,7 +48,7 @@ class SiteDataFS(OSFS):
...
@@ -48,7 +48,7 @@ class SiteDataFS(OSFS):
"""
"""
app_dirs
=
AppDirs
(
appname
,
appauthor
,
version
,
roaming
)
app_dirs
=
AppDirs
(
appname
,
appauthor
,
version
,
roaming
)
super
(
self
.
__class__
,
self
)
.
__init__
(
app_dirs
.
site_data_dir
,
create
=
create
)
super
(
SiteDataFS
,
self
)
.
__init__
(
app_dirs
.
site_data_dir
,
create
=
create
)
class
UserCacheFS
(
OSFS
):
class
UserCacheFS
(
OSFS
):
...
@@ -63,7 +63,7 @@ class UserCacheFS(OSFS):
...
@@ -63,7 +63,7 @@ class UserCacheFS(OSFS):
"""
"""
app_dirs
=
AppDirs
(
appname
,
appauthor
,
version
,
roaming
)
app_dirs
=
AppDirs
(
appname
,
appauthor
,
version
,
roaming
)
super
(
self
.
__class__
,
self
)
.
__init__
(
app_dirs
.
user_cache_dir
,
create
=
create
)
super
(
UserCacheFS
,
self
)
.
__init__
(
app_dirs
.
user_cache_dir
,
create
=
create
)
class
UserLogFS
(
OSFS
):
class
UserLogFS
(
OSFS
):
...
@@ -78,10 +78,11 @@ class UserLogFS(OSFS):
...
@@ -78,10 +78,11 @@ class UserLogFS(OSFS):
"""
"""
app_dirs
=
AppDirs
(
appname
,
appauthor
,
version
,
roaming
)
app_dirs
=
AppDirs
(
appname
,
appauthor
,
version
,
roaming
)
super
(
self
.
__class__
,
self
)
.
__init__
(
app_dirs
.
user_log_dir
,
create
=
create
)
super
(
UserLogFS
,
self
)
.
__init__
(
app_dirs
.
user_log_dir
,
create
=
create
)
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
udfs
=
UserDataFS
(
'
sexytime
'
,
appauthor
=
'pyfs'
)
udfs
=
UserDataFS
(
'
exampleapp
'
,
appauthor
=
'pyfs'
)
print
udfs
print
udfs
udfs2
=
UserDataFS
(
'
sexytime
2'
,
appauthor
=
'pyfs'
,
create
=
False
)
udfs2
=
UserDataFS
(
'
exampleapp
2'
,
appauthor
=
'pyfs'
,
create
=
False
)
print
udfs2
print
udfs2
fs/base.py
View file @
3ea4efe1
...
@@ -37,9 +37,10 @@ from fs.path import *
...
@@ -37,9 +37,10 @@ from fs.path import *
from
fs.errors
import
*
from
fs.errors
import
*
from
fs.local_functools
import
wraps
from
fs.local_functools
import
wraps
import
compatibility
import
six
from
six
import
b
from
six
import
b
class
DummyLock
(
object
):
class
DummyLock
(
object
):
"""A dummy lock object that doesn't do anything.
"""A dummy lock object that doesn't do anything.
...
@@ -373,7 +374,7 @@ class FS(object):
...
@@ -373,7 +374,7 @@ class FS(object):
"""
"""
return
self
.
getpathurl
(
path
,
allow_none
=
True
)
is
not
None
return
self
.
getpathurl
(
path
,
allow_none
=
True
)
is
not
None
def
open
(
self
,
path
,
mode
=
"r"
,
**
kwargs
):
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
"""Open a the given path as a file-like object.
"""Open a the given path as a file-like object.
:param path: a path to file that should be opened
:param path: a path to file that should be opened
...
@@ -394,7 +395,7 @@ class FS(object):
...
@@ -394,7 +395,7 @@ class FS(object):
"""
"""
raise
UnsupportedError
(
"open file"
)
raise
UnsupportedError
(
"open file"
)
def
safeopen
(
self
,
path
,
mode
=
"r"
,
**
kwargs
):
def
safeopen
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
"""Like :py:meth:`~fs.base.FS.open`, but returns a
"""Like :py:meth:`~fs.base.FS.open`, but returns a
:py:class:`~fs.base.NullFile` if the file could not be opened.
:py:class:`~fs.base.NullFile` if the file could not be opened.
...
@@ -414,7 +415,7 @@ class FS(object):
...
@@ -414,7 +415,7 @@ class FS(object):
"""
"""
try
:
try
:
f
=
self
.
open
(
path
,
mode
,
**
kwargs
)
f
=
self
.
open
(
path
,
mode
=
mode
,
buffering
=
buffering
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
,
line_buffering
=
line_buffering
,
**
kwargs
)
except
ResourceNotFoundError
:
except
ResourceNotFoundError
:
return
NullFile
()
return
NullFile
()
return
f
return
f
...
@@ -457,7 +458,8 @@ class FS(object):
...
@@ -457,7 +458,8 @@ class FS(object):
for
f
in
self
.
listdir
():
for
f
in
self
.
listdir
():
yield
f
yield
f
def
listdir
(
self
,
path
=
"./"
,
def
listdir
(
self
,
path
=
"./"
,
wildcard
=
None
,
wildcard
=
None
,
full
=
False
,
full
=
False
,
absolute
=
False
,
absolute
=
False
,
...
@@ -489,7 +491,8 @@ class FS(object):
...
@@ -489,7 +491,8 @@ class FS(object):
"""
"""
raise
UnsupportedError
(
"list directory"
)
raise
UnsupportedError
(
"list directory"
)
def
listdirinfo
(
self
,
path
=
"./"
,
def
listdirinfo
(
self
,
path
=
"./"
,
wildcard
=
None
,
wildcard
=
None
,
full
=
False
,
full
=
False
,
absolute
=
False
,
absolute
=
False
,
...
@@ -517,6 +520,7 @@ class FS(object):
...
@@ -517,6 +520,7 @@ class FS(object):
"""
"""
path
=
normpath
(
path
)
path
=
normpath
(
path
)
def
getinfo
(
p
):
def
getinfo
(
p
):
try
:
try
:
if
full
or
absolute
:
if
full
or
absolute
:
...
@@ -534,7 +538,8 @@ class FS(object):
...
@@ -534,7 +538,8 @@ class FS(object):
dirs_only
=
dirs_only
,
dirs_only
=
dirs_only
,
files_only
=
files_only
)]
files_only
=
files_only
)]
def
_listdir_helper
(
self
,
path
,
def
_listdir_helper
(
self
,
path
,
entries
,
entries
,
wildcard
=
None
,
wildcard
=
None
,
full
=
False
,
full
=
False
,
...
@@ -556,7 +561,7 @@ class FS(object):
...
@@ -556,7 +561,7 @@ class FS(object):
if
wildcard
is
not
None
:
if
wildcard
is
not
None
:
if
not
callable
(
wildcard
):
if
not
callable
(
wildcard
):
wildcard_re
=
re
.
compile
(
fnmatch
.
translate
(
wildcard
))
wildcard_re
=
re
.
compile
(
fnmatch
.
translate
(
wildcard
))
wildcard
=
lambda
fn
:
bool
(
wildcard_re
.
match
(
fn
))
wildcard
=
lambda
fn
:
bool
(
wildcard_re
.
match
(
fn
))
entries
=
[
p
for
p
in
entries
if
wildcard
(
p
)]
entries
=
[
p
for
p
in
entries
if
wildcard
(
p
)]
if
dirs_only
:
if
dirs_only
:
...
@@ -574,7 +579,8 @@ class FS(object):
...
@@ -574,7 +579,8 @@ class FS(object):
return
entries
return
entries
def
ilistdir
(
self
,
path
=
"./"
,
def
ilistdir
(
self
,
path
=
"./"
,
wildcard
=
None
,
wildcard
=
None
,
full
=
False
,
full
=
False
,
absolute
=
False
,
absolute
=
False
,
...
@@ -594,7 +600,8 @@ class FS(object):
...
@@ -594,7 +600,8 @@ class FS(object):
dirs_only
=
dirs_only
,
dirs_only
=
dirs_only
,
files_only
=
files_only
))
files_only
=
files_only
))
def
ilistdirinfo
(
self
,
path
=
"./"
,
def
ilistdirinfo
(
self
,
path
=
"./"
,
wildcard
=
None
,
wildcard
=
None
,
full
=
False
,
full
=
False
,
absolute
=
False
,
absolute
=
False
,
...
@@ -748,40 +755,94 @@ class FS(object):
...
@@ -748,40 +755,94 @@ class FS(object):
return
"No description available"
return
"No description available"
return
sys_path
return
sys_path
def
getcontents
(
self
,
path
,
mode
=
'rb'
,
encoding
=
None
,
errors
=
None
,
newline
=
None
):
def
getcontents
(
self
,
path
,
mode
=
"rb"
):
"""Returns the contents of a file as a string.
"""Returns the contents of a file as a string.
:param path: A path of file to read
:param path: A path of file to read
:rtype: str
:rtype: str
:returns: file contents
:returns: file contents
"""
"""
if
'r'
not
in
mode
:
raise
ValueError
(
"mode must contain 'r' to be readable"
)
f
=
None
f
=
None
try
:
try
:
f
=
self
.
open
(
path
,
mode
)
f
=
self
.
open
(
path
,
mode
=
mode
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
)
contents
=
f
.
read
()
contents
=
f
.
read
()
return
contents
return
contents
finally
:
finally
:
if
f
is
not
None
:
if
f
is
not
None
:
f
.
close
()
f
.
close
()
def
setcontents
(
self
,
path
,
data
,
chunk_size
=
1024
*
64
):
def
_setcontents
(
self
,
path
,
data
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
1024
*
64
,
progress_callback
=
None
,
finished_callback
=
None
):
"""Does the work of setcontents. Factored out, so that `setcontents_async` can use it"""
if
progress_callback
is
None
:
progress_callback
=
lambda
bytes_written
:
None
if
finished_callback
is
None
:
finished_callback
=
lambda
:
None
if
not
data
:
progress_callback
(
0
)
self
.
createfile
(
path
)
finished_callback
()
return
0
bytes_written
=
0
progress_callback
(
0
)
if
hasattr
(
data
,
'read'
):
read
=
data
.
read
chunk
=
read
(
chunk_size
)
if
isinstance
(
chunk
,
six
.
text_type
):
f
=
self
.
open
(
path
,
'wt'
,
encoding
=
encoding
,
errors
=
errors
)
else
:
f
=
self
.
open
(
path
,
'wb'
)
write
=
f
.
write
try
:
while
chunk
:
write
(
chunk
)
bytes_written
+=
len
(
chunk
)
progress_callback
(
bytes_written
)
chunk
=
read
(
chunk_size
)
finally
:
f
.
close
()
else
:
if
isinstance
(
data
,
six
.
text_type
):
with
self
.
open
(
path
,
'wt'
,
encoding
=
encoding
,
errors
=
errors
)
as
f
:
f
.
write
(
data
)
bytes_written
+=
len
(
data
)
else
:
with
self
.
open
(
path
,
'wb'
)
as
f
:
f
.
write
(
data
)
bytes_written
+=
len
(
data
)
progress_callback
(
bytes_written
)
finished_callback
()
return
bytes_written
def
setcontents
(
self
,
path
,
data
=
b
''
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
1024
*
64
):
"""A convenience method to create a new file from a string or file-like object
"""A convenience method to create a new file from a string or file-like object
:param path: a path of the file to create
:param path: a path of the file to create
:param data: a string or a file-like object containing the contents for the new file
:param data: a string or bytes object containing the contents for the new file
:param encoding: if `data` is a file open in text mode, or a text string, then use this `encoding` to write to the destination file
:param errors: if `data` is a file open in text mode or a text string, then use `errors` when opening the destination file
:param chunk_size: Number of bytes to read in a chunk, if the implementation has to resort to a read / copy loop
:param chunk_size: Number of bytes to read in a chunk, if the implementation has to resort to a read / copy loop
"""
"""
return
self
.
_setcontents
(
path
,
data
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
1024
*
64
)
if
not
data
:
self
.
createfile
(
path
)
else
:
compatibility
.
copy_file_to_fs
(
data
,
self
,
path
,
chunk_size
=
chunk_size
)
def
setcontents_async
(
self
,
def
setcontents_async
(
self
,
path
,
path
,
data
,
data
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
1024
*
64
,
chunk_size
=
1024
*
64
,
progress_callback
=
None
,
progress_callback
=
None
,
finished_callback
=
None
,
finished_callback
=
None
,
...
@@ -793,6 +854,8 @@ class FS(object):
...
@@ -793,6 +854,8 @@ class FS(object):
:param path: a path of the file to create
:param path: a path of the file to create
:param data: a string or a file-like object containing the contents for the new file
:param data: a string or a file-like object containing the contents for the new file
:param encoding: if `data` is a file open in text mode, or a text string, then use this `encoding` to write to the destination file
:param errors: if `data` is a file open in text mode or a text string, then use `errors` when opening the destination file
:param chunk_size: Number of bytes to read and write in a chunk
:param chunk_size: Number of bytes to read and write in a chunk
:param progress_callback: A function that is called periodically
:param progress_callback: A function that is called periodically
with the number of bytes written.
with the number of bytes written.
...
@@ -805,9 +868,16 @@ class FS(object):
...
@@ -805,9 +868,16 @@ class FS(object):
"""
"""
finished_event
=
threading
.
Event
()
finished_event
=
threading
.
Event
()
def
do_setcontents
():
def
do_setcontents
():
try
:
try
:
compatibility
.
copy_file_to_fs
(
data
,
self
,
path
,
chunk_size
=
chunk_size
,
progress_callback
=
progress_callback
,
finished_callback
=
finished_callback
)
self
.
_setcontents
(
path
,
data
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
1024
*
64
,
progress_callback
=
progress_callback
,
finished_callback
=
finished_callback
)
except
Exception
,
e
:
except
Exception
,
e
:
if
error_callback
is
not
None
:
if
error_callback
is
not
None
:
error_callback
(
e
)
error_callback
(
e
)
...
@@ -817,7 +887,6 @@ class FS(object):
...
@@ -817,7 +887,6 @@ class FS(object):
threading
.
Thread
(
target
=
do_setcontents
)
.
start
()
threading
.
Thread
(
target
=
do_setcontents
)
.
start
()
return
finished_event
return
finished_event
def
createfile
(
self
,
path
,
wipe
=
False
):
def
createfile
(
self
,
path
,
wipe
=
False
):
"""Creates an empty file if it doesn't exist
"""Creates an empty file if it doesn't exist
...
@@ -835,7 +904,6 @@ class FS(object):
...
@@ -835,7 +904,6 @@ class FS(object):
if
f
is
not
None
:
if
f
is
not
None
:
f
.
close
()
f
.
close
()
def
opendir
(
self
,
path
):
def
opendir
(
self
,
path
):
"""Opens a directory and returns a FS object representing its contents.
"""Opens a directory and returns a FS object representing its contents.
...
@@ -897,19 +965,18 @@ class FS(object):
...
@@ -897,19 +965,18 @@ class FS(object):
return
self
.
listdir
(
path
,
*
args
,
**
kwargs
)
return
self
.
listdir
(
path
,
*
args
,
**
kwargs
)
if
wildcard
is
None
:
if
wildcard
is
None
:
wildcard
=
lambda
f
:
True
wildcard
=
lambda
f
:
True
elif
not
callable
(
wildcard
):
elif
not
callable
(
wildcard
):
wildcard_re
=
re
.
compile
(
fnmatch
.
translate
(
wildcard
))
wildcard_re
=
re
.
compile
(
fnmatch
.
translate
(
wildcard
))
wildcard
=
lambda
fn
:
bool
(
wildcard_re
.
match
(
fn
))
wildcard
=
lambda
fn
:
bool
(
wildcard_re
.
match
(
fn
))
if
dir_wildcard
is
None
:
if
dir_wildcard
is
None
:
dir_wildcard
=
lambda
f
:
True
dir_wildcard
=
lambda
f
:
True
elif
not
callable
(
dir_wildcard
):
elif
not
callable
(
dir_wildcard
):
dir_wildcard_re
=
re
.
compile
(
fnmatch
.
translate
(
dir_wildcard
))
dir_wildcard_re
=
re
.
compile
(
fnmatch
.
translate
(
dir_wildcard
))
dir_wildcard
=
lambda
fn
:
bool
(
dir_wildcard_re
.
match
(
fn
))
dir_wildcard
=
lambda
fn
:
bool
(
dir_wildcard_re
.
match
(
fn
))
if
search
==
"breadth"
:
if
search
==
"breadth"
:
dirs
=
[
path
]
dirs
=
[
path
]
dirs_append
=
dirs
.
append
dirs_append
=
dirs
.
append
dirs_pop
=
dirs
.
pop
dirs_pop
=
dirs
.
pop
...
@@ -1005,7 +1072,6 @@ class FS(object):
...
@@ -1005,7 +1072,6 @@ class FS(object):
for
p
,
_files
in
self
.
walk
(
path
,
dir_wildcard
=
wildcard
,
search
=
search
,
ignore_errors
=
ignore_errors
):
for
p
,
_files
in
self
.
walk
(
path
,
dir_wildcard
=
wildcard
,
search
=
search
,
ignore_errors
=
ignore_errors
):
yield
p
yield
p
def
getsize
(
self
,
path
):
def
getsize
(
self
,
path
):
"""Returns the size (in bytes) of a resource.
"""Returns the size (in bytes) of a resource.
...
@@ -1207,6 +1273,7 @@ class FS(object):
...
@@ -1207,6 +1273,7 @@ class FS(object):
with
self
.
_lock
:
with
self
.
_lock
:
if
not
self
.
isdir
(
src
):
if
not
self
.
isdir
(
src
):
raise
ResourceInvalidError
(
src
,
msg
=
"Source is not a directory:
%(path)
s"
)
raise
ResourceInvalidError
(
src
,
msg
=
"Source is not a directory:
%(path)
s"
)
def
copyfile_noerrors
(
src
,
dst
,
**
kwargs
):
def
copyfile_noerrors
(
src
,
dst
,
**
kwargs
):
try
:
try
:
return
self
.
copy
(
src
,
dst
,
**
kwargs
)
return
self
.
copy
(
src
,
dst
,
**
kwargs
)
...
@@ -1227,13 +1294,10 @@ class FS(object):
...
@@ -1227,13 +1294,10 @@ class FS(object):
self
.
makedir
(
dst
,
allow_recreate
=
True
)
self
.
makedir
(
dst
,
allow_recreate
=
True
)
for
dirname
,
filenames
in
self
.
walk
(
src
):
for
dirname
,
filenames
in
self
.
walk
(
src
):
dst_dirname
=
relpath
(
frombase
(
src
,
abspath
(
dirname
)))
dst_dirname
=
relpath
(
frombase
(
src
,
abspath
(
dirname
)))
dst_dirpath
=
pathjoin
(
dst
,
dst_dirname
)
dst_dirpath
=
pathjoin
(
dst
,
dst_dirname
)
self
.
makedir
(
dst_dirpath
,
allow_recreate
=
True
,
recursive
=
True
)
self
.
makedir
(
dst_dirpath
,
allow_recreate
=
True
,
recursive
=
True
)
for
filename
in
filenames
:
for
filename
in
filenames
:
src_filename
=
pathjoin
(
dirname
,
filename
)
src_filename
=
pathjoin
(
dirname
,
filename
)
dst_filename
=
pathjoin
(
dst_dirpath
,
filename
)
dst_filename
=
pathjoin
(
dst_dirpath
,
filename
)
copyfile
(
src_filename
,
dst_filename
,
overwrite
=
overwrite
,
chunk_size
=
chunk_size
)
copyfile
(
src_filename
,
dst_filename
,
overwrite
=
overwrite
,
chunk_size
=
chunk_size
)
...
@@ -1248,9 +1312,9 @@ class FS(object):
...
@@ -1248,9 +1312,9 @@ class FS(object):
"""
"""
with
self
.
_lock
:
with
self
.
_lock
:
path
=
normpath
(
path
)
path
=
normpath
(
path
)
iter_dir
=
iter
(
self
.
listdir
(
path
))
iter_dir
=
iter
(
self
.
i
listdir
(
path
))
try
:
try
:
iter_dir
.
next
(
)
next
(
iter_dir
)
except
StopIteration
:
except
StopIteration
:
return
True
return
True
return
False
return
False
...
@@ -1326,7 +1390,7 @@ class FS(object):
...
@@ -1326,7 +1390,7 @@ class FS(object):
return
m
return
m
def
flags_to_mode
(
flags
):
def
flags_to_mode
(
flags
,
binary
=
True
):
"""Convert an os.O_* flag bitmask into an FS mode string."""
"""Convert an os.O_* flag bitmask into an FS mode string."""
if
flags
&
os
.
O_WRONLY
:
if
flags
&
os
.
O_WRONLY
:
if
flags
&
os
.
O_TRUNC
:
if
flags
&
os
.
O_TRUNC
:
...
@@ -1346,6 +1410,10 @@ def flags_to_mode(flags):
...
@@ -1346,6 +1410,10 @@ def flags_to_mode(flags):
mode
=
"r"
mode
=
"r"
if
flags
&
os
.
O_EXCL
:
if
flags
&
os
.
O_EXCL
:
mode
+=
"x"
mode
+=
"x"
if
binary
:
mode
+=
'b'
else
:
mode
+=
't'
return
mode
return
mode
fs/compatibility.py
View file @
3ea4efe1
...
@@ -8,10 +8,11 @@ Not for general usage, the functionality in this file is exposed elsewhere
...
@@ -8,10 +8,11 @@ Not for general usage, the functionality in this file is exposed elsewhere
import
six
import
six
from
six
import
PY3
from
six
import
PY3
def
copy_file_to_fs
(
data
,
dst_fs
,
dst_path
,
chunk_size
=
64
*
1024
,
progress_callback
=
None
,
finished_callback
=
None
):
def
copy_file_to_fs
(
data
,
dst_fs
,
dst_path
,
chunk_size
=
64
*
1024
,
progress_callback
=
None
,
finished_callback
=
None
):
"""Copy data from a string or a file-like object to a given fs/path"""
"""Copy data from a string or a file-like object to a given fs/path"""
if
progress_callback
is
None
:
if
progress_callback
is
None
:
progress_callback
=
lambda
bytes_written
:
None
progress_callback
=
lambda
bytes_written
:
None
bytes_written
=
0
bytes_written
=
0
f
=
None
f
=
None
try
:
try
:
...
@@ -19,7 +20,7 @@ def copy_file_to_fs(data, dst_fs, dst_path, chunk_size=64 * 1024, progress_callb
...
@@ -19,7 +20,7 @@ def copy_file_to_fs(data, dst_fs, dst_path, chunk_size=64 * 1024, progress_callb
if
hasattr
(
data
,
"read"
):
if
hasattr
(
data
,
"read"
):
read
=
data
.
read
read
=
data
.
read
chunk
=
read
(
chunk_size
)
chunk
=
read
(
chunk_size
)
if
PY3
and
isinstance
(
chunk
,
six
.
text_type
):
if
isinstance
(
chunk
,
six
.
text_type
):
f
=
dst_fs
.
open
(
dst_path
,
'w'
)
f
=
dst_fs
.
open
(
dst_path
,
'w'
)
else
:
else
:
f
=
dst_fs
.
open
(
dst_path
,
'wb'
)
f
=
dst_fs
.
open
(
dst_path
,
'wb'
)
...
@@ -30,7 +31,7 @@ def copy_file_to_fs(data, dst_fs, dst_path, chunk_size=64 * 1024, progress_callb
...
@@ -30,7 +31,7 @@ def copy_file_to_fs(data, dst_fs, dst_path, chunk_size=64 * 1024, progress_callb
progress_callback
(
bytes_written
)
progress_callback
(
bytes_written
)
chunk
=
read
(
chunk_size
)
chunk
=
read
(
chunk_size
)
else
:
else
:
if
PY3
and
isinstance
(
data
,
six
.
text_type
):
if
isinstance
(
data
,
six
.
text_type
):
f
=
dst_fs
.
open
(
dst_path
,
'w'
)
f
=
dst_fs
.
open
(
dst_path
,
'w'
)
else
:
else
:
f
=
dst_fs
.
open
(
dst_path
,
'wb'
)
f
=
dst_fs
.
open
(
dst_path
,
'wb'
)
...
...
fs/contrib/archivefs.py
View file @
3ea4efe1
...
@@ -112,10 +112,10 @@ class ArchiveFS(FS):
...
@@ -112,10 +112,10 @@ class ArchiveFS(FS):
return
SizeUpdater
(
entry
,
self
.
archive
.
writestream
(
path
))
return
SizeUpdater
(
entry
,
self
.
archive
.
writestream
(
path
))
@synchronize
@synchronize
def
getcontents
(
self
,
path
,
mode
=
"rb"
):
def
getcontents
(
self
,
path
,
mode
=
"rb"
,
encoding
=
None
,
errors
=
None
,
newline
=
None
):
if
not
self
.
exists
(
path
):
if
not
self
.
exists
(
path
):
raise
ResourceNotFoundError
(
path
)
raise
ResourceNotFoundError
(
path
)
f
=
self
.
open
(
path
)
with
self
.
open
(
path
,
mode
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
)
as
f
:
return
f
.
read
()
return
f
.
read
()
def
desc
(
self
,
path
):
def
desc
(
self
,
path
):
...
...
fs/contrib/davfs/__init__.py
View file @
3ea4efe1
...
@@ -41,11 +41,13 @@ from fs.base import *
...
@@ -41,11 +41,13 @@ from fs.base import *
from
fs.path
import
*
from
fs.path
import
*
from
fs.errors
import
*
from
fs.errors
import
*
from
fs.remote
import
RemoteFileBuffer
from
fs.remote
import
RemoteFileBuffer
from
fs
import
iotools
from
fs.contrib.davfs.util
import
*
from
fs.contrib.davfs.util
import
*
from
fs.contrib.davfs
import
xmlobj
from
fs.contrib.davfs
import
xmlobj
from
fs.contrib.davfs.xmlobj
import
*
from
fs.contrib.davfs.xmlobj
import
*
import
six
from
six
import
b
from
six
import
b
import
errno
import
errno
...
@@ -343,8 +345,10 @@ class DAVFS(FS):
...
@@ -343,8 +345,10 @@ class DAVFS(FS):
msg
=
str
(
e
)
msg
=
str
(
e
)
raise
RemoteConnectionError
(
""
,
msg
=
msg
,
details
=
e
)
raise
RemoteConnectionError
(
""
,
msg
=
msg
,
details
=
e
)
def
setcontents
(
self
,
path
,
contents
,
chunk_size
=
1024
*
64
):
def
setcontents
(
self
,
path
,
data
=
b
''
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
1024
*
64
):
resp
=
self
.
_request
(
path
,
"PUT"
,
contents
)
if
isinstance
(
data
,
six
.
text_type
):
data
=
data
.
encode
(
encoding
=
encoding
,
errors
=
errors
)
resp
=
self
.
_request
(
path
,
"PUT"
,
data
)
resp
.
close
()
resp
.
close
()
if
resp
.
status
==
405
:
if
resp
.
status
==
405
:
raise
ResourceInvalidError
(
path
)
raise
ResourceInvalidError
(
path
)
...
@@ -353,7 +357,8 @@ class DAVFS(FS):
...
@@ -353,7 +357,8 @@ class DAVFS(FS):
if
resp
.
status
not
in
(
200
,
201
,
204
):
if
resp
.
status
not
in
(
200
,
201
,
204
):
raise_generic_error
(
resp
,
"setcontents"
,
path
)
raise_generic_error
(
resp
,
"setcontents"
,
path
)
def
open
(
self
,
path
,
mode
=
"r"
):
@iotools.filelike_to_stream
def
open
(
self
,
path
,
mode
=
"r"
,
**
kwargs
):
mode
=
mode
.
replace
(
"b"
,
""
)
.
replace
(
"t"
,
""
)
mode
=
mode
.
replace
(
"b"
,
""
)
.
replace
(
"t"
,
""
)
# Truncate the file if requested
# Truncate the file if requested
contents
=
b
(
""
)
contents
=
b
(
""
)
...
...
fs/expose/dokan/__init__.py
View file @
3ea4efe1
...
@@ -77,9 +77,9 @@ from fs.wrapfs import WrapFS
...
@@ -77,9 +77,9 @@ from fs.wrapfs import WrapFS
try
:
try
:
import
libdokan
import
libdokan
except
(
NotImplementedError
,
EnvironmentError
,
ImportError
,
NameError
,):
except
(
NotImplementedError
,
EnvironmentError
,
ImportError
,
NameError
,):
is_available
=
False
is_available
=
False
sys
.
modules
.
pop
(
"fs.expose.dokan.libdokan"
,
None
)
sys
.
modules
.
pop
(
"fs.expose.dokan.libdokan"
,
None
)
libdokan
=
None
libdokan
=
None
else
:
else
:
is_available
=
True
is_available
=
True
...
@@ -325,9 +325,9 @@ class FSOperations(object):
...
@@ -325,9 +325,9 @@ class FSOperations(object):
"""
"""
self
.
_files_lock
.
acquire
()
self
.
_files_lock
.
acquire
()
try
:
try
:
(
f2
,
path
,
lock
)
=
self
.
_files_by_handle
[
fh
]
(
f2
,
path
,
lock
)
=
self
.
_files_by_handle
[
fh
]
assert
f2
.
closed
assert
f2
.
closed
self
.
_files_by_handle
[
fh
]
=
(
f
,
path
,
lock
)
self
.
_files_by_handle
[
fh
]
=
(
f
,
path
,
lock
)
return
fh
return
fh
finally
:
finally
:
self
.
_files_lock
.
release
()
self
.
_files_lock
.
release
()
...
@@ -336,7 +336,7 @@ class FSOperations(object):
...
@@ -336,7 +336,7 @@ class FSOperations(object):
"""Unregister the given file handle."""
"""Unregister the given file handle."""
self
.
_files_lock
.
acquire
()
self
.
_files_lock
.
acquire
()
try
:
try
:
(
f
,
path
,
lock
)
=
self
.
_files_by_handle
.
pop
(
fh
)
(
f
,
path
,
lock
)
=
self
.
_files_by_handle
.
pop
(
fh
)
del
self
.
_files_size_written
[
path
][
fh
]
del
self
.
_files_size_written
[
path
][
fh
]
if
not
self
.
_files_size_written
[
path
]:
if
not
self
.
_files_size_written
[
path
]:
del
self
.
_files_size_written
[
path
]
del
self
.
_files_size_written
[
path
]
...
@@ -368,7 +368,7 @@ class FSOperations(object):
...
@@ -368,7 +368,7 @@ class FSOperations(object):
locks
=
self
.
_active_locks
[
path
]
locks
=
self
.
_active_locks
[
path
]
except
KeyError
:
except
KeyError
:
return
0
return
0
for
(
lh
,
lstart
,
lend
)
in
locks
:
for
(
lh
,
lstart
,
lend
)
in
locks
:
if
info
is
not
None
and
info
.
contents
.
Context
==
lh
:
if
info
is
not
None
and
info
.
contents
.
Context
==
lh
:
continue
continue
if
lstart
>=
offset
+
length
:
if
lstart
>=
offset
+
length
:
...
@@ -423,7 +423,8 @@ class FSOperations(object):
...
@@ -423,7 +423,8 @@ class FSOperations(object):
# Try to open the requested file. It may actually be a directory.
# Try to open the requested file. It may actually be a directory.
info
.
contents
.
Context
=
1
info
.
contents
.
Context
=
1
try
:
try
:
f
=
self
.
fs
.
open
(
path
,
mode
)
f
=
self
.
fs
.
open
(
path
,
mode
)
print
path
,
mode
,
repr
(
f
)
except
ResourceInvalidError
:
except
ResourceInvalidError
:
info
.
contents
.
IsDirectory
=
True
info
.
contents
.
IsDirectory
=
True
except
FSError
:
except
FSError
:
...
@@ -434,7 +435,7 @@ class FSOperations(object):
...
@@ -434,7 +435,7 @@ class FSOperations(object):
else
:
else
:
raise
raise
else
:
else
:
info
.
contents
.
Context
=
self
.
_reg_file
(
f
,
path
)
info
.
contents
.
Context
=
self
.
_reg_file
(
f
,
path
)
return
retcode
return
retcode
@timeout_protect
@timeout_protect
...
@@ -468,7 +469,7 @@ class FSOperations(object):
...
@@ -468,7 +469,7 @@ class FSOperations(object):
self
.
fs
.
removedir
(
path
)
self
.
fs
.
removedir
(
path
)
self
.
_pending_delete
.
remove
(
path
)
self
.
_pending_delete
.
remove
(
path
)
else
:
else
:
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
file
.
close
()
file
.
close
()
...
@@ -484,7 +485,7 @@ class FSOperations(object):
...
@@ -484,7 +485,7 @@ class FSOperations(object):
@handle_fs_errors
@handle_fs_errors
def
CloseFile
(
self
,
path
,
info
):
def
CloseFile
(
self
,
path
,
info
):
if
info
.
contents
.
Context
>=
MIN_FH
:
if
info
.
contents
.
Context
>=
MIN_FH
:
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
file
.
close
()
file
.
close
()
...
@@ -497,20 +498,20 @@ class FSOperations(object):
...
@@ -497,20 +498,20 @@ class FSOperations(object):
@handle_fs_errors
@handle_fs_errors
def
ReadFile
(
self
,
path
,
buffer
,
nBytesToRead
,
nBytesRead
,
offset
,
info
):
def
ReadFile
(
self
,
path
,
buffer
,
nBytesToRead
,
nBytesRead
,
offset
,
info
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
errno
=
self
.
_check_lock
(
path
,
offset
,
nBytesToRead
,
info
)
errno
=
self
.
_check_lock
(
path
,
offset
,
nBytesToRead
,
info
)
if
errno
:
if
errno
:
return
errno
return
errno
# This may be called after Cleanup, meaning we
# This may be called after Cleanup, meaning we
# need to re-open the file.
# need to re-open the file.
if
file
.
closed
:
if
file
.
closed
:
file
=
self
.
fs
.
open
(
path
,
file
.
mode
)
file
=
self
.
fs
.
open
(
path
,
file
.
mode
)
self
.
_rereg_file
(
info
.
contents
.
Context
,
file
)
self
.
_rereg_file
(
info
.
contents
.
Context
,
file
)
file
.
seek
(
offset
)
file
.
seek
(
offset
)
data
=
file
.
read
(
nBytesToRead
)
data
=
file
.
read
(
nBytesToRead
)
ctypes
.
memmove
(
buffer
,
ctypes
.
create_string_buffer
(
data
),
len
(
data
))
ctypes
.
memmove
(
buffer
,
ctypes
.
create_string_buffer
(
data
),
len
(
data
))
nBytesRead
[
0
]
=
len
(
data
)
nBytesRead
[
0
]
=
len
(
data
)
finally
:
finally
:
lock
.
release
()
lock
.
release
()
...
@@ -520,23 +521,23 @@ class FSOperations(object):
...
@@ -520,23 +521,23 @@ class FSOperations(object):
def
WriteFile
(
self
,
path
,
buffer
,
nBytesToWrite
,
nBytesWritten
,
offset
,
info
):
def
WriteFile
(
self
,
path
,
buffer
,
nBytesToWrite
,
nBytesWritten
,
offset
,
info
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
fh
=
info
.
contents
.
Context
fh
=
info
.
contents
.
Context
(
file
,
_
,
lock
)
=
self
.
_get_file
(
fh
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
fh
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
errno
=
self
.
_check_lock
(
path
,
offset
,
nBytesToWrite
,
info
)
errno
=
self
.
_check_lock
(
path
,
offset
,
nBytesToWrite
,
info
)
if
errno
:
if
errno
:
return
errno
return
errno
# This may be called after Cleanup, meaning we
# This may be called after Cleanup, meaning we
# need to re-open the file.
# need to re-open the file.
if
file
.
closed
:
if
file
.
closed
:
file
=
self
.
fs
.
open
(
path
,
file
.
mode
)
file
=
self
.
fs
.
open
(
path
,
file
.
mode
)
self
.
_rereg_file
(
info
.
contents
.
Context
,
file
)
self
.
_rereg_file
(
info
.
contents
.
Context
,
file
)
if
info
.
contents
.
WriteToEndOfFile
:
if
info
.
contents
.
WriteToEndOfFile
:
file
.
seek
(
0
,
os
.
SEEK_END
)
file
.
seek
(
0
,
os
.
SEEK_END
)
else
:
else
:
file
.
seek
(
offset
)
file
.
seek
(
offset
)
data
=
ctypes
.
create_string_buffer
(
nBytesToWrite
)
data
=
ctypes
.
create_string_buffer
(
nBytesToWrite
)
ctypes
.
memmove
(
data
,
buffer
,
nBytesToWrite
)
ctypes
.
memmove
(
data
,
buffer
,
nBytesToWrite
)
file
.
write
(
data
.
raw
)
file
.
write
(
data
.
raw
)
nBytesWritten
[
0
]
=
len
(
data
.
raw
)
nBytesWritten
[
0
]
=
len
(
data
.
raw
)
try
:
try
:
...
@@ -554,7 +555,7 @@ class FSOperations(object):
...
@@ -554,7 +555,7 @@ class FSOperations(object):
@handle_fs_errors
@handle_fs_errors
def
FlushFileBuffers
(
self
,
path
,
info
):
def
FlushFileBuffers
(
self
,
path
,
info
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
file
.
flush
()
file
.
flush
()
...
@@ -567,7 +568,7 @@ class FSOperations(object):
...
@@ -567,7 +568,7 @@ class FSOperations(object):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
finfo
=
self
.
fs
.
getinfo
(
path
)
finfo
=
self
.
fs
.
getinfo
(
path
)
data
=
buffer
.
contents
data
=
buffer
.
contents
self
.
_info2finddataw
(
path
,
finfo
,
data
,
info
)
self
.
_info2finddataw
(
path
,
finfo
,
data
,
info
)
try
:
try
:
written_size
=
max
(
self
.
_files_size_written
[
path
]
.
values
())
written_size
=
max
(
self
.
_files_size_written
[
path
]
.
values
())
except
KeyError
:
except
KeyError
:
...
@@ -583,26 +584,25 @@ class FSOperations(object):
...
@@ -583,26 +584,25 @@ class FSOperations(object):
@handle_fs_errors
@handle_fs_errors
def
FindFiles
(
self
,
path
,
fillFindData
,
info
):
def
FindFiles
(
self
,
path
,
fillFindData
,
info
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
for
(
nm
,
finfo
)
in
self
.
fs
.
listdirinfo
(
path
):
for
(
nm
,
finfo
)
in
self
.
fs
.
listdirinfo
(
path
):
fpath
=
pathjoin
(
path
,
nm
)
fpath
=
pathjoin
(
path
,
nm
)
if
self
.
_is_pending_delete
(
fpath
):
if
self
.
_is_pending_delete
(
fpath
):
continue
continue
data
=
self
.
_info2finddataw
(
fpath
,
finfo
)
data
=
self
.
_info2finddataw
(
fpath
,
finfo
)
fillFindData
(
ctypes
.
byref
(
data
),
info
)
fillFindData
(
ctypes
.
byref
(
data
),
info
)
@timeout_protect
@timeout_protect
@handle_fs_errors
@handle_fs_errors
def
FindFilesWithPattern
(
self
,
path
,
pattern
,
fillFindData
,
info
):
def
FindFilesWithPattern
(
self
,
path
,
pattern
,
fillFindData
,
info
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
infolist
=
[]
for
(
nm
,
finfo
)
in
self
.
fs
.
listdirinfo
(
path
):
for
(
nm
,
finfo
)
in
self
.
fs
.
listdirinfo
(
path
):
fpath
=
pathjoin
(
path
,
nm
)
fpath
=
pathjoin
(
path
,
nm
)
if
self
.
_is_pending_delete
(
fpath
):
if
self
.
_is_pending_delete
(
fpath
):
continue
continue
if
not
libdokan
.
DokanIsNameInExpression
(
pattern
,
nm
,
True
):
if
not
libdokan
.
DokanIsNameInExpression
(
pattern
,
nm
,
True
):
continue
continue
data
=
self
.
_info2finddataw
(
fpath
,
finfo
,
None
)
data
=
self
.
_info2finddataw
(
fpath
,
finfo
,
None
)
fillFindData
(
ctypes
.
byref
(
data
),
info
)
fillFindData
(
ctypes
.
byref
(
data
),
info
)
@timeout_protect
@timeout_protect
@handle_fs_errors
@handle_fs_errors
...
@@ -648,7 +648,7 @@ class FSOperations(object):
...
@@ -648,7 +648,7 @@ class FSOperations(object):
def
DeleteDirectory
(
self
,
path
,
info
):
def
DeleteDirectory
(
self
,
path
,
info
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
for
nm
in
self
.
fs
.
listdir
(
path
):
for
nm
in
self
.
fs
.
listdir
(
path
):
if
not
self
.
_is_pending_delete
(
pathjoin
(
path
,
nm
)):
if
not
self
.
_is_pending_delete
(
pathjoin
(
path
,
nm
)):
raise
DirectoryNotEmptyError
(
path
)
raise
DirectoryNotEmptyError
(
path
)
self
.
_pending_delete
.
add
(
path
)
self
.
_pending_delete
.
add
(
path
)
# the actual delete takes place in self.CloseFile()
# the actual delete takes place in self.CloseFile()
...
@@ -658,7 +658,7 @@ class FSOperations(object):
...
@@ -658,7 +658,7 @@ class FSOperations(object):
def
MoveFile
(
self
,
src
,
dst
,
overwrite
,
info
):
def
MoveFile
(
self
,
src
,
dst
,
overwrite
,
info
):
# Close the file if we have an open handle to it.
# Close the file if we have an open handle to it.
if
info
.
contents
.
Context
>=
MIN_FH
:
if
info
.
contents
.
Context
>=
MIN_FH
:
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
file
.
close
()
file
.
close
()
...
@@ -668,15 +668,15 @@ class FSOperations(object):
...
@@ -668,15 +668,15 @@ class FSOperations(object):
src
=
normpath
(
src
)
src
=
normpath
(
src
)
dst
=
normpath
(
dst
)
dst
=
normpath
(
dst
)
if
info
.
contents
.
IsDirectory
:
if
info
.
contents
.
IsDirectory
:
self
.
fs
.
movedir
(
src
,
dst
,
overwrite
=
overwrite
)
self
.
fs
.
movedir
(
src
,
dst
,
overwrite
=
overwrite
)
else
:
else
:
self
.
fs
.
move
(
src
,
dst
,
overwrite
=
overwrite
)
self
.
fs
.
move
(
src
,
dst
,
overwrite
=
overwrite
)
@timeout_protect
@timeout_protect
@handle_fs_errors
@handle_fs_errors
def
SetEndOfFile
(
self
,
path
,
length
,
info
):
def
SetEndOfFile
(
self
,
path
,
length
,
info
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
info
.
contents
.
Context
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
pos
=
file
.
tell
()
pos
=
file
.
tell
()
...
@@ -684,7 +684,7 @@ class FSOperations(object):
...
@@ -684,7 +684,7 @@ class FSOperations(object):
file
.
seek
(
length
)
file
.
seek
(
length
)
file
.
truncate
()
file
.
truncate
()
if
pos
<
length
:
if
pos
<
length
:
file
.
seek
(
min
(
pos
,
length
))
file
.
seek
(
min
(
pos
,
length
))
finally
:
finally
:
lock
.
release
()
lock
.
release
()
...
@@ -694,15 +694,15 @@ class FSOperations(object):
...
@@ -694,15 +694,15 @@ class FSOperations(object):
# It's better to pretend an operation is possible and have it fail
# It's better to pretend an operation is possible and have it fail
# than to pretend an operation will fail when it's actually possible.
# than to pretend an operation will fail when it's actually possible.
large_amount
=
100
*
1024
*
1024
*
1024
large_amount
=
100
*
1024
*
1024
*
1024
nBytesFree
[
0
]
=
self
.
fs
.
getmeta
(
"free_space"
,
large_amount
)
nBytesFree
[
0
]
=
self
.
fs
.
getmeta
(
"free_space"
,
large_amount
)
nBytesTotal
[
0
]
=
self
.
fs
.
getmeta
(
"total_space"
,
2
*
large_amount
)
nBytesTotal
[
0
]
=
self
.
fs
.
getmeta
(
"total_space"
,
2
*
large_amount
)
nBytesAvail
[
0
]
=
nBytesFree
[
0
]
nBytesAvail
[
0
]
=
nBytesFree
[
0
]
@handle_fs_errors
@handle_fs_errors
def
GetVolumeInformation
(
self
,
vnmBuf
,
vnmSz
,
sNum
,
maxLen
,
flags
,
fnmBuf
,
fnmSz
,
info
):
def
GetVolumeInformation
(
self
,
vnmBuf
,
vnmSz
,
sNum
,
maxLen
,
flags
,
fnmBuf
,
fnmSz
,
info
):
nm
=
ctypes
.
create_unicode_buffer
(
self
.
volname
[:
vnmSz
-
1
])
nm
=
ctypes
.
create_unicode_buffer
(
self
.
volname
[:
vnmSz
-
1
])
sz
=
(
len
(
nm
.
value
)
+
1
)
*
ctypes
.
sizeof
(
ctypes
.
c_wchar
)
sz
=
(
len
(
nm
.
value
)
+
1
)
*
ctypes
.
sizeof
(
ctypes
.
c_wchar
)
ctypes
.
memmove
(
vnmBuf
,
nm
,
sz
)
ctypes
.
memmove
(
vnmBuf
,
nm
,
sz
)
if
sNum
:
if
sNum
:
sNum
[
0
]
=
0
sNum
[
0
]
=
0
if
maxLen
:
if
maxLen
:
...
@@ -710,8 +710,8 @@ class FSOperations(object):
...
@@ -710,8 +710,8 @@ class FSOperations(object):
if
flags
:
if
flags
:
flags
[
0
]
=
0
flags
[
0
]
=
0
nm
=
ctypes
.
create_unicode_buffer
(
self
.
fsname
[:
fnmSz
-
1
])
nm
=
ctypes
.
create_unicode_buffer
(
self
.
fsname
[:
fnmSz
-
1
])
sz
=
(
len
(
nm
.
value
)
+
1
)
*
ctypes
.
sizeof
(
ctypes
.
c_wchar
)
sz
=
(
len
(
nm
.
value
)
+
1
)
*
ctypes
.
sizeof
(
ctypes
.
c_wchar
)
ctypes
.
memmove
(
fnmBuf
,
nm
,
sz
)
ctypes
.
memmove
(
fnmBuf
,
nm
,
sz
)
@timeout_protect
@timeout_protect
@handle_fs_errors
@handle_fs_errors
...
@@ -731,10 +731,10 @@ class FSOperations(object):
...
@@ -731,10 +731,10 @@ class FSOperations(object):
except
KeyError
:
except
KeyError
:
locks
=
self
.
_active_locks
[
path
]
=
[]
locks
=
self
.
_active_locks
[
path
]
=
[]
else
:
else
:
errno
=
self
.
_check_lock
(
path
,
offset
,
length
,
None
,
locks
)
errno
=
self
.
_check_lock
(
path
,
offset
,
length
,
None
,
locks
)
if
errno
:
if
errno
:
return
errno
return
errno
locks
.
append
((
info
.
contents
.
Context
,
offset
,
end
))
locks
.
append
((
info
.
contents
.
Context
,
offset
,
end
))
return
0
return
0
@timeout_protect
@timeout_protect
...
@@ -747,7 +747,7 @@ class FSOperations(object):
...
@@ -747,7 +747,7 @@ class FSOperations(object):
except
KeyError
:
except
KeyError
:
return
-
ERROR_NOT_LOCKED
return
-
ERROR_NOT_LOCKED
todel
=
[]
todel
=
[]
for
i
,
(
lh
,
lstart
,
lend
)
in
enumerate
(
locks
):
for
i
,
(
lh
,
lstart
,
lend
)
in
enumerate
(
locks
):
if
info
.
contents
.
Context
==
lh
:
if
info
.
contents
.
Context
==
lh
:
if
lstart
==
offset
:
if
lstart
==
offset
:
if
lend
==
offset
+
length
:
if
lend
==
offset
+
length
:
...
@@ -762,10 +762,10 @@ class FSOperations(object):
...
@@ -762,10 +762,10 @@ class FSOperations(object):
def
Unmount
(
self
,
info
):
def
Unmount
(
self
,
info
):
pass
pass
def
_info2attrmask
(
self
,
path
,
info
,
hinfo
=
None
):
def
_info2attrmask
(
self
,
path
,
info
,
hinfo
=
None
):
"""Convert a file/directory info dict to a win32 file attribute mask."""
"""Convert a file/directory info dict to a win32 file attribute mask."""
attrs
=
0
attrs
=
0
st_mode
=
info
.
get
(
"st_mode"
,
None
)
st_mode
=
info
.
get
(
"st_mode"
,
None
)
if
st_mode
:
if
st_mode
:
if
statinfo
.
S_ISDIR
(
st_mode
):
if
statinfo
.
S_ISDIR
(
st_mode
):
attrs
|=
FILE_ATTRIBUTE_DIRECTORY
attrs
|=
FILE_ATTRIBUTE_DIRECTORY
...
...
fs/expose/ftp.py
View file @
3ea4efe1
...
@@ -26,6 +26,7 @@ from pyftpdlib import ftpserver
...
@@ -26,6 +26,7 @@ from pyftpdlib import ftpserver
from
fs.path
import
*
from
fs.path
import
*
from
fs.osfs
import
OSFS
from
fs.osfs
import
OSFS
from
fs.errors
import
convert_fs_errors
from
fs.errors
import
convert_fs_errors
from
fs
import
iotools
# Get these once so we can reuse them:
# Get these once so we can reuse them:
...
@@ -96,8 +97,9 @@ class FTPFS(ftpserver.AbstractedFS):
...
@@ -96,8 +97,9 @@ class FTPFS(ftpserver.AbstractedFS):
@convert_fs_errors
@convert_fs_errors
@decode_args
@decode_args
def
open
(
self
,
path
,
mode
):
@iotools.filelike_to_stream
return
self
.
fs
.
open
(
path
,
mode
)
def
open
(
self
,
path
,
mode
,
**
kwargs
):
return
self
.
fs
.
open
(
path
,
mode
,
**
kwargs
)
@convert_fs_errors
@convert_fs_errors
def
chdir
(
self
,
path
):
def
chdir
(
self
,
path
):
...
...
fs/expose/fuse/__init__.py
View file @
3ea4efe1
...
@@ -70,11 +70,11 @@ from six import PY3
...
@@ -70,11 +70,11 @@ from six import PY3
from
six
import
b
from
six
import
b
try
:
try
:
#
if PY3:
if
PY3
:
# import fuse3
as fuse
from
fs.expose.fuse
import
fuse_ctypes
as
fuse
#
else:
else
:
# import
fuse
from
fs.expose.fuse
import
fuse3
as
fuse
import
fuse_ctypes
as
fuse
except
NotImplementedError
:
except
NotImplementedError
:
raise
ImportError
(
"FUSE found but not usable"
)
raise
ImportError
(
"FUSE found but not usable"
)
try
:
try
:
...
@@ -116,7 +116,6 @@ def handle_fs_errors(func):
...
@@ -116,7 +116,6 @@ def handle_fs_errors(func):
return
wrapper
return
wrapper
class
FSOperations
(
Operations
):
class
FSOperations
(
Operations
):
"""FUSE Operations interface delegating all activities to an FS object."""
"""FUSE Operations interface delegating all activities to an FS object."""
...
@@ -186,13 +185,13 @@ class FSOperations(Operations):
...
@@ -186,13 +185,13 @@ class FSOperations(Operations):
# I haven't figured out how to distinguish between "w" and "w+".
# I haven't figured out how to distinguish between "w" and "w+".
# Go with the most permissive option.
# Go with the most permissive option.
mode
=
flags_to_mode
(
fi
.
flags
)
mode
=
flags_to_mode
(
fi
.
flags
)
fh
=
self
.
_reg_file
(
self
.
fs
.
open
(
path
,
mode
),
path
)
fh
=
self
.
_reg_file
(
self
.
fs
.
open
(
path
,
mode
),
path
)
fi
.
fh
=
fh
fi
.
fh
=
fh
fi
.
keep_cache
=
0
fi
.
keep_cache
=
0
@handle_fs_errors
@handle_fs_errors
def
flush
(
self
,
path
,
fh
):
def
flush
(
self
,
path
,
fh
):
(
file
,
_
,
lock
)
=
self
.
_get_file
(
fh
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
fh
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
file
.
flush
()
file
.
flush
()
...
@@ -209,12 +208,12 @@ class FSOperations(Operations):
...
@@ -209,12 +208,12 @@ class FSOperations(Operations):
path
=
path
.
decode
(
NATIVE_ENCODING
)
path
=
path
.
decode
(
NATIVE_ENCODING
)
name
=
name
.
decode
(
NATIVE_ENCODING
)
name
=
name
.
decode
(
NATIVE_ENCODING
)
try
:
try
:
value
=
self
.
fs
.
getxattr
(
path
,
name
)
value
=
self
.
fs
.
getxattr
(
path
,
name
)
except
AttributeError
:
except
AttributeError
:
raise
UnsupportedError
(
"getxattr"
)
raise
UnsupportedError
(
"getxattr"
)
else
:
else
:
if
value
is
None
:
if
value
is
None
:
raise
OSError
(
errno
.
ENODATA
,
"no attribute '
%
s'"
%
(
name
,))
raise
OSError
(
errno
.
ENODATA
,
"no attribute '
%
s'"
%
(
name
,))
return
value
return
value
@handle_fs_errors
@handle_fs_errors
...
@@ -245,13 +244,13 @@ class FSOperations(Operations):
...
@@ -245,13 +244,13 @@ class FSOperations(Operations):
def
open
(
self
,
path
,
fi
):
def
open
(
self
,
path
,
fi
):
path
=
path
.
decode
(
NATIVE_ENCODING
)
path
=
path
.
decode
(
NATIVE_ENCODING
)
mode
=
flags_to_mode
(
fi
.
flags
)
mode
=
flags_to_mode
(
fi
.
flags
)
fi
.
fh
=
self
.
_reg_file
(
self
.
fs
.
open
(
path
,
mode
),
path
)
fi
.
fh
=
self
.
_reg_file
(
self
.
fs
.
open
(
path
,
mode
),
path
)
fi
.
keep_cache
=
0
fi
.
keep_cache
=
0
return
0
return
0
@handle_fs_errors
@handle_fs_errors
def
read
(
self
,
path
,
size
,
offset
,
fh
):
def
read
(
self
,
path
,
size
,
offset
,
fh
):
(
file
,
_
,
lock
)
=
self
.
_get_file
(
fh
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
fh
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
file
.
seek
(
offset
)
file
.
seek
(
offset
)
...
@@ -264,9 +263,9 @@ class FSOperations(Operations):
...
@@ -264,9 +263,9 @@ class FSOperations(Operations):
def
readdir
(
self
,
path
,
fh
=
None
):
def
readdir
(
self
,
path
,
fh
=
None
):
path
=
path
.
decode
(
NATIVE_ENCODING
)
path
=
path
.
decode
(
NATIVE_ENCODING
)
entries
=
[
'.'
,
'..'
]
entries
=
[
'.'
,
'..'
]
for
(
nm
,
info
)
in
self
.
fs
.
listdirinfo
(
path
):
for
(
nm
,
info
)
in
self
.
fs
.
listdirinfo
(
path
):
self
.
_fill_stat_dict
(
pathjoin
(
path
,
nm
),
info
)
self
.
_fill_stat_dict
(
pathjoin
(
path
,
nm
),
info
)
entries
.
append
((
nm
.
encode
(
NATIVE_ENCODING
),
info
,
0
))
entries
.
append
((
nm
.
encode
(
NATIVE_ENCODING
),
info
,
0
))
return
entries
return
entries
@handle_fs_errors
@handle_fs_errors
...
@@ -275,7 +274,7 @@ class FSOperations(Operations):
...
@@ -275,7 +274,7 @@ class FSOperations(Operations):
@handle_fs_errors
@handle_fs_errors
def
release
(
self
,
path
,
fh
):
def
release
(
self
,
path
,
fh
):
(
file
,
_
,
lock
)
=
self
.
_get_file
(
fh
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
fh
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
file
.
close
()
file
.
close
()
...
@@ -288,7 +287,7 @@ class FSOperations(Operations):
...
@@ -288,7 +287,7 @@ class FSOperations(Operations):
path
=
path
.
decode
(
NATIVE_ENCODING
)
path
=
path
.
decode
(
NATIVE_ENCODING
)
name
=
name
.
decode
(
NATIVE_ENCODING
)
name
=
name
.
decode
(
NATIVE_ENCODING
)
try
:
try
:
return
self
.
fs
.
delxattr
(
path
,
name
)
return
self
.
fs
.
delxattr
(
path
,
name
)
except
AttributeError
:
except
AttributeError
:
raise
UnsupportedError
(
"removexattr"
)
raise
UnsupportedError
(
"removexattr"
)
...
@@ -297,12 +296,12 @@ class FSOperations(Operations):
...
@@ -297,12 +296,12 @@ class FSOperations(Operations):
old
=
old
.
decode
(
NATIVE_ENCODING
)
old
=
old
.
decode
(
NATIVE_ENCODING
)
new
=
new
.
decode
(
NATIVE_ENCODING
)
new
=
new
.
decode
(
NATIVE_ENCODING
)
try
:
try
:
self
.
fs
.
rename
(
old
,
new
)
self
.
fs
.
rename
(
old
,
new
)
except
FSError
:
except
FSError
:
if
self
.
fs
.
isdir
(
old
):
if
self
.
fs
.
isdir
(
old
):
self
.
fs
.
movedir
(
old
,
new
)
self
.
fs
.
movedir
(
old
,
new
)
else
:
else
:
self
.
fs
.
move
(
old
,
new
)
self
.
fs
.
move
(
old
,
new
)
@handle_fs_errors
@handle_fs_errors
def
rmdir
(
self
,
path
):
def
rmdir
(
self
,
path
):
...
@@ -314,7 +313,7 @@ class FSOperations(Operations):
...
@@ -314,7 +313,7 @@ class FSOperations(Operations):
path
=
path
.
decode
(
NATIVE_ENCODING
)
path
=
path
.
decode
(
NATIVE_ENCODING
)
name
=
name
.
decode
(
NATIVE_ENCODING
)
name
=
name
.
decode
(
NATIVE_ENCODING
)
try
:
try
:
return
self
.
fs
.
setxattr
(
path
,
name
,
value
)
return
self
.
fs
.
setxattr
(
path
,
name
,
value
)
except
AttributeError
:
except
AttributeError
:
raise
UnsupportedError
(
"setxattr"
)
raise
UnsupportedError
(
"setxattr"
)
...
@@ -326,18 +325,18 @@ class FSOperations(Operations):
...
@@ -326,18 +325,18 @@ class FSOperations(Operations):
def
truncate
(
self
,
path
,
length
,
fh
=
None
):
def
truncate
(
self
,
path
,
length
,
fh
=
None
):
path
=
path
.
decode
(
NATIVE_ENCODING
)
path
=
path
.
decode
(
NATIVE_ENCODING
)
if
fh
is
None
and
length
==
0
:
if
fh
is
None
and
length
==
0
:
self
.
fs
.
open
(
path
,
"wb"
)
.
close
()
self
.
fs
.
open
(
path
,
"wb"
)
.
close
()
else
:
else
:
if
fh
is
None
:
if
fh
is
None
:
f
=
self
.
fs
.
open
(
path
,
"rb+"
)
f
=
self
.
fs
.
open
(
path
,
"rb+"
)
if
not
hasattr
(
f
,
"truncate"
):
if
not
hasattr
(
f
,
"truncate"
):
raise
UnsupportedError
(
"truncate"
)
raise
UnsupportedError
(
"truncate"
)
f
.
truncate
(
length
)
f
.
truncate
(
length
)
else
:
else
:
(
file
,
_
,
lock
)
=
self
.
_get_file
(
fh
)
(
file
,
_
,
lock
)
=
self
.
_get_file
(
fh
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
if
not
hasattr
(
file
,
"truncate"
):
if
not
hasattr
(
file
,
"truncate"
):
raise
UnsupportedError
(
"truncate"
)
raise
UnsupportedError
(
"truncate"
)
file
.
truncate
(
length
)
file
.
truncate
(
length
)
finally
:
finally
:
...
@@ -371,7 +370,7 @@ class FSOperations(Operations):
...
@@ -371,7 +370,7 @@ class FSOperations(Operations):
@handle_fs_errors
@handle_fs_errors
def
write
(
self
,
path
,
data
,
offset
,
fh
):
def
write
(
self
,
path
,
data
,
offset
,
fh
):
(
file
,
path
,
lock
)
=
self
.
_get_file
(
fh
)
(
file
,
path
,
lock
)
=
self
.
_get_file
(
fh
)
lock
.
acquire
()
lock
.
acquire
()
try
:
try
:
file
.
seek
(
offset
)
file
.
seek
(
offset
)
...
@@ -385,7 +384,7 @@ class FSOperations(Operations):
...
@@ -385,7 +384,7 @@ class FSOperations(Operations):
def
_get_stat_dict
(
self
,
path
):
def
_get_stat_dict
(
self
,
path
):
"""Build a 'stat' dictionary for the given file."""
"""Build a 'stat' dictionary for the given file."""
info
=
self
.
fs
.
getinfo
(
path
)
info
=
self
.
fs
.
getinfo
(
path
)
self
.
_fill_stat_dict
(
path
,
info
)
self
.
_fill_stat_dict
(
path
,
info
)
return
info
return
info
def
_fill_stat_dict
(
self
,
path
,
info
):
def
_fill_stat_dict
(
self
,
path
,
info
):
...
@@ -395,13 +394,13 @@ class FSOperations(Operations):
...
@@ -395,13 +394,13 @@ class FSOperations(Operations):
for
k
in
private_keys
:
for
k
in
private_keys
:
del
info
[
k
]
del
info
[
k
]
# Basic stuff that is constant for all paths
# Basic stuff that is constant for all paths
info
.
setdefault
(
"st_ino"
,
0
)
info
.
setdefault
(
"st_ino"
,
0
)
info
.
setdefault
(
"st_dev"
,
0
)
info
.
setdefault
(
"st_dev"
,
0
)
info
.
setdefault
(
"st_uid"
,
uid
)
info
.
setdefault
(
"st_uid"
,
uid
)
info
.
setdefault
(
"st_gid"
,
gid
)
info
.
setdefault
(
"st_gid"
,
gid
)
info
.
setdefault
(
"st_rdev"
,
0
)
info
.
setdefault
(
"st_rdev"
,
0
)
info
.
setdefault
(
"st_blksize"
,
1024
)
info
.
setdefault
(
"st_blksize"
,
1024
)
info
.
setdefault
(
"st_blocks"
,
1
)
info
.
setdefault
(
"st_blocks"
,
1
)
# The interesting stuff
# The interesting stuff
if
'st_mode'
not
in
info
:
if
'st_mode'
not
in
info
:
if
self
.
fs
.
isdir
(
path
):
if
self
.
fs
.
isdir
(
path
):
...
@@ -412,11 +411,11 @@ class FSOperations(Operations):
...
@@ -412,11 +411,11 @@ class FSOperations(Operations):
if
not
statinfo
.
S_ISDIR
(
mode
)
and
not
statinfo
.
S_ISREG
(
mode
):
if
not
statinfo
.
S_ISDIR
(
mode
)
and
not
statinfo
.
S_ISREG
(
mode
):
if
self
.
fs
.
isdir
(
path
):
if
self
.
fs
.
isdir
(
path
):
info
[
"st_mode"
]
=
mode
|
statinfo
.
S_IFDIR
info
[
"st_mode"
]
=
mode
|
statinfo
.
S_IFDIR
info
.
setdefault
(
"st_nlink"
,
2
)
info
.
setdefault
(
"st_nlink"
,
2
)
else
:
else
:
info
[
"st_mode"
]
=
mode
|
statinfo
.
S_IFREG
info
[
"st_mode"
]
=
mode
|
statinfo
.
S_IFREG
info
.
setdefault
(
"st_nlink"
,
1
)
info
.
setdefault
(
"st_nlink"
,
1
)
for
(
key1
,
key2
)
in
[(
"st_atime"
,
"accessed_time"
),(
"st_mtime"
,
"modified_time"
),(
"st_ctime"
,
"created_time"
)]:
for
(
key1
,
key2
)
in
[(
"st_atime"
,
"accessed_time"
),
(
"st_mtime"
,
"modified_time"
),
(
"st_ctime"
,
"created_time"
)]:
if
key1
not
in
info
:
if
key1
not
in
info
:
if
key2
in
info
:
if
key2
in
info
:
info
[
key1
]
=
time
.
mktime
(
info
[
key2
]
.
timetuple
())
info
[
key1
]
=
time
.
mktime
(
info
[
key2
]
.
timetuple
())
...
@@ -467,6 +466,7 @@ def mount(fs, path, foreground=False, ready_callback=None, unmount_callback=None
...
@@ -467,6 +466,7 @@ def mount(fs, path, foreground=False, ready_callback=None, unmount_callback=None
ready_callback
()
ready_callback
()
if
unmount_callback
:
if
unmount_callback
:
orig_unmount
=
mp
.
unmount
orig_unmount
=
mp
.
unmount
def
new_unmount
():
def
new_unmount
():
orig_unmount
()
orig_unmount
()
unmount_callback
()
unmount_callback
()
...
@@ -492,7 +492,8 @@ def unmount(path):
...
@@ -492,7 +492,8 @@ def unmount(path):
args
=
[
"fusermount"
,
"-u"
,
path
]
args
=
[
"fusermount"
,
"-u"
,
path
]
for
num_tries
in
xrange
(
3
):
for
num_tries
in
xrange
(
3
):
p
=
subprocess
.
Popen
(
args
,
stderr
=
subprocess
.
PIPE
,
p
=
subprocess
.
Popen
(
args
,
stderr
=
subprocess
.
PIPE
,
stdout
=
subprocess
.
PIPE
)
stdout
=
subprocess
.
PIPE
)
(
stdout
,
stderr
)
=
p
.
communicate
()
(
stdout
,
stderr
)
=
p
.
communicate
()
if
p
.
returncode
==
0
:
if
p
.
returncode
==
0
:
...
@@ -544,7 +545,7 @@ class MountProcess(subprocess.Popen):
...
@@ -544,7 +545,7 @@ class MountProcess(subprocess.Popen):
def
__init__
(
self
,
fs
,
path
,
fuse_opts
=
{},
nowait
=
False
,
**
kwds
):
def
__init__
(
self
,
fs
,
path
,
fuse_opts
=
{},
nowait
=
False
,
**
kwds
):
self
.
path
=
path
self
.
path
=
path
if
nowait
or
kwds
.
get
(
"close_fds"
,
False
):
if
nowait
or
kwds
.
get
(
"close_fds"
,
False
):
if
PY3
:
if
PY3
:
cmd
=
"from pickle import loads;"
cmd
=
"from pickle import loads;"
else
:
else
:
...
@@ -553,11 +554,11 @@ class MountProcess(subprocess.Popen):
...
@@ -553,11 +554,11 @@ class MountProcess(subprocess.Popen):
cmd
=
cmd
+
'data = loads(
%
s); '
cmd
=
cmd
+
'data = loads(
%
s); '
cmd
=
cmd
+
'from fs.expose.fuse import MountProcess; '
cmd
=
cmd
+
'from fs.expose.fuse import MountProcess; '
cmd
=
cmd
+
'MountProcess._do_mount_nowait(data)'
cmd
=
cmd
+
'MountProcess._do_mount_nowait(data)'
cmd
=
cmd
%
(
repr
(
cPickle
.
dumps
((
fs
,
path
,
fuse_opts
),
-
1
)),)
cmd
=
cmd
%
(
repr
(
cPickle
.
dumps
((
fs
,
path
,
fuse_opts
),
-
1
)),)
cmd
=
[
sys
.
executable
,
"-c"
,
cmd
]
cmd
=
[
sys
.
executable
,
"-c"
,
cmd
]
super
(
MountProcess
,
self
)
.
__init__
(
cmd
,
**
kwds
)
super
(
MountProcess
,
self
)
.
__init__
(
cmd
,
**
kwds
)
else
:
else
:
(
r
,
w
)
=
os
.
pipe
()
(
r
,
w
)
=
os
.
pipe
()
if
PY3
:
if
PY3
:
cmd
=
"from pickle import loads;"
cmd
=
"from pickle import loads;"
else
:
else
:
...
@@ -566,15 +567,18 @@ class MountProcess(subprocess.Popen):
...
@@ -566,15 +567,18 @@ class MountProcess(subprocess.Popen):
cmd
=
cmd
+
'data = loads(
%
s); '
cmd
=
cmd
+
'data = loads(
%
s); '
cmd
=
cmd
+
'from fs.expose.fuse import MountProcess; '
cmd
=
cmd
+
'from fs.expose.fuse import MountProcess; '
cmd
=
cmd
+
'MountProcess._do_mount_wait(data)'
cmd
=
cmd
+
'MountProcess._do_mount_wait(data)'
cmd
=
cmd
%
(
repr
(
cPickle
.
dumps
((
fs
,
path
,
fuse_opts
,
r
,
w
),
-
1
)),)
cmd
=
cmd
%
(
repr
(
cPickle
.
dumps
((
fs
,
path
,
fuse_opts
,
r
,
w
),
-
1
)),)
cmd
=
[
sys
.
executable
,
"-c"
,
cmd
]
cmd
=
[
sys
.
executable
,
"-c"
,
cmd
]
super
(
MountProcess
,
self
)
.
__init__
(
cmd
,
**
kwds
)
super
(
MountProcess
,
self
)
.
__init__
(
cmd
,
**
kwds
)
os
.
close
(
w
)
os
.
close
(
w
)
byte
=
os
.
read
(
r
,
1
)
byte
=
os
.
read
(
r
,
1
)
if
byte
!=
b
(
"S"
):
if
byte
!=
b
(
"S"
):
err_text
=
os
.
read
(
r
,
20
)
self
.
terminate
()
self
.
terminate
()
raise
RuntimeError
(
"FUSE error: "
+
os
.
read
(
r
,
20
)
.
decode
(
NATIVE_ENCODING
))
if
hasattr
(
err_text
,
'decode'
):
err_text
=
err_text
.
decode
(
NATIVE_ENCODING
)
raise
RuntimeError
(
"FUSE error: "
+
err_text
)
def
unmount
(
self
):
def
unmount
(
self
):
"""Cleanly unmount the FUSE filesystem, terminating this subprocess."""
"""Cleanly unmount the FUSE filesystem, terminating this subprocess."""
...
@@ -586,7 +590,7 @@ class MountProcess(subprocess.Popen):
...
@@ -586,7 +590,7 @@ class MountProcess(subprocess.Popen):
unmount
(
self
.
path
)
unmount
(
self
.
path
)
except
OSError
:
except
OSError
:
pass
pass
tmr
=
threading
.
Timer
(
self
.
unmount_timeout
,
killme
)
tmr
=
threading
.
Timer
(
self
.
unmount_timeout
,
killme
)
tmr
.
start
()
tmr
.
start
()
self
.
wait
()
self
.
wait
()
tmr
.
cancel
()
tmr
.
cancel
()
...
@@ -594,56 +598,60 @@ class MountProcess(subprocess.Popen):
...
@@ -594,56 +598,60 @@ class MountProcess(subprocess.Popen):
if
not
hasattr
(
subprocess
.
Popen
,
"terminate"
):
if
not
hasattr
(
subprocess
.
Popen
,
"terminate"
):
def
terminate
(
self
):
def
terminate
(
self
):
"""Gracefully terminate the subprocess."""
"""Gracefully terminate the subprocess."""
os
.
kill
(
self
.
pid
,
signal
.
SIGTERM
)
os
.
kill
(
self
.
pid
,
signal
.
SIGTERM
)
if
not
hasattr
(
subprocess
.
Popen
,
"kill"
):
if
not
hasattr
(
subprocess
.
Popen
,
"kill"
):
def
kill
(
self
):
def
kill
(
self
):
"""Forcibly terminate the subprocess."""
"""Forcibly terminate the subprocess."""
os
.
kill
(
self
.
pid
,
signal
.
SIGKILL
)
os
.
kill
(
self
.
pid
,
signal
.
SIGKILL
)
@staticmethod
@staticmethod
def
_do_mount_nowait
(
data
):
def
_do_mount_nowait
(
data
):
"""Perform the specified mount, return without waiting."""
"""Perform the specified mount, return without waiting."""
(
fs
,
path
,
opts
)
=
data
fs
,
path
,
opts
=
data
opts
[
"foreground"
]
=
True
opts
[
"foreground"
]
=
True
def
unmount_callback
():
def
unmount_callback
():
fs
.
close
()
fs
.
close
()
opts
[
"unmount_callback"
]
=
unmount_callback
opts
[
"unmount_callback"
]
=
unmount_callback
mount
(
fs
,
path
,
*
opts
)
mount
(
fs
,
path
,
*
opts
)
@staticmethod
@staticmethod
def
_do_mount_wait
(
data
):
def
_do_mount_wait
(
data
):
"""Perform the specified mount, signalling when ready."""
"""Perform the specified mount, signalling when ready."""
(
fs
,
path
,
opts
,
r
,
w
)
=
data
fs
,
path
,
opts
,
r
,
w
=
data
os
.
close
(
r
)
os
.
close
(
r
)
opts
[
"foreground"
]
=
True
opts
[
"foreground"
]
=
True
successful
=
[]
successful
=
[]
def
ready_callback
():
def
ready_callback
():
successful
.
append
(
True
)
successful
.
append
(
True
)
os
.
write
(
w
,
b
(
"S"
))
os
.
write
(
w
,
b
(
"S"
))
os
.
close
(
w
)
os
.
close
(
w
)
opts
[
"ready_callback"
]
=
ready_callback
opts
[
"ready_callback"
]
=
ready_callback
def
unmount_callback
():
def
unmount_callback
():
fs
.
close
()
fs
.
close
()
opts
[
"unmount_callback"
]
=
unmount_callback
opts
[
"unmount_callback"
]
=
unmount_callback
try
:
try
:
mount
(
fs
,
path
,
**
opts
)
mount
(
fs
,
path
,
**
opts
)
except
Exception
,
e
:
except
Exception
,
e
:
os
.
write
(
w
,
b
(
"E"
)
+
b
(
e
))
os
.
write
(
w
,
b
(
"E"
)
+
unicode
(
e
)
.
encode
(
'ascii'
,
errors
=
'replace'
))
os
.
close
(
w
)
os
.
close
(
w
)
else
:
if
not
successful
:
if
not
successful
:
os
.
write
(
w
,
b
(
"E
"
))
os
.
write
(
w
,
b
(
"EMount unsuccessful
"
))
os
.
close
(
w
)
os
.
close
(
w
)
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
import
os
,
os
.
path
import
os
import
os.path
from
fs.tempfs
import
TempFS
from
fs.tempfs
import
TempFS
mount_point
=
os
.
path
.
join
(
os
.
environ
[
"HOME"
],
"fs.expose.fuse"
)
mount_point
=
os
.
path
.
join
(
os
.
environ
[
"HOME"
],
"fs.expose.fuse"
)
if
not
os
.
path
.
exists
(
mount_point
):
if
not
os
.
path
.
exists
(
mount_point
):
os
.
makedirs
(
mount_point
)
os
.
makedirs
(
mount_point
)
def
ready_callback
():
def
ready_callback
():
print
"READY"
print
"READY"
mount
(
TempFS
(),
mount_point
,
foreground
=
True
,
ready_callback
=
ready_callback
)
mount
(
TempFS
(),
mount_point
,
foreground
=
True
,
ready_callback
=
ready_callback
)
fs/expose/sftp.py
View file @
3ea4efe1
...
@@ -221,8 +221,8 @@ class SFTPHandle(paramiko.SFTPHandle):
...
@@ -221,8 +221,8 @@ class SFTPHandle(paramiko.SFTPHandle):
"""
"""
def
__init__
(
self
,
owner
,
path
,
flags
):
def
__init__
(
self
,
owner
,
path
,
flags
):
super
(
SFTPHandle
,
self
)
.
__init__
(
flags
)
super
(
SFTPHandle
,
self
)
.
__init__
(
flags
)
mode
=
flags_to_mode
(
flags
)
+
"b"
mode
=
flags_to_mode
(
flags
)
self
.
owner
=
owner
self
.
owner
=
owner
if
not
isinstance
(
path
,
unicode
):
if
not
isinstance
(
path
,
unicode
):
path
=
path
.
decode
(
self
.
owner
.
encoding
)
path
=
path
.
decode
(
self
.
owner
.
encoding
)
...
...
fs/expose/wsgi/wsgi.py
View file @
3ea4efe1
fs/expose/xmlrpc.py
View file @
3ea4efe1
...
@@ -18,9 +18,11 @@ an FS object, which can then be exposed using whatever server you choose
...
@@ -18,9 +18,11 @@ an FS object, which can then be exposed using whatever server you choose
import
xmlrpclib
import
xmlrpclib
from
SimpleXMLRPCServer
import
SimpleXMLRPCServer
from
SimpleXMLRPCServer
import
SimpleXMLRPCServer
from
datetime
import
datetime
from
datetime
import
datetime
import
base64
import
six
import
six
from
six
import
PY3
,
b
from
six
import
PY3
class
RPCFSInterface
(
object
):
class
RPCFSInterface
(
object
):
"""Wrapper to expose an FS via a XML-RPC compatible interface.
"""Wrapper to expose an FS via a XML-RPC compatible interface.
...
@@ -40,26 +42,23 @@ class RPCFSInterface(object):
...
@@ -40,26 +42,23 @@ class RPCFSInterface(object):
must return something that can be represented in ASCII. The default
must return something that can be represented in ASCII. The default
is base64-encoded UTF-8.
is base64-encoded UTF-8.
"""
"""
if
PY3
:
#return path
return
path
return
six
.
text_type
(
base64
.
b64encode
(
path
.
encode
(
"utf8"
)),
'ascii'
)
return
path
.
encode
(
"utf8"
)
.
encode
(
"base64"
)
def
decode_path
(
self
,
path
):
def
decode_path
(
self
,
path
):
"""Decode paths arriving over the wire."""
"""Decode paths arriving over the wire."""
if
PY3
:
return
six
.
text_type
(
base64
.
b64decode
(
path
.
encode
(
'ascii'
)),
'utf8'
)
return
path
return
path
.
decode
(
"base64"
)
.
decode
(
"utf8"
)
def
getmeta
(
self
,
meta_name
):
def
getmeta
(
self
,
meta_name
):
meta
=
self
.
fs
.
getmeta
(
meta_name
)
meta
=
self
.
fs
.
getmeta
(
meta_name
)
if
isinstance
(
meta
,
basestring
):
if
isinstance
(
meta
,
basestring
):
meta
=
meta
.
decode
(
'base64'
)
meta
=
self
.
decode_path
(
meta
)
return
meta
return
meta
def
getmeta_default
(
self
,
meta_name
,
default
):
def
getmeta_default
(
self
,
meta_name
,
default
):
meta
=
self
.
fs
.
getmeta
(
meta_name
,
default
)
meta
=
self
.
fs
.
getmeta
(
meta_name
,
default
)
if
isinstance
(
meta
,
basestring
):
if
isinstance
(
meta
,
basestring
):
meta
=
meta
.
decode
(
'base64'
)
meta
=
self
.
decode_path
(
meta
)
return
meta
return
meta
def
hasmeta
(
self
,
meta_name
):
def
hasmeta
(
self
,
meta_name
):
...
@@ -72,7 +71,7 @@ class RPCFSInterface(object):
...
@@ -72,7 +71,7 @@ class RPCFSInterface(object):
def
set_contents
(
self
,
path
,
data
):
def
set_contents
(
self
,
path
,
data
):
path
=
self
.
decode_path
(
path
)
path
=
self
.
decode_path
(
path
)
self
.
fs
.
setcontents
(
path
,
data
.
data
)
self
.
fs
.
setcontents
(
path
,
data
.
data
)
def
exists
(
self
,
path
):
def
exists
(
self
,
path
):
path
=
self
.
decode_path
(
path
)
path
=
self
.
decode_path
(
path
)
...
@@ -88,7 +87,7 @@ class RPCFSInterface(object):
...
@@ -88,7 +87,7 @@ class RPCFSInterface(object):
def
listdir
(
self
,
path
=
"./"
,
wildcard
=
None
,
full
=
False
,
absolute
=
False
,
dirs_only
=
False
,
files_only
=
False
):
def
listdir
(
self
,
path
=
"./"
,
wildcard
=
None
,
full
=
False
,
absolute
=
False
,
dirs_only
=
False
,
files_only
=
False
):
path
=
self
.
decode_path
(
path
)
path
=
self
.
decode_path
(
path
)
entries
=
self
.
fs
.
listdir
(
path
,
wildcard
,
full
,
absolute
,
dirs_only
,
files_only
)
entries
=
self
.
fs
.
listdir
(
path
,
wildcard
,
full
,
absolute
,
dirs_only
,
files_only
)
return
[
self
.
encode_path
(
e
)
for
e
in
entries
]
return
[
self
.
encode_path
(
e
)
for
e
in
entries
]
def
makedir
(
self
,
path
,
recursive
=
False
,
allow_recreate
=
False
):
def
makedir
(
self
,
path
,
recursive
=
False
,
allow_recreate
=
False
):
...
@@ -149,7 +148,7 @@ class RPCFSInterface(object):
...
@@ -149,7 +148,7 @@ class RPCFSInterface(object):
dst
=
self
.
decode_path
(
dst
)
dst
=
self
.
decode_path
(
dst
)
return
self
.
fs
.
copy
(
src
,
dst
,
overwrite
,
chunk_size
)
return
self
.
fs
.
copy
(
src
,
dst
,
overwrite
,
chunk_size
)
def
move
(
self
,
src
,
dst
,
overwrite
=
False
,
chunk_size
=
16384
):
def
move
(
self
,
src
,
dst
,
overwrite
=
False
,
chunk_size
=
16384
):
src
=
self
.
decode_path
(
src
)
src
=
self
.
decode_path
(
src
)
dst
=
self
.
decode_path
(
dst
)
dst
=
self
.
decode_path
(
dst
)
return
self
.
fs
.
move
(
src
,
dst
,
overwrite
,
chunk_size
)
return
self
.
fs
.
move
(
src
,
dst
,
overwrite
,
chunk_size
)
...
@@ -187,11 +186,10 @@ class RPCFSServer(SimpleXMLRPCServer):
...
@@ -187,11 +186,10 @@ class RPCFSServer(SimpleXMLRPCServer):
if
logRequests
is
not
None
:
if
logRequests
is
not
None
:
kwds
[
'logRequests'
]
=
logRequests
kwds
[
'logRequests'
]
=
logRequests
self
.
serve_more_requests
=
True
self
.
serve_more_requests
=
True
SimpleXMLRPCServer
.
__init__
(
self
,
addr
,
**
kwds
)
SimpleXMLRPCServer
.
__init__
(
self
,
addr
,
**
kwds
)
self
.
register_instance
(
RPCFSInterface
(
fs
))
self
.
register_instance
(
RPCFSInterface
(
fs
))
def
serve_forever
(
self
):
def
serve_forever
(
self
):
"""Override serve_forever to allow graceful shutdown."""
"""Override serve_forever to allow graceful shutdown."""
while
self
.
serve_more_requests
:
while
self
.
serve_more_requests
:
self
.
handle_request
()
self
.
handle_request
()
fs/filelike.py
View file @
3ea4efe1
...
@@ -531,7 +531,7 @@ class FileLikeBase(object):
...
@@ -531,7 +531,7 @@ class FileLikeBase(object):
self
.
_assert_mode
(
"w-"
)
self
.
_assert_mode
(
"w-"
)
# If we were previously reading, ensure position is correct
# If we were previously reading, ensure position is correct
if
self
.
_rbuffer
is
not
None
:
if
self
.
_rbuffer
is
not
None
:
self
.
seek
(
0
,
1
)
self
.
seek
(
0
,
1
)
# If we're actually behind the apparent position, we must also
# If we're actually behind the apparent position, we must also
# write the data in the gap.
# write the data in the gap.
if
self
.
_sbuffer
:
if
self
.
_sbuffer
:
...
@@ -544,14 +544,16 @@ class FileLikeBase(object):
...
@@ -544,14 +544,16 @@ class FileLikeBase(object):
string
=
self
.
_do_read
(
s
)
+
string
string
=
self
.
_do_read
(
s
)
+
string
except
NotReadableError
:
except
NotReadableError
:
raise
NotSeekableError
(
"File not readable, could not complete simulation of seek"
)
raise
NotSeekableError
(
"File not readable, could not complete simulation of seek"
)
self
.
seek
(
0
,
0
)
self
.
seek
(
0
,
0
)
if
self
.
_wbuffer
:
if
self
.
_wbuffer
:
string
=
self
.
_wbuffer
+
string
string
=
self
.
_wbuffer
+
string
leftover
=
self
.
_write
(
string
)
leftover
=
self
.
_write
(
string
)
if
leftover
is
None
or
isinstance
(
leftover
,
int
):
if
leftover
is
None
or
isinstance
(
leftover
,
int
):
self
.
_wbuffer
=
b
(
""
)
self
.
_wbuffer
=
b
(
""
)
return
len
(
string
)
-
(
leftover
or
0
)
else
:
else
:
self
.
_wbuffer
=
leftover
self
.
_wbuffer
=
leftover
return
len
(
string
)
-
len
(
leftover
)
def
writelines
(
self
,
seq
):
def
writelines
(
self
,
seq
):
"""Write a sequence of lines to the file."""
"""Write a sequence of lines to the file."""
...
@@ -660,7 +662,7 @@ class FileWrapper(FileLikeBase):
...
@@ -660,7 +662,7 @@ class FileWrapper(FileLikeBase):
return
data
return
data
def
_write
(
self
,
string
,
flushing
=
False
):
def
_write
(
self
,
string
,
flushing
=
False
):
return
self
.
wrapped_file
.
write
(
string
)
self
.
wrapped_file
.
write
(
string
)
def
_seek
(
self
,
offset
,
whence
):
def
_seek
(
self
,
offset
,
whence
):
self
.
wrapped_file
.
seek
(
offset
,
whence
)
self
.
wrapped_file
.
seek
(
offset
,
whence
)
...
...
fs/ftpfs.py
View file @
3ea4efe1
...
@@ -14,6 +14,7 @@ import fs
...
@@ -14,6 +14,7 @@ import fs
from
fs.base
import
*
from
fs.base
import
*
from
fs.errors
import
*
from
fs.errors
import
*
from
fs.path
import
pathsplit
,
abspath
,
dirname
,
recursepath
,
normpath
,
pathjoin
,
isbase
from
fs.path
import
pathsplit
,
abspath
,
dirname
,
recursepath
,
normpath
,
pathjoin
,
isbase
from
fs
import
iotools
from
ftplib
import
FTP
,
error_perm
,
error_temp
,
error_proto
,
error_reply
from
ftplib
import
FTP
,
error_perm
,
error_temp
,
error_proto
,
error_reply
...
@@ -1152,8 +1153,9 @@ class FTPFS(FS):
...
@@ -1152,8 +1153,9 @@ class FTPFS(FS):
url
=
'ftp://
%
s@
%
s
%
s'
%
(
credentials
,
self
.
host
.
rstrip
(
'/'
),
abspath
(
path
))
url
=
'ftp://
%
s@
%
s
%
s'
%
(
credentials
,
self
.
host
.
rstrip
(
'/'
),
abspath
(
path
))
return
url
return
url
@iotools.filelike_to_stream
@ftperrors
@ftperrors
def
open
(
self
,
path
,
mode
=
'r'
):
def
open
(
self
,
path
,
mode
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
mode
=
mode
.
lower
()
mode
=
mode
.
lower
()
if
self
.
isdir
(
path
):
if
self
.
isdir
(
path
):
...
@@ -1168,19 +1170,21 @@ class FTPFS(FS):
...
@@ -1168,19 +1170,21 @@ class FTPFS(FS):
return
f
return
f
@ftperrors
@ftperrors
def
setcontents
(
self
,
path
,
data
,
chunk_size
=
1024
*
64
):
def
setcontents
(
self
,
path
,
data
=
b
''
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
1024
*
64
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
if
isinstance
(
data
,
basestring
):
data
=
iotools
.
make_bytes_io
(
data
,
encoding
=
encoding
,
errors
=
errors
)
data
=
StringIO
(
data
)
self
.
refresh_dircache
(
dirname
(
path
))
self
.
refresh_dircache
(
dirname
(
path
))
self
.
ftp
.
storbinary
(
'STOR
%
s'
%
_encode
(
path
),
data
,
blocksize
=
chunk_size
)
self
.
ftp
.
storbinary
(
'STOR
%
s'
%
_encode
(
path
),
data
,
blocksize
=
chunk_size
)
@ftperrors
@ftperrors
def
getcontents
(
self
,
path
,
mode
=
"rb"
):
def
getcontents
(
self
,
path
,
mode
=
"rb"
,
encoding
=
None
,
errors
=
None
,
newline
=
None
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
contents
=
StringIO
()
contents
=
StringIO
()
self
.
ftp
.
retrbinary
(
'RETR
%
s'
%
_encode
(
path
),
contents
.
write
,
blocksize
=
1024
*
64
)
self
.
ftp
.
retrbinary
(
'RETR
%
s'
%
_encode
(
path
),
contents
.
write
,
blocksize
=
1024
*
64
)
return
contents
.
getvalue
()
data
=
contents
.
getvalue
()
if
'b'
in
data
:
return
data
return
iotools
.
decode_binary
(
data
,
encoding
=
encoding
,
errors
=
errors
)
@ftperrors
@ftperrors
def
exists
(
self
,
path
):
def
exists
(
self
,
path
):
...
...
fs/httpfs.py
View file @
3ea4efe1
...
@@ -8,9 +8,12 @@ fs.httpfs
...
@@ -8,9 +8,12 @@ fs.httpfs
from
fs.base
import
FS
from
fs.base
import
FS
from
fs.path
import
normpath
from
fs.path
import
normpath
from
fs.errors
import
ResourceNotFoundError
,
UnsupportedError
from
fs.errors
import
ResourceNotFoundError
,
UnsupportedError
from
fs.filelike
import
FileWrapper
from
fs
import
iotools
from
urllib2
import
urlopen
,
URLError
from
urllib2
import
urlopen
,
URLError
from
datetime
import
datetime
from
datetime
import
datetime
from
fs.filelike
import
FileWrapper
class
HTTPFS
(
FS
):
class
HTTPFS
(
FS
):
...
@@ -22,8 +25,8 @@ class HTTPFS(FS):
...
@@ -22,8 +25,8 @@ class HTTPFS(FS):
"""
"""
_meta
=
{
'read_only'
:
True
,
_meta
=
{
'read_only'
:
True
,
'network'
:
True
,
}
'network'
:
True
}
def
__init__
(
self
,
url
):
def
__init__
(
self
,
url
):
"""
"""
...
@@ -38,7 +41,8 @@ class HTTPFS(FS):
...
@@ -38,7 +41,8 @@ class HTTPFS(FS):
url
=
'
%
s/
%
s'
%
(
self
.
root_url
.
rstrip
(
'/'
),
path
.
lstrip
(
'/'
))
url
=
'
%
s/
%
s'
%
(
self
.
root_url
.
rstrip
(
'/'
),
path
.
lstrip
(
'/'
))
return
url
return
url
def
open
(
self
,
path
,
mode
=
"r"
):
@iotools.filelike_to_stream
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
if
'+'
in
mode
or
'w'
in
mode
or
'a'
in
mode
:
if
'+'
in
mode
or
'w'
in
mode
or
'a'
in
mode
:
raise
UnsupportedError
(
'write'
)
raise
UnsupportedError
(
'write'
)
...
...
fs/iotools.py
View file @
3ea4efe1
from
__future__
import
unicode_literals
from
__future__
import
unicode_literals
from
__future__
import
print_function
from
__future__
import
print_function
import
io
import
io
from
functools
import
wraps
import
six
class
RawWrapper
(
object
):
class
RawWrapper
(
object
):
"""Convert a Python 2 style file-like object in to a IO object"""
"""Convert a Python 2 style file-like object in to a IO object"""
def
__init__
(
self
,
f
,
mode
=
None
,
name
=
None
):
def
__init__
(
self
,
f
,
mode
=
None
,
name
=
None
):
self
.
_f
=
f
self
.
_f
=
f
self
.
is_io
=
isinstance
(
f
,
io
.
IOBase
)
if
mode
is
None
and
hasattr
(
f
,
'mode'
):
if
mode
is
None
and
hasattr
(
f
,
'mode'
):
mode
=
f
.
mode
mode
=
f
.
mode
self
.
mode
=
mode
self
.
mode
=
mode
self
.
name
=
name
self
.
name
=
name
self
.
closed
=
False
self
.
closed
=
False
super
(
RawWrapper
,
self
)
.
__init__
()
super
(
RawWrapper
,
self
)
.
__init__
()
def
__repr__
(
self
):
def
__repr__
(
self
):
...
@@ -35,12 +39,18 @@ class RawWrapper(object):
...
@@ -35,12 +39,18 @@ class RawWrapper(object):
return
self
.
_f
.
seek
(
offset
,
whence
)
return
self
.
_f
.
seek
(
offset
,
whence
)
def
readable
(
self
):
def
readable
(
self
):
if
hasattr
(
self
.
_f
,
'readable'
):
return
self
.
_f
.
readable
()
return
'r'
in
self
.
mode
return
'r'
in
self
.
mode
def
writable
(
self
):
def
writable
(
self
):
if
hasattr
(
self
.
_f
,
'writeable'
):
return
self
.
_fs
.
writeable
()
return
'w'
in
self
.
mode
return
'w'
in
self
.
mode
def
seekable
(
self
):
def
seekable
(
self
):
if
hasattr
(
self
.
_f
,
'seekable'
):
return
self
.
_f
.
seekable
()
try
:
try
:
self
.
seek
(
0
,
io
.
SEEK_CUR
)
self
.
seek
(
0
,
io
.
SEEK_CUR
)
except
IOError
:
except
IOError
:
...
@@ -51,11 +61,14 @@ class RawWrapper(object):
...
@@ -51,11 +61,14 @@ class RawWrapper(object):
def
tell
(
self
):
def
tell
(
self
):
return
self
.
_f
.
tell
()
return
self
.
_f
.
tell
()
def
truncate
(
self
,
size
):
def
truncate
(
self
,
size
=
None
):
return
self
.
_f
.
truncate
(
size
)
return
self
.
_f
.
truncate
(
size
)
def
write
(
self
,
data
):
def
write
(
self
,
data
):
if
self
.
is_io
:
return
self
.
_f
.
write
(
data
)
return
self
.
_f
.
write
(
data
)
self
.
_f
.
write
(
data
)
return
len
(
data
)
def
read
(
self
,
n
=-
1
):
def
read
(
self
,
n
=-
1
):
if
n
==
-
1
:
if
n
==
-
1
:
...
@@ -63,21 +76,21 @@ class RawWrapper(object):
...
@@ -63,21 +76,21 @@ class RawWrapper(object):
return
self
.
_f
.
read
(
n
)
return
self
.
_f
.
read
(
n
)
def
read1
(
self
,
n
=-
1
):
def
read1
(
self
,
n
=-
1
):
if
self
.
is_io
:
return
self
.
read1
(
n
)
return
self
.
read
(
n
)
return
self
.
read
(
n
)
def
readall
(
self
):
def
readall
(
self
):
return
self
.
_f
.
read
()
return
self
.
_f
.
read
()
def
readinto
(
self
,
b
):
def
readinto
(
self
,
b
):
if
self
.
is_io
:
return
self
.
_f
.
readinto
(
b
)
data
=
self
.
_f
.
read
(
len
(
b
))
data
=
self
.
_f
.
read
(
len
(
b
))
bytes_read
=
len
(
data
)
bytes_read
=
len
(
data
)
b
[:
len
(
data
)]
=
data
b
[:
len
(
data
)]
=
data
return
bytes_read
return
bytes_read
def
write
(
self
,
b
):
bytes_written
=
self
.
_f
.
write
(
b
)
return
bytes_written
def
writelines
(
self
,
sequence
):
def
writelines
(
self
,
sequence
):
return
self
.
_f
.
writelines
(
sequence
)
return
self
.
_f
.
writelines
(
sequence
)
...
@@ -87,6 +100,32 @@ class RawWrapper(object):
...
@@ -87,6 +100,32 @@ class RawWrapper(object):
def
__exit__
(
self
,
*
args
,
**
kwargs
):
def
__exit__
(
self
,
*
args
,
**
kwargs
):
self
.
close
()
self
.
close
()
def
__iter__
(
self
):
return
iter
(
self
.
_f
)
def
filelike_to_stream
(
f
):
@wraps
(
f
)
def
wrapper
(
self
,
path
,
mode
=
'rt'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
file_like
=
f
(
self
,
path
,
mode
=
mode
,
buffering
=
buffering
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
,
line_buffering
=
line_buffering
,
**
kwargs
)
return
make_stream
(
path
,
file_like
,
mode
=
mode
,
buffering
=
buffering
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
,
line_buffering
=
line_buffering
)
return
wrapper
def
make_stream
(
name
,
def
make_stream
(
name
,
f
,
f
,
...
@@ -95,9 +134,8 @@ def make_stream(name,
...
@@ -95,9 +134,8 @@ def make_stream(name,
encoding
=
None
,
encoding
=
None
,
errors
=
None
,
errors
=
None
,
newline
=
None
,
newline
=
None
,
closefd
=
True
,
line_buffering
=
False
,
line_buffering
=
False
,
**
param
s
):
**
kwarg
s
):
"""Take a Python 2.x binary file and returns an IO Stream"""
"""Take a Python 2.x binary file and returns an IO Stream"""
r
,
w
,
a
,
binary
=
'r'
in
mode
,
'w'
in
mode
,
'a'
in
mode
,
'b'
in
mode
r
,
w
,
a
,
binary
=
'r'
in
mode
,
'w'
in
mode
,
'a'
in
mode
,
'b'
in
mode
if
'+'
in
mode
:
if
'+'
in
mode
:
...
@@ -122,6 +160,51 @@ def make_stream(name,
...
@@ -122,6 +160,51 @@ def make_stream(name,
return
io_object
return
io_object
def
decode_binary
(
data
,
encoding
=
None
,
errors
=
None
,
newline
=
None
):
"""Decode bytes as though read from a text file"""
return
io
.
TextIOWrapper
(
io
.
BytesIO
(
data
),
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
)
.
read
()
def
make_bytes_io
(
data
,
encoding
=
None
,
errors
=
None
):
"""Make a bytes IO object from either a string or an open file"""
if
hasattr
(
data
,
'mode'
)
and
'b'
in
data
.
mode
:
# It's already a binary file
return
data
if
not
isinstance
(
data
,
basestring
):
# It's a file, but we don't know if its binary
# TODO: Is there a better way than reading the entire file?
data
=
data
.
read
()
or
b
''
if
isinstance
(
data
,
six
.
text_type
):
# If its text, encoding in to bytes
data
=
data
.
encode
(
encoding
=
encoding
,
errors
=
errors
)
return
io
.
BytesIO
(
data
)
def
copy_file_to_fs
(
f
,
fs
,
path
,
encoding
=
None
,
errors
=
None
,
progress_callback
=
None
,
chunk_size
=
64
*
1024
):
"""Copy an open file to a path on an FS"""
if
progress_callback
is
None
:
progress_callback
=
lambda
bytes_written
:
None
read
=
f
.
read
chunk
=
read
(
chunk_size
)
if
isinstance
(
chunk
,
six
.
text_type
):
f
=
fs
.
open
(
path
,
'wt'
,
encoding
=
encoding
,
errors
=
errors
)
else
:
f
=
fs
.
open
(
path
,
'wb'
)
write
=
f
.
write
bytes_written
=
0
try
:
while
chunk
:
write
(
chunk
)
bytes_written
+=
len
(
chunk
)
progress_callback
(
bytes_written
)
chunk
=
read
(
chunk_size
)
finally
:
f
.
close
()
return
bytes_written
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
print
(
"Reading a binary file"
)
print
(
"Reading a binary file"
)
bin_file
=
open
(
'tests/data/UTF-8-demo.txt'
,
'rb'
)
bin_file
=
open
(
'tests/data/UTF-8-demo.txt'
,
'rb'
)
...
...
fs/memoryfs.py
View file @
3ea4efe1
...
@@ -17,6 +17,7 @@ from fs.base import *
...
@@ -17,6 +17,7 @@ from fs.base import *
from
fs.errors
import
*
from
fs.errors
import
*
from
fs
import
_thread_synchronize_default
from
fs
import
_thread_synchronize_default
from
fs.filelike
import
StringIO
from
fs.filelike
import
StringIO
from
fs
import
iotools
from
os
import
SEEK_END
from
os
import
SEEK_END
import
threading
import
threading
...
@@ -408,8 +409,10 @@ class MemoryFS(FS):
...
@@ -408,8 +409,10 @@ class MemoryFS(FS):
# for f in file_dir_entry.open_files[:]:
# for f in file_dir_entry.open_files[:]:
# f.close()
# f.close()
@synchronize
@synchronize
def
open
(
self
,
path
,
mode
=
"r"
,
**
kwargs
):
@iotools.filelike_to_stream
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
filepath
,
filename
=
pathsplit
(
path
)
filepath
,
filename
=
pathsplit
(
path
)
parent_dir_entry
=
self
.
_get_dir_entry
(
filepath
)
parent_dir_entry
=
self
.
_get_dir_entry
(
filepath
)
...
@@ -455,7 +458,7 @@ class MemoryFS(FS):
...
@@ -455,7 +458,7 @@ class MemoryFS(FS):
raise
ResourceNotFoundError
(
path
)
raise
ResourceNotFoundError
(
path
)
if
dir_entry
.
isdir
():
if
dir_entry
.
isdir
():
raise
ResourceInvalidError
(
path
,
msg
=
"That's a directory, not a file:
%(path)
s"
)
raise
ResourceInvalidError
(
path
,
msg
=
"That's a directory, not a file:
%(path)
s"
)
pathname
,
dirname
=
pathsplit
(
path
)
pathname
,
dirname
=
pathsplit
(
path
)
parent_dir
=
self
.
_get_dir_entry
(
pathname
)
parent_dir
=
self
.
_get_dir_entry
(
pathname
)
...
@@ -628,27 +631,43 @@ class MemoryFS(FS):
...
@@ -628,27 +631,43 @@ class MemoryFS(FS):
dst_dir_entry
.
xattrs
.
update
(
src_xattrs
)
dst_dir_entry
.
xattrs
.
update
(
src_xattrs
)
@synchronize
@synchronize
def
getcontents
(
self
,
path
,
mode
=
"rb"
):
def
getcontents
(
self
,
path
,
mode
=
"rb"
,
encoding
=
None
,
errors
=
None
,
newline
=
None
):
dir_entry
=
self
.
_get_dir_entry
(
path
)
dir_entry
=
self
.
_get_dir_entry
(
path
)
if
dir_entry
is
None
:
if
dir_entry
is
None
:
raise
ResourceNotFoundError
(
path
)
raise
ResourceNotFoundError
(
path
)
if
not
dir_entry
.
isfile
():
if
not
dir_entry
.
isfile
():
raise
ResourceInvalidError
(
path
,
msg
=
"not a file:
%(path)
s"
)
raise
ResourceInvalidError
(
path
,
msg
=
"not a file:
%(path)
s"
)
return
dir_entry
.
data
or
b
(
''
)
data
=
dir_entry
.
data
or
b
(
''
)
if
'b'
not
in
mode
:
return
iotools
.
decode_binary
(
data
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
)
return
data
@synchronize
@synchronize
def
setcontents
(
self
,
path
,
data
,
chunk_size
=
1024
*
64
):
def
setcontents
(
self
,
path
,
data
=
b
''
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
1024
*
64
):
if
not
isinstance
(
data
,
six
.
binary_type
):
if
isinstance
(
data
,
six
.
binary_type
):
return
super
(
MemoryFS
,
self
)
.
setcontents
(
path
,
data
,
chunk_size
)
if
not
self
.
exists
(
path
):
if
not
self
.
exists
(
path
):
self
.
open
(
path
,
'wb'
)
.
close
()
self
.
open
(
path
,
'wb'
)
.
close
()
dir_entry
=
self
.
_get_dir_entry
(
path
)
dir_entry
=
self
.
_get_dir_entry
(
path
)
if
not
dir_entry
.
isfile
():
if
not
dir_entry
.
isfile
():
raise
ResourceInvalidError
(
'Not a directory
%(path)
s'
,
path
)
raise
ResourceInvalidError
(
'Not a directory
%(path)
s'
,
path
)
new_mem_file
=
StringIO
()
new_mem_file
=
StringIO
()
new_mem_file
.
write
(
data
)
new_mem_file
.
write
(
data
)
dir_entry
.
mem_file
=
new_mem_file
dir_entry
.
mem_file
=
new_mem_file
return
len
(
data
)
return
super
(
MemoryFS
,
self
)
.
setcontents
(
path
,
data
=
data
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
chunk_size
)
# if isinstance(data, six.text_type):
# return super(MemoryFS, self).setcontents(path, data, encoding=encoding, errors=errors, chunk_size=chunk_size)
# if not self.exists(path):
# self.open(path, 'wb').close()
# dir_entry = self._get_dir_entry(path)
# if not dir_entry.isfile():
# raise ResourceInvalidError('Not a directory %(path)s', path)
# new_mem_file = StringIO()
# new_mem_file.write(data)
# dir_entry.mem_file = new_mem_file
@synchronize
@synchronize
def
setxattr
(
self
,
path
,
key
,
value
):
def
setxattr
(
self
,
path
,
key
,
value
):
...
...
fs/mountfs.py
View file @
3ea4efe1
...
@@ -46,6 +46,7 @@ from fs.base import *
...
@@ -46,6 +46,7 @@ from fs.base import *
from
fs.errors
import
*
from
fs.errors
import
*
from
fs.path
import
*
from
fs.path
import
*
from
fs
import
_thread_synchronize_default
from
fs
import
_thread_synchronize_default
from
fs
import
iotools
class
DirMount
(
object
):
class
DirMount
(
object
):
...
@@ -286,7 +287,7 @@ class MountFS(FS):
...
@@ -286,7 +287,7 @@ class MountFS(FS):
def
makedir
(
self
,
path
,
recursive
=
False
,
allow_recreate
=
False
):
def
makedir
(
self
,
path
,
recursive
=
False
,
allow_recreate
=
False
):
fs
,
_mount_path
,
delegate_path
=
self
.
_delegate
(
path
)
fs
,
_mount_path
,
delegate_path
=
self
.
_delegate
(
path
)
if
fs
is
self
or
fs
is
None
:
if
fs
is
self
or
fs
is
None
:
raise
UnsupportedError
(
"make directory"
,
msg
=
"Can only makedir for mounted paths"
)
raise
UnsupportedError
(
"make directory"
,
msg
=
"Can only makedir for mounted paths"
)
if
not
delegate_path
:
if
not
delegate_path
:
if
allow_recreate
:
if
allow_recreate
:
return
return
...
@@ -295,7 +296,7 @@ class MountFS(FS):
...
@@ -295,7 +296,7 @@ class MountFS(FS):
return
fs
.
makedir
(
delegate_path
,
recursive
=
recursive
,
allow_recreate
=
allow_recreate
)
return
fs
.
makedir
(
delegate_path
,
recursive
=
recursive
,
allow_recreate
=
allow_recreate
)
@synchronize
@synchronize
def
open
(
self
,
path
,
mode
=
"r"
,
**
kwargs
):
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
obj
=
self
.
mount_tree
.
get
(
path
,
None
)
obj
=
self
.
mount_tree
.
get
(
path
,
None
)
if
type
(
obj
)
is
MountFS
.
FileMount
:
if
type
(
obj
)
is
MountFS
.
FileMount
:
callable
=
obj
.
open_callable
callable
=
obj
.
open_callable
...
@@ -309,20 +310,24 @@ class MountFS(FS):
...
@@ -309,20 +310,24 @@ class MountFS(FS):
return
fs
.
open
(
delegate_path
,
mode
,
**
kwargs
)
return
fs
.
open
(
delegate_path
,
mode
,
**
kwargs
)
@synchronize
@synchronize
def
setcontents
(
self
,
path
,
data
,
chunk_size
=
64
*
1024
):
def
setcontents
(
self
,
path
,
data
=
b
''
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
64
*
1024
):
obj
=
self
.
mount_tree
.
get
(
path
,
None
)
obj
=
self
.
mount_tree
.
get
(
path
,
None
)
if
type
(
obj
)
is
MountFS
.
FileMount
:
if
type
(
obj
)
is
MountFS
.
FileMount
:
return
super
(
MountFS
,
self
)
.
setcontents
(
path
,
data
,
chunk_size
=
chunk_size
)
return
super
(
MountFS
,
self
)
.
setcontents
(
path
,
data
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
chunk_size
)
fs
,
_mount_path
,
delegate_path
=
self
.
_delegate
(
path
)
fs
,
_mount_path
,
delegate_path
=
self
.
_delegate
(
path
)
if
fs
is
self
or
fs
is
None
:
if
fs
is
self
or
fs
is
None
:
raise
ParentDirectoryMissingError
(
path
)
raise
ParentDirectoryMissingError
(
path
)
return
fs
.
setcontents
(
delegate_path
,
data
,
chunk_size
)
return
fs
.
setcontents
(
delegate_path
,
data
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
chunk_size
)
@synchronize
@synchronize
def
createfile
(
self
,
path
,
wipe
=
False
):
def
createfile
(
self
,
path
,
wipe
=
False
):
obj
=
self
.
mount_tree
.
get
(
path
,
None
)
obj
=
self
.
mount_tree
.
get
(
path
,
None
)
if
type
(
obj
)
is
MountFS
.
FileMount
:
if
type
(
obj
)
is
MountFS
.
FileMount
:
return
super
(
MountFS
,
self
)
.
createfile
(
path
,
wipe
=
wipe
)
return
super
(
MountFS
,
self
)
.
createfile
(
path
,
wipe
=
wipe
)
fs
,
_mount_path
,
delegate_path
=
self
.
_delegate
(
path
)
fs
,
_mount_path
,
delegate_path
=
self
.
_delegate
(
path
)
if
fs
is
self
or
fs
is
None
:
if
fs
is
self
or
fs
is
None
:
raise
ParentDirectoryMissingError
(
path
)
raise
ParentDirectoryMissingError
(
path
)
...
@@ -430,7 +435,7 @@ class MountFS(FS):
...
@@ -430,7 +435,7 @@ class MountFS(FS):
"""Unmounts a path.
"""Unmounts a path.
:param path: Path to unmount
:param path: Path to unmount
:return: True if a
dir
was unmounted, False if the path was already unmounted
:return: True if a
path
was unmounted, False if the path was already unmounted
:rtype: bool
:rtype: bool
"""
"""
...
...
fs/multifs.py
View file @
3ea4efe1
...
@@ -238,14 +238,14 @@ class MultiFS(FS):
...
@@ -238,14 +238,14 @@ class MultiFS(FS):
return
"
%
s, on
%
s (
%
s)"
%
(
fs
.
desc
(
path
),
name
,
fs
)
return
"
%
s, on
%
s (
%
s)"
%
(
fs
.
desc
(
path
),
name
,
fs
)
@synchronize
@synchronize
def
open
(
self
,
path
,
mode
=
"r"
,
**
kwargs
):
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
if
'w'
in
mode
or
'+'
in
mode
or
'a'
in
mode
:
if
'w'
in
mode
or
'+'
in
mode
or
'a'
in
mode
:
if
self
.
writefs
is
None
:
if
self
.
writefs
is
None
:
raise
OperationFailedError
(
'open'
,
path
=
path
,
msg
=
"No writeable FS set"
)
raise
OperationFailedError
(
'open'
,
path
=
path
,
msg
=
"No writeable FS set"
)
return
self
.
writefs
.
open
(
path
,
mode
)
return
self
.
writefs
.
open
(
path
,
mode
=
mode
,
buffering
=
buffering
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
,
line_buffering
=
line_buffering
,
**
kwargs
)
for
fs
in
self
:
for
fs
in
self
:
if
fs
.
exists
(
path
):
if
fs
.
exists
(
path
):
fs_file
=
fs
.
open
(
path
,
mode
,
**
kwargs
)
fs_file
=
fs
.
open
(
path
,
mode
=
mode
,
buffering
=
buffering
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
,
line_buffering
=
line_buffering
,
**
kwargs
)
return
fs_file
return
fs_file
raise
ResourceNotFoundError
(
path
)
raise
ResourceNotFoundError
(
path
)
...
...
fs/opener.py
View file @
3ea4efe1
...
@@ -250,7 +250,7 @@ class OpenerRegistry(object):
...
@@ -250,7 +250,7 @@ class OpenerRegistry(object):
return
fs
,
fs_path
or
''
return
fs
,
fs_path
or
''
def
open
(
self
,
fs_url
,
mode
=
'r
b'
):
def
open
(
self
,
fs_url
,
mode
=
'r
'
,
**
kwargs
):
"""Opens a file from a given FS url
"""Opens a file from a given FS url
If you intend to do a lot of file manipulation, it would likely be more
If you intend to do a lot of file manipulation, it would likely be more
...
@@ -271,15 +271,14 @@ class OpenerRegistry(object):
...
@@ -271,15 +271,14 @@ class OpenerRegistry(object):
file_object
.
fs
=
fs
file_object
.
fs
=
fs
return
file_object
return
file_object
def
getcontents
(
self
,
fs_url
,
mode
=
"rb"
):
def
getcontents
(
self
,
fs_url
,
node
=
'rb'
,
encoding
=
None
,
errors
=
None
,
newline
=
None
):
"""Gets the contents from a given FS url (if it references a file)
"""Gets the contents from a given FS url (if it references a file)
:param fs_url: a FS URL e.g. ftp://ftp.mozilla.org/README
:param fs_url: a FS URL e.g. ftp://ftp.mozilla.org/README
"""
"""
fs
,
path
=
self
.
parse
(
fs_url
)
fs
,
path
=
self
.
parse
(
fs_url
)
return
fs
.
getcontents
(
path
,
mode
)
return
fs
.
getcontents
(
path
,
mode
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
)
def
opendir
(
self
,
fs_url
,
writeable
=
True
,
create_dir
=
False
):
def
opendir
(
self
,
fs_url
,
writeable
=
True
,
create_dir
=
False
):
"""Opens an FS object from an FS URL
"""Opens an FS object from an FS URL
...
...
fs/osfs/__init__.py
View file @
3ea4efe1
...
@@ -20,6 +20,7 @@ import sys
...
@@ -20,6 +20,7 @@ import sys
import
errno
import
errno
import
datetime
import
datetime
import
platform
import
platform
import
io
from
fs.base
import
*
from
fs.base
import
*
from
fs.path
import
*
from
fs.path
import
*
...
@@ -76,16 +77,15 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
...
@@ -76,16 +77,15 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
methods in the os and os.path modules.
methods in the os and os.path modules.
"""
"""
_meta
=
{
'thread_safe'
:
True
,
_meta
=
{
'thread_safe'
:
True
,
'network'
:
False
,
'network'
:
False
,
'virtual'
:
False
,
'virtual'
:
False
,
'read_only'
:
False
,
'read_only'
:
False
,
'unicode_paths'
:
os
.
path
.
supports_unicode_filenames
,
'unicode_paths'
:
os
.
path
.
supports_unicode_filenames
,
'case_insensitive_paths'
:
os
.
path
.
normcase
(
'Aa'
)
==
'aa'
,
'case_insensitive_paths'
:
os
.
path
.
normcase
(
'Aa'
)
==
'aa'
,
'atomic.makedir'
:
True
,
'atomic.makedir'
:
True
,
'atomic.rename'
:
True
,
'atomic.rename'
:
True
,
'atomic.setcontents'
:
False
,
'atomic.setcontents'
:
False
}
}
if
platform
.
system
()
==
'Windows'
:
if
platform
.
system
()
==
'Windows'
:
_meta
[
"invalid_path_chars"
]
=
''
.
join
(
chr
(
n
)
for
n
in
xrange
(
31
))
+
'
\\
:*?"<>|'
_meta
[
"invalid_path_chars"
]
=
''
.
join
(
chr
(
n
)
for
n
in
xrange
(
31
))
+
'
\\
:*?"<>|'
...
@@ -215,11 +215,11 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
...
@@ -215,11 +215,11 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
return
super
(
OSFS
,
self
)
.
getmeta
(
meta_name
,
default
)
return
super
(
OSFS
,
self
)
.
getmeta
(
meta_name
,
default
)
@convert_os_errors
@convert_os_errors
def
open
(
self
,
path
,
mode
=
"r"
,
**
kwargs
):
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
mode
=
''
.
join
(
c
for
c
in
mode
if
c
in
'rwabt+'
)
mode
=
''
.
join
(
c
for
c
in
mode
if
c
in
'rwabt+'
)
sys_path
=
self
.
getsyspath
(
path
)
sys_path
=
self
.
getsyspath
(
path
)
try
:
try
:
return
open
(
sys_path
,
mode
,
kwargs
.
get
(
"buffering"
,
-
1
)
)
return
io
.
open
(
sys_path
,
mode
=
mode
,
buffering
=
buffering
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
)
except
EnvironmentError
,
e
:
except
EnvironmentError
,
e
:
# Win32 gives EACCES when opening a directory.
# Win32 gives EACCES when opening a directory.
if
sys
.
platform
==
"win32"
and
e
.
errno
in
(
errno
.
EACCES
,):
if
sys
.
platform
==
"win32"
and
e
.
errno
in
(
errno
.
EACCES
,):
...
@@ -228,8 +228,8 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
...
@@ -228,8 +228,8 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
raise
raise
@convert_os_errors
@convert_os_errors
def
setcontents
(
self
,
path
,
contents
,
chunk_size
=
64
*
1024
):
def
setcontents
(
self
,
path
,
data
=
b
''
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
64
*
1024
):
return
super
(
OSFS
,
self
)
.
setcontents
(
path
,
contents
,
chunk_size
)
return
super
(
OSFS
,
self
)
.
setcontents
(
path
,
data
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
chunk_size
)
@convert_os_errors
@convert_os_errors
def
exists
(
self
,
path
):
def
exists
(
self
,
path
):
...
...
fs/remote.py
View file @
3ea4efe1
...
@@ -41,6 +41,7 @@ _SENTINAL = object()
...
@@ -41,6 +41,7 @@ _SENTINAL = object()
from
six
import
PY3
,
b
from
six
import
PY3
,
b
class
RemoteFileBuffer
(
FileWrapper
):
class
RemoteFileBuffer
(
FileWrapper
):
"""File-like object providing buffer for local file operations.
"""File-like object providing buffer for local file operations.
...
@@ -82,7 +83,7 @@ class RemoteFileBuffer(FileWrapper):
...
@@ -82,7 +83,7 @@ class RemoteFileBuffer(FileWrapper):
self
.
_readlen
=
0
# How many bytes already loaded from rfile
self
.
_readlen
=
0
# How many bytes already loaded from rfile
self
.
_rfile
=
None
# Reference to remote file object
self
.
_rfile
=
None
# Reference to remote file object
self
.
_eof
=
False
# Reached end of rfile?
self
.
_eof
=
False
# Reached end of rfile?
if
getattr
(
fs
,
"_lock"
,
None
)
is
not
None
:
if
getattr
(
fs
,
"_lock"
,
None
)
is
not
None
:
self
.
_lock
=
fs
.
_lock
.
__class__
()
self
.
_lock
=
fs
.
_lock
.
__class__
()
else
:
else
:
self
.
_lock
=
threading
.
RLock
()
self
.
_lock
=
threading
.
RLock
()
...
@@ -315,8 +316,8 @@ class ConnectionManagerFS(LazyFS):
...
@@ -315,8 +316,8 @@ class ConnectionManagerFS(LazyFS):
self
.
_poll_sleeper
=
threading
.
Event
()
self
.
_poll_sleeper
=
threading
.
Event
()
self
.
connected
=
connected
self
.
connected
=
connected
def
setcontents
(
self
,
path
,
data
,
chunk_size
=
64
*
1024
):
def
setcontents
(
self
,
path
,
data
=
b
''
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
64
*
1024
):
return
self
.
wrapped_fs
.
setcontents
(
path
,
data
,
chunk_size
=
chunk_size
)
return
self
.
wrapped_fs
.
setcontents
(
path
,
data
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
chunk_size
)
def
__getstate__
(
self
):
def
__getstate__
(
self
):
state
=
super
(
ConnectionManagerFS
,
self
)
.
__getstate__
()
state
=
super
(
ConnectionManagerFS
,
self
)
.
__getstate__
()
...
@@ -536,12 +537,12 @@ class CacheFSMixin(FS):
...
@@ -536,12 +537,12 @@ class CacheFSMixin(FS):
except
KeyError
:
except
KeyError
:
pass
pass
def
open
(
self
,
path
,
mode
=
"r"
,
**
kwd
s
):
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwarg
s
):
# Try to validate the entry using the cached info
# Try to validate the entry using the cached info
try
:
try
:
ci
=
self
.
__get_cached_info
(
path
)
ci
=
self
.
__get_cached_info
(
path
)
except
KeyError
:
except
KeyError
:
if
path
in
(
""
,
"/"
):
if
path
in
(
""
,
"/"
):
raise
ResourceInvalidError
(
path
)
raise
ResourceInvalidError
(
path
)
try
:
try
:
ppath
=
dirname
(
path
)
ppath
=
dirname
(
path
)
...
@@ -549,38 +550,38 @@ class CacheFSMixin(FS):
...
@@ -549,38 +550,38 @@ class CacheFSMixin(FS):
except
KeyError
:
except
KeyError
:
pass
pass
else
:
else
:
if
not
fs
.
utils
.
isdir
(
super
(
CacheFSMixin
,
self
),
ppath
,
pci
.
info
):
if
not
fs
.
utils
.
isdir
(
super
(
CacheFSMixin
,
self
),
ppath
,
pci
.
info
):
raise
ResourceInvalidError
(
path
)
raise
ResourceInvalidError
(
path
)
if
pci
.
has_full_children
:
if
pci
.
has_full_children
:
raise
ResourceNotFoundError
(
path
)
raise
ResourceNotFoundError
(
path
)
else
:
else
:
if
not
fs
.
utils
.
isfile
(
super
(
CacheFSMixin
,
self
),
path
,
ci
.
info
):
if
not
fs
.
utils
.
isfile
(
super
(
CacheFSMixin
,
self
),
path
,
ci
.
info
):
raise
ResourceInvalidError
(
path
)
raise
ResourceInvalidError
(
path
)
f
=
super
(
CacheFSMixin
,
self
)
.
open
(
path
,
mode
,
**
kwd
s
)
f
=
super
(
CacheFSMixin
,
self
)
.
open
(
path
,
mode
=
mode
,
buffering
=
buffering
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
,
line_buffering
=
line_buffering
,
**
kwarg
s
)
if
"w"
in
mode
or
"a"
in
mode
or
"+"
in
mode
:
if
"w"
in
mode
or
"a"
in
mode
or
"+"
in
mode
:
with
self
.
__cache_lock
:
with
self
.
__cache_lock
:
self
.
__cache
.
clear
(
path
)
self
.
__cache
.
clear
(
path
)
f
=
self
.
_CacheInvalidatingFile
(
self
,
path
,
f
,
mode
)
f
=
self
.
_CacheInvalidatingFile
(
self
,
path
,
f
,
mode
)
return
f
return
f
class
_CacheInvalidatingFile
(
FileWrapper
):
class
_CacheInvalidatingFile
(
FileWrapper
):
def
__init__
(
self
,
owner
,
path
,
wrapped_file
,
mode
=
None
):
def
__init__
(
self
,
owner
,
path
,
wrapped_file
,
mode
=
None
):
self
.
path
=
path
self
.
path
=
path
sup
=
super
(
CacheFSMixin
.
_CacheInvalidatingFile
,
self
)
sup
=
super
(
CacheFSMixin
.
_CacheInvalidatingFile
,
self
)
sup
.
__init__
(
wrapped_file
,
mode
)
sup
.
__init__
(
wrapped_file
,
mode
)
self
.
owner
=
owner
self
.
owner
=
owner
def
_write
(
self
,
string
,
flushing
=
False
):
def
_write
(
self
,
string
,
flushing
=
False
):
with
self
.
owner
.
_CacheFSMixin__cache_lock
:
with
self
.
owner
.
_CacheFSMixin__cache_lock
:
self
.
owner
.
_CacheFSMixin__cache
.
clear
(
self
.
path
)
self
.
owner
.
_CacheFSMixin__cache
.
clear
(
self
.
path
)
sup
=
super
(
CacheFSMixin
.
_CacheInvalidatingFile
,
self
)
sup
=
super
(
CacheFSMixin
.
_CacheInvalidatingFile
,
self
)
return
sup
.
_write
(
string
,
flushing
=
flushing
)
return
sup
.
_write
(
string
,
flushing
=
flushing
)
def
_truncate
(
self
,
size
):
def
_truncate
(
self
,
size
):
with
self
.
owner
.
_CacheFSMixin__cache_lock
:
with
self
.
owner
.
_CacheFSMixin__cache_lock
:
self
.
owner
.
_CacheFSMixin__cache
.
clear
(
self
.
path
)
self
.
owner
.
_CacheFSMixin__cache
.
clear
(
self
.
path
)
sup
=
super
(
CacheFSMixin
.
_CacheInvalidatingFile
,
self
)
sup
=
super
(
CacheFSMixin
.
_CacheInvalidatingFile
,
self
)
return
sup
.
_truncate
(
size
)
return
sup
.
_truncate
(
size
)
def
exists
(
self
,
path
):
def
exists
(
self
,
path
):
try
:
try
:
self
.
getinfo
(
path
)
self
.
getinfo
(
path
)
except
ResourceNotFoundError
:
except
ResourceNotFoundError
:
...
@@ -588,7 +589,7 @@ class CacheFSMixin(FS):
...
@@ -588,7 +589,7 @@ class CacheFSMixin(FS):
else
:
else
:
return
True
return
True
def
isdir
(
self
,
path
):
def
isdir
(
self
,
path
):
try
:
try
:
self
.
__cache
.
iternames
(
path
)
.
next
()
self
.
__cache
.
iternames
(
path
)
.
next
()
return
True
return
True
...
@@ -601,9 +602,9 @@ class CacheFSMixin(FS):
...
@@ -601,9 +602,9 @@ class CacheFSMixin(FS):
except
ResourceNotFoundError
:
except
ResourceNotFoundError
:
return
False
return
False
else
:
else
:
return
fs
.
utils
.
isdir
(
super
(
CacheFSMixin
,
self
),
path
,
info
)
return
fs
.
utils
.
isdir
(
super
(
CacheFSMixin
,
self
),
path
,
info
)
def
isfile
(
self
,
path
):
def
isfile
(
self
,
path
):
try
:
try
:
self
.
__cache
.
iternames
(
path
)
.
next
()
self
.
__cache
.
iternames
(
path
)
.
next
()
return
False
return
False
...
@@ -616,17 +617,17 @@ class CacheFSMixin(FS):
...
@@ -616,17 +617,17 @@ class CacheFSMixin(FS):
except
ResourceNotFoundError
:
except
ResourceNotFoundError
:
return
False
return
False
else
:
else
:
return
fs
.
utils
.
isfile
(
super
(
CacheFSMixin
,
self
),
path
,
info
)
return
fs
.
utils
.
isfile
(
super
(
CacheFSMixin
,
self
),
path
,
info
)
def
getinfo
(
self
,
path
):
def
getinfo
(
self
,
path
):
try
:
try
:
ci
=
self
.
__get_cached_info
(
path
)
ci
=
self
.
__get_cached_info
(
path
)
if
not
ci
.
has_full_info
:
if
not
ci
.
has_full_info
:
raise
KeyError
raise
KeyError
info
=
ci
.
info
info
=
ci
.
info
except
KeyError
:
except
KeyError
:
info
=
super
(
CacheFSMixin
,
self
)
.
getinfo
(
path
)
info
=
super
(
CacheFSMixin
,
self
)
.
getinfo
(
path
)
self
.
__set_cached_info
(
path
,
CachedInfo
(
info
))
self
.
__set_cached_info
(
path
,
CachedInfo
(
info
))
return
info
return
info
def
listdir
(
self
,
path
=
""
,
*
args
,
**
kwds
):
def
listdir
(
self
,
path
=
""
,
*
args
,
**
kwds
):
...
@@ -670,9 +671,9 @@ class CacheFSMixin(FS):
...
@@ -670,9 +671,9 @@ class CacheFSMixin(FS):
def
getsize
(
self
,
path
):
def
getsize
(
self
,
path
):
return
self
.
getinfo
(
path
)[
"size"
]
return
self
.
getinfo
(
path
)[
"size"
]
def
setcontents
(
self
,
path
,
contents
=
b
(
""
)
,
chunk_size
=
64
*
1024
):
def
setcontents
(
self
,
path
,
data
=
b
''
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
64
*
1024
):
supsc
=
super
(
CacheFSMixin
,
self
)
.
setcontents
supsc
=
super
(
CacheFSMixin
,
self
)
.
setcontents
res
=
supsc
(
path
,
contents
,
chunk_size
=
chunk_size
)
res
=
supsc
(
path
,
data
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
chunk_size
)
with
self
.
__cache_lock
:
with
self
.
__cache_lock
:
self
.
__cache
.
clear
(
path
)
self
.
__cache
.
clear
(
path
)
self
.
__cache
[
path
]
=
CachedInfo
.
new_file_stub
()
self
.
__cache
[
path
]
=
CachedInfo
.
new_file_stub
()
...
...
fs/rpcfs.py
View file @
3ea4efe1
...
@@ -10,36 +10,42 @@ class from the :mod:`fs.expose.xmlrpc` module.
...
@@ -10,36 +10,42 @@ class from the :mod:`fs.expose.xmlrpc` module.
import
xmlrpclib
import
xmlrpclib
import
socket
import
socket
import
threading
import
base64
from
fs.base
import
*
from
fs.base
import
*
from
fs.errors
import
*
from
fs.errors
import
*
from
fs.path
import
*
from
fs.path
import
*
from
fs
import
iotools
from
fs.filelike
import
StringIO
from
fs.filelike
import
StringIO
import
six
import
six
from
six
import
PY3
,
b
from
six
import
PY3
,
b
def
re_raise_faults
(
func
):
def
re_raise_faults
(
func
):
"""Decorator to re-raise XML-RPC faults as proper exceptions."""
"""Decorator to re-raise XML-RPC faults as proper exceptions."""
def
wrapper
(
*
args
,
**
kwds
):
def
wrapper
(
*
args
,
**
kwds
):
try
:
try
:
return
func
(
*
args
,
**
kwds
)
return
func
(
*
args
,
**
kwds
)
except
(
xmlrpclib
.
Fault
),
f
:
except
(
xmlrpclib
.
Fault
),
f
:
#raise
# Make sure it's in a form we can handle
# Make sure it's in a form we can handle
print
f
.
faultString
bits
=
f
.
faultString
.
split
(
" "
)
bits
=
f
.
faultString
.
split
(
" "
)
if
bits
[
0
]
not
in
[
"<type"
,
"<class"
]:
if
bits
[
0
]
not
in
[
"<type"
,
"<class"
]:
raise
f
raise
f
# Find the class/type object
# Find the class/type object
bits
=
" "
.
join
(
bits
[
1
:])
.
split
(
">:"
)
bits
=
" "
.
join
(
bits
[
1
:])
.
split
(
">:"
)
cls
=
bits
[
0
]
cls
=
bits
[
0
]
msg
=
">:"
.
join
(
bits
[
1
:])
msg
=
">:"
.
join
(
bits
[
1
:])
cls
=
cls
.
strip
(
'
\'
'
)
cls
=
cls
.
strip
(
'
\'
'
)
print
"-"
+
cls
cls
=
_object_by_name
(
cls
)
cls
=
_object_by_name
(
cls
)
# Re-raise using the remainder of the fault code as message
# Re-raise using the remainder of the fault code as message
if
cls
:
if
cls
:
if
issubclass
(
cls
,
FSError
):
if
issubclass
(
cls
,
FSError
):
raise
cls
(
''
,
msg
=
msg
)
raise
cls
(
''
,
msg
=
msg
)
else
:
else
:
raise
cls
(
msg
)
raise
cls
(
msg
)
...
@@ -49,7 +55,7 @@ def re_raise_faults(func):
...
@@ -49,7 +55,7 @@ def re_raise_faults(func):
return
wrapper
return
wrapper
def
_object_by_name
(
name
,
root
=
None
):
def
_object_by_name
(
name
,
root
=
None
):
"""Look up an object by dotted-name notation."""
"""Look up an object by dotted-name notation."""
bits
=
name
.
split
(
"."
)
bits
=
name
.
split
(
"."
)
if
root
is
None
:
if
root
is
None
:
...
@@ -59,11 +65,11 @@ def _object_by_name(name,root=None):
...
@@ -59,11 +65,11 @@ def _object_by_name(name,root=None):
try
:
try
:
obj
=
__builtins__
[
bits
[
0
]]
obj
=
__builtins__
[
bits
[
0
]]
except
KeyError
:
except
KeyError
:
obj
=
__import__
(
bits
[
0
],
globals
())
obj
=
__import__
(
bits
[
0
],
globals
())
else
:
else
:
obj
=
getattr
(
root
,
bits
[
0
])
obj
=
getattr
(
root
,
bits
[
0
])
if
len
(
bits
)
>
1
:
if
len
(
bits
)
>
1
:
return
_object_by_name
(
"."
.
join
(
bits
[
1
:]),
obj
)
return
_object_by_name
(
"."
.
join
(
bits
[
1
:]),
obj
)
else
:
else
:
return
obj
return
obj
...
@@ -71,11 +77,11 @@ def _object_by_name(name,root=None):
...
@@ -71,11 +77,11 @@ def _object_by_name(name,root=None):
class
ReRaiseFaults
:
class
ReRaiseFaults
:
"""XML-RPC proxy wrapper that re-raises Faults as proper Exceptions."""
"""XML-RPC proxy wrapper that re-raises Faults as proper Exceptions."""
def
__init__
(
self
,
obj
):
def
__init__
(
self
,
obj
):
self
.
_obj
=
obj
self
.
_obj
=
obj
def
__getattr__
(
self
,
attr
):
def
__getattr__
(
self
,
attr
):
val
=
getattr
(
self
.
_obj
,
attr
)
val
=
getattr
(
self
.
_obj
,
attr
)
if
callable
(
val
):
if
callable
(
val
):
val
=
re_raise_faults
(
val
)
val
=
re_raise_faults
(
val
)
self
.
__dict__
[
attr
]
=
val
self
.
__dict__
[
attr
]
=
val
...
@@ -120,9 +126,9 @@ class RPCFS(FS):
...
@@ -120,9 +126,9 @@ class RPCFS(FS):
kwds
=
dict
(
allow_none
=
True
,
use_datetime
=
True
)
kwds
=
dict
(
allow_none
=
True
,
use_datetime
=
True
)
if
self
.
_transport
is
not
None
:
if
self
.
_transport
is
not
None
:
proxy
=
xmlrpclib
.
ServerProxy
(
self
.
uri
,
self
.
_transport
,
**
kwds
)
proxy
=
xmlrpclib
.
ServerProxy
(
self
.
uri
,
self
.
_transport
,
**
kwds
)
else
:
else
:
proxy
=
xmlrpclib
.
ServerProxy
(
self
.
uri
,
**
kwds
)
proxy
=
xmlrpclib
.
ServerProxy
(
self
.
uri
,
**
kwds
)
return
ReRaiseFaults
(
proxy
)
return
ReRaiseFaults
(
proxy
)
...
@@ -134,7 +140,7 @@ class RPCFS(FS):
...
@@ -134,7 +140,7 @@ class RPCFS(FS):
@synchronize
@synchronize
def
__getstate__
(
self
):
def
__getstate__
(
self
):
state
=
super
(
RPCFS
,
self
)
.
__getstate__
()
state
=
super
(
RPCFS
,
self
)
.
__getstate__
()
try
:
try
:
del
state
[
'proxy'
]
del
state
[
'proxy'
]
except
KeyError
:
except
KeyError
:
...
@@ -152,15 +158,11 @@ class RPCFS(FS):
...
@@ -152,15 +158,11 @@ class RPCFS(FS):
must return something that can be represented in ASCII. The default
must return something that can be represented in ASCII. The default
is base64-encoded UTF8.
is base64-encoded UTF8.
"""
"""
if
PY3
:
return
six
.
text_type
(
base64
.
b64encode
(
path
.
encode
(
"utf8"
)),
'ascii'
)
return
path
return
path
.
encode
(
"utf8"
)
.
encode
(
"base64"
)
def
decode_path
(
self
,
path
):
def
decode_path
(
self
,
path
):
"""Decode paths arriving over the wire."""
"""Decode paths arriving over the wire."""
if
PY3
:
return
six
.
text_type
(
base64
.
b64decode
(
path
.
encode
(
'ascii'
)),
'utf8'
)
return
path
return
path
.
decode
(
"base64"
)
.
decode
(
"utf8"
)
@synchronize
@synchronize
def
getmeta
(
self
,
meta_name
,
default
=
NoDefaultMeta
):
def
getmeta
(
self
,
meta_name
,
default
=
NoDefaultMeta
):
...
@@ -170,7 +172,7 @@ class RPCFS(FS):
...
@@ -170,7 +172,7 @@ class RPCFS(FS):
meta
=
self
.
proxy
.
getmeta_default
(
meta_name
,
default
)
meta
=
self
.
proxy
.
getmeta_default
(
meta_name
,
default
)
if
isinstance
(
meta
,
basestring
):
if
isinstance
(
meta
,
basestring
):
# To allow transport of meta with invalid xml chars (like null)
# To allow transport of meta with invalid xml chars (like null)
meta
=
meta
.
encode
(
'base64'
)
meta
=
self
.
encode_path
(
meta
)
return
meta
return
meta
@synchronize
@synchronize
...
@@ -178,37 +180,40 @@ class RPCFS(FS):
...
@@ -178,37 +180,40 @@ class RPCFS(FS):
return
self
.
proxy
.
hasmeta
(
meta_name
)
return
self
.
proxy
.
hasmeta
(
meta_name
)
@synchronize
@synchronize
def
open
(
self
,
path
,
mode
=
"r"
):
@iotools.filelike_to_stream
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
# TODO: chunked transport of large files
# TODO: chunked transport of large files
path
=
self
.
encode_path
(
path
)
e
path
=
self
.
encode_path
(
path
)
if
"w"
in
mode
:
if
"w"
in
mode
:
self
.
proxy
.
set_contents
(
path
,
xmlrpclib
.
Binary
(
b
(
""
)))
self
.
proxy
.
set_contents
(
epath
,
xmlrpclib
.
Binary
(
b
(
""
)))
if
"r"
in
mode
or
"a"
in
mode
or
"+"
in
mode
:
if
"r"
in
mode
or
"a"
in
mode
or
"+"
in
mode
:
try
:
try
:
data
=
self
.
proxy
.
get_contents
(
path
,
"rb"
)
.
data
data
=
self
.
proxy
.
get_contents
(
e
path
,
"rb"
)
.
data
except
IOError
:
except
IOError
:
if
"w"
not
in
mode
and
"a"
not
in
mode
:
if
"w"
not
in
mode
and
"a"
not
in
mode
:
raise
ResourceNotFoundError
(
path
)
raise
ResourceNotFoundError
(
path
)
if
not
self
.
isdir
(
dirname
(
path
)):
if
not
self
.
isdir
(
dirname
(
path
)):
raise
ParentDirectoryMissingError
(
path
)
raise
ParentDirectoryMissingError
(
path
)
self
.
proxy
.
set_contents
(
path
,
xmlrpclib
.
Binary
(
b
(
""
)))
self
.
proxy
.
set_contents
(
path
,
xmlrpclib
.
Binary
(
b
(
""
)))
else
:
else
:
data
=
b
(
""
)
data
=
b
(
""
)
f
=
StringIO
(
data
)
f
=
StringIO
(
data
)
if
"a"
not
in
mode
:
if
"a"
not
in
mode
:
f
.
seek
(
0
,
0
)
f
.
seek
(
0
,
0
)
else
:
else
:
f
.
seek
(
0
,
2
)
f
.
seek
(
0
,
2
)
oldflush
=
f
.
flush
oldflush
=
f
.
flush
oldclose
=
f
.
close
oldclose
=
f
.
close
oldtruncate
=
f
.
truncate
oldtruncate
=
f
.
truncate
def
newflush
():
def
newflush
():
self
.
_lock
.
acquire
()
self
.
_lock
.
acquire
()
try
:
try
:
oldflush
()
oldflush
()
self
.
proxy
.
set_contents
(
path
,
xmlrpclib
.
Binary
(
f
.
getvalue
()))
self
.
proxy
.
set_contents
(
epath
,
xmlrpclib
.
Binary
(
f
.
getvalue
()))
finally
:
finally
:
self
.
_lock
.
release
()
self
.
_lock
.
release
()
def
newclose
():
def
newclose
():
self
.
_lock
.
acquire
()
self
.
_lock
.
acquire
()
try
:
try
:
...
@@ -216,6 +221,7 @@ class RPCFS(FS):
...
@@ -216,6 +221,7 @@ class RPCFS(FS):
oldclose
()
oldclose
()
finally
:
finally
:
self
.
_lock
.
release
()
self
.
_lock
.
release
()
def
newtruncate
(
size
=
None
):
def
newtruncate
(
size
=
None
):
self
.
_lock
.
acquire
()
self
.
_lock
.
acquire
()
try
:
try
:
...
@@ -248,24 +254,32 @@ class RPCFS(FS):
...
@@ -248,24 +254,32 @@ class RPCFS(FS):
def
listdir
(
self
,
path
=
"./"
,
wildcard
=
None
,
full
=
False
,
absolute
=
False
,
dirs_only
=
False
,
files_only
=
False
):
def
listdir
(
self
,
path
=
"./"
,
wildcard
=
None
,
full
=
False
,
absolute
=
False
,
dirs_only
=
False
,
files_only
=
False
):
enc_path
=
self
.
encode_path
(
path
)
enc_path
=
self
.
encode_path
(
path
)
if
not
callable
(
wildcard
):
if
not
callable
(
wildcard
):
entries
=
self
.
proxy
.
listdir
(
enc_path
,
wildcard
,
full
,
absolute
,
entries
=
self
.
proxy
.
listdir
(
enc_path
,
dirs_only
,
files_only
)
wildcard
,
full
,
absolute
,
dirs_only
,
files_only
)
entries
=
[
self
.
decode_path
(
e
)
for
e
in
entries
]
entries
=
[
self
.
decode_path
(
e
)
for
e
in
entries
]
else
:
else
:
entries
=
self
.
proxy
.
listdir
(
enc_path
,
None
,
False
,
False
,
entries
=
self
.
proxy
.
listdir
(
enc_path
,
dirs_only
,
files_only
)
None
,
False
,
False
,
dirs_only
,
files_only
)
entries
=
[
self
.
decode_path
(
e
)
for
e
in
entries
]
entries
=
[
self
.
decode_path
(
e
)
for
e
in
entries
]
entries
=
[
e
for
e
in
entries
if
wildcard
(
e
)]
entries
=
[
e
for
e
in
entries
if
wildcard
(
e
)]
if
full
:
if
full
:
entries
=
[
relpath
(
pathjoin
(
path
,
e
))
for
e
in
entries
]
entries
=
[
relpath
(
pathjoin
(
path
,
e
))
for
e
in
entries
]
elif
absolute
:
elif
absolute
:
entries
=
[
abspath
(
pathjoin
(
path
,
e
))
for
e
in
entries
]
entries
=
[
abspath
(
pathjoin
(
path
,
e
))
for
e
in
entries
]
return
entries
return
entries
@synchronize
@synchronize
def
makedir
(
self
,
path
,
recursive
=
False
,
allow_recreate
=
False
):
def
makedir
(
self
,
path
,
recursive
=
False
,
allow_recreate
=
False
):
path
=
self
.
encode_path
(
path
)
path
=
self
.
encode_path
(
path
)
return
self
.
proxy
.
makedir
(
path
,
recursive
,
allow_recreate
)
return
self
.
proxy
.
makedir
(
path
,
recursive
,
allow_recreate
)
@synchronize
@synchronize
def
remove
(
self
,
path
):
def
remove
(
self
,
path
):
...
@@ -275,13 +289,13 @@ class RPCFS(FS):
...
@@ -275,13 +289,13 @@ class RPCFS(FS):
@synchronize
@synchronize
def
removedir
(
self
,
path
,
recursive
=
False
,
force
=
False
):
def
removedir
(
self
,
path
,
recursive
=
False
,
force
=
False
):
path
=
self
.
encode_path
(
path
)
path
=
self
.
encode_path
(
path
)
return
self
.
proxy
.
removedir
(
path
,
recursive
,
force
)
return
self
.
proxy
.
removedir
(
path
,
recursive
,
force
)
@synchronize
@synchronize
def
rename
(
self
,
src
,
dst
):
def
rename
(
self
,
src
,
dst
):
src
=
self
.
encode_path
(
src
)
src
=
self
.
encode_path
(
src
)
dst
=
self
.
encode_path
(
dst
)
dst
=
self
.
encode_path
(
dst
)
return
self
.
proxy
.
rename
(
src
,
dst
)
return
self
.
proxy
.
rename
(
src
,
dst
)
@synchronize
@synchronize
def
settimes
(
self
,
path
,
accessed_time
,
modified_time
):
def
settimes
(
self
,
path
,
accessed_time
,
modified_time
):
...
@@ -302,19 +316,19 @@ class RPCFS(FS):
...
@@ -302,19 +316,19 @@ class RPCFS(FS):
def
getxattr
(
self
,
path
,
attr
,
default
=
None
):
def
getxattr
(
self
,
path
,
attr
,
default
=
None
):
path
=
self
.
encode_path
(
path
)
path
=
self
.
encode_path
(
path
)
attr
=
self
.
encode_path
(
attr
)
attr
=
self
.
encode_path
(
attr
)
return
self
.
fs
.
getxattr
(
path
,
attr
,
default
)
return
self
.
fs
.
getxattr
(
path
,
attr
,
default
)
@synchronize
@synchronize
def
setxattr
(
self
,
path
,
attr
,
value
):
def
setxattr
(
self
,
path
,
attr
,
value
):
path
=
self
.
encode_path
(
path
)
path
=
self
.
encode_path
(
path
)
attr
=
self
.
encode_path
(
attr
)
attr
=
self
.
encode_path
(
attr
)
return
self
.
fs
.
setxattr
(
path
,
attr
,
value
)
return
self
.
fs
.
setxattr
(
path
,
attr
,
value
)
@synchronize
@synchronize
def
delxattr
(
self
,
path
,
attr
):
def
delxattr
(
self
,
path
,
attr
):
path
=
self
.
encode_path
(
path
)
path
=
self
.
encode_path
(
path
)
attr
=
self
.
encode_path
(
attr
)
attr
=
self
.
encode_path
(
attr
)
return
self
.
fs
.
delxattr
(
path
,
attr
)
return
self
.
fs
.
delxattr
(
path
,
attr
)
@synchronize
@synchronize
def
listxattrs
(
self
,
path
):
def
listxattrs
(
self
,
path
):
...
@@ -325,13 +339,13 @@ class RPCFS(FS):
...
@@ -325,13 +339,13 @@ class RPCFS(FS):
def
copy
(
self
,
src
,
dst
,
overwrite
=
False
,
chunk_size
=
16384
):
def
copy
(
self
,
src
,
dst
,
overwrite
=
False
,
chunk_size
=
16384
):
src
=
self
.
encode_path
(
src
)
src
=
self
.
encode_path
(
src
)
dst
=
self
.
encode_path
(
dst
)
dst
=
self
.
encode_path
(
dst
)
return
self
.
proxy
.
copy
(
src
,
dst
,
overwrite
,
chunk_size
)
return
self
.
proxy
.
copy
(
src
,
dst
,
overwrite
,
chunk_size
)
@synchronize
@synchronize
def
move
(
self
,
src
,
dst
,
overwrite
=
False
,
chunk_size
=
16384
):
def
move
(
self
,
src
,
dst
,
overwrite
=
False
,
chunk_size
=
16384
):
src
=
self
.
encode_path
(
src
)
src
=
self
.
encode_path
(
src
)
dst
=
self
.
encode_path
(
dst
)
dst
=
self
.
encode_path
(
dst
)
return
self
.
proxy
.
move
(
src
,
dst
,
overwrite
,
chunk_size
)
return
self
.
proxy
.
move
(
src
,
dst
,
overwrite
,
chunk_size
)
@synchronize
@synchronize
def
movedir
(
self
,
src
,
dst
,
overwrite
=
False
,
ignore_errors
=
False
,
chunk_size
=
16384
):
def
movedir
(
self
,
src
,
dst
,
overwrite
=
False
,
ignore_errors
=
False
,
chunk_size
=
16384
):
...
@@ -343,6 +357,4 @@ class RPCFS(FS):
...
@@ -343,6 +357,4 @@ class RPCFS(FS):
def
copydir
(
self
,
src
,
dst
,
overwrite
=
False
,
ignore_errors
=
False
,
chunk_size
=
16384
):
def
copydir
(
self
,
src
,
dst
,
overwrite
=
False
,
ignore_errors
=
False
,
chunk_size
=
16384
):
src
=
self
.
encode_path
(
src
)
src
=
self
.
encode_path
(
src
)
dst
=
self
.
encode_path
(
dst
)
dst
=
self
.
encode_path
(
dst
)
return
self
.
proxy
.
copydir
(
src
,
dst
,
overwrite
,
ignore_errors
,
chunk_size
)
return
self
.
proxy
.
copydir
(
src
,
dst
,
overwrite
,
ignore_errors
,
chunk_size
)
fs/s3fs.py
View file @
3ea4efe1
...
@@ -26,7 +26,9 @@ from fs.path import *
...
@@ -26,7 +26,9 @@ from fs.path import *
from
fs.errors
import
*
from
fs.errors
import
*
from
fs.remote
import
*
from
fs.remote
import
*
from
fs.filelike
import
LimitBytesFile
from
fs.filelike
import
LimitBytesFile
from
fs
import
iotools
import
six
# Boto is not thread-safe, so we need to use a per-thread S3 connection.
# Boto is not thread-safe, so we need to use a per-thread S3 connection.
if
hasattr
(
threading
,
"local"
):
if
hasattr
(
threading
,
"local"
):
...
@@ -253,9 +255,9 @@ class S3FS(FS):
...
@@ -253,9 +255,9 @@ class S3FS(FS):
k
=
self
.
_s3bukt
.
get_key
(
s3path
)
k
=
self
.
_s3bukt
.
get_key
(
s3path
)
# Is there AllUsers group with READ permissions?
# Is there AllUsers group with READ permissions?
is_public
=
True
in
[
grant
.
permission
==
'READ'
and
\
is_public
=
True
in
[
grant
.
permission
==
'READ'
and
grant
.
uri
==
'http://acs.amazonaws.com/groups/global/AllUsers'
grant
.
uri
==
'http://acs.amazonaws.com/groups/global/AllUsers'
for
grant
in
k
.
get_acl
()
.
acl
.
grants
]
for
grant
in
k
.
get_acl
()
.
acl
.
grants
]
url
=
k
.
generate_url
(
expires
,
force_http
=
is_public
)
url
=
k
.
generate_url
(
expires
,
force_http
=
is_public
)
...
@@ -270,11 +272,14 @@ class S3FS(FS):
...
@@ -270,11 +272,14 @@ class S3FS(FS):
return
url
return
url
def
setcontents
(
self
,
path
,
data
,
chunk_size
=
64
*
1024
):
def
setcontents
(
self
,
path
,
data
=
b
''
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
64
*
1024
):
s3path
=
self
.
_s3path
(
path
)
s3path
=
self
.
_s3path
(
path
)
if
isinstance
(
data
,
six
.
text_type
):
data
=
data
.
encode
(
encoding
=
encoding
,
errors
=
errors
)
self
.
_sync_set_contents
(
s3path
,
data
)
self
.
_sync_set_contents
(
s3path
,
data
)
def
open
(
self
,
path
,
mode
=
"r"
):
@iotools.filelike_to_stream
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
"""Open the named file in the given mode.
"""Open the named file in the given mode.
This method downloads the file contents into a local temporary file
This method downloads the file contents into a local temporary file
...
...
fs/sftpfs.py
View file @
3ea4efe1
...
@@ -19,6 +19,8 @@ from fs.base import *
...
@@ -19,6 +19,8 @@ from fs.base import *
from
fs.path
import
*
from
fs.path
import
*
from
fs.errors
import
*
from
fs.errors
import
*
from
fs.utils
import
isdir
,
isfile
from
fs.utils
import
isdir
,
isfile
from
fs
import
iotools
class
WrongHostKeyError
(
RemoteConnectionError
):
class
WrongHostKeyError
(
RemoteConnectionError
):
pass
pass
...
@@ -108,7 +110,6 @@ class SFTPFS(FS):
...
@@ -108,7 +110,6 @@ class SFTPFS(FS):
if other authentication is not succesful
if other authentication is not succesful
"""
"""
credentials
=
dict
(
username
=
username
,
credentials
=
dict
(
username
=
username
,
password
=
password
,
password
=
password
,
pkey
=
pkey
)
pkey
=
pkey
)
...
@@ -300,12 +301,12 @@ class SFTPFS(FS):
...
@@ -300,12 +301,12 @@ class SFTPFS(FS):
self
.
_transport
.
close
()
self
.
_transport
.
close
()
self
.
closed
=
True
self
.
closed
=
True
def
_normpath
(
self
,
path
):
def
_normpath
(
self
,
path
):
if
not
isinstance
(
path
,
unicode
):
if
not
isinstance
(
path
,
unicode
):
path
=
path
.
decode
(
self
.
encoding
)
path
=
path
.
decode
(
self
.
encoding
)
npath
=
pathjoin
(
self
.
root_path
,
relpath
(
normpath
(
path
)))
npath
=
pathjoin
(
self
.
root_path
,
relpath
(
normpath
(
path
)))
if
not
isprefix
(
self
.
root_path
,
npath
):
if
not
isprefix
(
self
.
root_path
,
npath
):
raise
PathError
(
path
,
msg
=
"Path is outside root:
%(path)
s"
)
raise
PathError
(
path
,
msg
=
"Path is outside root:
%(path)
s"
)
return
npath
return
npath
def
getpathurl
(
self
,
path
,
allow_none
=
False
):
def
getpathurl
(
self
,
path
,
allow_none
=
False
):
...
@@ -325,17 +326,19 @@ class SFTPFS(FS):
...
@@ -325,17 +326,19 @@ class SFTPFS(FS):
@synchronize
@synchronize
@convert_os_errors
@convert_os_errors
def
open
(
self
,
path
,
mode
=
"rb"
,
bufsize
=-
1
):
@iotools.filelike_to_stream
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
bufsize
=-
1
,
**
kwargs
):
npath
=
self
.
_normpath
(
path
)
npath
=
self
.
_normpath
(
path
)
if
self
.
isdir
(
path
):
if
self
.
isdir
(
path
):
msg
=
"that's a directory:
%(path)
s"
msg
=
"that's a directory:
%(path)
s"
raise
ResourceInvalidError
(
path
,
msg
=
msg
)
raise
ResourceInvalidError
(
path
,
msg
=
msg
)
# paramiko implements its own buffering and write-back logic,
# paramiko implements its own buffering and write-back logic,
# so we don't need to use a RemoteFileBuffer here.
# so we don't need to use a RemoteFileBuffer here.
f
=
self
.
client
.
open
(
npath
,
mode
,
bufsize
)
f
=
self
.
client
.
open
(
npath
,
mode
,
bufsize
)
# Unfortunately it has a broken truncate() method.
# Unfortunately it has a broken truncate() method.
# TODO: implement this as a wrapper
# TODO: implement this as a wrapper
old_truncate
=
f
.
truncate
old_truncate
=
f
.
truncate
def
new_truncate
(
size
=
None
):
def
new_truncate
(
size
=
None
):
if
size
is
None
:
if
size
is
None
:
size
=
f
.
tell
()
size
=
f
.
tell
()
...
@@ -354,7 +357,7 @@ class SFTPFS(FS):
...
@@ -354,7 +357,7 @@ class SFTPFS(FS):
@synchronize
@synchronize
@convert_os_errors
@convert_os_errors
def
exists
(
self
,
path
):
def
exists
(
self
,
path
):
if
path
in
(
''
,
'/'
):
if
path
in
(
''
,
'/'
):
return
True
return
True
npath
=
self
.
_normpath
(
path
)
npath
=
self
.
_normpath
(
path
)
...
@@ -369,7 +372,7 @@ class SFTPFS(FS):
...
@@ -369,7 +372,7 @@ class SFTPFS(FS):
@synchronize
@synchronize
@convert_os_errors
@convert_os_errors
def
isdir
(
self
,
path
):
def
isdir
(
self
,
path
):
if
path
in
(
''
,
'/'
):
if
normpath
(
path
)
in
(
''
,
'/'
):
return
True
return
True
npath
=
self
.
_normpath
(
path
)
npath
=
self
.
_normpath
(
path
)
try
:
try
:
...
@@ -378,7 +381,7 @@ class SFTPFS(FS):
...
@@ -378,7 +381,7 @@ class SFTPFS(FS):
if
getattr
(
e
,
"errno"
,
None
)
==
2
:
if
getattr
(
e
,
"errno"
,
None
)
==
2
:
return
False
return
False
raise
raise
return
statinfo
.
S_ISDIR
(
stat
.
st_mode
)
return
statinfo
.
S_ISDIR
(
stat
.
st_mode
)
!=
0
@synchronize
@synchronize
@convert_os_errors
@convert_os_errors
...
@@ -390,7 +393,7 @@ class SFTPFS(FS):
...
@@ -390,7 +393,7 @@ class SFTPFS(FS):
if
getattr
(
e
,
"errno"
,
None
)
==
2
:
if
getattr
(
e
,
"errno"
,
None
)
==
2
:
return
False
return
False
raise
raise
return
statinfo
.
S_ISREG
(
stat
.
st_mode
)
return
statinfo
.
S_ISREG
(
stat
.
st_mode
)
!=
0
@synchronize
@synchronize
@convert_os_errors
@convert_os_errors
...
...
fs/tempfs.py
View file @
3ea4efe1
...
@@ -10,13 +10,14 @@ import os
...
@@ -10,13 +10,14 @@ import os
import
os.path
import
os.path
import
time
import
time
import
tempfile
import
tempfile
import
platform
from
fs.base
import
synchronize
from
fs.osfs
import
OSFS
from
fs.osfs
import
OSFS
from
fs.errors
import
*
from
fs.errors
import
*
from
fs
import
_thread_synchronize_default
from
fs
import
_thread_synchronize_default
class
TempFS
(
OSFS
):
class
TempFS
(
OSFS
):
"""Create a Filesystem in a temporary directory (with tempfile.mkdtemp),
"""Create a Filesystem in a temporary directory (with tempfile.mkdtemp),
...
@@ -38,7 +39,7 @@ class TempFS(OSFS):
...
@@ -38,7 +39,7 @@ class TempFS(OSFS):
self
.
identifier
=
identifier
self
.
identifier
=
identifier
self
.
temp_dir
=
temp_dir
self
.
temp_dir
=
temp_dir
self
.
dir_mode
=
dir_mode
self
.
dir_mode
=
dir_mode
self
.
_temp_dir
=
tempfile
.
mkdtemp
(
identifier
or
"TempFS"
,
dir
=
temp_dir
)
self
.
_temp_dir
=
tempfile
.
mkdtemp
(
identifier
or
"TempFS"
,
dir
=
temp_dir
)
self
.
_cleaned
=
False
self
.
_cleaned
=
False
super
(
TempFS
,
self
)
.
__init__
(
self
.
_temp_dir
,
dir_mode
=
dir_mode
,
thread_synchronize
=
thread_synchronize
)
super
(
TempFS
,
self
)
.
__init__
(
self
.
_temp_dir
,
dir_mode
=
dir_mode
,
thread_synchronize
=
thread_synchronize
)
...
@@ -65,6 +66,7 @@ class TempFS(OSFS):
...
@@ -65,6 +66,7 @@ class TempFS(OSFS):
# dir_mode=self.dir_mode,
# dir_mode=self.dir_mode,
# thread_synchronize=self.thread_synchronize)
# thread_synchronize=self.thread_synchronize)
@synchronize
def
close
(
self
):
def
close
(
self
):
"""Removes the temporary directory.
"""Removes the temporary directory.
...
@@ -73,13 +75,13 @@ class TempFS(OSFS):
...
@@ -73,13 +75,13 @@ class TempFS(OSFS):
Note that once this method has been called, the FS object may
Note that once this method has been called, the FS object may
no longer be used.
no longer be used.
"""
"""
super
(
TempFS
,
self
)
.
close
()
super
(
TempFS
,
self
)
.
close
()
# Depending on how resources are freed by the OS, there could
# Depending on how resources are freed by the OS, there could
# be some transient errors when freeing a TempFS soon after it
# be some transient errors when freeing a TempFS soon after it
# was used. If they occur, do a small sleep and try again.
# was used. If they occur, do a small sleep and try again.
try
:
try
:
self
.
_close
()
self
.
_close
()
except
(
ResourceLockedError
,
ResourceInvalidError
):
except
(
ResourceLockedError
,
ResourceInvalidError
):
time
.
sleep
(
0.5
)
time
.
sleep
(
0.5
)
self
.
_close
()
self
.
_close
()
...
@@ -97,20 +99,23 @@ class TempFS(OSFS):
...
@@ -97,20 +99,23 @@ class TempFS(OSFS):
try
:
try
:
# shutil.rmtree doesn't handle long paths on win32,
# shutil.rmtree doesn't handle long paths on win32,
# so we walk the tree by hand.
# so we walk the tree by hand.
entries
=
os
.
walk
(
self
.
root_path
,
topdown
=
False
)
entries
=
os
.
walk
(
self
.
root_path
,
topdown
=
False
)
for
(
dir
,
dirnames
,
filenames
)
in
entries
:
for
(
dir
,
dirnames
,
filenames
)
in
entries
:
for
filename
in
filenames
:
for
filename
in
filenames
:
try
:
try
:
os_remove
(
os
.
path
.
join
(
dir
,
filename
))
os_remove
(
os
.
path
.
join
(
dir
,
filename
))
except
ResourceNotFoundError
:
except
ResourceNotFoundError
:
pass
pass
for
dirname
in
dirnames
:
for
dirname
in
dirnames
:
try
:
try
:
os_rmdir
(
os
.
path
.
join
(
dir
,
dirname
))
os_rmdir
(
os
.
path
.
join
(
dir
,
dirname
))
except
ResourceNotFoundError
:
except
ResourceNotFoundError
:
pass
pass
try
:
os
.
rmdir
(
self
.
root_path
)
os
.
rmdir
(
self
.
root_path
)
except
OSError
:
pass
self
.
_cleaned
=
True
self
.
_cleaned
=
True
finally
:
finally
:
self
.
_lock
.
release
()
self
.
_lock
.
release
()
super
(
TempFS
,
self
)
.
close
()
super
(
TempFS
,
self
)
.
close
()
fs/tests/__init__.py
View file @
3ea4efe1
...
@@ -11,7 +11,7 @@ from __future__ import with_statement
...
@@ -11,7 +11,7 @@ from __future__ import with_statement
# be captured by nose and reported appropriately
# be captured by nose and reported appropriately
import
sys
import
sys
import
logging
import
logging
#
logging.basicConfig(level=logging.ERROR, stream=sys.stdout)
logging
.
basicConfig
(
level
=
logging
.
ERROR
,
stream
=
sys
.
stdout
)
from
fs.base
import
*
from
fs.base
import
*
from
fs.path
import
*
from
fs.path
import
*
...
@@ -20,7 +20,8 @@ from fs.filelike import StringIO
...
@@ -20,7 +20,8 @@ from fs.filelike import StringIO
import
datetime
import
datetime
import
unittest
import
unittest
import
os
,
os
.
path
import
os
import
os.path
import
pickle
import
pickle
import
random
import
random
import
copy
import
copy
...
@@ -34,6 +35,7 @@ except ImportError:
...
@@ -34,6 +35,7 @@ except ImportError:
import
six
import
six
from
six
import
PY3
,
b
from
six
import
PY3
,
b
class
FSTestCases
(
object
):
class
FSTestCases
(
object
):
"""Base suite of testcases for filesystem implementations.
"""Base suite of testcases for filesystem implementations.
...
@@ -80,7 +82,6 @@ class FSTestCases(object):
...
@@ -80,7 +82,6 @@ class FSTestCases(object):
except
NoMetaError
:
except
NoMetaError
:
self
.
assertFalse
(
self
.
fs
.
hasmeta
(
meta_name
))
self
.
assertFalse
(
self
.
fs
.
hasmeta
(
meta_name
))
def
test_root_dir
(
self
):
def
test_root_dir
(
self
):
self
.
assertTrue
(
self
.
fs
.
isdir
(
""
))
self
.
assertTrue
(
self
.
fs
.
isdir
(
""
))
self
.
assertTrue
(
self
.
fs
.
isdir
(
"/"
))
self
.
assertTrue
(
self
.
fs
.
isdir
(
"/"
))
...
@@ -94,10 +95,10 @@ class FSTestCases(object):
...
@@ -94,10 +95,10 @@ class FSTestCases(object):
except
NoSysPathError
:
except
NoSysPathError
:
pass
pass
else
:
else
:
self
.
assertTrue
(
isinstance
(
syspath
,
unicode
))
self
.
assertTrue
(
isinstance
(
syspath
,
unicode
))
syspath
=
self
.
fs
.
getsyspath
(
"/"
,
allow_none
=
True
)
syspath
=
self
.
fs
.
getsyspath
(
"/"
,
allow_none
=
True
)
if
syspath
is
not
None
:
if
syspath
is
not
None
:
self
.
assertTrue
(
isinstance
(
syspath
,
unicode
))
self
.
assertTrue
(
isinstance
(
syspath
,
unicode
))
def
test_debug
(
self
):
def
test_debug
(
self
):
str
(
self
.
fs
)
str
(
self
.
fs
)
...
@@ -119,49 +120,54 @@ class FSTestCases(object):
...
@@ -119,49 +120,54 @@ class FSTestCases(object):
assert
False
,
"ResourceInvalidError was not raised"
assert
False
,
"ResourceInvalidError was not raised"
def
test_writefile
(
self
):
def
test_writefile
(
self
):
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
open
,
"test1.txt"
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
open
,
"test1.txt"
)
f
=
self
.
fs
.
open
(
"test1.txt"
,
"wb"
)
f
=
self
.
fs
.
open
(
"test1.txt"
,
"wb"
)
f
.
write
(
b
(
"testing"
))
f
.
write
(
b
(
"testing"
))
f
.
close
()
f
.
close
()
self
.
assertTrue
(
self
.
check
(
"test1.txt"
))
self
.
assertTrue
(
self
.
check
(
"test1.txt"
))
f
=
self
.
fs
.
open
(
"test1.txt"
,
"rb"
)
f
=
self
.
fs
.
open
(
"test1.txt"
,
"rb"
)
self
.
assertEquals
(
f
.
read
(),
b
(
"testing"
))
self
.
assertEquals
(
f
.
read
(),
b
(
"testing"
))
f
.
close
()
f
.
close
()
f
=
self
.
fs
.
open
(
"test1.txt"
,
"wb"
)
f
=
self
.
fs
.
open
(
"test1.txt"
,
"wb"
)
f
.
write
(
b
(
"test file overwrite"
))
f
.
write
(
b
(
"test file overwrite"
))
f
.
close
()
f
.
close
()
self
.
assertTrue
(
self
.
check
(
"test1.txt"
))
self
.
assertTrue
(
self
.
check
(
"test1.txt"
))
f
=
self
.
fs
.
open
(
"test1.txt"
,
"rb"
)
f
=
self
.
fs
.
open
(
"test1.txt"
,
"rb"
)
self
.
assertEquals
(
f
.
read
(),
b
(
"test file overwrite"
))
self
.
assertEquals
(
f
.
read
(),
b
(
"test file overwrite"
))
f
.
close
()
f
.
close
()
def
test_setcontents
(
self
):
def
test_setcontents
(
self
):
# setcontents() should accept both a string...
# setcontents() should accept both a string...
self
.
fs
.
setcontents
(
"hello"
,
b
(
"world"
))
self
.
fs
.
setcontents
(
"hello"
,
b
(
"world"
))
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"world"
))
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"world"
))
# ...and a file-like object
# ...and a file-like object
self
.
fs
.
setcontents
(
"hello"
,
StringIO
(
b
(
"to you, good sir!"
)))
self
.
fs
.
setcontents
(
"hello"
,
StringIO
(
b
(
"to you, good sir!"
)))
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"to you, good sir!"
))
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"to you, good sir!"
))
# setcontents() should accept both a string...
# setcontents() should accept both a string...
self
.
fs
.
setcontents
(
"hello"
,
b
(
"world"
),
chunk_size
=
2
)
self
.
fs
.
setcontents
(
"hello"
,
b
(
"world"
),
chunk_size
=
2
)
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"world"
))
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"world"
))
# ...and a file-like object
# ...and a file-like object
self
.
fs
.
setcontents
(
"hello"
,
StringIO
(
b
(
"to you, good sir!"
)),
chunk_size
=
2
)
self
.
fs
.
setcontents
(
"hello"
,
StringIO
(
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"to you, good sir!"
))
b
(
"to you, good sir!"
)),
chunk_size
=
2
)
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"to you, good sir!"
))
def
test_setcontents_async
(
self
):
def
test_setcontents_async
(
self
):
# setcontents() should accept both a string...
# setcontents() should accept both a string...
self
.
fs
.
setcontents_async
(
"hello"
,
b
(
"world"
))
.
wait
()
self
.
fs
.
setcontents_async
(
"hello"
,
b
(
"world"
))
.
wait
()
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"world"
))
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"world"
))
# ...and a file-like object
# ...and a file-like object
self
.
fs
.
setcontents_async
(
"hello"
,
StringIO
(
b
(
"to you, good sir!"
)))
.
wait
()
self
.
fs
.
setcontents_async
(
"hello"
,
StringIO
(
b
(
"to you, good sir!"
)))
.
wait
()
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
),
b
(
"to you, good sir!"
))
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
),
b
(
"to you, good sir!"
))
self
.
fs
.
setcontents_async
(
"hello"
,
b
(
"world"
),
chunk_size
=
2
)
.
wait
()
self
.
fs
.
setcontents_async
(
"hello"
,
b
(
"world"
),
chunk_size
=
2
)
.
wait
()
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"world"
))
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"world"
))
# ...and a file-like object
# ...and a file-like object
self
.
fs
.
setcontents_async
(
"hello"
,
StringIO
(
b
(
"to you, good sir!"
)),
chunk_size
=
2
)
.
wait
()
self
.
fs
.
setcontents_async
(
"hello"
,
StringIO
(
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"to you, good sir!"
))
b
(
"to you, good sir!"
)),
chunk_size
=
2
)
.
wait
()
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"hello"
,
"rb"
),
b
(
"to you, good sir!"
))
def
test_isdir_isfile
(
self
):
def
test_isdir_isfile
(
self
):
self
.
assertFalse
(
self
.
fs
.
exists
(
"dir1"
))
self
.
assertFalse
(
self
.
fs
.
exists
(
"dir1"
))
...
@@ -182,7 +188,7 @@ class FSTestCases(object):
...
@@ -182,7 +188,7 @@ class FSTestCases(object):
def
test_listdir
(
self
):
def
test_listdir
(
self
):
def
check_unicode
(
items
):
def
check_unicode
(
items
):
for
item
in
items
:
for
item
in
items
:
self
.
assertTrue
(
isinstance
(
item
,
unicode
))
self
.
assertTrue
(
isinstance
(
item
,
unicode
))
self
.
fs
.
setcontents
(
u"a"
,
b
(
''
))
self
.
fs
.
setcontents
(
u"a"
,
b
(
''
))
self
.
fs
.
setcontents
(
"b"
,
b
(
''
))
self
.
fs
.
setcontents
(
"b"
,
b
(
''
))
self
.
fs
.
setcontents
(
"foo"
,
b
(
''
))
self
.
fs
.
setcontents
(
"foo"
,
b
(
''
))
...
@@ -206,7 +212,7 @@ class FSTestCases(object):
...
@@ -206,7 +212,7 @@ class FSTestCases(object):
check_unicode
(
d2
)
check_unicode
(
d2
)
# Create some deeper subdirectories, to make sure their
# Create some deeper subdirectories, to make sure their
# contents are not inadvertantly included
# contents are not inadvertantly included
self
.
fs
.
makedir
(
"p/1/2/3"
,
recursive
=
True
)
self
.
fs
.
makedir
(
"p/1/2/3"
,
recursive
=
True
)
self
.
fs
.
setcontents
(
"p/1/2/3/a"
,
b
(
''
))
self
.
fs
.
setcontents
(
"p/1/2/3/a"
,
b
(
''
))
self
.
fs
.
setcontents
(
"p/1/2/3/b"
,
b
(
''
))
self
.
fs
.
setcontents
(
"p/1/2/3/b"
,
b
(
''
))
self
.
fs
.
setcontents
(
"p/1/2/3/foo"
,
b
(
''
))
self
.
fs
.
setcontents
(
"p/1/2/3/foo"
,
b
(
''
))
...
@@ -218,7 +224,7 @@ class FSTestCases(object):
...
@@ -218,7 +224,7 @@ class FSTestCases(object):
contains_a
=
self
.
fs
.
listdir
(
wildcard
=
"*a*"
)
contains_a
=
self
.
fs
.
listdir
(
wildcard
=
"*a*"
)
self
.
assertEqual
(
sorted
(
dirs_only
),
[
u"p"
,
u"q"
])
self
.
assertEqual
(
sorted
(
dirs_only
),
[
u"p"
,
u"q"
])
self
.
assertEqual
(
sorted
(
files_only
),
[
u"a"
,
u"b"
,
u"bar"
,
u"foo"
])
self
.
assertEqual
(
sorted
(
files_only
),
[
u"a"
,
u"b"
,
u"bar"
,
u"foo"
])
self
.
assertEqual
(
sorted
(
contains_a
),
[
u"a"
,
u"bar"
])
self
.
assertEqual
(
sorted
(
contains_a
),
[
u"a"
,
u"bar"
])
check_unicode
(
dirs_only
)
check_unicode
(
dirs_only
)
check_unicode
(
files_only
)
check_unicode
(
files_only
)
check_unicode
(
contains_a
)
check_unicode
(
contains_a
)
...
@@ -237,16 +243,17 @@ class FSTestCases(object):
...
@@ -237,16 +243,17 @@ class FSTestCases(object):
self
.
assertEqual
(
sorted
(
d4
),
[
u"p/1/2/3/a"
,
u"p/1/2/3/b"
,
u"p/1/2/3/bar"
,
u"p/1/2/3/foo"
])
self
.
assertEqual
(
sorted
(
d4
),
[
u"p/1/2/3/a"
,
u"p/1/2/3/b"
,
u"p/1/2/3/bar"
,
u"p/1/2/3/foo"
])
check_unicode
(
d4
)
check_unicode
(
d4
)
# Test that appropriate errors are raised
# Test that appropriate errors are raised
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
listdir
,
"zebra"
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
listdir
,
"zebra"
)
self
.
assertRaises
(
ResourceInvalidError
,
self
.
fs
.
listdir
,
"foo"
)
self
.
assertRaises
(
ResourceInvalidError
,
self
.
fs
.
listdir
,
"foo"
)
def
test_listdirinfo
(
self
):
def
test_listdirinfo
(
self
):
def
check_unicode
(
items
):
def
check_unicode
(
items
):
for
(
nm
,
info
)
in
items
:
for
(
nm
,
info
)
in
items
:
self
.
assertTrue
(
isinstance
(
nm
,
unicode
))
self
.
assertTrue
(
isinstance
(
nm
,
unicode
))
def
check_equal
(
items
,
target
):
names
=
[
nm
for
(
nm
,
info
)
in
items
]
def
check_equal
(
items
,
target
):
self
.
assertEqual
(
sorted
(
names
),
sorted
(
target
))
names
=
[
nm
for
(
nm
,
info
)
in
items
]
self
.
assertEqual
(
sorted
(
names
),
sorted
(
target
))
self
.
fs
.
setcontents
(
u"a"
,
b
(
''
))
self
.
fs
.
setcontents
(
u"a"
,
b
(
''
))
self
.
fs
.
setcontents
(
"b"
,
b
(
''
))
self
.
fs
.
setcontents
(
"b"
,
b
(
''
))
self
.
fs
.
setcontents
(
"foo"
,
b
(
''
))
self
.
fs
.
setcontents
(
"foo"
,
b
(
''
))
...
@@ -271,7 +278,7 @@ class FSTestCases(object):
...
@@ -271,7 +278,7 @@ class FSTestCases(object):
check_unicode
(
d2
)
check_unicode
(
d2
)
# Create some deeper subdirectories, to make sure their
# Create some deeper subdirectories, to make sure their
# contents are not inadvertantly included
# contents are not inadvertantly included
self
.
fs
.
makedir
(
"p/1/2/3"
,
recursive
=
True
)
self
.
fs
.
makedir
(
"p/1/2/3"
,
recursive
=
True
)
self
.
fs
.
setcontents
(
"p/1/2/3/a"
,
b
(
''
))
self
.
fs
.
setcontents
(
"p/1/2/3/a"
,
b
(
''
))
self
.
fs
.
setcontents
(
"p/1/2/3/b"
,
b
(
''
))
self
.
fs
.
setcontents
(
"p/1/2/3/b"
,
b
(
''
))
self
.
fs
.
setcontents
(
"p/1/2/3/foo"
,
b
(
''
))
self
.
fs
.
setcontents
(
"p/1/2/3/foo"
,
b
(
''
))
...
@@ -283,7 +290,7 @@ class FSTestCases(object):
...
@@ -283,7 +290,7 @@ class FSTestCases(object):
contains_a
=
self
.
fs
.
listdirinfo
(
wildcard
=
"*a*"
)
contains_a
=
self
.
fs
.
listdirinfo
(
wildcard
=
"*a*"
)
check_equal
(
dirs_only
,
[
u"p"
,
u"q"
])
check_equal
(
dirs_only
,
[
u"p"
,
u"q"
])
check_equal
(
files_only
,
[
u"a"
,
u"b"
,
u"bar"
,
u"foo"
])
check_equal
(
files_only
,
[
u"a"
,
u"b"
,
u"bar"
,
u"foo"
])
check_equal
(
contains_a
,
[
u"a"
,
u"bar"
])
check_equal
(
contains_a
,
[
u"a"
,
u"bar"
])
check_unicode
(
dirs_only
)
check_unicode
(
dirs_only
)
check_unicode
(
files_only
)
check_unicode
(
files_only
)
check_unicode
(
contains_a
)
check_unicode
(
contains_a
)
...
@@ -302,20 +309,20 @@ class FSTestCases(object):
...
@@ -302,20 +309,20 @@ class FSTestCases(object):
check_equal
(
d4
,
[
u"p/1/2/3/a"
,
u"p/1/2/3/b"
,
u"p/1/2/3/bar"
,
u"p/1/2/3/foo"
])
check_equal
(
d4
,
[
u"p/1/2/3/a"
,
u"p/1/2/3/b"
,
u"p/1/2/3/bar"
,
u"p/1/2/3/foo"
])
check_unicode
(
d4
)
check_unicode
(
d4
)
# Test that appropriate errors are raised
# Test that appropriate errors are raised
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
listdirinfo
,
"zebra"
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
listdirinfo
,
"zebra"
)
self
.
assertRaises
(
ResourceInvalidError
,
self
.
fs
.
listdirinfo
,
"foo"
)
self
.
assertRaises
(
ResourceInvalidError
,
self
.
fs
.
listdirinfo
,
"foo"
)
def
test_walk
(
self
):
def
test_walk
(
self
):
self
.
fs
.
setcontents
(
'a.txt'
,
b
(
'hello'
))
self
.
fs
.
setcontents
(
'a.txt'
,
b
(
'hello'
))
self
.
fs
.
setcontents
(
'b.txt'
,
b
(
'world'
))
self
.
fs
.
setcontents
(
'b.txt'
,
b
(
'world'
))
self
.
fs
.
makeopendir
(
'foo'
)
.
setcontents
(
'c'
,
b
(
'123'
))
self
.
fs
.
makeopendir
(
'foo'
)
.
setcontents
(
'c'
,
b
(
'123'
))
sorted_walk
=
sorted
([(
d
,
sorted
(
fs
))
for
(
d
,
fs
)
in
self
.
fs
.
walk
()])
sorted_walk
=
sorted
([(
d
,
sorted
(
fs
))
for
(
d
,
fs
)
in
self
.
fs
.
walk
()])
self
.
assertEquals
(
sorted_walk
,
self
.
assertEquals
(
sorted_walk
,
[(
"/"
,
[
"a.txt"
,
"b.txt"
]),
[(
"/"
,
[
"a.txt"
,
"b.txt"
]),
(
"/foo"
,[
"c"
])])
(
"/foo"
,
[
"c"
])])
# When searching breadth-first, shallow entries come first
# When searching breadth-first, shallow entries come first
found_a
=
False
found_a
=
False
for
_
,
files
in
self
.
fs
.
walk
(
search
=
"breadth"
):
for
_
,
files
in
self
.
fs
.
walk
(
search
=
"breadth"
):
if
"a.txt"
in
files
:
if
"a.txt"
in
files
:
found_a
=
True
found_a
=
True
if
"c"
in
files
:
if
"c"
in
files
:
...
@@ -323,12 +330,13 @@ class FSTestCases(object):
...
@@ -323,12 +330,13 @@ class FSTestCases(object):
assert
found_a
,
"breadth search order was wrong"
assert
found_a
,
"breadth search order was wrong"
# When searching depth-first, deep entries come first
# When searching depth-first, deep entries come first
found_c
=
False
found_c
=
False
for
_
,
files
in
self
.
fs
.
walk
(
search
=
"depth"
):
for
_
,
files
in
self
.
fs
.
walk
(
search
=
"depth"
):
if
"c"
in
files
:
if
"c"
in
files
:
found_c
=
True
found_c
=
True
if
"a.txt"
in
files
:
if
"a.txt"
in
files
:
break
break
assert
found_c
,
"depth search order was wrong: "
+
str
(
list
(
self
.
fs
.
walk
(
search
=
"depth"
)))
assert
found_c
,
"depth search order was wrong: "
+
\
str
(
list
(
self
.
fs
.
walk
(
search
=
"depth"
)))
def
test_walk_wildcard
(
self
):
def
test_walk_wildcard
(
self
):
self
.
fs
.
setcontents
(
'a.txt'
,
b
(
'hello'
))
self
.
fs
.
setcontents
(
'a.txt'
,
b
(
'hello'
))
...
@@ -338,7 +346,7 @@ class FSTestCases(object):
...
@@ -338,7 +346,7 @@ class FSTestCases(object):
for
dir_path
,
paths
in
self
.
fs
.
walk
(
wildcard
=
'*.txt'
):
for
dir_path
,
paths
in
self
.
fs
.
walk
(
wildcard
=
'*.txt'
):
for
path
in
paths
:
for
path
in
paths
:
self
.
assert_
(
path
.
endswith
(
'.txt'
))
self
.
assert_
(
path
.
endswith
(
'.txt'
))
for
dir_path
,
paths
in
self
.
fs
.
walk
(
wildcard
=
lambda
fn
:
fn
.
endswith
(
'.txt'
)):
for
dir_path
,
paths
in
self
.
fs
.
walk
(
wildcard
=
lambda
fn
:
fn
.
endswith
(
'.txt'
)):
for
path
in
paths
:
for
path
in
paths
:
self
.
assert_
(
path
.
endswith
(
'.txt'
))
self
.
assert_
(
path
.
endswith
(
'.txt'
))
...
@@ -347,22 +355,28 @@ class FSTestCases(object):
...
@@ -347,22 +355,28 @@ class FSTestCases(object):
self
.
fs
.
setcontents
(
'b.txt'
,
b
(
'world'
))
self
.
fs
.
setcontents
(
'b.txt'
,
b
(
'world'
))
self
.
fs
.
makeopendir
(
'foo'
)
.
setcontents
(
'c'
,
b
(
'123'
))
self
.
fs
.
makeopendir
(
'foo'
)
.
setcontents
(
'c'
,
b
(
'123'
))
self
.
fs
.
makeopendir
(
'.svn'
)
.
setcontents
(
'ignored'
,
b
(
''
))
self
.
fs
.
makeopendir
(
'.svn'
)
.
setcontents
(
'ignored'
,
b
(
''
))
for
dir_path
,
paths
in
self
.
fs
.
walk
(
dir_wildcard
=
lambda
fn
:
not
fn
.
endswith
(
'.svn'
)):
for
dir_path
,
paths
in
self
.
fs
.
walk
(
dir_wildcard
=
lambda
fn
:
not
fn
.
endswith
(
'.svn'
)):
for
path
in
paths
:
for
path
in
paths
:
self
.
assert_
(
'.svn'
not
in
path
)
self
.
assert_
(
'.svn'
not
in
path
)
def
test_walkfiles
(
self
):
def
test_walkfiles
(
self
):
self
.
fs
.
makeopendir
(
'bar'
)
.
setcontents
(
'a.txt'
,
b
(
'123'
))
self
.
fs
.
makeopendir
(
'bar'
)
.
setcontents
(
'a.txt'
,
b
(
'123'
))
self
.
fs
.
makeopendir
(
'foo'
)
.
setcontents
(
'b'
,
b
(
'123'
))
self
.
fs
.
makeopendir
(
'foo'
)
.
setcontents
(
'b'
,
b
(
'123'
))
self
.
assertEquals
(
sorted
(
self
.
fs
.
walkfiles
()),[
"/bar/a.txt"
,
"/foo/b"
])
self
.
assertEquals
(
sorted
(
self
.
assertEquals
(
sorted
(
self
.
fs
.
walkfiles
(
dir_wildcard
=
"*foo*"
)),[
"/foo/b"
])
self
.
fs
.
walkfiles
()),
[
"/bar/a.txt"
,
"/foo/b"
])
self
.
assertEquals
(
sorted
(
self
.
fs
.
walkfiles
(
wildcard
=
"*.txt"
)),[
"/bar/a.txt"
])
self
.
assertEquals
(
sorted
(
self
.
fs
.
walkfiles
(
dir_wildcard
=
"*foo*"
)),
[
"/foo/b"
])
self
.
assertEquals
(
sorted
(
self
.
fs
.
walkfiles
(
wildcard
=
"*.txt"
)),
[
"/bar/a.txt"
])
def
test_walkdirs
(
self
):
def
test_walkdirs
(
self
):
self
.
fs
.
makeopendir
(
'bar'
)
.
setcontents
(
'a.txt'
,
b
(
'123'
))
self
.
fs
.
makeopendir
(
'bar'
)
.
setcontents
(
'a.txt'
,
b
(
'123'
))
self
.
fs
.
makeopendir
(
'foo'
)
.
makeopendir
(
"baz"
)
.
setcontents
(
'b'
,
b
(
'123'
))
self
.
fs
.
makeopendir
(
'foo'
)
.
makeopendir
(
self
.
assertEquals
(
sorted
(
self
.
fs
.
walkdirs
()),[
"/"
,
"/bar"
,
"/foo"
,
"/foo/baz"
])
"baz"
)
.
setcontents
(
'b'
,
b
(
'123'
))
self
.
assertEquals
(
sorted
(
self
.
fs
.
walkdirs
(
wildcard
=
"*foo*"
)),[
"/"
,
"/foo"
,
"/foo/baz"
])
self
.
assertEquals
(
sorted
(
self
.
fs
.
walkdirs
()),
[
"/"
,
"/bar"
,
"/foo"
,
"/foo/baz"
])
self
.
assertEquals
(
sorted
(
self
.
fs
.
walkdirs
(
wildcard
=
"*foo*"
)),
[
"/"
,
"/foo"
,
"/foo/baz"
])
def
test_unicode
(
self
):
def
test_unicode
(
self
):
alpha
=
u"
\N{GREEK SMALL LETTER ALPHA}
"
alpha
=
u"
\N{GREEK SMALL LETTER ALPHA}
"
...
@@ -371,32 +385,33 @@ class FSTestCases(object):
...
@@ -371,32 +385,33 @@ class FSTestCases(object):
self
.
fs
.
setcontents
(
alpha
+
"/a"
,
b
(
''
))
self
.
fs
.
setcontents
(
alpha
+
"/a"
,
b
(
''
))
self
.
fs
.
setcontents
(
alpha
+
"/"
+
beta
,
b
(
''
))
self
.
fs
.
setcontents
(
alpha
+
"/"
+
beta
,
b
(
''
))
self
.
assertTrue
(
self
.
check
(
alpha
))
self
.
assertTrue
(
self
.
check
(
alpha
))
self
.
assertEquals
(
sorted
(
self
.
fs
.
listdir
(
alpha
)),
[
"a"
,
beta
])
self
.
assertEquals
(
sorted
(
self
.
fs
.
listdir
(
alpha
)),
[
"a"
,
beta
])
def
test_makedir
(
self
):
def
test_makedir
(
self
):
check
=
self
.
check
check
=
self
.
check
self
.
fs
.
makedir
(
"a"
)
self
.
fs
.
makedir
(
"a"
)
self
.
assertTrue
(
check
(
"a"
))
self
.
assertTrue
(
check
(
"a"
))
self
.
assertRaises
(
ParentDirectoryMissingError
,
self
.
fs
.
makedir
,
"a/b/c"
)
self
.
assertRaises
(
ParentDirectoryMissingError
,
self
.
fs
.
makedir
,
"a/b/c"
)
self
.
fs
.
makedir
(
"a/b/c"
,
recursive
=
True
)
self
.
fs
.
makedir
(
"a/b/c"
,
recursive
=
True
)
self
.
assert_
(
check
(
"a/b/c"
))
self
.
assert_
(
check
(
"a/b/c"
))
self
.
fs
.
makedir
(
"foo/bar/baz"
,
recursive
=
True
)
self
.
fs
.
makedir
(
"foo/bar/baz"
,
recursive
=
True
)
self
.
assert_
(
check
(
"foo/bar/baz"
))
self
.
assert_
(
check
(
"foo/bar/baz"
))
self
.
fs
.
makedir
(
"a/b/child"
)
self
.
fs
.
makedir
(
"a/b/child"
)
self
.
assert_
(
check
(
"a/b/child"
))
self
.
assert_
(
check
(
"a/b/child"
))
self
.
assertRaises
(
DestinationExistsError
,
self
.
fs
.
makedir
,
"/a/b"
)
self
.
assertRaises
(
DestinationExistsError
,
self
.
fs
.
makedir
,
"/a/b"
)
self
.
fs
.
makedir
(
"/a/b"
,
allow_recreate
=
True
)
self
.
fs
.
makedir
(
"/a/b"
,
allow_recreate
=
True
)
self
.
fs
.
setcontents
(
"/a/file"
,
b
(
''
))
self
.
fs
.
setcontents
(
"/a/file"
,
b
(
''
))
self
.
assertRaises
(
ResourceInvalidError
,
self
.
fs
.
makedir
,
"a/file"
)
self
.
assertRaises
(
ResourceInvalidError
,
self
.
fs
.
makedir
,
"a/file"
)
def
test_remove
(
self
):
def
test_remove
(
self
):
self
.
fs
.
setcontents
(
"a.txt"
,
b
(
''
))
self
.
fs
.
setcontents
(
"a.txt"
,
b
(
''
))
self
.
assertTrue
(
self
.
check
(
"a.txt"
))
self
.
assertTrue
(
self
.
check
(
"a.txt"
))
self
.
fs
.
remove
(
"a.txt"
)
self
.
fs
.
remove
(
"a.txt"
)
self
.
assertFalse
(
self
.
check
(
"a.txt"
))
self
.
assertFalse
(
self
.
check
(
"a.txt"
))
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
remove
,
"a.txt"
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
remove
,
"a.txt"
)
self
.
fs
.
makedir
(
"dir1"
)
self
.
fs
.
makedir
(
"dir1"
)
self
.
assertRaises
(
ResourceInvalidError
,
self
.
fs
.
remove
,
"dir1"
)
self
.
assertRaises
(
ResourceInvalidError
,
self
.
fs
.
remove
,
"dir1"
)
self
.
fs
.
setcontents
(
"/dir1/a.txt"
,
b
(
''
))
self
.
fs
.
setcontents
(
"/dir1/a.txt"
,
b
(
''
))
self
.
assertTrue
(
self
.
check
(
"dir1/a.txt"
))
self
.
assertTrue
(
self
.
check
(
"dir1/a.txt"
))
self
.
fs
.
remove
(
"dir1/a.txt"
)
self
.
fs
.
remove
(
"dir1/a.txt"
)
...
@@ -431,10 +446,11 @@ class FSTestCases(object):
...
@@ -431,10 +446,11 @@ class FSTestCases(object):
self
.
assert_
(
check
(
"foo/file.txt"
))
self
.
assert_
(
check
(
"foo/file.txt"
))
# Ensure that force=True works as expected
# Ensure that force=True works as expected
self
.
fs
.
makedir
(
"frollic/waggle"
,
recursive
=
True
)
self
.
fs
.
makedir
(
"frollic/waggle"
,
recursive
=
True
)
self
.
fs
.
setcontents
(
"frollic/waddle.txt"
,
b
(
"waddlewaddlewaddle"
))
self
.
fs
.
setcontents
(
"frollic/waddle.txt"
,
b
(
"waddlewaddlewaddle"
))
self
.
assertRaises
(
DirectoryNotEmptyError
,
self
.
fs
.
removedir
,
"frollic"
)
self
.
assertRaises
(
DirectoryNotEmptyError
,
self
.
fs
.
removedir
,
"frollic"
)
self
.
assertRaises
(
ResourceInvalidError
,
self
.
fs
.
removedir
,
"frollic/waddle.txt"
)
self
.
assertRaises
(
self
.
fs
.
removedir
(
"frollic"
,
force
=
True
)
ResourceInvalidError
,
self
.
fs
.
removedir
,
"frollic/waddle.txt"
)
self
.
fs
.
removedir
(
"frollic"
,
force
=
True
)
self
.
assert_
(
not
check
(
"frollic"
))
self
.
assert_
(
not
check
(
"frollic"
))
# Test removing unicode dirs
# Test removing unicode dirs
kappa
=
u"
\N{GREEK CAPITAL LETTER KAPPA}
"
kappa
=
u"
\N{GREEK CAPITAL LETTER KAPPA}
"
...
@@ -443,59 +459,64 @@ class FSTestCases(object):
...
@@ -443,59 +459,64 @@ class FSTestCases(object):
self
.
fs
.
removedir
(
kappa
)
self
.
fs
.
removedir
(
kappa
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
removedir
,
kappa
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
removedir
,
kappa
)
self
.
assert_
(
not
self
.
fs
.
isdir
(
kappa
))
self
.
assert_
(
not
self
.
fs
.
isdir
(
kappa
))
self
.
fs
.
makedir
(
pathjoin
(
"test"
,
kappa
),
recursive
=
True
)
self
.
fs
.
makedir
(
pathjoin
(
"test"
,
kappa
),
recursive
=
True
)
self
.
assert_
(
check
(
pathjoin
(
"test"
,
kappa
)))
self
.
assert_
(
check
(
pathjoin
(
"test"
,
kappa
)))
self
.
fs
.
removedir
(
"test"
,
force
=
True
)
self
.
fs
.
removedir
(
"test"
,
force
=
True
)
self
.
assert_
(
not
check
(
"test"
))
self
.
assert_
(
not
check
(
"test"
))
def
test_rename
(
self
):
def
test_rename
(
self
):
check
=
self
.
check
check
=
self
.
check
# test renaming a file in the same directory
# test renaming a file in the same directory
self
.
fs
.
setcontents
(
"foo.txt"
,
b
(
"Hello, World!"
))
self
.
fs
.
setcontents
(
"foo.txt"
,
b
(
"Hello, World!"
))
self
.
assert_
(
check
(
"foo.txt"
))
self
.
assert_
(
check
(
"foo.txt"
))
self
.
fs
.
rename
(
"foo.txt"
,
"bar.txt"
)
self
.
fs
.
rename
(
"foo.txt"
,
"bar.txt"
)
self
.
assert_
(
check
(
"bar.txt"
))
self
.
assert_
(
check
(
"bar.txt"
))
self
.
assert_
(
not
check
(
"foo.txt"
))
self
.
assert_
(
not
check
(
"foo.txt"
))
# test renaming a directory in the same directory
# test renaming a directory in the same directory
self
.
fs
.
makedir
(
"dir_a"
)
self
.
fs
.
makedir
(
"dir_a"
)
self
.
fs
.
setcontents
(
"dir_a/test.txt"
,
b
(
"testerific"
))
self
.
fs
.
setcontents
(
"dir_a/test.txt"
,
b
(
"testerific"
))
self
.
assert_
(
check
(
"dir_a"
))
self
.
assert_
(
check
(
"dir_a"
))
self
.
fs
.
rename
(
"dir_a"
,
"dir_b"
)
self
.
fs
.
rename
(
"dir_a"
,
"dir_b"
)
self
.
assert_
(
check
(
"dir_b"
))
self
.
assert_
(
check
(
"dir_b"
))
self
.
assert_
(
check
(
"dir_b/test.txt"
))
self
.
assert_
(
check
(
"dir_b/test.txt"
))
self
.
assert_
(
not
check
(
"dir_a/test.txt"
))
self
.
assert_
(
not
check
(
"dir_a/test.txt"
))
self
.
assert_
(
not
check
(
"dir_a"
))
self
.
assert_
(
not
check
(
"dir_a"
))
# test renaming a file into a different directory
# test renaming a file into a different directory
self
.
fs
.
makedir
(
"dir_a"
)
self
.
fs
.
makedir
(
"dir_a"
)
self
.
fs
.
rename
(
"dir_b/test.txt"
,
"dir_a/test.txt"
)
self
.
fs
.
rename
(
"dir_b/test.txt"
,
"dir_a/test.txt"
)
self
.
assert_
(
not
check
(
"dir_b/test.txt"
))
self
.
assert_
(
not
check
(
"dir_b/test.txt"
))
self
.
assert_
(
check
(
"dir_a/test.txt"
))
self
.
assert_
(
check
(
"dir_a/test.txt"
))
# test renaming a file into a non-existent directory
# test renaming a file into a non-existent directory
self
.
assertRaises
(
ParentDirectoryMissingError
,
self
.
fs
.
rename
,
"dir_a/test.txt"
,
"nonexistent/test.txt"
)
self
.
assertRaises
(
ParentDirectoryMissingError
,
self
.
fs
.
rename
,
"dir_a/test.txt"
,
"nonexistent/test.txt"
)
def
test_info
(
self
):
def
test_info
(
self
):
test_str
=
b
(
"Hello, World!"
)
test_str
=
b
(
"Hello, World!"
)
self
.
fs
.
setcontents
(
"info.txt"
,
test_str
)
self
.
fs
.
setcontents
(
"info.txt"
,
test_str
)
info
=
self
.
fs
.
getinfo
(
"info.txt"
)
info
=
self
.
fs
.
getinfo
(
"info.txt"
)
self
.
assertEqual
(
info
[
'size'
],
len
(
test_str
))
self
.
assertEqual
(
info
[
'size'
],
len
(
test_str
))
self
.
fs
.
desc
(
"info.txt"
)
self
.
fs
.
desc
(
"info.txt"
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
getinfo
,
"notafile"
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
getinfo
,
"notafile"
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
getinfo
,
"info.txt/inval"
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
getinfo
,
"info.txt/inval"
)
def
test_getsize
(
self
):
def
test_getsize
(
self
):
test_str
=
b
(
"*"
)
*
23
test_str
=
b
(
"*"
)
*
23
self
.
fs
.
setcontents
(
"info.txt"
,
test_str
)
self
.
fs
.
setcontents
(
"info.txt"
,
test_str
)
size
=
self
.
fs
.
getsize
(
"info.txt"
)
size
=
self
.
fs
.
getsize
(
"info.txt"
)
self
.
assertEqual
(
size
,
len
(
test_str
))
self
.
assertEqual
(
size
,
len
(
test_str
))
def
test_movefile
(
self
):
def
test_movefile
(
self
):
check
=
self
.
check
check
=
self
.
check
contents
=
b
(
"If the implementation is hard to explain, it's a bad idea."
)
contents
=
b
(
"If the implementation is hard to explain, it's a bad idea."
)
def
makefile
(
path
):
def
makefile
(
path
):
self
.
fs
.
setcontents
(
path
,
contents
)
self
.
fs
.
setcontents
(
path
,
contents
)
def
checkcontents
(
path
):
def
checkcontents
(
path
):
check_contents
=
self
.
fs
.
getcontents
(
path
,
"rb"
)
check_contents
=
self
.
fs
.
getcontents
(
path
,
"rb"
)
self
.
assertEqual
(
check_contents
,
contents
)
self
.
assertEqual
(
check_contents
,
contents
)
return
contents
==
check_contents
return
contents
==
check_contents
self
.
fs
.
makedir
(
"foo/bar"
,
recursive
=
True
)
self
.
fs
.
makedir
(
"foo/bar"
,
recursive
=
True
)
...
@@ -513,21 +534,23 @@ class FSTestCases(object):
...
@@ -513,21 +534,23 @@ class FSTestCases(object):
self
.
assert_
(
checkcontents
(
"/c.txt"
))
self
.
assert_
(
checkcontents
(
"/c.txt"
))
makefile
(
"foo/bar/a.txt"
)
makefile
(
"foo/bar/a.txt"
)
self
.
assertRaises
(
DestinationExistsError
,
self
.
fs
.
move
,
"foo/bar/a.txt"
,
"/c.txt"
)
self
.
assertRaises
(
DestinationExistsError
,
self
.
fs
.
move
,
"foo/bar/a.txt"
,
"/c.txt"
)
self
.
assert_
(
check
(
"foo/bar/a.txt"
))
self
.
assert_
(
check
(
"foo/bar/a.txt"
))
self
.
assert_
(
check
(
"/c.txt"
))
self
.
assert_
(
check
(
"/c.txt"
))
self
.
fs
.
move
(
"foo/bar/a.txt"
,
"/c.txt"
,
overwrite
=
True
)
self
.
fs
.
move
(
"foo/bar/a.txt"
,
"/c.txt"
,
overwrite
=
True
)
self
.
assert_
(
not
check
(
"foo/bar/a.txt"
))
self
.
assert_
(
not
check
(
"foo/bar/a.txt"
))
self
.
assert_
(
check
(
"/c.txt"
))
self
.
assert_
(
check
(
"/c.txt"
))
def
test_movedir
(
self
):
def
test_movedir
(
self
):
check
=
self
.
check
check
=
self
.
check
contents
=
b
(
"If the implementation is hard to explain, it's a bad idea."
)
contents
=
b
(
"If the implementation is hard to explain, it's a bad idea."
)
def
makefile
(
path
):
def
makefile
(
path
):
self
.
fs
.
setcontents
(
path
,
contents
)
self
.
fs
.
setcontents
(
path
,
contents
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
movedir
,
"a"
,
"b"
)
self
.
assertRaises
(
ResourceNotFoundError
,
self
.
fs
.
movedir
,
"a"
,
"b"
)
self
.
fs
.
makedir
(
"a"
)
self
.
fs
.
makedir
(
"a"
)
self
.
fs
.
makedir
(
"b"
)
self
.
fs
.
makedir
(
"b"
)
makefile
(
"a/1.txt"
)
makefile
(
"a/1.txt"
)
...
@@ -553,34 +576,37 @@ class FSTestCases(object):
...
@@ -553,34 +576,37 @@ class FSTestCases(object):
self
.
assert_
(
not
check
(
"a"
))
self
.
assert_
(
not
check
(
"a"
))
self
.
fs
.
makedir
(
"a"
)
self
.
fs
.
makedir
(
"a"
)
self
.
assertRaises
(
DestinationExistsError
,
self
.
fs
.
movedir
,
"copy of a"
,
"a"
)
self
.
assertRaises
(
self
.
fs
.
movedir
(
"copy of a"
,
"a"
,
overwrite
=
True
)
DestinationExistsError
,
self
.
fs
.
movedir
,
"copy of a"
,
"a"
)
self
.
fs
.
movedir
(
"copy of a"
,
"a"
,
overwrite
=
True
)
self
.
assert_
(
not
check
(
"copy of a"
))
self
.
assert_
(
not
check
(
"copy of a"
))
self
.
assert_
(
check
(
"a/1.txt"
))
self
.
assert_
(
check
(
"a/1.txt"
))
self
.
assert_
(
check
(
"a/2.txt"
))
self
.
assert_
(
check
(
"a/2.txt"
))
self
.
assert_
(
check
(
"a/3.txt"
))
self
.
assert_
(
check
(
"a/3.txt"
))
self
.
assert_
(
check
(
"a/foo/bar/baz.txt"
))
self
.
assert_
(
check
(
"a/foo/bar/baz.txt"
))
def
test_cant_copy_from_os
(
self
):
def
test_cant_copy_from_os
(
self
):
sys_executable
=
os
.
path
.
abspath
(
os
.
path
.
realpath
(
sys
.
executable
))
sys_executable
=
os
.
path
.
abspath
(
os
.
path
.
realpath
(
sys
.
executable
))
self
.
assertRaises
(
FSError
,
self
.
fs
.
copy
,
sys_executable
,
"py.exe"
)
self
.
assertRaises
(
FSError
,
self
.
fs
.
copy
,
sys_executable
,
"py.exe"
)
def
test_copyfile
(
self
):
def
test_copyfile
(
self
):
check
=
self
.
check
check
=
self
.
check
contents
=
b
(
"If the implementation is hard to explain, it's a bad idea."
)
contents
=
b
(
def
makefile
(
path
,
contents
=
contents
):
"If the implementation is hard to explain, it's a bad idea."
)
self
.
fs
.
setcontents
(
path
,
contents
)
def
checkcontents
(
path
,
contents
=
contents
):
def
makefile
(
path
,
contents
=
contents
):
self
.
fs
.
setcontents
(
path
,
contents
)
def
checkcontents
(
path
,
contents
=
contents
):
check_contents
=
self
.
fs
.
getcontents
(
path
,
"rb"
)
check_contents
=
self
.
fs
.
getcontents
(
path
,
"rb"
)
self
.
assertEqual
(
check_contents
,
contents
)
self
.
assertEqual
(
check_contents
,
contents
)
return
contents
==
check_contents
return
contents
==
check_contents
self
.
fs
.
makedir
(
"foo/bar"
,
recursive
=
True
)
self
.
fs
.
makedir
(
"foo/bar"
,
recursive
=
True
)
makefile
(
"foo/bar/a.txt"
)
makefile
(
"foo/bar/a.txt"
)
self
.
assert_
(
check
(
"foo/bar/a.txt"
))
self
.
assert_
(
check
(
"foo/bar/a.txt"
))
self
.
assert_
(
checkcontents
(
"foo/bar/a.txt"
))
self
.
assert_
(
checkcontents
(
"foo/bar/a.txt"
))
#import rpdb2; rpdb2.start_embedded_debugger('password');
#
import rpdb2; rpdb2.start_embedded_debugger('password');
self
.
fs
.
copy
(
"foo/bar/a.txt"
,
"foo/b.txt"
)
self
.
fs
.
copy
(
"foo/bar/a.txt"
,
"foo/b.txt"
)
self
.
assert_
(
check
(
"foo/bar/a.txt"
))
self
.
assert_
(
check
(
"foo/bar/a.txt"
))
self
.
assert_
(
check
(
"foo/b.txt"
))
self
.
assert_
(
check
(
"foo/b.txt"
))
...
@@ -592,23 +618,26 @@ class FSTestCases(object):
...
@@ -592,23 +618,26 @@ class FSTestCases(object):
self
.
assert_
(
check
(
"/c.txt"
))
self
.
assert_
(
check
(
"/c.txt"
))
self
.
assert_
(
checkcontents
(
"/c.txt"
))
self
.
assert_
(
checkcontents
(
"/c.txt"
))
makefile
(
"foo/bar/a.txt"
,
b
(
"different contents"
))
makefile
(
"foo/bar/a.txt"
,
b
(
"different contents"
))
self
.
assert_
(
checkcontents
(
"foo/bar/a.txt"
,
b
(
"different contents"
)))
self
.
assert_
(
checkcontents
(
"foo/bar/a.txt"
,
b
(
"different contents"
)))
self
.
assertRaises
(
DestinationExistsError
,
self
.
fs
.
copy
,
"foo/bar/a.txt"
,
"/c.txt"
)
self
.
assertRaises
(
DestinationExistsError
,
self
.
fs
.
copy
,
"foo/bar/a.txt"
,
"/c.txt"
)
self
.
assert_
(
checkcontents
(
"/c.txt"
))
self
.
assert_
(
checkcontents
(
"/c.txt"
))
self
.
fs
.
copy
(
"foo/bar/a.txt"
,
"/c.txt"
,
overwrite
=
True
)
self
.
fs
.
copy
(
"foo/bar/a.txt"
,
"/c.txt"
,
overwrite
=
True
)
self
.
assert_
(
checkcontents
(
"foo/bar/a.txt"
,
b
(
"different contents"
)))
self
.
assert_
(
checkcontents
(
"foo/bar/a.txt"
,
b
(
"different contents"
)))
self
.
assert_
(
checkcontents
(
"/c.txt"
,
b
(
"different contents"
)))
self
.
assert_
(
checkcontents
(
"/c.txt"
,
b
(
"different contents"
)))
def
test_copydir
(
self
):
def
test_copydir
(
self
):
check
=
self
.
check
check
=
self
.
check
contents
=
b
(
"If the implementation is hard to explain, it's a bad idea."
)
contents
=
b
(
"If the implementation is hard to explain, it's a bad idea."
)
def
makefile
(
path
):
def
makefile
(
path
):
self
.
fs
.
setcontents
(
path
,
contents
)
self
.
fs
.
setcontents
(
path
,
contents
)
def
checkcontents
(
path
):
def
checkcontents
(
path
):
check_contents
=
self
.
fs
.
getcontents
(
path
)
check_contents
=
self
.
fs
.
getcontents
(
path
)
self
.
assertEqual
(
check_contents
,
contents
)
self
.
assertEqual
(
check_contents
,
contents
)
return
contents
==
check_contents
return
contents
==
check_contents
self
.
fs
.
makedir
(
"a"
)
self
.
fs
.
makedir
(
"a"
)
...
@@ -632,8 +661,8 @@ class FSTestCases(object):
...
@@ -632,8 +661,8 @@ class FSTestCases(object):
self
.
assert_
(
check
(
"a/foo/bar/baz.txt"
))
self
.
assert_
(
check
(
"a/foo/bar/baz.txt"
))
checkcontents
(
"a/1.txt"
)
checkcontents
(
"a/1.txt"
)
self
.
assertRaises
(
DestinationExistsError
,
self
.
fs
.
copydir
,
"a"
,
"b"
)
self
.
assertRaises
(
DestinationExistsError
,
self
.
fs
.
copydir
,
"a"
,
"b"
)
self
.
fs
.
copydir
(
"a"
,
"b"
,
overwrite
=
True
)
self
.
fs
.
copydir
(
"a"
,
"b"
,
overwrite
=
True
)
self
.
assert_
(
check
(
"b/1.txt"
))
self
.
assert_
(
check
(
"b/1.txt"
))
self
.
assert_
(
check
(
"b/2.txt"
))
self
.
assert_
(
check
(
"b/2.txt"
))
self
.
assert_
(
check
(
"b/3.txt"
))
self
.
assert_
(
check
(
"b/3.txt"
))
...
@@ -642,9 +671,11 @@ class FSTestCases(object):
...
@@ -642,9 +671,11 @@ class FSTestCases(object):
def
test_copydir_with_dotfile
(
self
):
def
test_copydir_with_dotfile
(
self
):
check
=
self
.
check
check
=
self
.
check
contents
=
b
(
"If the implementation is hard to explain, it's a bad idea."
)
contents
=
b
(
"If the implementation is hard to explain, it's a bad idea."
)
def
makefile
(
path
):
def
makefile
(
path
):
self
.
fs
.
setcontents
(
path
,
contents
)
self
.
fs
.
setcontents
(
path
,
contents
)
self
.
fs
.
makedir
(
"a"
)
self
.
fs
.
makedir
(
"a"
)
makefile
(
"a/1.txt"
)
makefile
(
"a/1.txt"
)
...
@@ -663,7 +694,7 @@ class FSTestCases(object):
...
@@ -663,7 +694,7 @@ class FSTestCases(object):
def
test_readwriteappendseek
(
self
):
def
test_readwriteappendseek
(
self
):
def
checkcontents
(
path
,
check_contents
):
def
checkcontents
(
path
,
check_contents
):
read_contents
=
self
.
fs
.
getcontents
(
path
,
"rb"
)
read_contents
=
self
.
fs
.
getcontents
(
path
,
"rb"
)
self
.
assertEqual
(
read_contents
,
check_contents
)
self
.
assertEqual
(
read_contents
,
check_contents
)
return
read_contents
==
check_contents
return
read_contents
==
check_contents
test_strings
=
[
b
(
"Beautiful is better than ugly."
),
test_strings
=
[
b
(
"Beautiful is better than ugly."
),
b
(
"Explicit is better than implicit."
),
b
(
"Explicit is better than implicit."
),
...
@@ -688,11 +719,11 @@ class FSTestCases(object):
...
@@ -688,11 +719,11 @@ class FSTestCases(object):
self
.
assert_
(
checkcontents
(
"b.txt"
,
test_strings
[
0
]))
self
.
assert_
(
checkcontents
(
"b.txt"
,
test_strings
[
0
]))
f3
=
self
.
fs
.
open
(
"b.txt"
,
"ab"
)
f3
=
self
.
fs
.
open
(
"b.txt"
,
"ab"
)
# On win32, tell() gives zero until you actually write to the file
# On win32, tell() gives zero until you actually write to the file
#self.assertEquals(f3.tell(),len(test_strings[0]))
#
self.assertEquals(f3.tell(),len(test_strings[0]))
f3
.
write
(
test_strings
[
1
])
f3
.
write
(
test_strings
[
1
])
self
.
assertEquals
(
f3
.
tell
(),
len
(
test_strings
[
0
])
+
len
(
test_strings
[
1
]))
self
.
assertEquals
(
f3
.
tell
(),
len
(
test_strings
[
0
])
+
len
(
test_strings
[
1
]))
f3
.
write
(
test_strings
[
2
])
f3
.
write
(
test_strings
[
2
])
self
.
assertEquals
(
f3
.
tell
(),
len
(
all_strings
))
self
.
assertEquals
(
f3
.
tell
(),
len
(
all_strings
))
f3
.
close
()
f3
.
close
()
self
.
assert_
(
checkcontents
(
"b.txt"
,
all_strings
))
self
.
assert_
(
checkcontents
(
"b.txt"
,
all_strings
))
f4
=
self
.
fs
.
open
(
"b.txt"
,
"wb"
)
f4
=
self
.
fs
.
open
(
"b.txt"
,
"wb"
)
...
@@ -723,46 +754,45 @@ class FSTestCases(object):
...
@@ -723,46 +754,45 @@ class FSTestCases(object):
def
test_truncate
(
self
):
def
test_truncate
(
self
):
def
checkcontents
(
path
,
check_contents
):
def
checkcontents
(
path
,
check_contents
):
read_contents
=
self
.
fs
.
getcontents
(
path
,
"rb"
)
read_contents
=
self
.
fs
.
getcontents
(
path
,
"rb"
)
self
.
assertEqual
(
read_contents
,
check_contents
)
self
.
assertEqual
(
read_contents
,
check_contents
)
return
read_contents
==
check_contents
return
read_contents
==
check_contents
self
.
fs
.
setcontents
(
"hello"
,
b
(
"world"
))
self
.
fs
.
setcontents
(
"hello"
,
b
(
"world"
))
checkcontents
(
"hello"
,
b
(
"world"
))
checkcontents
(
"hello"
,
b
(
"world"
))
self
.
fs
.
setcontents
(
"hello"
,
b
(
"hi"
))
self
.
fs
.
setcontents
(
"hello"
,
b
(
"hi"
))
checkcontents
(
"hello"
,
b
(
"hi"
))
checkcontents
(
"hello"
,
b
(
"hi"
))
self
.
fs
.
setcontents
(
"hello"
,
b
(
"1234567890"
))
self
.
fs
.
setcontents
(
"hello"
,
b
(
"1234567890"
))
checkcontents
(
"hello"
,
b
(
"1234567890"
))
checkcontents
(
"hello"
,
b
(
"1234567890"
))
with
self
.
fs
.
open
(
"hello"
,
"rb+"
)
as
f
:
with
self
.
fs
.
open
(
"hello"
,
"rb+"
)
as
f
:
f
.
truncate
(
7
)
f
.
truncate
(
7
)
checkcontents
(
"hello"
,
b
(
"1234567"
))
checkcontents
(
"hello"
,
b
(
"1234567"
))
with
self
.
fs
.
open
(
"hello"
,
"rb+"
)
as
f
:
with
self
.
fs
.
open
(
"hello"
,
"rb+"
)
as
f
:
f
.
seek
(
5
)
f
.
seek
(
5
)
f
.
truncate
()
f
.
truncate
()
checkcontents
(
"hello"
,
b
(
"12345"
))
checkcontents
(
"hello"
,
b
(
"12345"
))
def
test_truncate_to_larger_size
(
self
):
def
test_truncate_to_larger_size
(
self
):
with
self
.
fs
.
open
(
"hello"
,
"wb"
)
as
f
:
with
self
.
fs
.
open
(
"hello"
,
"wb"
)
as
f
:
f
.
truncate
(
30
)
f
.
truncate
(
30
)
self
.
assertEquals
(
self
.
fs
.
getsize
(
"hello"
),
30
)
self
.
assertEquals
(
self
.
fs
.
getsize
(
"hello"
),
30
)
# Some file systems (FTPFS) don't support both reading and writing
# Some file systems (FTPFS) don't support both reading and writing
if
self
.
fs
.
getmeta
(
'file.read_and_write'
,
True
):
if
self
.
fs
.
getmeta
(
'file.read_and_write'
,
True
):
with
self
.
fs
.
open
(
"hello"
,
"rb+"
)
as
f
:
with
self
.
fs
.
open
(
"hello"
,
"rb+"
)
as
f
:
f
.
seek
(
25
)
f
.
seek
(
25
)
f
.
write
(
b
(
"123456"
))
f
.
write
(
b
(
"123456"
))
with
self
.
fs
.
open
(
"hello"
,
"rb"
)
as
f
:
with
self
.
fs
.
open
(
"hello"
,
"rb"
)
as
f
:
f
.
seek
(
25
)
f
.
seek
(
25
)
self
.
assertEquals
(
f
.
read
(),
b
(
"123456"
))
self
.
assertEquals
(
f
.
read
(),
b
(
"123456"
))
def
test_write_past_end_of_file
(
self
):
def
test_write_past_end_of_file
(
self
):
if
self
.
fs
.
getmeta
(
'file.read_and_write'
,
True
):
if
self
.
fs
.
getmeta
(
'file.read_and_write'
,
True
):
with
self
.
fs
.
open
(
"write_at_end"
,
"wb"
)
as
f
:
with
self
.
fs
.
open
(
"write_at_end"
,
"wb"
)
as
f
:
f
.
seek
(
25
)
f
.
seek
(
25
)
f
.
write
(
b
(
"EOF"
))
f
.
write
(
b
(
"EOF"
))
with
self
.
fs
.
open
(
"write_at_end"
,
"rb"
)
as
f
:
with
self
.
fs
.
open
(
"write_at_end"
,
"rb"
)
as
f
:
self
.
assertEquals
(
f
.
read
(),
b
(
"
\x00
"
)
*
25
+
b
(
"EOF"
))
self
.
assertEquals
(
f
.
read
(),
b
(
"
\x00
"
)
*
25
+
b
(
"EOF"
))
def
test_with_statement
(
self
):
def
test_with_statement
(
self
):
# This is a little tricky since 'with' is actually new syntax.
# This is a little tricky since 'with' is actually new syntax.
...
@@ -775,7 +805,7 @@ class FSTestCases(object):
...
@@ -775,7 +805,7 @@ class FSTestCases(object):
code
+=
"with self.fs.open('f.txt','wb-') as testfile:
\n
"
code
+=
"with self.fs.open('f.txt','wb-') as testfile:
\n
"
code
+=
" testfile.write(contents)
\n
"
code
+=
" testfile.write(contents)
\n
"
code
+=
"self.assertEquals(self.fs.getcontents('f.txt', 'rb'),contents)"
code
+=
"self.assertEquals(self.fs.getcontents('f.txt', 'rb'),contents)"
code
=
compile
(
code
,
"<string>"
,
'exec'
)
code
=
compile
(
code
,
"<string>"
,
'exec'
)
eval
(
code
)
eval
(
code
)
# A 'with' statement raising an error
# A 'with' statement raising an error
contents
=
"testing the with statement"
contents
=
"testing the with statement"
...
@@ -783,42 +813,43 @@ class FSTestCases(object):
...
@@ -783,42 +813,43 @@ class FSTestCases(object):
code
+=
"with self.fs.open('f.txt','wb-') as testfile:
\n
"
code
+=
"with self.fs.open('f.txt','wb-') as testfile:
\n
"
code
+=
" testfile.write(contents)
\n
"
code
+=
" testfile.write(contents)
\n
"
code
+=
" raise ValueError
\n
"
code
+=
" raise ValueError
\n
"
code
=
compile
(
code
,
"<string>"
,
'exec'
)
code
=
compile
(
code
,
"<string>"
,
'exec'
)
self
.
assertRaises
(
ValueError
,
eval
,
code
,
globals
(),
locals
())
self
.
assertRaises
(
ValueError
,
eval
,
code
,
globals
(),
locals
())
self
.
assertEquals
(
self
.
fs
.
getcontents
(
'f.txt'
,
'rb'
),
contents
)
self
.
assertEquals
(
self
.
fs
.
getcontents
(
'f.txt'
,
'rb'
),
contents
)
def
test_pickling
(
self
):
def
test_pickling
(
self
):
if
self
.
fs
.
getmeta
(
'pickle_contents'
,
True
):
if
self
.
fs
.
getmeta
(
'pickle_contents'
,
True
):
self
.
fs
.
setcontents
(
"test1"
,
b
(
"hello world"
))
self
.
fs
.
setcontents
(
"test1"
,
b
(
"hello world"
))
fs2
=
pickle
.
loads
(
pickle
.
dumps
(
self
.
fs
))
fs2
=
pickle
.
loads
(
pickle
.
dumps
(
self
.
fs
))
self
.
assert_
(
fs2
.
isfile
(
"test1"
))
self
.
assert_
(
fs2
.
isfile
(
"test1"
))
fs3
=
pickle
.
loads
(
pickle
.
dumps
(
self
.
fs
,
-
1
))
fs3
=
pickle
.
loads
(
pickle
.
dumps
(
self
.
fs
,
-
1
))
self
.
assert_
(
fs3
.
isfile
(
"test1"
))
self
.
assert_
(
fs3
.
isfile
(
"test1"
))
else
:
else
:
# Just make sure it doesn't throw an exception
# Just make sure it doesn't throw an exception
fs2
=
pickle
.
loads
(
pickle
.
dumps
(
self
.
fs
))
fs2
=
pickle
.
loads
(
pickle
.
dumps
(
self
.
fs
))
def
test_big_file
(
self
):
def
test_big_file
(
self
):
"""Test handling of a big file (1MB)"""
"""Test handling of a big file (1MB)"""
chunk_size
=
1024
*
256
chunk_size
=
1024
*
256
num_chunks
=
4
num_chunks
=
4
def
chunk_stream
():
def
chunk_stream
():
"""Generate predictable-but-randomy binary content."""
"""Generate predictable-but-randomy binary content."""
r
=
random
.
Random
(
0
)
r
=
random
.
Random
(
0
)
randint
=
r
.
randint
randint
=
r
.
randint
int2byte
=
six
.
int2byte
int2byte
=
six
.
int2byte
for
_i
in
xrange
(
num_chunks
):
for
_i
in
xrange
(
num_chunks
):
c
=
b
(
""
)
.
join
(
int2byte
(
randint
(
0
,
255
))
for
_j
in
xrange
(
chunk_size
//
8
))
c
=
b
(
""
)
.
join
(
int2byte
(
randint
(
0
,
255
))
for
_j
in
xrange
(
chunk_size
//
8
))
yield
c
*
8
yield
c
*
8
f
=
self
.
fs
.
open
(
"bigfile"
,
"wb"
)
f
=
self
.
fs
.
open
(
"bigfile"
,
"wb"
)
try
:
try
:
for
chunk
in
chunk_stream
():
for
chunk
in
chunk_stream
():
f
.
write
(
chunk
)
f
.
write
(
chunk
)
finally
:
finally
:
f
.
close
()
f
.
close
()
chunks
=
chunk_stream
()
chunks
=
chunk_stream
()
f
=
self
.
fs
.
open
(
"bigfile"
,
"rb"
)
f
=
self
.
fs
.
open
(
"bigfile"
,
"rb"
)
try
:
try
:
try
:
try
:
while
True
:
while
True
:
...
@@ -854,17 +885,19 @@ class FSTestCases(object):
...
@@ -854,17 +885,19 @@ class FSTestCases(object):
self
.
assertRaises
(
RemoveRootError
,
self
.
fs
.
removedir
,
"/"
)
self
.
assertRaises
(
RemoveRootError
,
self
.
fs
.
removedir
,
"/"
)
# May be disabled - see end of file
# May be disabled - see end of file
class
ThreadingTestCases
(
object
):
class
ThreadingTestCases
(
object
):
"""Testcases for thread-safety of FS implementations."""
"""Testcases for thread-safety of FS implementations."""
# These are either too slow to be worth repeating,
# These are either too slow to be worth repeating,
# or cannot possibly break cross-thread.
# or cannot possibly break cross-thread.
_dont_retest
=
(
"test_pickling"
,
"test_multiple_overwrite"
,)
_dont_retest
=
(
"test_pickling"
,
"test_multiple_overwrite"
,)
__lock
=
threading
.
RLock
()
__lock
=
threading
.
RLock
()
def
_yield
(
self
):
def
_yield
(
self
):
#time.sleep(0.001)
#
time.sleep(0.001)
# Yields without a delay
# Yields without a delay
time
.
sleep
(
0
)
time
.
sleep
(
0
)
...
@@ -874,7 +907,7 @@ class ThreadingTestCases(object):
...
@@ -874,7 +907,7 @@ class ThreadingTestCases(object):
def
_unlock
(
self
):
def
_unlock
(
self
):
self
.
__lock
.
release
()
self
.
__lock
.
release
()
def
_makeThread
(
self
,
func
,
errors
):
def
_makeThread
(
self
,
func
,
errors
):
def
runThread
():
def
runThread
():
try
:
try
:
func
()
func
()
...
@@ -884,74 +917,79 @@ class ThreadingTestCases(object):
...
@@ -884,74 +917,79 @@ class ThreadingTestCases(object):
thread
.
daemon
=
True
thread
.
daemon
=
True
return
thread
return
thread
def
_runThreads
(
self
,
*
funcs
):
def
_runThreads
(
self
,
*
funcs
):
check_interval
=
sys
.
getcheckinterval
()
check_interval
=
sys
.
getcheckinterval
()
sys
.
setcheckinterval
(
1
)
sys
.
setcheckinterval
(
1
)
try
:
try
:
errors
=
[]
errors
=
[]
threads
=
[
self
.
_makeThread
(
f
,
errors
)
for
f
in
funcs
]
threads
=
[
self
.
_makeThread
(
f
,
errors
)
for
f
in
funcs
]
for
t
in
threads
:
for
t
in
threads
:
t
.
start
()
t
.
start
()
for
t
in
threads
:
for
t
in
threads
:
t
.
join
()
t
.
join
()
for
(
c
,
e
,
t
)
in
errors
:
for
(
c
,
e
,
t
)
in
errors
:
raise
c
,
e
,
t
raise
e
,
None
,
t
finally
:
finally
:
sys
.
setcheckinterval
(
check_interval
)
sys
.
setcheckinterval
(
check_interval
)
def
test_setcontents_threaded
(
self
):
def
test_setcontents_threaded
(
self
):
def
setcontents
(
name
,
contents
):
def
setcontents
(
name
,
contents
):
f
=
self
.
fs
.
open
(
name
,
"wb"
)
f
=
self
.
fs
.
open
(
name
,
"wb"
)
self
.
_yield
()
self
.
_yield
()
try
:
try
:
f
.
write
(
contents
)
f
.
write
(
contents
)
self
.
_yield
()
self
.
_yield
()
finally
:
finally
:
f
.
close
()
f
.
close
()
def
thread1
():
def
thread1
():
c
=
b
(
"thread1 was 'ere"
)
c
=
b
(
"thread1 was 'ere"
)
setcontents
(
"thread1.txt"
,
c
)
setcontents
(
"thread1.txt"
,
c
)
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"thread1.txt"
,
'rb'
),
c
)
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"thread1.txt"
,
'rb'
),
c
)
def
thread2
():
def
thread2
():
c
=
b
(
"thread2 was 'ere"
)
c
=
b
(
"thread2 was 'ere"
)
setcontents
(
"thread2.txt"
,
c
)
setcontents
(
"thread2.txt"
,
c
)
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"thread2.txt"
,
'rb'
),
c
)
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"thread2.txt"
,
'rb'
),
c
)
self
.
_runThreads
(
thread1
,
thread2
)
self
.
_runThreads
(
thread1
,
thread2
)
def
test_setcontents_threaded_samefile
(
self
):
def
test_setcontents_threaded_samefile
(
self
):
def
setcontents
(
name
,
contents
):
def
setcontents
(
name
,
contents
):
f
=
self
.
fs
.
open
(
name
,
"wb"
)
f
=
self
.
fs
.
open
(
name
,
"wb"
)
self
.
_yield
()
self
.
_yield
()
try
:
try
:
f
.
write
(
contents
)
f
.
write
(
contents
)
self
.
_yield
()
self
.
_yield
()
finally
:
finally
:
f
.
close
()
f
.
close
()
def
thread1
():
def
thread1
():
c
=
b
(
"thread1 was 'ere"
)
c
=
b
(
"thread1 was 'ere"
)
setcontents
(
"threads.txt"
,
c
)
setcontents
(
"threads.txt"
,
c
)
self
.
_yield
()
self
.
_yield
()
self
.
assertEquals
(
self
.
fs
.
listdir
(
"/"
),[
"threads.txt"
])
self
.
assertEquals
(
self
.
fs
.
listdir
(
"/"
),
[
"threads.txt"
])
def
thread2
():
def
thread2
():
c
=
b
(
"thread2 was 'ere"
)
c
=
b
(
"thread2 was 'ere"
)
setcontents
(
"threads.txt"
,
c
)
setcontents
(
"threads.txt"
,
c
)
self
.
_yield
()
self
.
_yield
()
self
.
assertEquals
(
self
.
fs
.
listdir
(
"/"
),[
"threads.txt"
])
self
.
assertEquals
(
self
.
fs
.
listdir
(
"/"
),
[
"threads.txt"
])
def
thread3
():
def
thread3
():
c
=
b
(
"thread3 was 'ere"
)
c
=
b
(
"thread3 was 'ere"
)
setcontents
(
"threads.txt"
,
c
)
setcontents
(
"threads.txt"
,
c
)
self
.
_yield
()
self
.
_yield
()
self
.
assertEquals
(
self
.
fs
.
listdir
(
"/"
),[
"threads.txt"
])
self
.
assertEquals
(
self
.
fs
.
listdir
(
"/"
),
[
"threads.txt"
])
try
:
try
:
self
.
_runThreads
(
thread1
,
thread2
,
thread3
)
self
.
_runThreads
(
thread1
,
thread2
,
thread3
)
except
ResourceLockedError
:
except
ResourceLockedError
:
# that's ok, some implementations don't support concurrent writes
# that's ok, some implementations don't support concurrent writes
pass
pass
def
test_cases_in_separate_dirs
(
self
):
def
test_cases_in_separate_dirs
(
self
):
class
TestCases_in_subdir
(
self
.
__class__
,
unittest
.
TestCase
):
class
TestCases_in_subdir
(
self
.
__class__
,
unittest
.
TestCase
):
"""Run all testcases against a subdir of self.fs"""
"""Run all testcases against a subdir of self.fs"""
def
__init__
(
this
,
subdir
):
def
__init__
(
this
,
subdir
):
super
(
TestCases_in_subdir
,
this
)
.
__init__
(
"test_listdir"
)
super
(
TestCases_in_subdir
,
this
)
.
__init__
(
"test_listdir"
)
this
.
subdir
=
subdir
this
.
subdir
=
subdir
for
meth
in
dir
(
this
):
for
meth
in
dir
(
this
):
...
@@ -959,113 +997,136 @@ class ThreadingTestCases(object):
...
@@ -959,113 +997,136 @@ class ThreadingTestCases(object):
continue
continue
if
meth
in
self
.
_dont_retest
:
if
meth
in
self
.
_dont_retest
:
continue
continue
if
not
hasattr
(
FSTestCases
,
meth
):
if
not
hasattr
(
FSTestCases
,
meth
):
continue
continue
if
self
.
fs
.
exists
(
subdir
):
if
self
.
fs
.
exists
(
subdir
):
self
.
fs
.
removedir
(
subdir
,
force
=
True
)
self
.
fs
.
removedir
(
subdir
,
force
=
True
)
self
.
assertFalse
(
self
.
fs
.
isdir
(
subdir
))
self
.
assertFalse
(
self
.
fs
.
isdir
(
subdir
))
self
.
assertTrue
(
self
.
fs
.
isdir
(
"/"
))
self
.
assertTrue
(
self
.
fs
.
isdir
(
"/"
))
self
.
fs
.
makedir
(
subdir
)
self
.
fs
.
makedir
(
subdir
)
self
.
_yield
()
self
.
_yield
()
getattr
(
this
,
meth
)()
getattr
(
this
,
meth
)()
@property
@property
def
fs
(
this
):
def
fs
(
this
):
return
self
.
fs
.
opendir
(
this
.
subdir
)
return
self
.
fs
.
opendir
(
this
.
subdir
)
def
check
(
this
,
p
):
return
self
.
check
(
pathjoin
(
this
.
subdir
,
relpath
(
p
)))
def
check
(
this
,
p
):
return
self
.
check
(
pathjoin
(
this
.
subdir
,
relpath
(
p
)))
def
thread1
():
def
thread1
():
TestCases_in_subdir
(
"thread1"
)
TestCases_in_subdir
(
"thread1"
)
def
thread2
():
def
thread2
():
TestCases_in_subdir
(
"thread2"
)
TestCases_in_subdir
(
"thread2"
)
def
thread3
():
def
thread3
():
TestCases_in_subdir
(
"thread3"
)
TestCases_in_subdir
(
"thread3"
)
self
.
_runThreads
(
thread1
,
thread2
,
thread3
)
self
.
_runThreads
(
thread1
,
thread2
,
thread3
)
def
test_makedir_winner
(
self
):
def
test_makedir_winner
(
self
):
errors
=
[]
errors
=
[]
def
makedir
():
def
makedir
():
try
:
try
:
self
.
fs
.
makedir
(
"testdir"
)
self
.
fs
.
makedir
(
"testdir"
)
except
DestinationExistsError
,
e
:
except
DestinationExistsError
,
e
:
errors
.
append
(
e
)
errors
.
append
(
e
)
def
makedir_noerror
():
def
makedir_noerror
():
try
:
try
:
self
.
fs
.
makedir
(
"testdir"
,
allow_recreate
=
True
)
self
.
fs
.
makedir
(
"testdir"
,
allow_recreate
=
True
)
except
DestinationExistsError
,
e
:
except
DestinationExistsError
,
e
:
errors
.
append
(
e
)
errors
.
append
(
e
)
def
removedir
():
def
removedir
():
try
:
try
:
self
.
fs
.
removedir
(
"testdir"
)
self
.
fs
.
removedir
(
"testdir"
)
except
(
ResourceNotFoundError
,
ResourceLockedError
),
e
:
except
(
ResourceNotFoundError
,
ResourceLockedError
),
e
:
errors
.
append
(
e
)
errors
.
append
(
e
)
# One thread should succeed, one should error
# One thread should succeed, one should error
self
.
_runThreads
(
makedir
,
makedir
)
self
.
_runThreads
(
makedir
,
makedir
)
self
.
assertEquals
(
len
(
errors
),
1
)
self
.
assertEquals
(
len
(
errors
),
1
)
self
.
fs
.
removedir
(
"testdir"
)
self
.
fs
.
removedir
(
"testdir"
)
# One thread should succeed, two should error
# One thread should succeed, two should error
errors
=
[]
errors
=
[]
self
.
_runThreads
(
makedir
,
makedir
,
makedir
)
self
.
_runThreads
(
makedir
,
makedir
,
makedir
)
if
len
(
errors
)
!=
2
:
if
len
(
errors
)
!=
2
:
raise
AssertionError
(
errors
)
raise
AssertionError
(
errors
)
self
.
fs
.
removedir
(
"testdir"
)
self
.
fs
.
removedir
(
"testdir"
)
# All threads should succeed
# All threads should succeed
errors
=
[]
errors
=
[]
self
.
_runThreads
(
makedir_noerror
,
makedir_noerror
,
makedir_noerror
)
self
.
_runThreads
(
makedir_noerror
,
makedir_noerror
,
makedir_noerror
)
self
.
assertEquals
(
len
(
errors
),
0
)
self
.
assertEquals
(
len
(
errors
),
0
)
self
.
assertTrue
(
self
.
fs
.
isdir
(
"testdir"
))
self
.
assertTrue
(
self
.
fs
.
isdir
(
"testdir"
))
self
.
fs
.
removedir
(
"testdir"
)
self
.
fs
.
removedir
(
"testdir"
)
# makedir() can beat removedir() and vice-versa
# makedir() can beat removedir() and vice-versa
errors
=
[]
errors
=
[]
self
.
_runThreads
(
makedir
,
removedir
)
self
.
_runThreads
(
makedir
,
removedir
)
if
self
.
fs
.
isdir
(
"testdir"
):
if
self
.
fs
.
isdir
(
"testdir"
):
self
.
assertEquals
(
len
(
errors
),
1
)
self
.
assertEquals
(
len
(
errors
),
1
)
self
.
assertFalse
(
isinstance
(
errors
[
0
],
DestinationExistsError
))
self
.
assertFalse
(
isinstance
(
errors
[
0
],
DestinationExistsError
))
self
.
fs
.
removedir
(
"testdir"
)
self
.
fs
.
removedir
(
"testdir"
)
else
:
else
:
self
.
assertEquals
(
len
(
errors
),
0
)
self
.
assertEquals
(
len
(
errors
),
0
)
def
test_concurrent_copydir
(
self
):
def
test_concurrent_copydir
(
self
):
self
.
fs
.
makedir
(
"a"
)
self
.
fs
.
makedir
(
"a"
)
self
.
fs
.
makedir
(
"a/b"
)
self
.
fs
.
makedir
(
"a/b"
)
self
.
fs
.
setcontents
(
"a/hello.txt"
,
b
(
"hello world"
))
self
.
fs
.
setcontents
(
"a/hello.txt"
,
b
(
"hello world"
))
self
.
fs
.
setcontents
(
"a/guido.txt"
,
b
(
"is a space alien"
))
self
.
fs
.
setcontents
(
"a/guido.txt"
,
b
(
"is a space alien"
))
self
.
fs
.
setcontents
(
"a/b/parrot.txt"
,
b
(
"pining for the fiords"
))
self
.
fs
.
setcontents
(
"a/b/parrot.txt"
,
b
(
"pining for the fiords"
))
def
copydir
():
def
copydir
():
self
.
_yield
()
self
.
_yield
()
self
.
fs
.
copydir
(
"a"
,
"copy of a"
)
self
.
fs
.
copydir
(
"a"
,
"copy of a"
)
def
copydir_overwrite
():
def
copydir_overwrite
():
self
.
_yield
()
self
.
_yield
()
self
.
fs
.
copydir
(
"a"
,
"copy of a"
,
overwrite
=
True
)
self
.
fs
.
copydir
(
"a"
,
"copy of a"
,
overwrite
=
True
)
# This should error out since we're not overwriting
# This should error out since we're not overwriting
self
.
assertRaises
(
DestinationExistsError
,
self
.
_runThreads
,
copydir
,
copydir
)
self
.
assertRaises
(
DestinationExistsError
,
self
.
_runThreads
,
copydir
,
copydir
)
self
.
assert_
(
self
.
fs
.
isdir
(
'a'
))
self
.
assert_
(
self
.
fs
.
isdir
(
'a'
))
copydir_overwrite
()
self
.
assert_
(
self
.
fs
.
isdir
(
'a'
))
# This should run to completion and give a valid state, unless
# This should run to completion and give a valid state, unless
# files get locked when written to.
# files get locked when written to.
try
:
try
:
self
.
_runThreads
(
copydir_overwrite
,
copydir_overwrite
)
self
.
_runThreads
(
copydir_overwrite
,
copydir_overwrite
)
except
ResourceLockedError
:
except
ResourceLockedError
:
pass
pass
self
.
assertTrue
(
self
.
fs
.
isdir
(
"copy of a"
))
self
.
assertTrue
(
self
.
fs
.
isdir
(
"copy of a"
))
self
.
assertTrue
(
self
.
fs
.
isdir
(
"copy of a/b"
))
self
.
assertTrue
(
self
.
fs
.
isdir
(
"copy of a/b"
))
self
.
assertEqual
(
self
.
fs
.
getcontents
(
"copy of a/b/parrot.txt"
,
'rb'
),
b
(
"pining for the fiords"
))
self
.
assertEqual
(
self
.
fs
.
getcontents
(
self
.
assertEqual
(
self
.
fs
.
getcontents
(
"copy of a/hello.txt"
,
'rb'
),
b
(
"hello world"
))
"copy of a/b/parrot.txt"
,
'rb'
),
b
(
"pining for the fiords"
))
self
.
assertEqual
(
self
.
fs
.
getcontents
(
"copy of a/guido.txt"
,
'rb'
),
b
(
"is a space alien"
))
self
.
assertEqual
(
self
.
fs
.
getcontents
(
"copy of a/hello.txt"
,
'rb'
),
b
(
"hello world"
))
self
.
assertEqual
(
self
.
fs
.
getcontents
(
"copy of a/guido.txt"
,
'rb'
),
b
(
"is a space alien"
))
def
test_multiple_overwrite
(
self
):
def
test_multiple_overwrite
(
self
):
contents
=
[
b
(
"contents one"
),
b
(
"contents the second"
),
b
(
"number three"
)]
contents
=
[
b
(
"contents one"
),
b
(
"contents the second"
),
b
(
"number three"
)]
def
thread1
():
def
thread1
():
for
i
in
xrange
(
30
):
for
i
in
xrange
(
30
):
for
c
in
contents
:
for
c
in
contents
:
self
.
fs
.
setcontents
(
"thread1.txt"
,
c
)
self
.
fs
.
setcontents
(
"thread1.txt"
,
c
)
self
.
assertEquals
(
self
.
fs
.
getsize
(
"thread1.txt"
),
len
(
c
))
self
.
assertEquals
(
self
.
fs
.
getsize
(
"thread1.txt"
),
len
(
c
))
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"thread1.txt"
,
'rb'
),
c
)
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"thread1.txt"
,
'rb'
),
c
)
def
thread2
():
def
thread2
():
for
i
in
xrange
(
30
):
for
i
in
xrange
(
30
):
for
c
in
contents
:
for
c
in
contents
:
self
.
fs
.
setcontents
(
"thread2.txt"
,
c
)
self
.
fs
.
setcontents
(
"thread2.txt"
,
c
)
self
.
assertEquals
(
self
.
fs
.
getsize
(
"thread2.txt"
),
len
(
c
))
self
.
assertEquals
(
self
.
fs
.
getsize
(
"thread2.txt"
),
len
(
c
))
self
.
assertEquals
(
self
.
fs
.
getcontents
(
"thread2.txt"
,
'rb'
),
c
)
self
.
assertEquals
(
self
.
fs
.
getcontents
(
self
.
_runThreads
(
thread1
,
thread2
)
"thread2.txt"
,
'rb'
),
c
)
self
.
_runThreads
(
thread1
,
thread2
)
# Uncomment to temporarily disable threading tests
# Uncomment to temporarily disable threading tests
#class ThreadingTestCases(object):
#
class ThreadingTestCases(object):
# _dont_retest = ()
# _dont_retest = ()
fs/tests/data/__init__.py
0 → 100644
View file @
3ea4efe1
fs/tests/test_expose.py
View file @
3ea4efe1
...
@@ -6,7 +6,8 @@
...
@@ -6,7 +6,8 @@
import
unittest
import
unittest
import
sys
import
sys
import
os
,
os
.
path
import
os
import
os.path
import
socket
import
socket
import
threading
import
threading
import
time
import
time
...
@@ -32,6 +33,12 @@ try:
...
@@ -32,6 +33,12 @@ try:
except
ImportError
:
except
ImportError
:
if
not
PY3
:
if
not
PY3
:
raise
raise
import
logging
logging
.
getLogger
(
'paramiko'
)
.
setLevel
(
logging
.
ERROR
)
logging
.
getLogger
(
'paramiko.transport'
)
.
setLevel
(
logging
.
ERROR
)
class
TestSFTPFS
(
TestRPCFS
):
class
TestSFTPFS
(
TestRPCFS
):
__test__
=
not
PY3
__test__
=
not
PY3
...
@@ -55,7 +62,7 @@ except ImportError:
...
@@ -55,7 +62,7 @@ except ImportError:
pass
pass
else
:
else
:
from
fs.osfs
import
OSFS
from
fs.osfs
import
OSFS
class
TestFUSE
(
unittest
.
TestCase
,
FSTestCases
,
ThreadingTestCases
):
class
TestFUSE
(
unittest
.
TestCase
,
FSTestCases
,
ThreadingTestCases
):
def
setUp
(
self
):
def
setUp
(
self
):
self
.
temp_fs
=
TempFS
()
self
.
temp_fs
=
TempFS
()
...
@@ -64,7 +71,7 @@ else:
...
@@ -64,7 +71,7 @@ else:
self
.
mounted_fs
=
self
.
temp_fs
.
opendir
(
"root"
)
self
.
mounted_fs
=
self
.
temp_fs
.
opendir
(
"root"
)
self
.
mount_point
=
self
.
temp_fs
.
getsyspath
(
"mount"
)
self
.
mount_point
=
self
.
temp_fs
.
getsyspath
(
"mount"
)
self
.
fs
=
OSFS
(
self
.
temp_fs
.
getsyspath
(
"mount"
))
self
.
fs
=
OSFS
(
self
.
temp_fs
.
getsyspath
(
"mount"
))
self
.
mount_proc
=
fuse
.
mount
(
self
.
mounted_fs
,
self
.
mount_point
)
self
.
mount_proc
=
fuse
.
mount
(
self
.
mounted_fs
,
self
.
mount_point
)
def
tearDown
(
self
):
def
tearDown
(
self
):
self
.
mount_proc
.
unmount
()
self
.
mount_proc
.
unmount
()
...
@@ -76,7 +83,7 @@ else:
...
@@ -76,7 +83,7 @@ else:
fuse
.
unmount
(
self
.
mount_point
)
fuse
.
unmount
(
self
.
mount_point
)
self
.
temp_fs
.
close
()
self
.
temp_fs
.
close
()
def
check
(
self
,
p
):
def
check
(
self
,
p
):
return
self
.
mounted_fs
.
exists
(
p
)
return
self
.
mounted_fs
.
exists
(
p
)
...
...
fs/tests/test_importhook.py
View file @
3ea4efe1
...
@@ -12,6 +12,7 @@ from fs.zipfs import ZipFS
...
@@ -12,6 +12,7 @@ from fs.zipfs import ZipFS
from
six
import
b
from
six
import
b
class
TestFSImportHook
(
unittest
.
TestCase
):
class
TestFSImportHook
(
unittest
.
TestCase
):
def
setUp
(
self
):
def
setUp
(
self
):
...
@@ -140,4 +141,3 @@ class TestFSImportHook(unittest.TestCase):
...
@@ -140,4 +141,3 @@ class TestFSImportHook(unittest.TestCase):
sys
.
path_hooks
.
remove
(
FSImportHook
)
sys
.
path_hooks
.
remove
(
FSImportHook
)
sys
.
path
.
pop
()
sys
.
path
.
pop
()
t
.
close
()
t
.
close
()
fs/tests/test_iotools.py
0 → 100644
View file @
3ea4efe1
from
__future__
import
unicode_literals
from
fs
import
iotools
import
io
import
unittest
from
os.path
import
dirname
,
join
,
abspath
try
:
unicode
except
NameError
:
unicode
=
str
class
OpenFilelike
(
object
):
def
__init__
(
self
,
make_f
):
self
.
make_f
=
make_f
@iotools.filelike_to_stream
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
return
self
.
make_f
()
def
__enter__
(
self
):
return
self
def
__exit__
(
self
,
*
args
,
**
kwargs
):
self
.
f
.
close
()
class
TestIOTools
(
unittest
.
TestCase
):
def
get_bin_file
(
self
):
path
=
join
(
dirname
(
abspath
(
__file__
)),
'data/UTF-8-demo.txt'
)
return
io
.
open
(
path
,
'rb'
)
def
test_make_stream
(
self
):
"""Test make_stream"""
with
self
.
get_bin_file
()
as
f
:
text
=
f
.
read
()
self
.
assert_
(
isinstance
(
text
,
bytes
))
with
self
.
get_bin_file
()
as
f
:
with
iotools
.
make_stream
(
"data/UTF-8-demo.txt"
,
f
,
'rt'
)
as
f2
:
text
=
f2
.
read
()
self
.
assert_
(
isinstance
(
text
,
unicode
))
def
test_decorator
(
self
):
"""Test filelike_to_stream decorator"""
o
=
OpenFilelike
(
self
.
get_bin_file
)
with
o
.
open
(
'file'
,
'rb'
)
as
f
:
text
=
f
.
read
()
self
.
assert_
(
isinstance
(
text
,
bytes
))
with
o
.
open
(
'file'
,
'rt'
)
as
f
:
text
=
f
.
read
()
self
.
assert_
(
isinstance
(
text
,
unicode
))
fs/tests/test_mountfs.py
View file @
3ea4efe1
...
@@ -2,10 +2,11 @@ from fs.mountfs import MountFS
...
@@ -2,10 +2,11 @@ from fs.mountfs import MountFS
from
fs.memoryfs
import
MemoryFS
from
fs.memoryfs
import
MemoryFS
import
unittest
import
unittest
class
TestMultiFS
(
unittest
.
TestCase
):
class
TestMountFS
(
unittest
.
TestCase
):
def
test_auto_close
(
self
):
def
test_auto_close
(
self
):
"""Test M
ulti
FS auto close is working"""
"""Test M
ount
FS auto close is working"""
multi_fs
=
MountFS
()
multi_fs
=
MountFS
()
m1
=
MemoryFS
()
m1
=
MemoryFS
()
m2
=
MemoryFS
()
m2
=
MemoryFS
()
...
@@ -18,7 +19,7 @@ class TestMultiFS(unittest.TestCase):
...
@@ -18,7 +19,7 @@ class TestMultiFS(unittest.TestCase):
self
.
assert_
(
m2
.
closed
)
self
.
assert_
(
m2
.
closed
)
def
test_no_auto_close
(
self
):
def
test_no_auto_close
(
self
):
"""Test M
ulti
FS auto close can be disabled"""
"""Test M
ount
FS auto close can be disabled"""
multi_fs
=
MountFS
(
auto_close
=
False
)
multi_fs
=
MountFS
(
auto_close
=
False
)
m1
=
MemoryFS
()
m1
=
MemoryFS
()
m2
=
MemoryFS
()
m2
=
MemoryFS
()
...
@@ -32,7 +33,7 @@ class TestMultiFS(unittest.TestCase):
...
@@ -32,7 +33,7 @@ class TestMultiFS(unittest.TestCase):
def
test_mountfile
(
self
):
def
test_mountfile
(
self
):
"""Test mounting a file"""
"""Test mounting a file"""
quote
=
"""If you wish to make an apple pie from scratch, you must first invent the universe."""
quote
=
b
"""If you wish to make an apple pie from scratch, you must first invent the universe."""
mem_fs
=
MemoryFS
()
mem_fs
=
MemoryFS
()
mem_fs
.
makedir
(
'foo'
)
mem_fs
.
makedir
(
'foo'
)
mem_fs
.
setcontents
(
'foo/bar.txt'
,
quote
)
mem_fs
.
setcontents
(
'foo/bar.txt'
,
quote
)
...
@@ -58,11 +59,11 @@ class TestMultiFS(unittest.TestCase):
...
@@ -58,11 +59,11 @@ class TestMultiFS(unittest.TestCase):
# Check changes are written back
# Check changes are written back
mem_fs
.
setcontents
(
'foo/bar.txt'
,
'baz'
)
mem_fs
.
setcontents
(
'foo/bar.txt'
,
'baz'
)
self
.
assertEqual
(
mount_fs
.
getcontents
(
'bar.txt'
),
'baz'
)
self
.
assertEqual
(
mount_fs
.
getcontents
(
'bar.txt'
),
b
'baz'
)
self
.
assertEqual
(
mount_fs
.
getsize
(
'bar.txt'
),
len
(
'baz'
))
self
.
assertEqual
(
mount_fs
.
getsize
(
'bar.txt'
),
len
(
'baz'
))
# Check changes are written to the original fs
# Check changes are written to the original fs
self
.
assertEqual
(
mem_fs
.
getcontents
(
'foo/bar.txt'
),
'baz'
)
self
.
assertEqual
(
mem_fs
.
getcontents
(
'foo/bar.txt'
),
b
'baz'
)
self
.
assertEqual
(
mem_fs
.
getsize
(
'foo/bar.txt'
),
len
(
'baz'
))
self
.
assertEqual
(
mem_fs
.
getsize
(
'foo/bar.txt'
),
len
(
'baz'
))
# Check unmount
# Check unmount
...
...
fs/tests/test_remote.py
View file @
3ea4efe1
...
@@ -24,23 +24,27 @@ from fs.local_functools import wraps
...
@@ -24,23 +24,27 @@ from fs.local_functools import wraps
from
six
import
PY3
,
b
from
six
import
PY3
,
b
class
RemoteTempFS
(
TempFS
):
class
RemoteTempFS
(
TempFS
):
"""
"""
Simple filesystem implementing setfilecontents
Simple filesystem implementing setfilecontents
for RemoteFileBuffer tests
for RemoteFileBuffer tests
"""
"""
def
open
(
self
,
path
,
mode
=
'rb'
,
write_on_flush
=
True
):
def
open
(
self
,
path
,
mode
=
'rb'
,
write_on_flush
=
True
,
**
kwargs
):
if
'a'
in
mode
or
'r'
in
mode
or
'+'
in
mode
:
if
'a'
in
mode
or
'r'
in
mode
or
'+'
in
mode
:
f
=
super
(
RemoteTempFS
,
self
)
.
open
(
path
,
'rb'
)
f
=
super
(
RemoteTempFS
,
self
)
.
open
(
path
,
mode
=
'rb'
,
**
kwargs
)
f
=
TellAfterCloseFile
(
f
)
f
=
TellAfterCloseFile
(
f
)
else
:
else
:
f
=
None
f
=
None
return
RemoteFileBuffer
(
self
,
path
,
mode
,
f
,
return
RemoteFileBuffer
(
self
,
path
,
mode
,
f
,
write_on_flush
=
write_on_flush
)
write_on_flush
=
write_on_flush
)
def
setcontents
(
self
,
path
,
data
,
chunk_size
=
64
*
1024
):
def
setcontents
(
self
,
path
,
data
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
64
*
1024
):
f
=
super
(
RemoteTempFS
,
self
)
.
open
(
path
,
'wb'
)
f
=
super
(
RemoteTempFS
,
self
)
.
open
(
path
,
'wb'
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
chunk_size
)
if
getattr
(
data
,
'read'
,
False
):
if
getattr
(
data
,
'read'
,
False
):
f
.
write
(
data
.
read
())
f
.
write
(
data
.
read
())
else
:
else
:
...
@@ -51,7 +55,7 @@ class RemoteTempFS(TempFS):
...
@@ -51,7 +55,7 @@ class RemoteTempFS(TempFS):
class
TellAfterCloseFile
(
object
):
class
TellAfterCloseFile
(
object
):
"""File-like object that allows calling tell() after it's been closed."""
"""File-like object that allows calling tell() after it's been closed."""
def
__init__
(
self
,
file
):
def
__init__
(
self
,
file
):
self
.
_finalpos
=
None
self
.
_finalpos
=
None
self
.
file
=
file
self
.
file
=
file
...
@@ -65,8 +69,8 @@ class TellAfterCloseFile(object):
...
@@ -65,8 +69,8 @@ class TellAfterCloseFile(object):
return
self
.
_finalpos
return
self
.
_finalpos
return
self
.
file
.
tell
()
return
self
.
file
.
tell
()
def
__getattr__
(
self
,
attr
):
def
__getattr__
(
self
,
attr
):
return
getattr
(
self
.
file
,
attr
)
return
getattr
(
self
.
file
,
attr
)
class
TestRemoteFileBuffer
(
unittest
.
TestCase
,
FSTestCases
,
ThreadingTestCases
):
class
TestRemoteFileBuffer
(
unittest
.
TestCase
,
FSTestCases
,
ThreadingTestCases
):
...
@@ -315,8 +319,8 @@ class DisconnectingFS(WrapFS):
...
@@ -315,8 +319,8 @@ class DisconnectingFS(WrapFS):
time
.
sleep
(
random
.
random
()
*
0.1
)
time
.
sleep
(
random
.
random
()
*
0.1
)
self
.
_connected
=
not
self
.
_connected
self
.
_connected
=
not
self
.
_connected
def
setcontents
(
self
,
path
,
contents
=
b
(
''
)
,
chunk_size
=
64
*
1024
):
def
setcontents
(
self
,
path
,
data
=
b
(
''
),
encoding
=
None
,
errors
=
None
,
chunk_size
=
64
*
1024
):
return
self
.
wrapped_fs
.
setcontents
(
path
,
contents
)
return
self
.
wrapped_fs
.
setcontents
(
path
,
data
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
chunk_size
)
def
close
(
self
):
def
close
(
self
):
if
not
self
.
closed
:
if
not
self
.
closed
:
...
...
fs/tests/test_watch.py
View file @
3ea4efe1
...
@@ -29,6 +29,10 @@ if sys.platform == "win32":
...
@@ -29,6 +29,10 @@ if sys.platform == "win32":
else
:
else
:
watch_win32
=
None
watch_win32
=
None
import
logging
logging
.
getLogger
(
'pyinotify'
)
.
setLevel
(
logging
.
ERROR
)
import
six
import
six
from
six
import
PY3
,
b
from
six
import
PY3
,
b
...
@@ -53,7 +57,7 @@ class WatcherTestCases:
...
@@ -53,7 +57,7 @@ class WatcherTestCases:
self
.
watchfs
.
_poll_cond
.
wait
()
self
.
watchfs
.
_poll_cond
.
wait
()
self
.
watchfs
.
_poll_cond
.
release
()
self
.
watchfs
.
_poll_cond
.
release
()
else
:
else
:
time
.
sleep
(
2
)
#0.5)
time
.
sleep
(
2
)
def
assertEventOccurred
(
self
,
cls
,
path
=
None
,
event_list
=
None
,
**
attrs
):
def
assertEventOccurred
(
self
,
cls
,
path
=
None
,
event_list
=
None
,
**
attrs
):
if
not
self
.
checkEventOccurred
(
cls
,
path
,
event_list
,
**
attrs
):
if
not
self
.
checkEventOccurred
(
cls
,
path
,
event_list
,
**
attrs
):
...
@@ -222,4 +226,3 @@ class TestWatchers_MemoryFS_polling(TestWatchers_MemoryFS):
...
@@ -222,4 +226,3 @@ class TestWatchers_MemoryFS_polling(TestWatchers_MemoryFS):
def
setUp
(
self
):
def
setUp
(
self
):
self
.
fs
=
memoryfs
.
MemoryFS
()
self
.
fs
=
memoryfs
.
MemoryFS
()
self
.
watchfs
=
ensure_watchable
(
self
.
fs
,
poll_interval
=
0.1
)
self
.
watchfs
=
ensure_watchable
(
self
.
fs
,
poll_interval
=
0.1
)
fs/tests/test_zipfs.py
View file @
3ea4efe1
...
@@ -17,6 +17,7 @@ from fs import zipfs
...
@@ -17,6 +17,7 @@ from fs import zipfs
from
six
import
PY3
,
b
from
six
import
PY3
,
b
class
TestReadZipFS
(
unittest
.
TestCase
):
class
TestReadZipFS
(
unittest
.
TestCase
):
def
setUp
(
self
):
def
setUp
(
self
):
...
@@ -46,20 +47,22 @@ class TestReadZipFS(unittest.TestCase):
...
@@ -46,20 +47,22 @@ class TestReadZipFS(unittest.TestCase):
def
test_reads
(
self
):
def
test_reads
(
self
):
def
read_contents
(
path
):
def
read_contents
(
path
):
f
=
self
.
fs
.
open
(
path
)
f
=
self
.
fs
.
open
(
path
,
'rb'
)
contents
=
f
.
read
()
contents
=
f
.
read
()
return
contents
return
contents
def
check_contents
(
path
,
expected
):
def
check_contents
(
path
,
expected
):
self
.
assert_
(
read_contents
(
path
)
==
expected
)
self
.
assert_
(
read_contents
(
path
)
==
expected
)
check_contents
(
"a.txt"
,
b
(
"Hello, World!"
))
check_contents
(
"a.txt"
,
b
(
"Hello, World!"
))
check_contents
(
"1.txt"
,
b
(
"1"
))
check_contents
(
"1.txt"
,
b
(
"1"
))
check_contents
(
"foo/bar/baz.txt"
,
b
(
"baz"
))
check_contents
(
"foo/bar/baz.txt"
,
b
(
"baz"
))
def
test_getcontents
(
self
):
def
test_getcontents
(
self
):
def
read_contents
(
path
):
def
read_contents
(
path
):
return
self
.
fs
.
getcontents
(
path
)
return
self
.
fs
.
getcontents
(
path
,
'rb'
)
def
check_contents
(
path
,
expected
):
def
check_contents
(
path
,
expected
):
self
.
assert_
(
read_contents
(
path
)
==
expected
)
self
.
assert_
(
read_contents
(
path
)
==
expected
)
check_contents
(
"a.txt"
,
b
(
"Hello, World!"
))
check_contents
(
"a.txt"
,
b
(
"Hello, World!"
))
check_contents
(
"1.txt"
,
b
(
"1"
))
check_contents
(
"1.txt"
,
b
(
"1"
))
check_contents
(
"foo/bar/baz.txt"
,
b
(
"baz"
))
check_contents
(
"foo/bar/baz.txt"
,
b
(
"baz"
))
...
@@ -82,7 +85,7 @@ class TestReadZipFS(unittest.TestCase):
...
@@ -82,7 +85,7 @@ class TestReadZipFS(unittest.TestCase):
dir_list
=
self
.
fs
.
listdir
(
path
)
dir_list
=
self
.
fs
.
listdir
(
path
)
self
.
assert_
(
sorted
(
dir_list
)
==
sorted
(
expected
))
self
.
assert_
(
sorted
(
dir_list
)
==
sorted
(
expected
))
for
item
in
dir_list
:
for
item
in
dir_list
:
self
.
assert_
(
isinstance
(
item
,
unicode
))
self
.
assert_
(
isinstance
(
item
,
unicode
))
check_listing
(
'/'
,
[
'a.txt'
,
'1.txt'
,
'foo'
,
'b.txt'
])
check_listing
(
'/'
,
[
'a.txt'
,
'1.txt'
,
'foo'
,
'b.txt'
])
check_listing
(
'foo'
,
[
'second.txt'
,
'bar'
])
check_listing
(
'foo'
,
[
'second.txt'
,
'bar'
])
check_listing
(
'foo/bar'
,
[
'baz.txt'
])
check_listing
(
'foo/bar'
,
[
'baz.txt'
])
...
...
fs/utils.py
View file @
3ea4efe1
...
@@ -72,6 +72,7 @@ def copyfile(src_fs, src_path, dst_fs, dst_path, overwrite=True, chunk_size=64*1
...
@@ -72,6 +72,7 @@ def copyfile(src_fs, src_path, dst_fs, dst_path, overwrite=True, chunk_size=64*1
if
src_lock
is
not
None
:
if
src_lock
is
not
None
:
src_lock
.
release
()
src_lock
.
release
()
def
copyfile_non_atomic
(
src_fs
,
src_path
,
dst_fs
,
dst_path
,
overwrite
=
True
,
chunk_size
=
64
*
1024
):
def
copyfile_non_atomic
(
src_fs
,
src_path
,
dst_fs
,
dst_path
,
overwrite
=
True
,
chunk_size
=
64
*
1024
):
"""A non atomic version of copyfile (will not block other threads using src_fs or dst_fst)
"""A non atomic version of copyfile (will not block other threads using src_fs or dst_fst)
...
...
fs/watch.py
View file @
3ea4efe1
...
@@ -291,29 +291,36 @@ class WatchableFS(WatchableFSMixin,WrapFS):
...
@@ -291,29 +291,36 @@ class WatchableFS(WatchableFSMixin,WrapFS):
that might be made through other interfaces to the same filesystem.
that might be made through other interfaces to the same filesystem.
"""
"""
def
__init__
(
self
,
*
args
,
**
kwds
):
def
__init__
(
self
,
*
args
,
**
kwds
):
super
(
WatchableFS
,
self
)
.
__init__
(
*
args
,
**
kwds
)
super
(
WatchableFS
,
self
)
.
__init__
(
*
args
,
**
kwds
)
def
close
(
self
):
def
close
(
self
):
super
(
WatchableFS
,
self
)
.
close
()
super
(
WatchableFS
,
self
)
.
close
()
self
.
notify_watchers
(
CLOSED
)
self
.
notify_watchers
(
CLOSED
)
def
open
(
self
,
path
,
mode
=
"r"
,
**
kwargs
):
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
existed
=
self
.
wrapped_fs
.
isfile
(
path
)
existed
=
self
.
wrapped_fs
.
isfile
(
path
)
f
=
super
(
WatchableFS
,
self
)
.
open
(
path
,
mode
,
**
kwargs
)
f
=
super
(
WatchableFS
,
self
)
.
open
(
path
,
mode
=
mode
,
buffering
=
buffering
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
,
line_buffering
=
line_buffering
,
**
kwargs
)
if
not
existed
:
if
not
existed
:
self
.
notify_watchers
(
CREATED
,
path
)
self
.
notify_watchers
(
CREATED
,
path
)
self
.
notify_watchers
(
ACCESSED
,
path
)
self
.
notify_watchers
(
ACCESSED
,
path
)
return
WatchedFile
(
f
,
self
,
path
,
mode
)
return
WatchedFile
(
f
,
self
,
path
,
mode
)
def
setcontents
(
self
,
path
,
data
=
b
(
''
)
,
chunk_size
=
64
*
1024
):
def
setcontents
(
self
,
path
,
data
=
b
''
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
64
*
1024
):
existed
=
self
.
wrapped_fs
.
isfile
(
path
)
existed
=
self
.
wrapped_fs
.
isfile
(
path
)
ret
=
super
(
WatchableFS
,
self
)
.
setcontents
(
path
,
data
,
chunk_size
=
chunk_size
)
ret
=
super
(
WatchableFS
,
self
)
.
setcontents
(
path
,
data
,
chunk_size
=
chunk_size
)
if
not
existed
:
if
not
existed
:
self
.
notify_watchers
(
CREATED
,
path
)
self
.
notify_watchers
(
CREATED
,
path
)
self
.
notify_watchers
(
ACCESSED
,
path
)
self
.
notify_watchers
(
ACCESSED
,
path
)
if
data
:
if
data
:
self
.
notify_watchers
(
MODIFIED
,
path
,
True
)
self
.
notify_watchers
(
MODIFIED
,
path
,
True
)
return
ret
return
ret
def
createfile
(
self
,
path
):
def
createfile
(
self
,
path
):
...
...
fs/wrapfs/__init__.py
View file @
3ea4efe1
...
@@ -150,21 +150,21 @@ class WrapFS(FS):
...
@@ -150,21 +150,21 @@ class WrapFS(FS):
return
self
.
wrapped_fs
.
hassyspath
(
self
.
_encode
(
path
))
return
self
.
wrapped_fs
.
hassyspath
(
self
.
_encode
(
path
))
@rewrite_errors
@rewrite_errors
def
open
(
self
,
path
,
mode
=
"r"
,
**
kwargs
):
def
open
(
self
,
path
,
mode
=
'r'
,
**
kwargs
):
(
mode
,
wmode
)
=
self
.
_adjust_mode
(
mode
)
(
mode
,
wmode
)
=
self
.
_adjust_mode
(
mode
)
f
=
self
.
wrapped_fs
.
open
(
self
.
_encode
(
path
),
wmode
,
**
kwargs
)
f
=
self
.
wrapped_fs
.
open
(
self
.
_encode
(
path
),
wmode
,
**
kwargs
)
return
self
.
_file_wrap
(
f
,
mode
)
return
self
.
_file_wrap
(
f
,
mode
)
@rewrite_errors
@rewrite_errors
def
setcontents
(
self
,
path
,
data
,
chunk_size
=
64
*
1024
):
def
setcontents
(
self
,
path
,
data
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
64
*
1024
):
# We can't pass setcontents() through to the wrapped FS if the
# We can't pass setcontents() through to the wrapped FS if the
# wrapper has defined a _file_wrap method, as it would bypass
# wrapper has defined a _file_wrap method, as it would bypass
# the file contents wrapping.
# the file contents wrapping.
#if self._file_wrap.im_func is WrapFS._file_wrap.im_func:
#if self._file_wrap.im_func is WrapFS._file_wrap.im_func:
if
getattr
(
self
.
__class__
,
'_file_wrap'
,
None
)
is
getattr
(
WrapFS
,
'_file_wrap'
,
None
):
if
getattr
(
self
.
__class__
,
'_file_wrap'
,
None
)
is
getattr
(
WrapFS
,
'_file_wrap'
,
None
):
return
self
.
wrapped_fs
.
setcontents
(
self
.
_encode
(
path
),
data
,
chunk_size
=
chunk_size
)
return
self
.
wrapped_fs
.
setcontents
(
self
.
_encode
(
path
),
data
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
chunk_size
)
else
:
else
:
return
super
(
WrapFS
,
self
)
.
setcontents
(
path
,
data
,
chunk_size
=
chunk_size
)
return
super
(
WrapFS
,
self
)
.
setcontents
(
path
,
data
,
encoding
=
encoding
,
errors
=
errors
,
chunk_size
=
chunk_size
)
@rewrite_errors
@rewrite_errors
def
createfile
(
self
,
path
):
def
createfile
(
self
,
path
):
...
...
fs/wrapfs/limitsizefs.py
View file @
3ea4efe1
...
@@ -58,14 +58,20 @@ class LimitSizeFS(WrapFS):
...
@@ -58,14 +58,20 @@ class LimitSizeFS(WrapFS):
raise
NoSysPathError
(
path
)
raise
NoSysPathError
(
path
)
return
None
return
None
def
open
(
self
,
path
,
mode
=
"r"
):
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
path
=
relpath
(
normpath
(
path
))
path
=
relpath
(
normpath
(
path
))
with
self
.
_size_lock
:
with
self
.
_size_lock
:
try
:
try
:
size
=
self
.
getsize
(
path
)
size
=
self
.
getsize
(
path
)
except
ResourceNotFoundError
:
except
ResourceNotFoundError
:
size
=
0
size
=
0
f
=
super
(
LimitSizeFS
,
self
)
.
open
(
path
,
mode
)
f
=
super
(
LimitSizeFS
,
self
)
.
open
(
path
,
mode
=
mode
,
buffering
=
buffering
,
errors
=
errors
,
newline
=
newline
,
line_buffering
=
line_buffering
,
**
kwargs
)
if
"w"
not
in
mode
:
if
"w"
not
in
mode
:
self
.
_set_file_size
(
path
,
None
,
1
)
self
.
_set_file_size
(
path
,
None
,
1
)
else
:
else
:
...
...
fs/wrapfs/readonlyfs.py
View file @
3ea4efe1
...
@@ -10,6 +10,7 @@ from fs.base import NoDefaultMeta
...
@@ -10,6 +10,7 @@ from fs.base import NoDefaultMeta
from
fs.wrapfs
import
WrapFS
from
fs.wrapfs
import
WrapFS
from
fs.errors
import
UnsupportedError
,
NoSysPathError
from
fs.errors
import
UnsupportedError
,
NoSysPathError
class
ReadOnlyFS
(
WrapFS
):
class
ReadOnlyFS
(
WrapFS
):
""" Makes a FS object read only. Any operation that could potentially modify
""" Makes a FS object read only. Any operation that could potentially modify
the underlying file system will throw an UnsupportedError
the underlying file system will throw an UnsupportedError
...
@@ -38,11 +39,18 @@ class ReadOnlyFS(WrapFS):
...
@@ -38,11 +39,18 @@ class ReadOnlyFS(WrapFS):
return
None
return
None
raise
NoSysPathError
(
path
)
raise
NoSysPathError
(
path
)
def
open
(
self
,
path
,
mode
=
'r'
,
**
kwargs
):
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
""" Only permit read access """
""" Only permit read access """
if
'w'
in
mode
or
'a'
in
mode
or
'+'
in
mode
:
if
'w'
in
mode
or
'a'
in
mode
or
'+'
in
mode
:
raise
UnsupportedError
(
'write'
)
raise
UnsupportedError
(
'write'
)
return
super
(
ReadOnlyFS
,
self
)
.
open
(
path
,
mode
,
**
kwargs
)
return
super
(
ReadOnlyFS
,
self
)
.
open
(
path
,
mode
=
mode
,
buffering
=
buffering
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
,
line_buffering
=
line_buffering
,
**
kwargs
)
def
_no_can_do
(
self
,
*
args
,
**
kwargs
):
def
_no_can_do
(
self
,
*
args
,
**
kwargs
):
""" Replacement method for methods that can modify the file system """
""" Replacement method for methods that can modify the file system """
...
...
fs/wrapfs/subfs.py
View file @
3ea4efe1
...
@@ -21,7 +21,7 @@ class SubFS(WrapFS):
...
@@ -21,7 +21,7 @@ class SubFS(WrapFS):
def
__init__
(
self
,
wrapped_fs
,
sub_dir
):
def
__init__
(
self
,
wrapped_fs
,
sub_dir
):
self
.
sub_dir
=
abspath
(
normpath
(
sub_dir
))
self
.
sub_dir
=
abspath
(
normpath
(
sub_dir
))
super
(
SubFS
,
self
)
.
__init__
(
wrapped_fs
)
super
(
SubFS
,
self
)
.
__init__
(
wrapped_fs
)
def
_encode
(
self
,
path
):
def
_encode
(
self
,
path
):
return
pathjoin
(
self
.
sub_dir
,
relpath
(
normpath
(
path
)))
return
pathjoin
(
self
.
sub_dir
,
relpath
(
normpath
(
path
)))
...
@@ -44,7 +44,7 @@ class SubFS(WrapFS):
...
@@ -44,7 +44,7 @@ class SubFS(WrapFS):
return
self
.
wrapped_fs
.
desc
(
self
.
sub_dir
)
return
self
.
wrapped_fs
.
desc
(
self
.
sub_dir
)
return
'
%
s!
%
s'
%
(
self
.
wrapped_fs
.
desc
(
self
.
sub_dir
),
path
)
return
'
%
s!
%
s'
%
(
self
.
wrapped_fs
.
desc
(
self
.
sub_dir
),
path
)
def
setcontents
(
self
,
path
,
data
,
chunk_size
=
64
*
1024
):
def
setcontents
(
self
,
path
,
data
,
encoding
=
None
,
errors
=
None
,
chunk_size
=
64
*
1024
):
path
=
self
.
_encode
(
path
)
path
=
self
.
_encode
(
path
)
return
self
.
wrapped_fs
.
setcontents
(
path
,
data
,
chunk_size
=
chunk_size
)
return
self
.
wrapped_fs
.
setcontents
(
path
,
data
,
chunk_size
=
chunk_size
)
...
@@ -62,11 +62,11 @@ class SubFS(WrapFS):
...
@@ -62,11 +62,11 @@ class SubFS(WrapFS):
path
=
normpath
(
path
)
path
=
normpath
(
path
)
if
path
in
(
''
,
'/'
):
if
path
in
(
''
,
'/'
):
raise
RemoveRootError
(
path
)
raise
RemoveRootError
(
path
)
super
(
SubFS
,
self
)
.
removedir
(
path
,
force
=
force
)
super
(
SubFS
,
self
)
.
removedir
(
path
,
force
=
force
)
if
recursive
:
if
recursive
:
try
:
try
:
if
dirname
(
path
)
not
in
(
''
,
'/'
):
if
dirname
(
path
)
not
in
(
''
,
'/'
):
self
.
removedir
(
dirname
(
path
),
recursive
=
True
)
self
.
removedir
(
dirname
(
path
),
recursive
=
True
)
except
DirectoryNotEmptyError
:
except
DirectoryNotEmptyError
:
pass
pass
...
...
fs/zipfs.py
View file @
3ea4efe1
...
@@ -13,6 +13,7 @@ from fs.base import *
...
@@ -13,6 +13,7 @@ from fs.base import *
from
fs.path
import
*
from
fs.path
import
*
from
fs.errors
import
*
from
fs.errors
import
*
from
fs.filelike
import
StringIO
from
fs.filelike
import
StringIO
from
fs
import
iotools
from
zipfile
import
ZipFile
,
ZIP_DEFLATED
,
ZIP_STORED
,
BadZipfile
,
LargeZipFile
from
zipfile
import
ZipFile
,
ZIP_DEFLATED
,
ZIP_STORED
,
BadZipfile
,
LargeZipFile
from
memoryfs
import
MemoryFS
from
memoryfs
import
MemoryFS
...
@@ -21,6 +22,7 @@ import tempfs
...
@@ -21,6 +22,7 @@ import tempfs
from
six
import
PY3
from
six
import
PY3
class
ZipOpenError
(
CreateFailedError
):
class
ZipOpenError
(
CreateFailedError
):
"""Thrown when the zip file could not be opened"""
"""Thrown when the zip file could not be opened"""
pass
pass
...
@@ -76,13 +78,13 @@ class _ExceptionProxy(object):
...
@@ -76,13 +78,13 @@ class _ExceptionProxy(object):
class
ZipFS
(
FS
):
class
ZipFS
(
FS
):
"""A FileSystem that represents a zip file."""
"""A FileSystem that represents a zip file."""
_meta
=
{
'thread_safe'
:
True
,
_meta
=
{
'thread_safe'
:
True
,
'virtual'
:
False
,
'virtual'
:
False
,
'read_only'
:
False
,
'read_only'
:
False
,
'unicode_paths'
:
True
,
'unicode_paths'
:
True
,
'case_insensitive_paths'
:
False
,
'case_insensitive_paths'
:
False
,
'network'
:
False
,
'network'
:
False
,
'atomic.setcontents'
:
False
'atomic.setcontents'
:
False
}
}
def
__init__
(
self
,
zip_file
,
mode
=
"r"
,
compression
=
"deflated"
,
allow_zip_64
=
False
,
encoding
=
"CP437"
,
thread_synchronize
=
True
):
def
__init__
(
self
,
zip_file
,
mode
=
"r"
,
compression
=
"deflated"
,
allow_zip_64
=
False
,
encoding
=
"CP437"
,
thread_synchronize
=
True
):
...
@@ -189,7 +191,8 @@ class ZipFS(FS):
...
@@ -189,7 +191,8 @@ class ZipFS(FS):
self
.
zf
=
_ExceptionProxy
()
self
.
zf
=
_ExceptionProxy
()
@synchronize
@synchronize
def
open
(
self
,
path
,
mode
=
"r"
,
**
kwargs
):
@iotools.filelike_to_stream
def
open
(
self
,
path
,
mode
=
'r'
,
buffering
=-
1
,
encoding
=
None
,
errors
=
None
,
newline
=
None
,
line_buffering
=
False
,
**
kwargs
):
path
=
normpath
(
relpath
(
path
))
path
=
normpath
(
relpath
(
path
))
if
'r'
in
mode
:
if
'r'
in
mode
:
...
@@ -222,7 +225,7 @@ class ZipFS(FS):
...
@@ -222,7 +225,7 @@ class ZipFS(FS):
raise
ValueError
(
"Mode must contain be 'r' or 'w'"
)
raise
ValueError
(
"Mode must contain be 'r' or 'w'"
)
@synchronize
@synchronize
def
getcontents
(
self
,
path
,
mode
=
"rb"
):
def
getcontents
(
self
,
path
,
mode
=
"rb"
,
encoding
=
None
,
errors
=
None
,
newline
=
None
):
if
not
self
.
exists
(
path
):
if
not
self
.
exists
(
path
):
raise
ResourceNotFoundError
(
path
)
raise
ResourceNotFoundError
(
path
)
path
=
normpath
(
relpath
(
path
))
path
=
normpath
(
relpath
(
path
))
...
@@ -232,7 +235,9 @@ class ZipFS(FS):
...
@@ -232,7 +235,9 @@ class ZipFS(FS):
raise
ResourceNotFoundError
(
path
)
raise
ResourceNotFoundError
(
path
)
except
RuntimeError
:
except
RuntimeError
:
raise
OperationFailedError
(
"read file"
,
path
=
path
,
msg
=
"3 Zip file must be opened with 'r' or 'a' to read"
)
raise
OperationFailedError
(
"read file"
,
path
=
path
,
msg
=
"3 Zip file must be opened with 'r' or 'a' to read"
)
if
'b'
in
mode
:
return
contents
return
contents
return
iotools
.
decode_binary
(
contents
,
encoding
=
encoding
,
errors
=
errors
,
newline
=
newline
)
@synchronize
@synchronize
def
_on_write_close
(
self
,
filename
):
def
_on_write_close
(
self
,
filename
):
...
...
setup.py
View file @
3ea4efe1
...
@@ -28,7 +28,6 @@ classifiers = [
...
@@ -28,7 +28,6 @@ classifiers = [
'License :: OSI Approved :: BSD License'
,
'License :: OSI Approved :: BSD License'
,
'Operating System :: OS Independent'
,
'Operating System :: OS Independent'
,
'Programming Language :: Python'
,
'Programming Language :: Python'
,
'Programming Language :: Python :: 2.5'
,
'Programming Language :: Python :: 2.6'
,
'Programming Language :: Python :: 2.6'
,
'Programming Language :: Python :: 2.7'
,
'Programming Language :: Python :: 2.7'
,
'Programming Language :: Python :: 3'
,
'Programming Language :: Python :: 3'
,
...
@@ -49,12 +48,12 @@ setup(install_requires=['distribute', 'six'],
...
@@ -49,12 +48,12 @@ setup(install_requires=['distribute', 'six'],
version
=
VERSION
,
version
=
VERSION
,
description
=
"Filesystem abstraction"
,
description
=
"Filesystem abstraction"
,
long_description
=
long_desc
,
long_description
=
long_desc
,
license
=
"BSD"
,
license
=
"BSD"
,
author
=
"Will McGugan"
,
author
=
"Will McGugan"
,
author_email
=
"will@willmcgugan.com"
,
author_email
=
"will@willmcgugan.com"
,
url
=
"http://code.google.com/p/pyfilesystem/"
,
url
=
"http://code.google.com/p/pyfilesystem/"
,
download_url
=
"http://code.google.com/p/pyfilesystem/downloads/list"
,
download_url
=
"http://code.google.com/p/pyfilesystem/downloads/list"
,
platforms
=
[
'any'
],
platforms
=
[
'any'
],
packages
=
[
'fs'
,
packages
=
[
'fs'
,
'fs.expose'
,
'fs.expose'
,
'fs.expose.dokan'
,
'fs.expose.dokan'
,
...
@@ -68,8 +67,8 @@ setup(install_requires=['distribute', 'six'],
...
@@ -68,8 +67,8 @@ setup(install_requires=['distribute', 'six'],
'fs.contrib.davfs'
,
'fs.contrib.davfs'
,
'fs.contrib.tahoelafs'
,
'fs.contrib.tahoelafs'
,
'fs.commands'
],
'fs.commands'
],
package_data
=
{
'fs'
:
[
'tests/data/*.txt'
]},
scripts
=
[
'fs/commands/
%
s'
%
command
for
command
in
COMMANDS
],
scripts
=
[
'fs/commands/
%
s'
%
command
for
command
in
COMMANDS
],
classifiers
=
classifiers
,
classifiers
=
classifiers
,
**
extra
**
extra
)
)
tox.ini
View file @
3ea4efe1
[tox]
[tox]
envlist
=
py2
5,py2
6,py27,py31,py32,pypy
envlist
=
py26,py27,py31,py32,pypy
sitepackages
=
False
sitepackages
=
False
[testenv]
[testenv]
...
@@ -10,24 +10,11 @@ deps = distribute
...
@@ -10,24 +10,11 @@ deps = distribute
boto
boto
nose
nose
mako
mako
python-libarchive
pyftpdlib
pyftpdlib
changedir
=
.tox
changedir
=
.tox
commands
=
nosetests fs.tests -v
\
commands
=
nosetests fs.tests -v
\
[]
[]
[testenv:py25]
deps
=
distribute
six
dexml
paramiko
boto
nose
mako
python-libarchive
pyftpdlib
simplejson
[testenv:py32]
[testenv:py32]
commands
=
nosetests fs.tests -v
\
commands
=
nosetests fs.tests -v
\
[]
[]
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment