Commit cc1a3c79 by Steven Bird

Support new Distribute Setup; remove old setup files;

copy project metadata from nltk/__init__.py to avoid importing nltk into setup.py
(and requiring yaml already installed)

Cleaned up some yaml imports.
parent f943efed
#!/usr/bin/env python
#
# Distutils setup script for the Natural Language Toolkit
#
# Copyright (C) 2001-2011 NLTK Project
# Author: Steven Bird <sb@csse.unimelb.edu.au>
# Edward Loper <edloper@gradient.cis.upenn.edu>
# Ewan Klein <ewan@inf.ed.ac.uk>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from distutils.core import setup
import nltk
setup(
#############################################
## Distribution Metadata
name = "nltk",
description = "Natural Language Toolkit",
version = nltk.__version__,
url = nltk.__url__,
long_description = nltk.__longdescr__,
license = nltk.__license__,
keywords = nltk.__keywords__,
maintainer = nltk.__maintainer__,
maintainer_email = nltk.__maintainer_email__,
author = nltk.__author__,
author_email = nltk.__author__,
classifiers = nltk.__classifiers__,
# platforms = <platforms>,
#############################################
## Package Data
package_data = {'nltk': ['nltk.jar', 'test/*.doctest']},
#############################################
## Package List
packages = ['nltk',
'nltk.app',
'nltk.chat',
'nltk.chunk',
'nltk.ccg',
'nltk.classify',
'nltk.corpus',
'nltk.corpus.reader',
'nltk.cluster',
'nltk.draw',
'nltk.examples',
'nltk.inference',
'nltk.metrics',
'nltk.misc',
'nltk.model',
'nltk.parse',
'nltk.sem',
'nltk.stem',
'nltk.tag',
'nltk.tokenize',
'nltk.toolbox',
'nltk.etree',
],
)
#!/usr/bin/env python #!/usr/bin/env python
# #
# Distutils setup script for the Natural Language Toolkit # Distribute setup script for the Natural Language Toolkit
# #
# Copyright (C) 2001-2011 NLTK Project # Copyright (C) 2001-2011 NLTK Project
# Author: Steven Bird <sb@csse.unimelb.edu.au> # Author: Steven Bird <sb@csse.unimelb.edu.au>
...@@ -9,7 +9,10 @@ ...@@ -9,7 +9,10 @@
# URL: <http://nltk.org/> # URL: <http://nltk.org/>
# For license information, see LICENSE.TXT # For license information, see LICENSE.TXT
from setuptools import setup import distribute_setup
distribute_setup.use_setuptools()
from setuptools import setup, find_packages
import nltk import nltk
# #
...@@ -20,11 +23,8 @@ from setuptools.command import sdist ...@@ -20,11 +23,8 @@ from setuptools.command import sdist
del sdist.finders[:] del sdist.finders[:]
setup( setup(
#############################################
## Distribution Metadata
name = "nltk", name = "nltk",
description = "Natural Language Toolkit", description = "Natural Language Toolkit",
version = nltk.__version__, version = nltk.__version__,
url = nltk.__url__, url = nltk.__url__,
long_description = nltk.__longdescr__, long_description = nltk.__longdescr__,
...@@ -35,41 +35,9 @@ setup( ...@@ -35,41 +35,9 @@ setup(
author = nltk.__author__, author = nltk.__author__,
author_email = nltk.__author__, author_email = nltk.__author__,
classifiers = nltk.__classifiers__, classifiers = nltk.__classifiers__,
# platforms = <platforms>,
#############################################
## Package Data
package_data = {'nltk': ['nltk.jar', 'test/*.doctest']}, package_data = {'nltk': ['nltk.jar', 'test/*.doctest']},
packages = find_packages(),
#############################################
## Package List
packages = ['nltk',
'nltk.app',
'nltk.chat',
'nltk.chunk',
'nltk.ccg',
'nltk.classify',
'nltk.corpus',
'nltk.corpus.reader',
'nltk.cluster',
'nltk.draw',
'nltk.examples',
'nltk.inference',
'nltk.metrics',
'nltk.misc',
'nltk.model',
'nltk.parse',
'nltk.sem',
'nltk.stem',
'nltk.tag',
'nltk.test',
'nltk.tokenize',
'nltk.toolbox',
'nltk.etree'
],
zip_safe=False, # since normal files will be present too? zip_safe=False, # since normal files will be present too?
install_requires=['setuptools', install_requires=['PyYAML==3.09'],
'PyYAML==3.09',
],
test_suite = 'nltk.test.simple', test_suite = 'nltk.test.simple',
) )
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment