Skip to content

Commit

Permalink
Merge pull request #6 from amir-zeldes/dev
Browse files Browse the repository at this point in the history
Fix relative imports
  • Loading branch information
amir-zeldes authored May 26, 2020
2 parents 8a26fc8 + 8d35903 commit 89c8e4b
Show file tree
Hide file tree
Showing 7 changed files with 32 additions and 11 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ Either install from PyPI using pip:

`pip install hebpipe`

And run as a module:

`python -m hebpipe example_in.txt`

Or install manually:

* Clone this repository into the directory that the script should run in (git clone https://github.com/amir-zeldes/HebPipe)
Expand Down
2 changes: 2 additions & 0 deletions hebpipe/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
from .heb_pipe import run_hebpipe
run_hebpipe()
File renamed without changes.
31 changes: 23 additions & 8 deletions hebpipe/heb_pipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,21 @@
from glob import glob

from rftokenizer import RFTokenizer
from lib.xrenner import Xrenner
from lib._version import __version__
from lib.tt2conll import conllize
from lib.append_column import inject_col
from lib.sent_split import toks_to_sents
from lib.whitespace_tokenize import tokenize as whitespace_tokenize
try: # Module usage
from .lib.xrenner import Xrenner
from .lib._version import __version__
from .lib.tt2conll import conllize
from .lib.append_column import inject_col
from .lib.sent_split import toks_to_sents
from .lib.whitespace_tokenize import tokenize as whitespace_tokenize
except ImportError: # direct script usage
from lib.xrenner import Xrenner
from lib._version import __version__
from lib.tt2conll import conllize
from lib.append_column import inject_col
from lib.sent_split import toks_to_sents
from lib.whitespace_tokenize import tokenize as whitespace_tokenize


PY3 = sys.version_info[0] > 2

Expand Down Expand Up @@ -553,7 +562,7 @@ def nlp(input_data, do_whitespace=True, do_tok=True, do_tag=True, do_lemma=True,
return tagged


if __name__ == "__main__":
def run_hebpipe():


if sys.version_info[0] == 2 and sys.version_info[1] < 7:
Expand Down Expand Up @@ -617,7 +626,10 @@ def nlp(input_data, do_whitespace=True, do_tok=True, do_tag=True, do_lemma=True,
dotok = opts.tokenize

if not opts.quiet:
from lib import timing
try:
from .lib import timing
except ImportError: # direct script usage
from lib import timing

files = glob(opts.files)

Expand Down Expand Up @@ -683,3 +695,6 @@ def nlp(input_data, do_whitespace=True, do_tok=True, do_tag=True, do_lemma=True,

fileword = " files\n\n" if len(files) > 1 else " file\n\n"
sys.stderr.write("\nFinished processing " + str(len(files)) + fileword)

if __name__ == "__main__":
run_hebpipe()
2 changes: 1 addition & 1 deletion hebpipe/lib/_version.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-

__version__ = "1.0.0.0"
__version__ = "1.0.0.2"
__author__ = "Amir Zeldes"
__copyright__ = "Copyright 2018-2020, Amir Zeldes"
__license__ = "Apache 2.0 License"
File renamed without changes.
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@
setup(
name = 'hebpipe',
packages = find_packages(),
version = '1.0.0.0',
version = '1.0.0.2',
description = 'A pipeline for Hebrew NLP',
author = 'Amir Zeldes',
author_email = '[email protected]',
package_data = {'':['README.md','LICENSE.md','requirements.txt'],'hebpipe':['lib/*','data/*','bin/*','models/*']},
install_requires=['numpy','pandas','scipy','joblib','xgboost==0.81','rftokenizer','depedit','xmltodict'],
url = 'https://github.com/amir-zeldes/HebPipe',
license='Apache License, Version 2.0',
download_url = 'https://github.com/amir-zeldes/HebPipe/releases/tag/v1.0.0.0',
download_url = 'https://github.com/amir-zeldes/HebPipe/releases/tag/v1.0.0.2',
keywords = ['NLP', 'Hebrew', 'segmentation', 'tokenization', 'tagging', 'parsing','morphology','POS'],
classifiers = ['Programming Language :: Python',
'Programming Language :: Python :: 2',
Expand Down

0 comments on commit 89c8e4b

Please sign in to comment.