forked from meta-llama/llama-models
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.py
45 lines (39 loc) · 1.6 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# top-level folder for each specific model found within the models/ directory at
# the top-level of this source tree.
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from setuptools import setup
def read_requirements():
with open("requirements.txt") as fp:
content = fp.readlines()
return [line.strip() for line in content if not line.startswith("#")]
setup(
name="llama_models",
version="0.0.55",
author="Meta Llama",
author_email="[email protected]",
description="Llama models",
entry_points={
"console_scripts": [
"multimodal_example_chat_completion = llama_models.scripts.multimodal_example_chat_completion:main",
"multimodal_example_text_completion = llama_models.scripts.multimodal_example_text_completion:main",
"example_chat_completion = llama_models.scripts.example_chat_completion:main",
"example_text_completion = llama_models.scripts.example_text_completion:main",
]
},
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="https://github.com/meta-llama/llama-models",
package_dir={"llama_models": "llama_models"},
classifiers=[],
python_requires=">=3.10",
install_requires=read_requirements(),
include_package_data=True,
)