git: cd0465f5d2c1 - main - misc/py-llama-cpp-python: New port: Python bindings for the llama.cpp library
- Go to: [ bottom of page ] [ top of archives ] [ this month ]
Date: Sun, 28 Jul 2024 23:57:52 UTC
The branch main has been updated by yuri: URL: https://cgit.FreeBSD.org/ports/commit/?id=cd0465f5d2c19aecf6c863c5ac5565890cd7b554 commit cd0465f5d2c19aecf6c863c5ac5565890cd7b554 Author: Yuri Victorovich <yuri@FreeBSD.org> AuthorDate: 2024-07-28 23:47:17 +0000 Commit: Yuri Victorovich <yuri@FreeBSD.org> CommitDate: 2024-07-28 23:57:43 +0000 misc/py-llama-cpp-python: New port: Python bindings for the llama.cpp library --- misc/Makefile | 1 + misc/py-llama-cpp-python/Makefile | 38 ++++++++++++++++++++++ misc/py-llama-cpp-python/distinfo | 5 +++ .../files/patch-llama__cpp_llama__cpp.py | 11 +++++++ misc/py-llama-cpp-python/pkg-descr | 5 +++ 5 files changed, 60 insertions(+) diff --git a/misc/Makefile b/misc/Makefile index e6e9bfdede5b..f99169f04911 100644 --- a/misc/Makefile +++ b/misc/Makefile @@ -445,6 +445,7 @@ SUBDIR += py-lightgbm SUBDIR += py-lightning-utilities SUBDIR += py-litellm + SUBDIR += py-llama-cpp-python SUBDIR += py-llm SUBDIR += py-llm-claude-3 SUBDIR += py-log_symbols diff --git a/misc/py-llama-cpp-python/Makefile b/misc/py-llama-cpp-python/Makefile new file mode 100644 index 000000000000..1a8d2ad7f44b --- /dev/null +++ b/misc/py-llama-cpp-python/Makefile @@ -0,0 +1,38 @@ +PORTNAME= llama-cpp-python +DISTVERSIONPREFIX= v +DISTVERSION= 0.2.84 +CATEGORIES= misc # machine-learning +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} + +MAINTAINER= yuri@FreeBSD.org +COMMENT= Python bindings for the llama.cpp library +WWW= https://llama-cpp-python.readthedocs.io/en/latest/ + +LICENSE= MIT +LICENSE_FILE= ${WRKSRC}/LICENSE.md + +BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}scikit-build-core>0:devel/py-scikit-build-core@${PY_FLAVOR} \ + cmake:devel/cmake-core +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}diskcache>=5.6.1:devel/py-diskcache@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}Jinja2>=2.11.3:devel/py-Jinja2@${PY_FLAVOR} \ + ${PYNUMPY} \ + ${PYTHON_PKGNAMEPREFIX}typing-extensions>=4.5.0:devel/py-typing-extensions@${PY_FLAVOR} +RUN_DEPENDS+= ${PYTHON_PKGNAMEPREFIX}fastapi>=0.100.0:www/py-fastapi@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pydantic-settings>=2.0.1:devel/py-pydantic-settings@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}sse-starlette>=1.6.1:www/py-sse-starlette@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}starlette-context>=0.3.6:www/py-starlette-context@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}uvicorn>=0.22.0:www/py-uvicorn@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pyyaml>=5.1:devel/py-pyyaml@${PY_FLAVOR} +TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}httpx>=0.24.1:www/py-httpx@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}scipy>=1.10:science/py-scipy@${PY_FLAVOR} + +USES= python shebangfix +USE_PYTHON= pep517 autoplist pytest + +USE_GITHUB= yes +GH_ACCOUNT= abetlen +GH_TUPLE= ggerganov:llama.cpp:4730fac:cpp/vendor/llama.cpp + +SHEBANG_GLOB= *.py + +.include <bsd.port.mk> diff --git a/misc/py-llama-cpp-python/distinfo b/misc/py-llama-cpp-python/distinfo new file mode 100644 index 000000000000..79307b9535ed --- /dev/null +++ b/misc/py-llama-cpp-python/distinfo @@ -0,0 +1,5 @@ +TIMESTAMP = 1722209930 +SHA256 (abetlen-llama-cpp-python-v0.2.84_GH0.tar.gz) = 5b030c3ee8aeefa2f3b49c0788f34b7da3fef2502dd856c68d9ede6b93dd8e53 +SIZE (abetlen-llama-cpp-python-v0.2.84_GH0.tar.gz) = 274756 +SHA256 (ggerganov-llama.cpp-4730fac_GH0.tar.gz) = 8b54c2f6c0560e48cf2af91840c33d81f688edfecbd11043d4bf7c098a125497 +SIZE (ggerganov-llama.cpp-4730fac_GH0.tar.gz) = 19005559 diff --git a/misc/py-llama-cpp-python/files/patch-llama__cpp_llama__cpp.py b/misc/py-llama-cpp-python/files/patch-llama__cpp_llama__cpp.py new file mode 100644 index 000000000000..64e43300b493 --- /dev/null +++ b/misc/py-llama-cpp-python/files/patch-llama__cpp_llama__cpp.py @@ -0,0 +1,11 @@ +--- llama_cpp/llama_cpp.py.orig 2024-07-28 03:36:26 UTC ++++ llama_cpp/llama_cpp.py +@@ -28,7 +28,7 @@ def _load_shared_library(lib_base_name: str): + # for llamacpp) and "llama" (default name for this repo) + _lib_paths: List[pathlib.Path] = [] + # Determine the file extension based on the platform +- if sys.platform.startswith("linux"): ++ if sys.platform.startswith("linux") or sys.platform.startswith("freebsd"): + _lib_paths += [ + _base_path / f"lib{lib_base_name}.so", + ] diff --git a/misc/py-llama-cpp-python/pkg-descr b/misc/py-llama-cpp-python/pkg-descr new file mode 100644 index 000000000000..6c5bf2aa148e --- /dev/null +++ b/misc/py-llama-cpp-python/pkg-descr @@ -0,0 +1,5 @@ +llama-cpp-python is the Python bindings for the llama.cpp library/ + +The main goal of llama.cpp is to enable LLM inference with minimal setup and +state-of-the-art performance on a wide variety of hardware - locally and in +the cloud.