aboutsummaryrefslogtreecommitdiff
path: root/misc/py-peft
diff options
context:
space:
mode:
Diffstat (limited to 'misc/py-peft')
-rw-r--r--misc/py-peft/Makefile43
-rw-r--r--misc/py-peft/distinfo3
-rw-r--r--misc/py-peft/pkg-descr14
3 files changed, 60 insertions, 0 deletions
diff --git a/misc/py-peft/Makefile b/misc/py-peft/Makefile
new file mode 100644
index 000000000000..56b09528d0cb
--- /dev/null
+++ b/misc/py-peft/Makefile
@@ -0,0 +1,43 @@
+PORTNAME= peft
+DISTVERSION= 0.17.1
+CATEGORIES= misc python # machine-learning
+MASTER_SITES= PYPI
+PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX}
+
+MAINTAINER= yuri@FreeBSD.org
+COMMENT= Parameter-Efficient Fine-Tuning (PEFT)
+WWW= https://huggingface.co/docs/peft/index \
+ https://github.com/huggingface/peft
+
+LICENSE= APACHE20
+LICENSE_FILE= ${WRKSRC}/LICENSE
+
+RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}accelerate>=0.21.0:misc/py-accelerate@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}huggingface-hub>=0.25.0:misc/py-huggingface-hub@${PY_FLAVOR} \
+ ${PYNUMPY} \
+ ${PYTHON_PKGNAMEPREFIX}packaging>=20.0:devel/py-packaging@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}psutil>0:sysutils/py-psutil@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}pytorch>=1.13.0:misc/py-pytorch@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}pyyaml>0:devel/py-pyyaml@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}safetensors>0:misc/py-safetensors@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}tqdm>0:misc/py-tqdm@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}transformers>0:misc/py-transformers@${PY_FLAVOR}
+TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}datasets>0:misc/py-datasets@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}diffusers>0:misc/py-diffusers@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}parameterized>0:devel/py-parameterized@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}protobuf>0:devel/py-protobuf@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}pytest-cov>0:devel/py-pytest-cov@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}pytest-xdist>0:devel/py-pytest-xdist@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}scipy>0:science/py-scipy@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}sentencepiece>0:textproc/py-sentencepiece@${PY_FLAVOR}
+
+USES= python
+USE_PYTHON= distutils autoplist pytest
+
+TEST_ENV= ${MAKE_ENV} PYTHONPATH=${STAGEDIR}${PYTHONPREFIX_SITELIBDIR}
+
+NO_ARCH= yes
+
+# tests fail to run, see https://github.com/huggingface/peft/issues/2789
+
+.include <bsd.port.mk>
diff --git a/misc/py-peft/distinfo b/misc/py-peft/distinfo
new file mode 100644
index 000000000000..b4a8bb7b5348
--- /dev/null
+++ b/misc/py-peft/distinfo
@@ -0,0 +1,3 @@
+TIMESTAMP = 1758189811
+SHA256 (peft-0.17.1.tar.gz) = e6002b42517976c290b3b8bbb9829a33dd5d470676b2dec7cb4df8501b77eb9f
+SIZE (peft-0.17.1.tar.gz) = 568192
diff --git a/misc/py-peft/pkg-descr b/misc/py-peft/pkg-descr
new file mode 100644
index 000000000000..c7205201b5f5
--- /dev/null
+++ b/misc/py-peft/pkg-descr
@@ -0,0 +1,14 @@
+The peft module contains state-of-the-art Parameter-Efficient Fine-Tuning
+(PEFT) methods.
+
+Fine-tuning large pretrained models is often prohibitively costly due to their
+scale. Parameter-Efficient Fine-Tuning (PEFT) methods enable efficient
+adaptation of large pretrained models to various downstream applications by only
+fine-tuning a small number of (extra) model parameters instead of all the
+model's parameters. This significantly decreases the computational and storage
+costs. Recent state-of-the-art PEFT techniques achieve performance comparable to
+fully fine-tuned models.
+
+PEFT is integrated with Transformers for easy model training and inference,
+Diffusers for conveniently managing different adapters, and Accelerate for
+distributed training and inference for really big models.