Index: misc/Makefile =================================================================== --- misc/Makefile +++ misc/Makefile @@ -454,6 +454,7 @@ SUBDIR += py-lightgbm SUBDIR += py-lightning-utilities SUBDIR += py-litellm + SUBDIR += py-litellm1447 SUBDIR += py-llama-cpp-python SUBDIR += py-llm SUBDIR += py-llm-claude-3 Index: misc/py-aider-chat/Makefile =================================================================== --- misc/py-aider-chat/Makefile +++ misc/py-aider-chat/Makefile @@ -32,7 +32,7 @@ ${PYTHON_PKGNAMEPREFIX}importlib-resources>=0:devel/py-importlib-resources@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}json5>=0.9.11:devel/py-json5@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}jsonschema>=4.23.0:devel/py-jsonschema@${PY_FLAVOR} \ - ${PYTHON_PKGNAMEPREFIX}litellm-1447==1.44.7:misc/py-litellm-1447@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}litellm1447==1.44.7:misc/py-litellm1447@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}networkx>=3.3:math/py-networkx@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}numpy<2.0,1:math/py-numpy@${PY_FLAVOR} \ ${PYTHON_PKGNAMEPREFIX}openai>=1.43.1:misc/py-openai@${PY_FLAVOR} \ Index: misc/py-litellm1447/Makefile =================================================================== --- /dev/null +++ misc/py-litellm1447/Makefile @@ -0,0 +1,50 @@ +PORTNAME= litellm +DISTVERSION= 1.44.7 +DISTNAME= litellm-${DISTVERSION} +CATEGORIES= misc python +MASTER_SITES= PYPI +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} +PKGNAMESUFFIX= 1447 + +MAINTAINER= dch@FreeBSD.org +COMMENT= Call all LLM APIs using the OpenAI format +WWW= https://github.com/BerriAI/litellm + +LICENSE= MIT +LICENSE_FILE= ${WRKSRC}/LICENSE + +BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}poetry-core>0:devel/py-poetry-core@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}wheel>0:devel/py-wheel@${PY_FLAVOR} +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}openai>=1.51.0:misc/py-openai@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}python-dotenv>=0.2.0:www/py-python-dotenv@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}tiktoken>=0.7.0:textproc/py-tiktoken@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}importlib-metadata>=6.8.0:devel/py-importlib-metadata@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}tokenizers>0:textproc/py-tokenizers@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}click>0:devel/py-click@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}Jinja2>=3.1.2<4.0.0:devel/py-Jinja2@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}aiohttp>0:www/py-aiohttp@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}requests>=2.31.0<3.0.0:www/py-requests@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pydantic2>=2.0.0<3.0.0:devel/py-pydantic2@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}jsonschema>=4.22.0<5.0.0:devel/py-jsonschema@${PY_FLAVOR} + +USES= python shebangfix +USE_PYTHON= autoplist concurrent pep517 + +REINPLACE_ARGS= -i '' +NO_ARCH= yes + +PORTDOCS= README.md + +OPTIONS_DEFINE= DOCS + +post-patch: + @${REINPLACE_CMD} -e 's|%%PYTHON_CMD%%|${PYTHON_CMD}|' \ + ${WRKSRC}/litellm/proxy/start.sh + @${FIND} ${WRKSRC}/litellm/proxy -type f \ + \( -name '*.orig' -o -name '*.bak' \) -delete + +post-install-DOCS-on: + @${MKDIR} ${STAGEDIR}${DOCSDIR} + ${INSTALL_MAN} ${PORTDOCS:S|^|${WRKSRC}/|} ${STAGEDIR}${DOCSDIR} + +.include Index: misc/py-litellm1447/distinfo =================================================================== --- /dev/null +++ misc/py-litellm1447/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1729940808 +SHA256 (litellm-1.44.7.tar.gz) = c8f8f9d80065be81580258177f3a006de86d2c4af1f9a732ac37bd317a13f042 +SIZE (litellm-1.44.7.tar.gz) = 8274968 Index: misc/py-litellm1447/files/patch-litellm_proxy_start.sh =================================================================== --- /dev/null +++ misc/py-litellm1447/files/patch-litellm_proxy_start.sh @@ -0,0 +1,8 @@ +--- litellm/proxy/start.sh.orig 2024-02-11 03:13:21 UTC ++++ litellm/proxy/start.sh +@@ -1,2 +1,2 @@ +-#!/bin/bash +-python3 proxy_cli.py +\ No newline at end of file ++#!/bin/sh ++%%PYTHON_CMD%% proxy_cli.py Index: misc/py-litellm1447/pkg-descr =================================================================== --- /dev/null +++ misc/py-litellm1447/pkg-descr @@ -0,0 +1,14 @@ +Call all LLM APIs using the OpenAI format [Bedrock, Huggingface, +VertexAI, TogetherAI, Azure, OpenAI, etc.] + +LiteLLM manages: +- Translate inputs to provider's completion, embedding, and + image_generation endpoints +- Consistent output, text responses will always be available at + ['choices'][0]['message']['content'] +- Retry/fallback logic across multiple deployments (e.g. Azure/OpenAI) + - Router +- Track spend & set budgets per project OpenAI Proxy Server + +Pinned port to support misc/py-aider chat that cannot run with the newer +LiteLLM version yet.