From 2215da3d2850aeeead8fd02ceabd79e2ab8c7964 Mon Sep 17 00:00:00 2001 From: ryanDing26 Date: Sun, 19 Oct 2025 11:02:09 -0700 Subject: [PATCH 1/3] Add HuggingFace support --- biomni/llm.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/biomni/llm.py b/biomni/llm.py index 61747b685..1b3c67132 100644 --- a/biomni/llm.py +++ b/biomni/llm.py @@ -6,7 +6,7 @@ if TYPE_CHECKING: from biomni.config import BiomniConfig -SourceType = Literal["OpenAI", "AzureOpenAI", "Anthropic", "Ollama", "Gemini", "Bedrock", "Groq", "Custom"] +SourceType = Literal["OpenAI", "AzureOpenAI", "Anthropic", "Ollama", "Gemini", "Bedrock", "Groq", "HuggingFace", "Custom"] ALLOWED_SOURCES: set[str] = set(SourceType.__args__) @@ -26,7 +26,7 @@ def get_llm( model (str): The model name to use temperature (float): Temperature setting for generation stop_sequences (list): Sequences that will stop generation - source (str): Source provider: "OpenAI", "AzureOpenAI", "Anthropic", "Ollama", "Gemini", "Bedrock", or "Custom" + source (str): Source provider: "OpenAI", "AzureOpenAI", "Anthropic", "Ollama", "Gemini", "Bedrock", "HuggingFace", or "Custom" If None, will attempt to auto-detect from model name base_url (str): The base URL for custom model serving (e.g., "http://localhost:8000/v1"), default is None api_key (str): The API key for the custom llm @@ -196,7 +196,21 @@ def get_llm( stop_sequences=stop_sequences, region_name=os.getenv("AWS_REGION", "us-east-1"), ) - + elif source == "HuggingFace": + try: + from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint + except ImportError: + raise ImportError( # noqa: B904 + "langchain-huggingface package is required for HuggingFace models. Install with: pip install langchain-huggingface" + ) + return ChatHuggingFace( + llm = HuggingFaceEndpoint( + repo_id=model, + temperature=temperature, + stop_sequences=stop_sequences, + huggingfacehub_api_token=os.getenv("HUGGINGFACE_API_KEY") + ) + ) elif source == "Custom": try: from langchain_openai import ChatOpenAI From e6cc6ccbfba9416dd0b7fa54c8bfef84ba8779cf Mon Sep 17 00:00:00 2001 From: ryanDing26 Date: Sun, 19 Oct 2025 11:07:22 -0700 Subject: [PATCH 2/3] Add pip installation to new software script --- biomni_env/new_software_v007.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/biomni_env/new_software_v007.sh b/biomni_env/new_software_v007.sh index 4cdcb4ae0..6bdae5c2c 100644 --- a/biomni_env/new_software_v007.sh +++ b/biomni_env/new_software_v007.sh @@ -8,4 +8,5 @@ pip install fair-esm pip install nnunet nibabel nilearn pip install mi-googlesearch-python pip install git+https://github.com/pylabrobot/pylabrobot.git +pip install langchain-huggingface conda install weasyprint From e1a9cc3372eb6ebb798a550ba6944521481c4fc5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 19 Oct 2025 18:25:34 +0000 Subject: [PATCH 3/3] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- biomni/llm.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/biomni/llm.py b/biomni/llm.py index 1b3c67132..d28e6d657 100644 --- a/biomni/llm.py +++ b/biomni/llm.py @@ -6,7 +6,9 @@ if TYPE_CHECKING: from biomni.config import BiomniConfig -SourceType = Literal["OpenAI", "AzureOpenAI", "Anthropic", "Ollama", "Gemini", "Bedrock", "Groq", "HuggingFace", "Custom"] +SourceType = Literal[ + "OpenAI", "AzureOpenAI", "Anthropic", "Ollama", "Gemini", "Bedrock", "Groq", "HuggingFace", "Custom" +] ALLOWED_SOURCES: set[str] = set(SourceType.__args__) @@ -198,17 +200,17 @@ def get_llm( ) elif source == "HuggingFace": try: - from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint + from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint except ImportError: raise ImportError( # noqa: B904 "langchain-huggingface package is required for HuggingFace models. Install with: pip install langchain-huggingface" ) return ChatHuggingFace( - llm = HuggingFaceEndpoint( + llm=HuggingFaceEndpoint( repo_id=model, temperature=temperature, stop_sequences=stop_sequences, - huggingfacehub_api_token=os.getenv("HUGGINGFACE_API_KEY") + huggingfacehub_api_token=os.getenv("HUGGINGFACE_API_KEY"), ) ) elif source == "Custom":