ci: update langchain version

This commit is contained in:
CaptainB 2025-03-20 18:42:56 +08:00 committed by 刘瑞斌
parent 18e4647211
commit 72398408c5
8 changed files with 32 additions and 26 deletions

View File

@ -11,7 +11,7 @@ from enum import Enum
from functools import reduce from functools import reduce
from typing import Dict, Iterator, Type, List from typing import Dict, Iterator, Type, List
from pydantic.v1 import BaseModel from pydantic import BaseModel
from common.exception.app_exception import AppApiException from common.exception.app_exception import AppApiException
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _

View File

@ -10,7 +10,6 @@ from langchain_core.outputs import ChatGenerationChunk, ChatGeneration
from langchain_core.runnables import RunnableConfig, ensure_config from langchain_core.runnables import RunnableConfig, ensure_config
from langchain_core.utils.pydantic import is_basemodel_subclass from langchain_core.utils.pydantic import is_basemodel_subclass
from langchain_openai import ChatOpenAI from langchain_openai import ChatOpenAI
from langchain_openai.chat_models.base import _convert_chunk_to_generation_chunk
from common.config.tokenizer_manage_config import TokenizerManage from common.config.tokenizer_manage_config import TokenizerManage
@ -98,7 +97,7 @@ class BaseChatOpenAI(ChatOpenAI):
if not isinstance(chunk, dict): if not isinstance(chunk, dict):
chunk = chunk.model_dump() chunk = chunk.model_dump()
generation_chunk = _convert_chunk_to_generation_chunk( generation_chunk = super()._convert_chunk_to_generation_chunk(
chunk, chunk,
default_chunk_class, default_chunk_class,
base_generation_info if is_first_chunk else {}, base_generation_info if is_first_chunk else {},

View File

@ -9,16 +9,19 @@
from typing import List, Dict, Optional, Sequence, Union, Any, Iterator, cast from typing import List, Dict, Optional, Sequence, Union, Any, Iterator, cast
from google.ai.generativelanguage_v1 import GenerateContentResponse from google.ai.generativelanguage_v1 import GenerateContentResponse
from google.generativeai.responder import ToolDict from google.ai.generativelanguage_v1beta.types import (
from google.generativeai.types import FunctionDeclarationType, SafetySettingDict Tool as GoogleTool,
from google.generativeai.types import Tool as GoogleTool )
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.messages import BaseMessage from langchain_core.messages import BaseMessage
from langchain_core.outputs import ChatGenerationChunk from langchain_core.outputs import ChatGenerationChunk
from langchain_google_genai import ChatGoogleGenerativeAI from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_google_genai._function_utils import _ToolConfigDict from langchain_google_genai._function_utils import _ToolConfigDict, _ToolDict
from langchain_google_genai.chat_models import _chat_with_retry, _response_to_result from langchain_google_genai.chat_models import _chat_with_retry, _response_to_result, \
_FunctionDeclarationType
from langchain_google_genai._common import (
SafetySettingDict,
)
from setting.models_provider.base_model_provider import MaxKBBaseModel from setting.models_provider.base_model_provider import MaxKBBaseModel
@ -54,8 +57,8 @@ class GeminiChatModel(MaxKBBaseModel, ChatGoogleGenerativeAI):
stop: Optional[List[str]] = None, stop: Optional[List[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None, run_manager: Optional[CallbackManagerForLLMRun] = None,
*, *,
tools: Optional[Sequence[Union[ToolDict, GoogleTool]]] = None, tools: Optional[Sequence[Union[_ToolDict, GoogleTool]]] = None,
functions: Optional[Sequence[FunctionDeclarationType]] = None, functions: Optional[Sequence[_FunctionDeclarationType]] = None,
safety_settings: Optional[SafetySettingDict] = None, safety_settings: Optional[SafetySettingDict] = None,
tool_config: Optional[Union[Dict, _ToolConfigDict]] = None, tool_config: Optional[Union[Dict, _ToolConfigDict]] = None,
generation_config: Optional[Dict[str, Any]] = None, generation_config: Optional[Dict[str, Any]] = None,

View File

@ -10,7 +10,7 @@ from typing import Dict, List
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel from pydantic import BaseModel
from langchain_huggingface import HuggingFaceEmbeddings from langchain_huggingface import HuggingFaceEmbeddings
from setting.models_provider.base_model_provider import MaxKBBaseModel from setting.models_provider.base_model_provider import MaxKBBaseModel

View File

@ -6,7 +6,7 @@
@date2024/9/2 16:42 @date2024/9/2 16:42
@desc: @desc:
""" """
from typing import Sequence, Optional, Dict, Any from typing import Sequence, Optional, Dict, Any, ClassVar
import requests import requests
import torch import torch
@ -69,7 +69,7 @@ class LocalBaseReranker(MaxKBBaseModel, BaseDocumentCompressor):
tokenizer: Any = None tokenizer: Any = None
model: Optional[str] = None model: Optional[str] = None
cache_dir: Optional[str] = None cache_dir: Optional[str] = None
model_kwargs = {} model_kwargs: ClassVar = {}
def __init__(self, model_name, cache_dir=None, **model_kwargs): def __init__(self, model_name, cache_dir=None, **model_kwargs):
super().__init__() super().__init__()

View File

@ -1,10 +1,11 @@
from typing import Sequence, Optional, Any, Dict from typing import Sequence, Optional, Any, Dict
from langchain_community.embeddings import OllamaEmbeddings
from langchain_core.callbacks import Callbacks from langchain_core.callbacks import Callbacks
from langchain_core.documents import Document from langchain_core.documents import Document
from langchain_community.embeddings import OllamaEmbeddings
from setting.models_provider.base_model_provider import MaxKBBaseModel from setting.models_provider.base_model_provider import MaxKBBaseModel
from sklearn.metrics.pairwise import cosine_similarity from sklearn.metrics.pairwise import cosine_similarity
from pydantic.v1 import BaseModel, Field from pydantic import BaseModel, Field
class OllamaReranker(MaxKBBaseModel, OllamaEmbeddings, BaseModel): class OllamaReranker(MaxKBBaseModel, OllamaEmbeddings, BaseModel):

View File

@ -18,7 +18,7 @@ from langchain_core.messages import (
HumanMessageChunk, SystemMessage, HumanMessageChunk, SystemMessage,
) )
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from pydantic import Field, SecretStr, root_validator
from langchain_core.utils import ( from langchain_core.utils import (
convert_to_secret_str, convert_to_secret_str,
get_from_dict_or_env, get_from_dict_or_env,
@ -137,7 +137,7 @@ class ChatHunyuan(BaseChatModel):
class Config: class Config:
"""Configuration for this pydantic object.""" """Configuration for this pydantic object."""
allow_population_by_field_name = True validate_by_name = True
@root_validator(pre=True) @root_validator(pre=True)
def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]: def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:

View File

@ -12,9 +12,16 @@ django = "4.2.18"
djangorestframework = "^3.15.2" djangorestframework = "^3.15.2"
drf-yasg = "1.21.7" drf-yasg = "1.21.7"
django-filter = "23.2" django-filter = "23.2"
langchain = "0.2.16" langchain-openai = "^0.3.0"
langchain_community = "0.2.17" langchain-anthropic = "^0.3.0"
langchain-huggingface = "^0.0.3" langchain-community = "^0.3.0"
langchain-deepseek = "^0.1.0"
langchain-google-genai = "^2.0.9"
langchain-mcp-adapters = "^0.0.5"
langchain-huggingface = "^0.1.2"
langchain-ollama = "^0.2.3"
langgraph = "^0.3.0"
mcp = "^1.4.1"
psycopg2-binary = "2.9.10" psycopg2-binary = "2.9.10"
jieba = "^0.42.1" jieba = "^0.42.1"
diskcache = "^5.6.3" diskcache = "^5.6.3"
@ -28,8 +35,6 @@ qianfan = "^0.3.6.1"
pycryptodome = "^3.19.0" pycryptodome = "^3.19.0"
beautifulsoup4 = "^4.12.2" beautifulsoup4 = "^4.12.2"
html2text = "^2024.2.26" html2text = "^2024.2.26"
langchain-openai = "^0.1.8"
langchain-ollama = "0.1.3"
django-ipware = "^6.0.4" django-ipware = "^6.0.4"
django-apscheduler = "^0.6.2" django-apscheduler = "^0.6.2"
pymupdf = "1.24.9" pymupdf = "1.24.9"
@ -42,8 +47,6 @@ zhipuai = "^2.0.1"
httpx = "^0.27.0" httpx = "^0.27.0"
httpx-sse = "^0.4.0" httpx-sse = "^0.4.0"
websockets = "^13.0" websockets = "^13.0"
langchain-google-genai = "^1.0.3"
langchain-anthropic= "^0.1.0"
openpyxl = "^3.1.2" openpyxl = "^3.1.2"
xlrd = "^2.0.1" xlrd = "^2.0.1"
gunicorn = "^22.0.0" gunicorn = "^22.0.0"
@ -55,7 +58,7 @@ psutil = "^6.0.0"
celery = { extras = ["sqlalchemy"], version = "^5.4.0" } celery = { extras = ["sqlalchemy"], version = "^5.4.0" }
django-celery-beat = "^2.6.0" django-celery-beat = "^2.6.0"
celery-once = "^3.0.1" celery-once = "^3.0.1"
anthropic = "^0.34.2" anthropic = "^0.39.0"
pylint = "3.1.0" pylint = "3.1.0"
pydub = "^0.25.1" pydub = "^0.25.1"
cffi = "^1.17.1" cffi = "^1.17.1"