diff --git a/scrapegraphai/graphs/abstract_graph.py b/scrapegraphai/graphs/abstract_graph.py index 63ccda45..229d7bb8 100644 --- a/scrapegraphai/graphs/abstract_graph.py +++ b/scrapegraphai/graphs/abstract_graph.py @@ -3,11 +3,12 @@ """ from abc import ABC, abstractmethod from typing import Optional +from langchain_aws import BedrockEmbeddings from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings -from langchain_community.embeddings import HuggingFaceHubEmbeddings, OllamaEmbeddings, BedrockEmbeddings +from langchain_community.embeddings import HuggingFaceHubEmbeddings, OllamaEmbeddings from langchain_google_genai import GoogleGenerativeAIEmbeddings from ..helpers import models_tokens -from ..models import AzureOpenAI, Bedrock, Gemini, Groq, HuggingFace, Ollama, OpenAI, Anthropic, Claude, DeepSeek +from ..models import AzureOpenAI, Bedrock, Gemini, Groq, HuggingFace, Ollama, OpenAI, Anthropic, DeepSeek class AbstractGraph(ABC): @@ -153,7 +154,7 @@ def _create_llm(self, llm_config: dict, chat=False) -> object: self.model_token = models_tokens["claude"][llm_params["model"]] except KeyError as exc: raise KeyError("Model not supported") from exc - return Claude(llm_params) + return Anthropic(llm_params) elif "ollama" in llm_params["model"]: llm_params["model"] = llm_params["model"].split("/")[-1] diff --git a/scrapegraphai/models/__init__.py b/scrapegraphai/models/__init__.py index 9c049fcd..7e7d5e18 100644 --- a/scrapegraphai/models/__init__.py +++ b/scrapegraphai/models/__init__.py @@ -12,5 +12,4 @@ from .groq import Groq from .bedrock import Bedrock from .anthropic import Anthropic -from .claude import Claude from .deepseek import DeepSeek diff --git a/scrapegraphai/models/claude.py b/scrapegraphai/models/claude.py deleted file mode 100644 index b0030fc4..00000000 --- a/scrapegraphai/models/claude.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Claude model -""" -from langchain_anthropic import ChatAnthropic - - -class Claude(ChatAnthropic): - """Class for wrapping bedrock module""" - - def __init__(self, llm_config: dict): - """ - A wrapper for the Claude class that provides default configuration - and could be extended with additional methods if needed. - - Args: - llm_config (dict): Configuration parameters for the language model. - """ - # Initialize the superclass (ChatAnthropic) with provided config parameters - super().__init__(**llm_config)