Skip to content

Langchain

zenml.integrations.langchain

Initialization of the langchain integration.

Attributes

LANGCHAIN = 'langchain' module-attribute

logger = get_logger(__name__) module-attribute

Classes

Integration

Base class for integration in ZenML.

Functions
activate() -> None classmethod

Abstract method to activate the integration.

Source code in src/zenml/integrations/integration.py
140
141
142
@classmethod
def activate(cls) -> None:
    """Abstract method to activate the integration."""
check_installation() -> bool classmethod

Method to check whether the required packages are installed.

Returns:

Type Description
bool

True if all required packages are installed, False otherwise.

Source code in src/zenml/integrations/integration.py
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
@classmethod
def check_installation(cls) -> bool:
    """Method to check whether the required packages are installed.

    Returns:
        True if all required packages are installed, False otherwise.
    """
    for requirement in cls.get_requirements():
        parsed_requirement = Requirement(requirement)

        if not requirement_installed(parsed_requirement):
            logger.debug(
                "Requirement '%s' for integration '%s' is not installed "
                "or installed with the wrong version.",
                requirement,
                cls.NAME,
            )
            return False

        dependencies = get_dependencies(parsed_requirement)

        for dependency in dependencies:
            if not requirement_installed(dependency):
                logger.debug(
                    "Requirement '%s' for integration '%s' is not "
                    "installed or installed with the wrong version.",
                    dependency,
                    cls.NAME,
                )
                return False

    logger.debug(
        f"Integration '{cls.NAME}' is installed correctly with "
        f"requirements {cls.get_requirements()}."
    )
    return True
flavors() -> List[Type[Flavor]] classmethod

Abstract method to declare new stack component flavors.

Returns:

Type Description
List[Type[Flavor]]

A list of new stack component flavors.

Source code in src/zenml/integrations/integration.py
144
145
146
147
148
149
150
151
@classmethod
def flavors(cls) -> List[Type[Flavor]]:
    """Abstract method to declare new stack component flavors.

    Returns:
        A list of new stack component flavors.
    """
    return []
get_requirements(target_os: Optional[str] = None, python_version: Optional[str] = None) -> List[str] classmethod

Method to get the requirements for the integration.

Parameters:

Name Type Description Default
target_os Optional[str]

The target operating system to get the requirements for.

None
python_version Optional[str]

The Python version to use for the requirements.

None

Returns:

Type Description
List[str]

A list of requirements.

Source code in src/zenml/integrations/integration.py
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
@classmethod
def get_requirements(
    cls,
    target_os: Optional[str] = None,
    python_version: Optional[str] = None,
) -> List[str]:
    """Method to get the requirements for the integration.

    Args:
        target_os: The target operating system to get the requirements for.
        python_version: The Python version to use for the requirements.

    Returns:
        A list of requirements.
    """
    return cls.REQUIREMENTS
get_uninstall_requirements(target_os: Optional[str] = None) -> List[str] classmethod

Method to get the uninstall requirements for the integration.

Parameters:

Name Type Description Default
target_os Optional[str]

The target operating system to get the requirements for.

None

Returns:

Type Description
List[str]

A list of requirements.

Source code in src/zenml/integrations/integration.py
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
@classmethod
def get_uninstall_requirements(
    cls, target_os: Optional[str] = None
) -> List[str]:
    """Method to get the uninstall requirements for the integration.

    Args:
        target_os: The target operating system to get the requirements for.

    Returns:
        A list of requirements.
    """
    ret = []
    for each in cls.get_requirements(target_os=target_os):
        is_ignored = False
        for ignored in cls.REQUIREMENTS_IGNORED_ON_UNINSTALL:
            if each.startswith(ignored):
                is_ignored = True
                break
        if not is_ignored:
            ret.append(each)
    return ret
plugin_flavors() -> List[Type[BasePluginFlavor]] classmethod

Abstract method to declare new plugin flavors.

Returns:

Type Description
List[Type[BasePluginFlavor]]

A list of new plugin flavors.

Source code in src/zenml/integrations/integration.py
153
154
155
156
157
158
159
160
@classmethod
def plugin_flavors(cls) -> List[Type["BasePluginFlavor"]]:
    """Abstract method to declare new plugin flavors.

    Returns:
        A list of new plugin flavors.
    """
    return []

LangchainIntegration

Bases: Integration

Definition of langchain integration for ZenML.

Functions
activate() -> None classmethod

Activates the integration.

Source code in src/zenml/integrations/langchain/__init__.py
36
37
38
39
@classmethod
def activate(cls) -> None:
    """Activates the integration."""
    from zenml.integrations.langchain import materializers  # noqa

Functions

get_logger(logger_name: str) -> logging.Logger

Main function to get logger name,.

Parameters:

Name Type Description Default
logger_name str

Name of logger to initialize.

required

Returns:

Type Description
Logger

A logger object.

Source code in src/zenml/logger.py
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
def get_logger(logger_name: str) -> logging.Logger:
    """Main function to get logger name,.

    Args:
        logger_name: Name of logger to initialize.

    Returns:
        A logger object.
    """
    logger = logging.getLogger(logger_name)
    logger.setLevel(get_logging_level().value)
    logger.addHandler(get_console_handler())

    logger.propagate = False
    return logger

Modules

materializers

Initialization of the langchain materializer.

Classes
Modules
document_materializer

Implementation of ZenML's Langchain Document materializer.

Classes
LangchainDocumentMaterializer(uri: str, artifact_store: Optional[BaseArtifactStore] = None)

Bases: BaseMaterializer

Handle Langchain Document objects.

Source code in src/zenml/materializers/base_materializer.py
125
126
127
128
129
130
131
132
133
134
135
def __init__(
    self, uri: str, artifact_store: Optional[BaseArtifactStore] = None
):
    """Initializes a materializer with the given URI.

    Args:
        uri: The URI where the artifact data will be stored.
        artifact_store: The artifact store used to store this artifact.
    """
    self.uri = uri
    self._artifact_store = artifact_store
Functions
extract_metadata(data: Document) -> Dict[str, MetadataType]

Extract metadata from the given BaseModel object.

Parameters:

Name Type Description Default
data Document

The BaseModel object to extract metadata from.

required

Returns:

Type Description
Dict[str, MetadataType]

The extracted metadata as a dictionary.

Source code in src/zenml/integrations/langchain/materializers/document_materializer.py
59
60
61
62
63
64
65
66
67
68
def extract_metadata(self, data: Document) -> Dict[str, "MetadataType"]:
    """Extract metadata from the given BaseModel object.

    Args:
        data: The BaseModel object to extract metadata from.

    Returns:
        The extracted metadata as a dictionary.
    """
    return {"schema": data.schema()}
load(data_type: Type[Document]) -> Any

Reads BaseModel from JSON.

Parameters:

Name Type Description Default
data_type Type[Document]

The type of the data to read.

required

Returns:

Type Description
Any

The data read.

Source code in src/zenml/integrations/langchain/materializers/document_materializer.py
37
38
39
40
41
42
43
44
45
46
47
48
def load(self, data_type: Type["Document"]) -> Any:
    """Reads BaseModel from JSON.

    Args:
        data_type: The type of the data to read.

    Returns:
        The data read.
    """
    data_path = os.path.join(self.uri, DEFAULT_FILENAME)
    contents = yaml_utils.read_json(data_path)
    return data_type.parse_raw(contents)
save(data: Document) -> None

Serialize a BaseModel to JSON.

Parameters:

Name Type Description Default
data Document

The data to store.

required
Source code in src/zenml/integrations/langchain/materializers/document_materializer.py
50
51
52
53
54
55
56
57
def save(self, data: "Document") -> None:
    """Serialize a BaseModel to JSON.

    Args:
        data: The data to store.
    """
    data_path = os.path.join(self.uri, DEFAULT_FILENAME)
    yaml_utils.write_json(data_path, data.json())
Modules
openai_embedding_materializer

Implementation of the Langchain OpenAI embedding materializer.

Classes
LangchainOpenaiEmbeddingMaterializer(uri: str, artifact_store: Optional[BaseArtifactStore] = None)

Bases: CloudpickleMaterializer

Materializer for Langchain OpenAI Embeddings.

Source code in src/zenml/materializers/base_materializer.py
125
126
127
128
129
130
131
132
133
134
135
def __init__(
    self, uri: str, artifact_store: Optional[BaseArtifactStore] = None
):
    """Initializes a materializer with the given URI.

    Args:
        uri: The URI where the artifact data will be stored.
        artifact_store: The artifact store used to store this artifact.
    """
    self.uri = uri
    self._artifact_store = artifact_store
Functions
load(data_type: Type[Any]) -> Any

Loads the embeddings model and lets it recreate clients when needed.

Parameters:

Name Type Description Default
data_type Type[Any]

The type of the data to load.

required

Returns:

Type Description
Any

The loaded embeddings model.

Source code in src/zenml/integrations/langchain/materializers/openai_embedding_materializer.py
47
48
49
50
51
52
53
54
55
56
def load(self, data_type: Type[Any]) -> Any:
    """Loads the embeddings model and lets it recreate clients when needed.

    Args:
        data_type: The type of the data to load.

    Returns:
        The loaded embeddings model.
    """
    return super().load(data_type)
save(embeddings: Any) -> None

Saves the embeddings model after clearing non-picklable clients.

Parameters:

Name Type Description Default
embeddings Any

The embeddings model to save.

required
Source code in src/zenml/integrations/langchain/materializers/openai_embedding_materializer.py
34
35
36
37
38
39
40
41
42
43
44
45
def save(self, embeddings: Any) -> None:
    """Saves the embeddings model after clearing non-picklable clients.

    Args:
        embeddings: The embeddings model to save.
    """
    # Clear the clients which will be recreated on load
    embeddings.client = None
    embeddings.async_client = None

    # Use the parent class's save implementation which uses cloudpickle
    super().save(embeddings)
vector_store_materializer

Implementation of the langchain vector store materializer.

Classes
LangchainVectorStoreMaterializer(uri: str, artifact_store: Optional[BaseArtifactStore] = None)

Bases: CloudpickleMaterializer

Handle langchain vector store objects.

Source code in src/zenml/materializers/base_materializer.py
125
126
127
128
129
130
131
132
133
134
135
def __init__(
    self, uri: str, artifact_store: Optional[BaseArtifactStore] = None
):
    """Initializes a materializer with the given URI.

    Args:
        uri: The URI where the artifact data will be stored.
        artifact_store: The artifact store used to store this artifact.
    """
    self.uri = uri
    self._artifact_store = artifact_store