Langchain
        zenml.integrations.langchain
  
      special
  
    Initialization of the langchain integration.
        
LangchainIntegration            (Integration)
        
    Definition of langchain integration for ZenML.
Source code in zenml/integrations/langchain/__init__.py
          class LangchainIntegration(Integration):
    """Definition of langchain integration for ZenML."""
    NAME = LANGCHAIN
    REQUIREMENTS = [
        "langchain==0.0.325",
        "pyyaml>=6.0.1",
        "tenacity!=8.4.0",  # https://github.com/jd/tenacity/issues/471
    ]
    REQUIREMENTS_IGNORED_ON_UNINSTALL = ["pyyaml","tenacity"]
    @classmethod
    def activate(cls) -> None:
        """Activates the integration."""
        from zenml.integrations.langchain import materializers  # noqa
activate()
  
      classmethod
  
    Activates the integration.
Source code in zenml/integrations/langchain/__init__.py
          @classmethod
def activate(cls) -> None:
    """Activates the integration."""
    from zenml.integrations.langchain import materializers  # noqa
        materializers
  
      special
  
    Initialization of the langchain materializer.
        document_materializer
    Implementation of ZenML's Langchain Document materializer.
        
LangchainDocumentMaterializer            (BaseMaterializer)
        
    Handle Langchain Document objects.
Source code in zenml/integrations/langchain/materializers/document_materializer.py
          class LangchainDocumentMaterializer(BaseMaterializer):
    """Handle Langchain Document objects."""
    ASSOCIATED_ARTIFACT_TYPE: ClassVar[ArtifactType] = ArtifactType.DATA
    ASSOCIATED_TYPES: ClassVar[Tuple[Type[Any], ...]] = (Document,)
    def load(self, data_type: Type["Document"]) -> Any:
        """Reads BaseModel from JSON.
        Args:
            data_type: The type of the data to read.
        Returns:
            The data read.
        """
        data_path = os.path.join(self.uri, DEFAULT_FILENAME)
        contents = yaml_utils.read_json(data_path)
        return data_type.parse_raw(contents)
    def save(self, data: "Document") -> None:
        """Serialize a BaseModel to JSON.
        Args:
            data: The data to store.
        """
        data_path = os.path.join(self.uri, DEFAULT_FILENAME)
        yaml_utils.write_json(data_path, data.json())
    def extract_metadata(self, data: Document) -> Dict[str, "MetadataType"]:
        """Extract metadata from the given BaseModel object.
        Args:
            data: The BaseModel object to extract metadata from.
        Returns:
            The extracted metadata as a dictionary.
        """
        return {"schema": data.schema()}
extract_metadata(self, data)
    Extract metadata from the given BaseModel object.
Parameters:
| Name | Type | Description | Default | 
|---|---|---|---|
| data | langchain.docstore.document.Document | The BaseModel object to extract metadata from. | required | 
Returns:
| Type | Description | 
|---|---|
| Dict[str, MetadataType] | The extracted metadata as a dictionary. | 
Source code in zenml/integrations/langchain/materializers/document_materializer.py
          def extract_metadata(self, data: Document) -> Dict[str, "MetadataType"]:
    """Extract metadata from the given BaseModel object.
    Args:
        data: The BaseModel object to extract metadata from.
    Returns:
        The extracted metadata as a dictionary.
    """
    return {"schema": data.schema()}
load(self, data_type)
    Reads BaseModel from JSON.
Parameters:
| Name | Type | Description | Default | 
|---|---|---|---|
| data_type | Type[Document] | The type of the data to read. | required | 
Returns:
| Type | Description | 
|---|---|
| Any | The data read. | 
Source code in zenml/integrations/langchain/materializers/document_materializer.py
          def load(self, data_type: Type["Document"]) -> Any:
    """Reads BaseModel from JSON.
    Args:
        data_type: The type of the data to read.
    Returns:
        The data read.
    """
    data_path = os.path.join(self.uri, DEFAULT_FILENAME)
    contents = yaml_utils.read_json(data_path)
    return data_type.parse_raw(contents)
save(self, data)
    Serialize a BaseModel to JSON.
Parameters:
| Name | Type | Description | Default | 
|---|---|---|---|
| data | Document | The data to store. | required | 
Source code in zenml/integrations/langchain/materializers/document_materializer.py
          def save(self, data: "Document") -> None:
    """Serialize a BaseModel to JSON.
    Args:
        data: The data to store.
    """
    data_path = os.path.join(self.uri, DEFAULT_FILENAME)
    yaml_utils.write_json(data_path, data.json())
        openai_embedding_materializer
    Implementation of the Langchain OpenAI embedding materializer.
        
LangchainOpenaiEmbeddingMaterializer            (CloudpickleMaterializer)
        
    Handle langchain OpenAI embedding objects.
Source code in zenml/integrations/langchain/materializers/openai_embedding_materializer.py
          class LangchainOpenaiEmbeddingMaterializer(CloudpickleMaterializer):
    """Handle langchain OpenAI embedding objects."""
    ASSOCIATED_ARTIFACT_TYPE: ClassVar[ArtifactType] = ArtifactType.MODEL
    ASSOCIATED_TYPES: ClassVar[Tuple[Type[Any], ...]] = (OpenAIEmbeddings,)
        vector_store_materializer
    Implementation of the langchain vector store materializer.
        
LangchainVectorStoreMaterializer            (CloudpickleMaterializer)
        
    Handle langchain vector store objects.
Source code in zenml/integrations/langchain/materializers/vector_store_materializer.py
          class LangchainVectorStoreMaterializer(CloudpickleMaterializer):
    """Handle langchain vector store objects."""
    ASSOCIATED_ARTIFACT_TYPE: ClassVar[ArtifactType] = ArtifactType.DATA
    ASSOCIATED_TYPES: ClassVar[Tuple[Type[Any], ...]] = (VectorStore,)