Does the langflow can use the workflow components to search chat message into vectorstore #2748
Replies: 2 comments 1 reply
-
Hey @jack197906! I'm a bot here to help you with any questions or issues you have. Feel free to ask me anything while you wait for a human maintainer to assist you. Yes, Langflow can utilize workflow components to store all chat messages into a vector store and then search for related chat messages using a query word. Here is how you can set this up in your application:
Here is a JSON configuration that demonstrates this setup: {
"data": {
"edges": [
{
"data": {
"sourceHandle": {
"dataType": "ChatInput",
"id": "ChatInput-c4xn9",
"name": "message",
"output_types": [
"Message"
]
},
"targetHandle": {
"fieldName": "message",
"id": "StoreMessageComponent-1",
"inputTypes": [
"Message"
],
"type": "str"
}
},
"id": "edge-ChatInput-StoreMessage",
"source": "ChatInput-c4xn9",
"sourceHandle": "message",
"target": "StoreMessageComponent-1",
"targetHandle": "message"
},
{
"data": {
"sourceHandle": {
"dataType": "StoreMessageComponent",
"id": "StoreMessageComponent-1",
"name": "message",
"output_types": [
"Message"
]
},
"targetHandle": {
"fieldName": "input_value",
"id": "OpenAIEmbeddings-BKXc6",
"inputTypes": [
"Message"
],
"type": "str"
}
},
"id": "edge-StoreMessage-OpenAIEmbeddings",
"source": "StoreMessageComponent-1",
"sourceHandle": "message",
"target": "OpenAIEmbeddings-BKXc6",
"targetHandle": "input_value"
},
{
"data": {
"sourceHandle": {
"dataType": "OpenAIEmbeddings",
"id": "OpenAIEmbeddings-BKXc6",
"name": "embeddings",
"output_types": [
"Embeddings"
]
},
"targetHandle": {
"fieldName": "embedding",
"id": "VectorStore-1",
"inputTypes": [
"Embeddings"
],
"type": "other"
}
},
"id": "edge-OpenAIEmbeddings-VectorStore",
"source": "OpenAIEmbeddings-BKXc6",
"sourceHandle": "embeddings",
"target": "VectorStore-1",
"targetHandle": "embedding"
},
{
"data": {
"sourceHandle": {
"dataType": "VectorStore",
"id": "VectorStore-1",
"name": "search_results",
"output_types": [
"Data"
]
},
"targetHandle": {
"fieldName": "data",
"id": "SearchComponent-1",
"inputTypes": [
"Data"
],
"type": "other"
}
},
"id": "edge-VectorStore-SearchComponent",
"source": "VectorStore-1",
"sourceHandle": "search_results",
"target": "SearchComponent-1",
"targetHandle": "data"
}
],
"nodes": [
{
"data": {
"description": "Get chat inputs from the Playground.",
"display_name": "Chat Input",
"id": "ChatInput-c4xn9",
"node": {
"base_classes": [
"Message"
],
"description": "Get chat inputs from the Playground.",
"display_name": "Chat Input",
"icon": "ChatInput",
"outputs": [
{
"display_name": "Message",
"method": "message_response",
"name": "message",
"types": [
"Message"
]
}
],
"template": {
"_type": "Component",
"code": {
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n inputs = [\n MessageTextInput(name=\"input_value\", display_name=\"Text\", info=\"Message to be passed as input.\"),\n DropdownInput(name=\"sender\", display_name=\"Sender Type\", options=[\"Machine\", \"User\"], value=\"User\", info=\"Type of sender.\"),\n MessageTextInput(name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"User\"),\n MessageTextInput(name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\"),\n MessageTextInput(name=\"files\", display_name=\"Files\", info=\"Files to be sent with the message.\", is_list=True)\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\")\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n self.store_message(message)\n self.message.value = message\n return message\n"
}
}
},
"type": "ChatInput"
},
"position": {
"x": 100,
"y": 100
}
},
{
"data": {
"id": "StoreMessageComponent-1",
"node": {
"base_classes": [
"Message"
],
"description": "Stores a chat message.",
"display_name": "Store Message",
"icon": "StoreMessage",
"outputs": [
{
"display_name": "Message",
"method": "build",
"name": "message",
"types": [
"Message"
]
}
],
"template": {
"_type": "Component",
"code": {
"value": "from langflow.custom import CustomComponent\nfrom langflow.memory import get_messages, store_message\nfrom langflow.schema.message import Message\n\nclass StoreMessageComponent(CustomComponent):\n display_name = \"Store Message\"\n description = \"Stores a chat message.\"\n\n def build_config(self):\n return {\n \"message\": {\"display_name\": \"Message\"},\n }\n\n def build(\n self,\n message: Message,\n ) -> Message:\n store_message(message, flow_id=self.graph.flow_id)\n self.status = get_messages()\n\n return message\n"
}
}
}
},
"position": {
"x": 300,
"y": 100
}
},
{
"data": {
"id": "OpenAIEmbeddings-BKXc6",
"node": {
"base_classes": [
"Embeddings"
],
"description": "Generate embeddings using OpenAI models.",
"display_name": "OpenAI Embeddings",
"icon": "OpenAI",
"outputs": [
{
"display_name": "Embeddings",
"method": "build_embeddings",
"name": "embeddings",
"types": [
"Embeddings"
]
}
],
"template": {
"_type": "Component",
"code": {
"value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nclass OpenAIEmbeddingsComponent:\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(model=\"text-embedding-ada-002\")\n"
}
}
}
},
"position": {
"x": 500,
"y": 100
}
},
{
"data": {
"id": "VectorStore-1",
"node": {
"base_classes": [
"VectorStore"
],
"description": "Stores embeddings for search.",
"display_name": "Vector Store",
"icon": "VectorStore",
"outputs": [
{
"display_name": "Search Results",
"method": "search",
"name": "search_results",
"types": [
"Data"
]
}
],
"template": {
"_type": "Component",
"code": {
"value": "from langflow.vector_store import VectorStore\n\nclass VectorStoreComponent(VectorStore):\n display_name = \"Vector Store\"\n description = \"Stores embeddings for search.\"\n icon = \"VectorStore\"\n\n def search(self, query: str) -> Data:\n return self.vector_store.search(query)\n"
}
}
}
},
"position": {
"x": 700,
"y": 100
}
},
{
"data": {
"id": "SearchComponent-1",
"node": {
"base_classes": [
"Search"
],
"description": "Searches for related chat messages.",
"display_name": "Search",
"icon": "Search",
"outputs": [
{
"display_name": "Search Results",
"method": "search_results",
"name": "search_results",
"types": [
"Data"
]
}
],
"template": {
"_type": "Component",
"code": {
"value": "from langflow.search import SearchComponent\n\nclass SearchComponent(SearchComponent):\n display_name = \"Search\"\n description = \"Searches for related chat messages.\"\n icon = \"Search\"\n\n def search_results(self, query: str) -> Data:\n return self.search(query)\n"
}
}
}
},
"position": {
"x": 900,
"y": 100
}
}
]
}
} This configuration captures chat messages, stores them, generates embeddings, stores the embeddings in a vector store, and allows for searching related messages using a query word [[1]](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json)[[2]](https://github.com/langflow-ai/langflow/blob/main/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json)[3][4][5]. |
Beta Was this translation helpful? Give feedback.
-
|
Beta Was this translation helpful? Give feedback.
-
In my current application case I want store all the chat message into the vectorestore, and then search with a question word to retrieval all related chat message from the vectorestore, could you please help for this case?
Beta Was this translation helpful? Give feedback.
All reactions