Skip to content

Commit

Permalink
Merge pull request #518 from FlowiseAI/feature/LogComponent
Browse files Browse the repository at this point in the history
Feature/Log Component Level
  • Loading branch information
HenryHengZJ authored Jul 11, 2023
2 parents aee0a51 + 8ad870b commit 2edb631
Show file tree
Hide file tree
Showing 25 changed files with 458 additions and 203 deletions.
22 changes: 22 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,28 @@ FLOWISE_USERNAME=user
FLOWISE_PASSWORD=1234
```
## 🌱 Env Variables
Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder.
| Variable | Description | Type | Default |
| ---------------- | ---------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
| PORT | The HTTP port Flowise runs on | Number | 3000 |
| FLOWISE_USERNAME | Username to login | String |
| FLOWISE_PASSWORD | Password to login | String |
| DEBUG | Print logs from components | Boolean |
| LOG_PATH | Location where log files are stored | String | `your-path/Flowise/logs` |
| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` |
| DATABASE_PATH | Location where database is saved | String | `your-home-dir/.flowise` |
| APIKEY_PATH | Location where api keys are saved | String | `your-path/Flowise/packages/server` |
| EXECUTION_MODE | Whether predictions run in their own process or the main process | Enum String: `child`, `main` | `main` |
You can also specify the env variables when using `npx`. For example:
```
npx flowise start --PORT=3000 --DEBUG=true
```
## 📖 Documentation
[Flowise Docs](https://docs.flowiseai.com/)
Expand Down
1 change: 0 additions & 1 deletion packages/components/nodes/agents/AutoGPT/AutoGPT.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ class AutoGPT_Agents implements INode {
const res = await executor.run([input])
return res || 'I have completed all my tasks.'
} catch (e) {
console.error(e)
throw new Error(e)
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { flatten } from 'lodash'
import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

class OpenAIFunctionAgent_Agents implements INode {
label: string
Expand Down Expand Up @@ -93,12 +94,14 @@ class OpenAIFunctionAgent_Agents implements INode {
executor.memory = memory
}

const loggerHandler = new ConsoleCallbackHandler(options.logger)

if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const result = await executor.run(input, [handler])
const result = await executor.run(input, [loggerHandler, handler])
return result
} else {
const result = await executor.run(input)
const result = await executor.run(input, [loggerHandler])
return result
}
}
Expand Down
9 changes: 6 additions & 3 deletions packages/components/nodes/chains/ApiChain/GETApiChain.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { APIChain } from 'langchain/chains'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

export const API_URL_RAW_PROMPT_TEMPLATE = `You are given the below API Documentation:
{api_docs}
Expand Down Expand Up @@ -95,12 +96,14 @@ class GETApiChain_Chains implements INode {
const ansPrompt = nodeData.inputs?.ansPrompt as string

const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt)
const loggerHandler = new ConsoleCallbackHandler(options.logger)

if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
}
Expand Down
9 changes: 6 additions & 3 deletions packages/components/nodes/chains/ApiChain/OpenAPIChain.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { APIChain, createOpenAPIChain } from 'langchain/chains'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { ChatOpenAI } from 'langchain/chat_models/openai'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

class OpenApiChain_Chains implements INode {
label: string
Expand Down Expand Up @@ -57,12 +58,14 @@ class OpenApiChain_Chains implements INode {

async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const chain = await initChain(nodeData)
const loggerHandler = new ConsoleCallbackHandler(options.logger)

if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
}
Expand Down
9 changes: 6 additions & 3 deletions packages/components/nodes/chains/ApiChain/POSTApiChain.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts'
import { API_RESPONSE_RAW_PROMPT_TEMPLATE, API_URL_RAW_PROMPT_TEMPLATE, APIChain } from './postCore'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

class POSTApiChain_Chains implements INode {
label: string
Expand Down Expand Up @@ -84,12 +85,14 @@ class POSTApiChain_Chains implements INode {
const ansPrompt = nodeData.inputs?.ansPrompt as string

const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt)
const loggerHandler = new ConsoleCallbackHandler(options.logger)

if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ConversationChain } from 'langchain/chains'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts'
import { BufferMemory, ChatMessageHistory } from 'langchain/memory'
import { BaseChatModel } from 'langchain/chat_models/base'
import { AIMessage, HumanMessage } from 'langchain/schema'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

const systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.`

Expand Down Expand Up @@ -90,12 +91,14 @@ class ConversationChain_Chains implements INode {
chain.memory = memory
}

const loggerHandler = new ConsoleCallbackHandler(options.logger)

if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
const res = await chain.call({ input }, [handler])
const res = await chain.call({ input }, [loggerHandler, handler])
return res?.response
} else {
const res = await chain.call({ input })
const res = await chain.call({ input }, [loggerHandler])
return res?.response
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { ConversationalRetrievalQAChain } from 'langchain/chains'
import { AIMessage, BaseRetriever, HumanMessage } from 'langchain/schema'
import { BaseChatMemory, BufferMemory, ChatMessageHistory } from 'langchain/memory'
import { PromptTemplate } from 'langchain/prompts'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

const default_qa_template = `Use the following pieces of context to answer the question at the end, in its original language. If you don't know the answer, just say that you don't know in its original language, don't try to make up an answer.
Expand Down Expand Up @@ -175,13 +176,15 @@ class ConversationalRetrievalQAChain_Chains implements INode {
chain.memory = memory
}

const loggerHandler = new ConsoleCallbackHandler(options.logger)

if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, undefined, returnSourceDocuments)
const res = await chain.call(obj, [handler])
const res = await chain.call(obj, [loggerHandler, handler])
if (res.text && res.sourceDocuments) return res
return res?.text
} else {
const res = await chain.call(obj)
const res = await chain.call(obj, [loggerHandler])
if (res.text && res.sourceDocuments) return res
return res?.text
}
Expand Down
40 changes: 20 additions & 20 deletions packages/components/nodes/chains/LLMChain/LLMChain.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

class LLMChain_Chains implements INode {
label: string
Expand Down Expand Up @@ -55,7 +56,7 @@ class LLMChain_Chains implements INode {
]
}

async init(nodeData: INodeData, input: string): Promise<any> {
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
const prompt = nodeData.inputs?.prompt
const output = nodeData.outputs?.output as string
Expand All @@ -67,7 +68,7 @@ class LLMChain_Chains implements INode {
} else if (output === 'outputPrediction') {
const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false })
const inputVariables = chain.prompt.inputVariables as string[] // ["product"]
const res = await runPrediction(inputVariables, chain, input, promptValues)
const res = await runPrediction(inputVariables, chain, input, promptValues, options)
// eslint-disable-next-line no-console
console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m')
// eslint-disable-next-line no-console
Expand All @@ -81,9 +82,7 @@ class LLMChain_Chains implements INode {
const chain = nodeData.instance as LLMChain
const promptValues = nodeData.inputs?.prompt.promptValues as ICommonObject

const res = options.socketIO
? await runPrediction(inputVariables, chain, input, promptValues, true, options.socketIO, options.socketIOClientId)
: await runPrediction(inputVariables, chain, input, promptValues)
const res = await runPrediction(inputVariables, chain, input, promptValues, options)
// eslint-disable-next-line no-console
console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m')
// eslint-disable-next-line no-console
Expand All @@ -97,17 +96,20 @@ const runPrediction = async (
chain: LLMChain,
input: string,
promptValues: ICommonObject,
isStreaming?: boolean,
socketIO?: any,
socketIOClientId = ''
options: ICommonObject
) => {
const loggerHandler = new ConsoleCallbackHandler(options.logger)
const isStreaming = options.socketIO && options.socketIOClientId
const socketIO = isStreaming ? options.socketIO : undefined
const socketIOClientId = isStreaming ? options.socketIOClientId : ''

if (inputVariables.length === 1) {
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
} else if (inputVariables.length > 1) {
Expand All @@ -122,15 +124,13 @@ const runPrediction = async (

if (seen.length === 0) {
// All inputVariables have fixed values specified
const options = {
...promptValues
}
const options = { ...promptValues }
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.call(options, [handler])
const res = await chain.call(options, [loggerHandler, handler])
return res?.text
} else {
const res = await chain.call(options)
const res = await chain.call(options, [loggerHandler])
return res?.text
}
} else if (seen.length === 1) {
Expand All @@ -143,10 +143,10 @@ const runPrediction = async (
}
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.call(options, [handler])
const res = await chain.call(options, [loggerHandler, handler])
return res?.text
} else {
const res = await chain.call(options)
const res = await chain.call(options, [loggerHandler])
return res?.text
}
} else {
Expand All @@ -155,10 +155,10 @@ const runPrediction = async (
} else {
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.run(input, [handler])
const res = await chain.run(input, [loggerHandler, handler])
return res
} else {
const res = await chain.run(input)
const res = await chain.run(input, [loggerHandler])
return res
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, INode, INodeData, INodeParams, PromptRetriever } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { MultiPromptChain } from 'langchain/chains'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

class MultiPromptChain_Chains implements INode {
label: string
Expand Down Expand Up @@ -63,12 +64,14 @@ class MultiPromptChain_Chains implements INode {
const chain = nodeData.instance as MultiPromptChain
const obj = { input }

const loggerHandler = new ConsoleCallbackHandler(options.logger)

if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.call(obj, [handler])
const res = await chain.call(obj, [loggerHandler, handler])
return res?.text
} else {
const res = await chain.call(obj)
const res = await chain.call(obj, [loggerHandler])
return res?.text
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject, INode, INodeData, INodeParams, VectorStoreRetriever } from '../../../src/Interface'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { getBaseClasses } from '../../../src/utils'
import { MultiRetrievalQAChain } from 'langchain/chains'
import { ConsoleCallbackHandler, CustomChainHandler } from '../../../src/handler'

class MultiRetrievalQAChain_Chains implements INode {
label: string
Expand Down Expand Up @@ -71,14 +72,15 @@ class MultiRetrievalQAChain_Chains implements INode {
const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean

const obj = { input }
const loggerHandler = new ConsoleCallbackHandler(options.logger)

if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2, returnSourceDocuments)
const res = await chain.call(obj, [handler])
const res = await chain.call(obj, [loggerHandler, handler])
if (res.text && res.sourceDocuments) return res
return res?.text
} else {
const res = await chain.call(obj)
const res = await chain.call(obj, [loggerHandler])
if (res.text && res.sourceDocuments) return res
return res?.text
}
Expand Down
Loading

0 comments on commit 2edb631

Please sign in to comment.