File size: 5,217 Bytes
4114d85 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 |
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { APIChain } from 'langchain/chains'
import { CustomChainHandler, getBaseClasses } from '../../../src/utils'
import { BaseLanguageModel } from 'langchain/base_language'
import { PromptTemplate } from 'langchain/prompts'
export const API_URL_RAW_PROMPT_TEMPLATE = `You are given the below API Documentation:
{api_docs}
Using this documentation, generate the full API url to call for answering the user question.
You should build the API url in order to get a response that is as short as possible, while still getting the necessary information to answer the question. Pay attention to deliberately exclude any unnecessary pieces of data in the API call.
Question:{question}
API url:`
export const API_RESPONSE_RAW_PROMPT_TEMPLATE =
'Given this {api_response} response for {api_url}. use the given response to answer this {question}'
class GETApiChain_Chains implements INode {
label: string
name: string
type: string
icon: string
category: string
baseClasses: string[]
description: string
inputs: INodeParams[]
constructor() {
this.label = 'GET API Chain'
this.name = 'getApiChain'
this.type = 'GETApiChain'
this.icon = 'apichain.svg'
this.category = 'Chains'
this.description = 'Chain to run queries against GET API'
this.baseClasses = [this.type, ...getBaseClasses(APIChain)]
this.inputs = [
{
label: 'Language Model',
name: 'model',
type: 'BaseLanguageModel'
},
{
label: 'API Documentation',
name: 'apiDocs',
type: 'string',
description:
'Description of how API works. Please refer to more <a target="_blank" href="https://github.com/hwchase17/langchain/blob/master/langchain/chains/api/open_meteo_docs.py">examples</a>',
rows: 4
},
{
label: 'Headers',
name: 'headers',
type: 'json',
additionalParams: true,
optional: true
},
{
label: 'URL Prompt',
name: 'urlPrompt',
type: 'string',
description: 'Prompt used to tell LLMs how to construct the URL. Must contains {api_docs} and {question}',
default: API_URL_RAW_PROMPT_TEMPLATE,
rows: 4,
additionalParams: true
},
{
label: 'Answer Prompt',
name: 'ansPrompt',
type: 'string',
description:
'Prompt used to tell LLMs how to return the API response. Must contains {api_response}, {api_url}, and {question}',
default: API_RESPONSE_RAW_PROMPT_TEMPLATE,
rows: 4,
additionalParams: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
const apiDocs = nodeData.inputs?.apiDocs as string
const headers = nodeData.inputs?.headers as string
const urlPrompt = nodeData.inputs?.urlPrompt as string
const ansPrompt = nodeData.inputs?.ansPrompt as string
const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt)
return chain
}
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const model = nodeData.inputs?.model as BaseLanguageModel
const apiDocs = nodeData.inputs?.apiDocs as string
const headers = nodeData.inputs?.headers as string
const urlPrompt = nodeData.inputs?.urlPrompt as string
const ansPrompt = nodeData.inputs?.ansPrompt as string
const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt)
if (options.socketIO && options.socketIOClientId) {
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2)
const res = await chain.run(input, [handler])
return res
} else {
const res = await chain.run(input)
return res
}
}
}
const getAPIChain = async (documents: string, llm: BaseLanguageModel, headers: string, urlPrompt: string, ansPrompt: string) => {
const apiUrlPrompt = new PromptTemplate({
inputVariables: ['api_docs', 'question'],
template: urlPrompt ? urlPrompt : API_URL_RAW_PROMPT_TEMPLATE
})
const apiResponsePrompt = new PromptTemplate({
inputVariables: ['api_docs', 'question', 'api_url', 'api_response'],
template: ansPrompt ? ansPrompt : API_RESPONSE_RAW_PROMPT_TEMPLATE
})
const chain = APIChain.fromLLMAndAPIDocs(llm, documents, {
apiUrlPrompt,
apiResponsePrompt,
verbose: process.env.DEBUG === 'true' ? true : false,
headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {}
})
return chain
}
module.exports = { nodeClass: GETApiChain_Chains }
|