|
@@ -1,8 +1,7 @@
|
|
|
import { useCallback, useEffect, useRef, useState } from 'react'
|
|
|
import produce from 'immer'
|
|
|
-import useVarList from '../_base/hooks/use-var-list'
|
|
|
-import { VarType } from '../../types'
|
|
|
-import type { Memory, ValueSelector, Var } from '../../types'
|
|
|
+import { EditionType, VarType } from '../../types'
|
|
|
+import type { Memory, PromptItem, ValueSelector, Var, Variable } from '../../types'
|
|
|
import { useStore } from '../../store'
|
|
|
import {
|
|
|
useIsChatMode,
|
|
@@ -18,7 +17,6 @@ import {
|
|
|
} from '@/app/components/header/account-setting/model-provider-page/declarations'
|
|
|
import useNodeCrud from '@/app/components/workflow/nodes/_base/hooks/use-node-crud'
|
|
|
import useOneStepRun from '@/app/components/workflow/nodes/_base/hooks/use-one-step-run'
|
|
|
-import type { PromptItem } from '@/models/debug'
|
|
|
import { RETRIEVAL_OUTPUT_STRUCT } from '@/app/components/workflow/constants'
|
|
|
import { checkHasContextBlock, checkHasHistoryBlock, checkHasQueryBlock } from '@/app/components/base/prompt-editor/constants'
|
|
|
|
|
@@ -29,20 +27,21 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
|
|
const defaultConfig = useStore(s => s.nodesDefaultConfigs)[payload.type]
|
|
|
const [defaultRolePrefix, setDefaultRolePrefix] = useState<{ user: string; assistant: string }>({ user: '', assistant: '' })
|
|
|
const { inputs, setInputs: doSetInputs } = useNodeCrud<LLMNodeType>(id, payload)
|
|
|
+ const inputRef = useRef(inputs)
|
|
|
+
|
|
|
const setInputs = useCallback((newInputs: LLMNodeType) => {
|
|
|
if (newInputs.memory && !newInputs.memory.role_prefix) {
|
|
|
const newPayload = produce(newInputs, (draft) => {
|
|
|
draft.memory!.role_prefix = defaultRolePrefix
|
|
|
})
|
|
|
doSetInputs(newPayload)
|
|
|
+ inputRef.current = newPayload
|
|
|
return
|
|
|
}
|
|
|
doSetInputs(newInputs)
|
|
|
+ inputRef.current = newInputs
|
|
|
}, [doSetInputs, defaultRolePrefix])
|
|
|
- const inputRef = useRef(inputs)
|
|
|
- useEffect(() => {
|
|
|
- inputRef.current = inputs
|
|
|
- }, [inputs])
|
|
|
+
|
|
|
// model
|
|
|
const model = inputs.model
|
|
|
const modelMode = inputs.model?.mode
|
|
@@ -178,11 +177,80 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
|
|
}
|
|
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
|
}, [isShowVisionConfig, modelChanged])
|
|
|
+
|
|
|
// variables
|
|
|
- const { handleVarListChange, handleAddVariable } = useVarList<LLMNodeType>({
|
|
|
- inputs,
|
|
|
- setInputs,
|
|
|
- })
|
|
|
+ const isShowVars = (() => {
|
|
|
+ if (isChatModel)
|
|
|
+ return (inputs.prompt_template as PromptItem[]).some(item => item.edition_type === EditionType.jinja2)
|
|
|
+
|
|
|
+ return (inputs.prompt_template as PromptItem).edition_type === EditionType.jinja2
|
|
|
+ })()
|
|
|
+ const handleAddEmptyVariable = useCallback(() => {
|
|
|
+ const newInputs = produce(inputRef.current, (draft) => {
|
|
|
+ if (!draft.prompt_config) {
|
|
|
+ draft.prompt_config = {
|
|
|
+ jinja2_variables: [],
|
|
|
+ }
|
|
|
+ }
|
|
|
+ if (!draft.prompt_config.jinja2_variables)
|
|
|
+ draft.prompt_config.jinja2_variables = []
|
|
|
+
|
|
|
+ draft.prompt_config.jinja2_variables.push({
|
|
|
+ variable: '',
|
|
|
+ value_selector: [],
|
|
|
+ })
|
|
|
+ })
|
|
|
+ setInputs(newInputs)
|
|
|
+ }, [setInputs])
|
|
|
+
|
|
|
+ const handleAddVariable = useCallback((payload: Variable) => {
|
|
|
+ const newInputs = produce(inputRef.current, (draft) => {
|
|
|
+ if (!draft.prompt_config) {
|
|
|
+ draft.prompt_config = {
|
|
|
+ jinja2_variables: [],
|
|
|
+ }
|
|
|
+ }
|
|
|
+ if (!draft.prompt_config.jinja2_variables)
|
|
|
+ draft.prompt_config.jinja2_variables = []
|
|
|
+
|
|
|
+ draft.prompt_config.jinja2_variables.push(payload)
|
|
|
+ })
|
|
|
+ setInputs(newInputs)
|
|
|
+ }, [setInputs])
|
|
|
+
|
|
|
+ const handleVarListChange = useCallback((newList: Variable[]) => {
|
|
|
+ const newInputs = produce(inputRef.current, (draft) => {
|
|
|
+ if (!draft.prompt_config) {
|
|
|
+ draft.prompt_config = {
|
|
|
+ jinja2_variables: [],
|
|
|
+ }
|
|
|
+ }
|
|
|
+ if (!draft.prompt_config.jinja2_variables)
|
|
|
+ draft.prompt_config.jinja2_variables = []
|
|
|
+
|
|
|
+ draft.prompt_config.jinja2_variables = newList
|
|
|
+ })
|
|
|
+ setInputs(newInputs)
|
|
|
+ }, [setInputs])
|
|
|
+
|
|
|
+ const handleVarNameChange = useCallback((oldName: string, newName: string) => {
|
|
|
+ const newInputs = produce(inputRef.current, (draft) => {
|
|
|
+ if (isChatModel) {
|
|
|
+ const promptTemplate = draft.prompt_template as PromptItem[]
|
|
|
+ promptTemplate.filter(item => item.edition_type === EditionType.jinja2).forEach((item) => {
|
|
|
+ item.jinja2_text = (item.jinja2_text || '').replaceAll(`{{ ${oldName} }}`, `{{ ${newName} }}`)
|
|
|
+ })
|
|
|
+ }
|
|
|
+ else {
|
|
|
+ if ((draft.prompt_template as PromptItem).edition_type !== EditionType.jinja2)
|
|
|
+ return
|
|
|
+
|
|
|
+ const promptTemplate = draft.prompt_template as PromptItem
|
|
|
+ promptTemplate.jinja2_text = (promptTemplate.jinja2_text || '').replaceAll(`{{ ${oldName} }}`, `{{ ${newName} }}`)
|
|
|
+ }
|
|
|
+ })
|
|
|
+ setInputs(newInputs)
|
|
|
+ }, [isChatModel, setInputs])
|
|
|
|
|
|
// context
|
|
|
const handleContextVarChange = useCallback((newVar: ValueSelector | string) => {
|
|
@@ -194,11 +262,11 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
|
|
}, [inputs, setInputs])
|
|
|
|
|
|
const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => {
|
|
|
- const newInputs = produce(inputs, (draft) => {
|
|
|
+ const newInputs = produce(inputRef.current, (draft) => {
|
|
|
draft.prompt_template = newPrompt
|
|
|
})
|
|
|
setInputs(newInputs)
|
|
|
- }, [inputs, setInputs])
|
|
|
+ }, [setInputs])
|
|
|
|
|
|
const handleMemoryChange = useCallback((newMemory?: Memory) => {
|
|
|
const newInputs = produce(inputs, (draft) => {
|
|
@@ -286,6 +354,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
|
|
runInputData,
|
|
|
setRunInputData,
|
|
|
runResult,
|
|
|
+ toVarInputs,
|
|
|
} = useOneStepRun<LLMNodeType>({
|
|
|
id,
|
|
|
data: inputs,
|
|
@@ -295,23 +364,6 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
|
|
},
|
|
|
})
|
|
|
|
|
|
- // const handleRun = (submitData: Record<string, any>) => {
|
|
|
- // console.log(submitData)
|
|
|
- // const res = produce(submitData, (draft) => {
|
|
|
- // debugger
|
|
|
- // if (draft.contexts) {
|
|
|
- // draft['#context#'] = draft.contexts
|
|
|
- // delete draft.contexts
|
|
|
- // }
|
|
|
- // if (draft.visionFiles) {
|
|
|
- // draft['#files#'] = draft.visionFiles
|
|
|
- // delete draft.visionFiles
|
|
|
- // }
|
|
|
- // })
|
|
|
-
|
|
|
- // doHandleRun(res)
|
|
|
- // }
|
|
|
-
|
|
|
const inputVarValues = (() => {
|
|
|
const vars: Record<string, any> = {}
|
|
|
Object.keys(runInputData)
|
|
@@ -348,7 +400,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
|
|
}, [runInputData, setRunInputData])
|
|
|
|
|
|
const allVarStrArr = (() => {
|
|
|
- const arr = isChatModel ? (inputs.prompt_template as PromptItem[]).map(item => item.text) : [(inputs.prompt_template as PromptItem).text]
|
|
|
+ const arr = isChatModel ? (inputs.prompt_template as PromptItem[]).filter(item => item.edition_type !== EditionType.jinja2).map(item => item.text) : [(inputs.prompt_template as PromptItem).text]
|
|
|
if (isChatMode && isChatModel && !!inputs.memory) {
|
|
|
arr.push('{{#sys.query#}}')
|
|
|
arr.push(inputs.memory.query_prompt_template)
|
|
@@ -357,7 +409,13 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
|
|
return arr
|
|
|
})()
|
|
|
|
|
|
- const varInputs = getInputVars(allVarStrArr)
|
|
|
+ const varInputs = (() => {
|
|
|
+ const vars = getInputVars(allVarStrArr)
|
|
|
+ if (isShowVars)
|
|
|
+ return [...vars, ...toVarInputs(inputs.prompt_config?.jinja2_variables || [])]
|
|
|
+
|
|
|
+ return vars
|
|
|
+ })()
|
|
|
|
|
|
return {
|
|
|
readOnly,
|
|
@@ -370,8 +428,11 @@ const useConfig = (id: string, payload: LLMNodeType) => {
|
|
|
isShowVisionConfig,
|
|
|
handleModelChanged,
|
|
|
handleCompletionParamsChange,
|
|
|
+ isShowVars,
|
|
|
handleVarListChange,
|
|
|
+ handleVarNameChange,
|
|
|
handleAddVariable,
|
|
|
+ handleAddEmptyVariable,
|
|
|
handleContextVarChange,
|
|
|
filterInputVar,
|
|
|
filterVar,
|