default.ts 3.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182
  1. import { BlockEnum, EditionType } from '../../types'
  2. import { type NodeDefault, type PromptItem, PromptRole } from '../../types'
  3. import type { LLMNodeType } from './types'
  4. import { ALL_CHAT_AVAILABLE_BLOCKS, ALL_COMPLETION_AVAILABLE_BLOCKS } from '@/app/components/workflow/constants'
  5. const i18nPrefix = 'workflow.errorMsg'
  6. const nodeDefault: NodeDefault<LLMNodeType> = {
  7. defaultValue: {
  8. model: {
  9. provider: '',
  10. name: '',
  11. mode: 'chat',
  12. completion_params: {
  13. temperature: 0.7,
  14. },
  15. },
  16. prompt_template: [{
  17. role: PromptRole.system,
  18. text: '',
  19. }],
  20. context: {
  21. enabled: false,
  22. variable_selector: [],
  23. },
  24. vision: {
  25. enabled: false,
  26. },
  27. },
  28. getAvailablePrevNodes(isChatMode: boolean) {
  29. const nodes = isChatMode
  30. ? ALL_CHAT_AVAILABLE_BLOCKS
  31. : ALL_COMPLETION_AVAILABLE_BLOCKS.filter(type => type !== BlockEnum.End)
  32. return nodes
  33. },
  34. getAvailableNextNodes(isChatMode: boolean) {
  35. const nodes = isChatMode ? ALL_CHAT_AVAILABLE_BLOCKS : ALL_COMPLETION_AVAILABLE_BLOCKS
  36. return nodes
  37. },
  38. checkValid(payload: LLMNodeType, t: any) {
  39. let errorMessages = ''
  40. if (!errorMessages && !payload.model.provider)
  41. errorMessages = t(`${i18nPrefix}.fieldRequired`, { field: t(`${i18nPrefix}.fields.model`) })
  42. if (!errorMessages && !payload.memory) {
  43. const isChatModel = payload.model.mode === 'chat'
  44. const isPromptyEmpty = isChatModel ? !(payload.prompt_template as PromptItem[]).some(t => t.text !== '') : (payload.prompt_template as PromptItem).text === ''
  45. if (isPromptyEmpty)
  46. errorMessages = t(`${i18nPrefix}.fieldRequired`, { field: t('workflow.nodes.llm.prompt') })
  47. }
  48. if (!errorMessages && !!payload.memory) {
  49. const isChatModel = payload.model.mode === 'chat'
  50. // payload.memory.query_prompt_template not pass is default: {{#sys.query#}}
  51. if (isChatModel && !!payload.memory.query_prompt_template && !payload.memory.query_prompt_template.includes('{{#sys.query#}}'))
  52. errorMessages = t('workflow.nodes.llm.sysQueryInUser')
  53. }
  54. if (!errorMessages) {
  55. const isChatModel = payload.model.mode === 'chat'
  56. const isShowVars = (() => {
  57. if (isChatModel)
  58. return (payload.prompt_template as PromptItem[]).some(item => item.edition_type === EditionType.jinja2)
  59. return (payload.prompt_template as PromptItem).edition_type === EditionType.jinja2
  60. })()
  61. if (isShowVars && payload.prompt_config?.jinja2_variables) {
  62. payload.prompt_config?.jinja2_variables.forEach((i) => {
  63. if (!errorMessages && !i.variable)
  64. errorMessages = t(`${i18nPrefix}.fieldRequired`, { field: t(`${i18nPrefix}.fields.variable`) })
  65. if (!errorMessages && !i.value_selector.length)
  66. errorMessages = t(`${i18nPrefix}.fieldRequired`, { field: t(`${i18nPrefix}.fields.variableValue`) })
  67. })
  68. }
  69. }
  70. return {
  71. isValid: !errorMessages,
  72. errorMessage: errorMessages,
  73. }
  74. },
  75. }
  76. export default nodeDefault