Просмотр исходного кода

feat(runtime): condition else branch (#813)

Louis Young 4 месяцев назад
Родитель
Сommit
f3d2da2b89

+ 1 - 1
apps/demo-free-layout/src/initial-data.ts

@@ -572,7 +572,7 @@ export const initialData: FlowDocumentJSON = {
     {
       sourceNodeID: 'condition_0',
       targetNodeID: 'llm_8--A3',
-      sourcePortID: 'default',
+      sourcePortID: 'else',
     },
     {
       sourceNodeID: 'llm_vTyMa',

+ 1 - 1
apps/demo-free-layout/src/nodes/condition/condition-inputs/index.tsx

@@ -63,7 +63,7 @@ export function ConditionInputs() {
             </Field>
           ))}
           <FormItem name="else" type="boolean" required={true} labelWidth={100}>
-            <ConditionPort data-port-id="default" data-port-type="output" />
+            <ConditionPort data-port-id="else" data-port-type="output" />
           </FormItem>
           {!readonly && (
             <div>

+ 75 - 9
packages/runtime/js-core/src/domain/__tests__/schemas/branch.test.ts

@@ -99,6 +99,7 @@ describe('WorkflowRuntime branch schema', () => {
     expect(report.reports.llm_1.status).toBe(WorkflowStatus.Succeeded);
     expect(report.reports.end_0.status).toBe(WorkflowStatus.Succeeded);
     expect(report.reports.llm_2).toBeUndefined();
+    expect(report.reports.llm_3).toBeUndefined();
   });
 
   it('should execute a workflow with branch 2', async () => {
@@ -187,30 +188,95 @@ describe('WorkflowRuntime branch schema', () => {
     expect(report.reports.llm_2.status).toBe(WorkflowStatus.Succeeded);
     expect(report.reports.end_0.status).toBe(WorkflowStatus.Succeeded);
     expect(report.reports.llm_1).toBeUndefined();
+    expect(report.reports.llm_3).toBeUndefined();
   });
 
-  it('should execute a workflow with branch not exist', async () => {
+  it('should execute a workflow with branch else', async () => {
     const engine = container.get<IEngine>(IEngine);
     const { context, processing } = engine.invoke({
       schema: TestSchemas.branchSchema,
       inputs: {
         model_id: 3,
-        prompt: 'Not Exist',
+        prompt: 'Tell me a movie',
       },
     });
     expect(context.statusCenter.workflow.status).toBe(WorkflowStatus.Processing);
     const result = await processing;
-    expect(context.statusCenter.workflow.status).toBe(WorkflowStatus.Failed);
-    expect(result).toStrictEqual({});
+    expect(context.statusCenter.workflow.status).toBe(WorkflowStatus.Succeeded);
+    expect(result).toStrictEqual({
+      m3_res: `Hi, I am an AI model, my name is AI_MODEL_3, temperature is 0.7, system prompt is "I'm Model 3.", prompt is "Tell me a movie"`,
+    });
+    const snapshots = snapshotsToVOData(context.snapshotCenter.exportAll());
+    expect(snapshots).toStrictEqual([
+      {
+        nodeID: 'start_0',
+        inputs: {},
+        outputs: { model_id: 3, prompt: 'Tell me a movie' },
+        data: {},
+      },
+      {
+        nodeID: 'condition_0',
+        inputs: {},
+        outputs: {},
+        data: {
+          conditions: [
+            {
+              value: {
+                left: { type: 'ref', content: ['start_0', 'model_id'] },
+                operator: 'eq',
+                right: { type: 'constant', content: 1 },
+              },
+              key: 'if_1',
+            },
+            {
+              value: {
+                left: { type: 'ref', content: ['start_0', 'model_id'] },
+                operator: 'eq',
+                right: { type: 'constant', content: 2 },
+              },
+              key: 'if_2',
+            },
+          ],
+        },
+        branch: 'else',
+      },
+      {
+        nodeID: 'llm_3',
+        inputs: {
+          modelName: 'AI_MODEL_3',
+          apiKey: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
+          apiHost: 'https://mock-ai-url/api/v3',
+          temperature: 0.7,
+          systemPrompt: "I'm Model 3.",
+          prompt: 'Tell me a movie',
+        },
+        outputs: {
+          result:
+            'Hi, I am an AI model, my name is AI_MODEL_3, temperature is 0.7, system prompt is "I\'m Model 3.", prompt is "Tell me a movie"',
+        },
+        data: {},
+      },
+      {
+        nodeID: 'end_0',
+        inputs: {
+          m3_res:
+            'Hi, I am an AI model, my name is AI_MODEL_3, temperature is 0.7, system prompt is "I\'m Model 3.", prompt is "Tell me a movie"',
+        },
+        outputs: {
+          m3_res:
+            'Hi, I am an AI model, my name is AI_MODEL_3, temperature is 0.7, system prompt is "I\'m Model 3.", prompt is "Tell me a movie"',
+        },
+        data: {},
+      },
+    ]);
 
     const report = context.reporter.export();
-    expect(report.messages.error.length).toBe(1);
-    expect(report.messages.error[0].nodeID).toBe('condition_0');
-    expect(report.workflowStatus.status).toBe(WorkflowStatus.Failed);
+    expect(report.workflowStatus.status).toBe(WorkflowStatus.Succeeded);
     expect(report.reports.start_0.status).toBe(WorkflowStatus.Succeeded);
-    expect(report.reports.condition_0.status).toBe(WorkflowStatus.Failed);
+    expect(report.reports.condition_0.status).toBe(WorkflowStatus.Succeeded);
+    expect(report.reports.llm_3.status).toBe(WorkflowStatus.Succeeded);
+    expect(report.reports.end_0.status).toBe(WorkflowStatus.Succeeded);
     expect(report.reports.llm_1).toBeUndefined();
     expect(report.reports.llm_2).toBeUndefined();
-    expect(report.reports.end_0).toBeUndefined();
   });
 });

+ 109 - 4
packages/runtime/js-core/src/domain/__tests__/schemas/branch.ts

@@ -13,7 +13,7 @@ export const branchSchema: WorkflowSchema = {
       meta: {
         position: {
           x: 180,
-          y: 368.3,
+          y: 614.7,
         },
       },
       data: {
@@ -45,7 +45,7 @@ export const branchSchema: WorkflowSchema = {
       meta: {
         position: {
           x: 1560,
-          y: 368.3,
+          y: 614.7,
         },
       },
       data: {
@@ -59,16 +59,32 @@ export const branchSchema: WorkflowSchema = {
             m2_res: {
               type: 'string',
             },
+            m3_res: {
+              type: 'string',
+            },
           },
         },
         inputsValues: {
           m1_res: {
             type: 'ref',
             content: ['llm_1', 'result'],
+            extra: {
+              index: 0,
+            },
           },
           m2_res: {
             type: 'ref',
             content: ['llm_2', 'result'],
+            extra: {
+              index: 1,
+            },
+          },
+          m3_res: {
+            type: 'ref',
+            content: ['llm_3', 'result'],
+            extra: {
+              index: 2,
+            },
           },
         },
       },
@@ -79,7 +95,7 @@ export const branchSchema: WorkflowSchema = {
       meta: {
         position: {
           x: 640,
-          y: 304.8,
+          y: 526.7,
         },
       },
       data: {
@@ -199,7 +215,7 @@ export const branchSchema: WorkflowSchema = {
       meta: {
         position: {
           x: 1100,
-          y: 459.8,
+          y: 467.8,
         },
       },
       data: {
@@ -270,6 +286,86 @@ export const branchSchema: WorkflowSchema = {
         },
       },
     },
+    {
+      id: 'llm_3',
+      type: 'llm',
+      meta: {
+        position: {
+          x: 1100,
+          y: 935.6,
+        },
+      },
+      data: {
+        title: 'LLM_3',
+        inputsValues: {
+          modelName: {
+            type: 'constant',
+            content: 'AI_MODEL_3',
+            schema: {
+              type: 'string',
+            },
+          },
+          apiKey: {
+            type: 'constant',
+            content: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
+          },
+          apiHost: {
+            type: 'constant',
+            content: 'https://mock-ai-url/api/v3',
+          },
+          temperature: {
+            type: 'constant',
+            content: 0.7,
+          },
+          systemPrompt: {
+            type: 'template',
+            content: "I'm Model 3.",
+          },
+          prompt: {
+            type: 'template',
+            content: '{{start_0.prompt}}',
+          },
+        },
+        inputs: {
+          type: 'object',
+          required: ['modelName', 'apiKey', 'apiHost', 'temperature', 'prompt'],
+          properties: {
+            modelName: {
+              type: 'string',
+            },
+            apiKey: {
+              type: 'string',
+            },
+            apiHost: {
+              type: 'string',
+            },
+            temperature: {
+              type: 'number',
+            },
+            systemPrompt: {
+              type: 'string',
+              extra: {
+                formComponent: 'prompt-editor',
+              },
+            },
+            prompt: {
+              type: 'string',
+              extra: {
+                formComponent: 'prompt-editor',
+              },
+            },
+          },
+        },
+        outputs: {
+          type: 'object',
+          properties: {
+            result: {
+              type: 'string',
+            },
+          },
+        },
+      },
+    },
   ],
   edges: [
     {
@@ -284,6 +380,10 @@ export const branchSchema: WorkflowSchema = {
       sourceNodeID: 'llm_2',
       targetNodeID: 'end_0',
     },
+    {
+      sourceNodeID: 'llm_3',
+      targetNodeID: 'end_0',
+    },
     {
       sourceNodeID: 'condition_0',
       targetNodeID: 'llm_1',
@@ -294,5 +394,10 @@ export const branchSchema: WorkflowSchema = {
       targetNodeID: 'llm_2',
       sourcePortID: 'if_2',
     },
+    {
+      sourceNodeID: 'condition_0',
+      targetNodeID: 'llm_3',
+      sourcePortID: 'else',
+    },
   ],
 };

+ 2 - 2
packages/runtime/js-core/src/domain/engine/index.ts

@@ -142,7 +142,7 @@ export class WorkflowRuntimeEngine implements IEngine {
     }
     const targetPort = node.ports.outputs.find((port) => port.id === branch);
     if (!targetPort) {
-      throw new Error(`Engine branch ${branch} not found`);
+      throw new Error(`Branch "${branch}" not found`);
     }
     const nextNodeIDs: Set<string> = new Set(targetPort.edges.map((edge) => edge.to.id));
     const nextNodes = allNextNodes.filter((nextNode) => nextNodeIDs.has(nextNode.id));
@@ -168,7 +168,7 @@ export class WorkflowRuntimeEngine implements IEngine {
       return;
     }
     if (nextNodes.length === 0) {
-      throw new Error(`Node ${node.id} has no next nodes`); // inside loop node may have no next nodes
+      throw new Error(`Node "${node.id}" has no next nodes`); // inside loop node may have no next nodes
     }
     await Promise.all(
       nextNodes.map((nextNode) =>

+ 4 - 1
packages/runtime/js-core/src/nodes/condition/index.ts

@@ -33,7 +33,10 @@ export class ConditionExecutor implements INodeExecutor {
       .filter((item) => this.checkCondition(item));
     const activatedCondition = parsedConditions.find((item) => this.handleCondition(item));
     if (!activatedCondition) {
-      throw new Error('No condition is activated');
+      return {
+        outputs: {},
+        branch: 'else',
+      };
     }
     return {
       outputs: {},