|
|
@@ -99,6 +99,7 @@ describe('WorkflowRuntime branch schema', () => {
|
|
|
expect(report.reports.llm_1.status).toBe(WorkflowStatus.Succeeded);
|
|
|
expect(report.reports.end_0.status).toBe(WorkflowStatus.Succeeded);
|
|
|
expect(report.reports.llm_2).toBeUndefined();
|
|
|
+ expect(report.reports.llm_3).toBeUndefined();
|
|
|
});
|
|
|
|
|
|
it('should execute a workflow with branch 2', async () => {
|
|
|
@@ -187,30 +188,95 @@ describe('WorkflowRuntime branch schema', () => {
|
|
|
expect(report.reports.llm_2.status).toBe(WorkflowStatus.Succeeded);
|
|
|
expect(report.reports.end_0.status).toBe(WorkflowStatus.Succeeded);
|
|
|
expect(report.reports.llm_1).toBeUndefined();
|
|
|
+ expect(report.reports.llm_3).toBeUndefined();
|
|
|
});
|
|
|
|
|
|
- it('should execute a workflow with branch not exist', async () => {
|
|
|
+ it('should execute a workflow with branch else', async () => {
|
|
|
const engine = container.get<IEngine>(IEngine);
|
|
|
const { context, processing } = engine.invoke({
|
|
|
schema: TestSchemas.branchSchema,
|
|
|
inputs: {
|
|
|
model_id: 3,
|
|
|
- prompt: 'Not Exist',
|
|
|
+ prompt: 'Tell me a movie',
|
|
|
},
|
|
|
});
|
|
|
expect(context.statusCenter.workflow.status).toBe(WorkflowStatus.Processing);
|
|
|
const result = await processing;
|
|
|
- expect(context.statusCenter.workflow.status).toBe(WorkflowStatus.Failed);
|
|
|
- expect(result).toStrictEqual({});
|
|
|
+ expect(context.statusCenter.workflow.status).toBe(WorkflowStatus.Succeeded);
|
|
|
+ expect(result).toStrictEqual({
|
|
|
+ m3_res: `Hi, I am an AI model, my name is AI_MODEL_3, temperature is 0.7, system prompt is "I'm Model 3.", prompt is "Tell me a movie"`,
|
|
|
+ });
|
|
|
+ const snapshots = snapshotsToVOData(context.snapshotCenter.exportAll());
|
|
|
+ expect(snapshots).toStrictEqual([
|
|
|
+ {
|
|
|
+ nodeID: 'start_0',
|
|
|
+ inputs: {},
|
|
|
+ outputs: { model_id: 3, prompt: 'Tell me a movie' },
|
|
|
+ data: {},
|
|
|
+ },
|
|
|
+ {
|
|
|
+ nodeID: 'condition_0',
|
|
|
+ inputs: {},
|
|
|
+ outputs: {},
|
|
|
+ data: {
|
|
|
+ conditions: [
|
|
|
+ {
|
|
|
+ value: {
|
|
|
+ left: { type: 'ref', content: ['start_0', 'model_id'] },
|
|
|
+ operator: 'eq',
|
|
|
+ right: { type: 'constant', content: 1 },
|
|
|
+ },
|
|
|
+ key: 'if_1',
|
|
|
+ },
|
|
|
+ {
|
|
|
+ value: {
|
|
|
+ left: { type: 'ref', content: ['start_0', 'model_id'] },
|
|
|
+ operator: 'eq',
|
|
|
+ right: { type: 'constant', content: 2 },
|
|
|
+ },
|
|
|
+ key: 'if_2',
|
|
|
+ },
|
|
|
+ ],
|
|
|
+ },
|
|
|
+ branch: 'else',
|
|
|
+ },
|
|
|
+ {
|
|
|
+ nodeID: 'llm_3',
|
|
|
+ inputs: {
|
|
|
+ modelName: 'AI_MODEL_3',
|
|
|
+ apiKey: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
|
|
|
+ apiHost: 'https://mock-ai-url/api/v3',
|
|
|
+ temperature: 0.7,
|
|
|
+ systemPrompt: "I'm Model 3.",
|
|
|
+ prompt: 'Tell me a movie',
|
|
|
+ },
|
|
|
+ outputs: {
|
|
|
+ result:
|
|
|
+ 'Hi, I am an AI model, my name is AI_MODEL_3, temperature is 0.7, system prompt is "I\'m Model 3.", prompt is "Tell me a movie"',
|
|
|
+ },
|
|
|
+ data: {},
|
|
|
+ },
|
|
|
+ {
|
|
|
+ nodeID: 'end_0',
|
|
|
+ inputs: {
|
|
|
+ m3_res:
|
|
|
+ 'Hi, I am an AI model, my name is AI_MODEL_3, temperature is 0.7, system prompt is "I\'m Model 3.", prompt is "Tell me a movie"',
|
|
|
+ },
|
|
|
+ outputs: {
|
|
|
+ m3_res:
|
|
|
+ 'Hi, I am an AI model, my name is AI_MODEL_3, temperature is 0.7, system prompt is "I\'m Model 3.", prompt is "Tell me a movie"',
|
|
|
+ },
|
|
|
+ data: {},
|
|
|
+ },
|
|
|
+ ]);
|
|
|
|
|
|
const report = context.reporter.export();
|
|
|
- expect(report.messages.error.length).toBe(1);
|
|
|
- expect(report.messages.error[0].nodeID).toBe('condition_0');
|
|
|
- expect(report.workflowStatus.status).toBe(WorkflowStatus.Failed);
|
|
|
+ expect(report.workflowStatus.status).toBe(WorkflowStatus.Succeeded);
|
|
|
expect(report.reports.start_0.status).toBe(WorkflowStatus.Succeeded);
|
|
|
- expect(report.reports.condition_0.status).toBe(WorkflowStatus.Failed);
|
|
|
+ expect(report.reports.condition_0.status).toBe(WorkflowStatus.Succeeded);
|
|
|
+ expect(report.reports.llm_3.status).toBe(WorkflowStatus.Succeeded);
|
|
|
+ expect(report.reports.end_0.status).toBe(WorkflowStatus.Succeeded);
|
|
|
expect(report.reports.llm_1).toBeUndefined();
|
|
|
expect(report.reports.llm_2).toBeUndefined();
|
|
|
- expect(report.reports.end_0).toBeUndefined();
|
|
|
});
|
|
|
});
|