|
|
@@ -7,8 +7,8 @@ export const initialData: FlowDocumentJSON = {
|
|
|
type: 'start',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 181,
|
|
|
- y: 249.5,
|
|
|
+ x: 180,
|
|
|
+ y: 313.25,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
@@ -29,8 +29,8 @@ export const initialData: FlowDocumentJSON = {
|
|
|
type: 'condition',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 643,
|
|
|
- y: 213,
|
|
|
+ x: 640,
|
|
|
+ y: 298.75,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
@@ -39,11 +39,17 @@ export const initialData: FlowDocumentJSON = {
|
|
|
conditions: [
|
|
|
{
|
|
|
key: 'if_0',
|
|
|
- value: { type: 'expression', content: '' },
|
|
|
+ value: {
|
|
|
+ type: 'expression',
|
|
|
+ content: '',
|
|
|
+ },
|
|
|
},
|
|
|
{
|
|
|
- key: 'if_1',
|
|
|
- value: { type: 'expression', content: '' },
|
|
|
+ key: 'if_f0rOAt',
|
|
|
+ value: {
|
|
|
+ type: 'expression',
|
|
|
+ content: '',
|
|
|
+ },
|
|
|
},
|
|
|
],
|
|
|
},
|
|
|
@@ -71,10 +77,9 @@ export const initialData: FlowDocumentJSON = {
|
|
|
{
|
|
|
id: 'llm_0',
|
|
|
type: 'llm',
|
|
|
- blocks: [],
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 1105,
|
|
|
+ x: 1430,
|
|
|
y: 0,
|
|
|
},
|
|
|
},
|
|
|
@@ -107,66 +112,55 @@ export const initialData: FlowDocumentJSON = {
|
|
|
outputs: {
|
|
|
type: 'object',
|
|
|
properties: {
|
|
|
- result: { type: 'string' },
|
|
|
+ result: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
},
|
|
|
},
|
|
|
},
|
|
|
},
|
|
|
{
|
|
|
- id: 'llm_1',
|
|
|
- type: 'llm',
|
|
|
- blocks: [],
|
|
|
+ id: 'end_0',
|
|
|
+ type: 'end',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 1105,
|
|
|
- y: 405,
|
|
|
+ x: 2220,
|
|
|
+ y: 313.25,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
- title: 'LLM_1',
|
|
|
- inputsValues: {
|
|
|
- modelType: 'gpt-3.5-turbo',
|
|
|
- temperature: 0.5,
|
|
|
- systemPrompt: 'You are an AI assistant.',
|
|
|
- prompt: 'Hello.',
|
|
|
- },
|
|
|
- inputs: {
|
|
|
+ title: 'End',
|
|
|
+ outputs: {
|
|
|
type: 'object',
|
|
|
- required: ['modelType', 'temperature', 'prompt'],
|
|
|
properties: {
|
|
|
- modelType: {
|
|
|
- type: 'string',
|
|
|
- },
|
|
|
- temperature: {
|
|
|
- type: 'number',
|
|
|
- },
|
|
|
- systemPrompt: {
|
|
|
- type: 'string',
|
|
|
- },
|
|
|
- prompt: {
|
|
|
+ result: {
|
|
|
type: 'string',
|
|
|
},
|
|
|
},
|
|
|
},
|
|
|
- outputs: {
|
|
|
- type: 'object',
|
|
|
- properties: {
|
|
|
- result: { type: 'string' },
|
|
|
- },
|
|
|
- },
|
|
|
},
|
|
|
},
|
|
|
{
|
|
|
- id: 'end_0',
|
|
|
- type: 'end',
|
|
|
+ id: 'loop_H8M3U',
|
|
|
+ type: 'loop',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 1567,
|
|
|
- y: 249.5,
|
|
|
+ x: 1020,
|
|
|
+ y: 532.5,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
- title: 'End',
|
|
|
+ title: 'Loop_2',
|
|
|
+ inputsValues: {},
|
|
|
+ inputs: {
|
|
|
+ type: 'object',
|
|
|
+ required: ['loopTimes'],
|
|
|
+ properties: {
|
|
|
+ loopTimes: {
|
|
|
+ type: 'number',
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
outputs: {
|
|
|
type: 'object',
|
|
|
properties: {
|
|
|
@@ -176,13 +170,118 @@ export const initialData: FlowDocumentJSON = {
|
|
|
},
|
|
|
},
|
|
|
},
|
|
|
+ blocks: [
|
|
|
+ {
|
|
|
+ id: 'llm_CBdCg',
|
|
|
+ type: 'llm',
|
|
|
+ meta: {
|
|
|
+ position: {
|
|
|
+ x: 180,
|
|
|
+ y: 0,
|
|
|
+ },
|
|
|
+ },
|
|
|
+ data: {
|
|
|
+ title: 'LLM_4',
|
|
|
+ inputsValues: {},
|
|
|
+ inputs: {
|
|
|
+ type: 'object',
|
|
|
+ required: ['modelType', 'temperature', 'prompt'],
|
|
|
+ properties: {
|
|
|
+ modelType: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ temperature: {
|
|
|
+ type: 'number',
|
|
|
+ },
|
|
|
+ systemPrompt: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ prompt: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ outputs: {
|
|
|
+ type: 'object',
|
|
|
+ properties: {
|
|
|
+ result: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ {
|
|
|
+ id: 'llm_gZafu',
|
|
|
+ type: 'llm',
|
|
|
+ meta: {
|
|
|
+ position: {
|
|
|
+ x: 640,
|
|
|
+ y: 0,
|
|
|
+ },
|
|
|
+ },
|
|
|
+ data: {
|
|
|
+ title: 'LLM_5',
|
|
|
+ inputsValues: {},
|
|
|
+ inputs: {
|
|
|
+ type: 'object',
|
|
|
+ required: ['modelType', 'temperature', 'prompt'],
|
|
|
+ properties: {
|
|
|
+ modelType: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ temperature: {
|
|
|
+ type: 'number',
|
|
|
+ },
|
|
|
+ systemPrompt: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ prompt: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ outputs: {
|
|
|
+ type: 'object',
|
|
|
+ properties: {
|
|
|
+ result: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ ],
|
|
|
+ edges: [
|
|
|
+ {
|
|
|
+ sourceNodeID: 'llm_CBdCg',
|
|
|
+ targetNodeID: 'llm_gZafu',
|
|
|
+ },
|
|
|
+ ],
|
|
|
},
|
|
|
],
|
|
|
edges: [
|
|
|
- { sourceNodeID: 'start_0', targetNodeID: 'condition_0' },
|
|
|
- { sourceNodeID: 'condition_0', sourcePortID: 'if_0', targetNodeID: 'llm_0' },
|
|
|
- { sourceNodeID: 'condition_0', sourcePortID: 'if_1', targetNodeID: 'llm_1' },
|
|
|
- { sourceNodeID: 'llm_0', targetNodeID: 'end_0' },
|
|
|
- { sourceNodeID: 'llm_1', targetNodeID: 'end_0' },
|
|
|
+ {
|
|
|
+ sourceNodeID: 'start_0',
|
|
|
+ targetNodeID: 'condition_0',
|
|
|
+ },
|
|
|
+ {
|
|
|
+ sourceNodeID: 'condition_0',
|
|
|
+ targetNodeID: 'llm_0',
|
|
|
+ sourcePortID: 'if_0',
|
|
|
+ },
|
|
|
+ {
|
|
|
+ sourceNodeID: 'condition_0',
|
|
|
+ targetNodeID: 'loop_H8M3U',
|
|
|
+ sourcePortID: 'if_f0rOAt',
|
|
|
+ },
|
|
|
+ {
|
|
|
+ sourceNodeID: 'llm_0',
|
|
|
+ targetNodeID: 'end_0',
|
|
|
+ },
|
|
|
+ {
|
|
|
+ sourceNodeID: 'loop_H8M3U',
|
|
|
+ targetNodeID: 'end_0',
|
|
|
+ },
|
|
|
],
|
|
|
};
|