|
|
@@ -48,7 +48,7 @@ export const initialData: FlowDocumentJSON = {
|
|
|
meta: {
|
|
|
position: {
|
|
|
x: 640,
|
|
|
- y: 363.25,
|
|
|
+ y: 318.25,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
@@ -86,13 +86,13 @@ export const initialData: FlowDocumentJSON = {
|
|
|
type: 'end',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 2220,
|
|
|
+ x: 2202.9953917050693,
|
|
|
y: 381.75,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
title: 'End',
|
|
|
- outputs: {
|
|
|
+ inputs: {
|
|
|
type: 'object',
|
|
|
properties: {
|
|
|
result: {
|
|
|
@@ -103,46 +103,59 @@ export const initialData: FlowDocumentJSON = {
|
|
|
},
|
|
|
},
|
|
|
{
|
|
|
- id: 'loop_H8M3U',
|
|
|
- type: 'loop',
|
|
|
+ id: '159623',
|
|
|
+ type: 'comment',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 1020,
|
|
|
- y: 547.96875,
|
|
|
+ x: 640,
|
|
|
+ y: 573.96875,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
- title: 'Loop_2',
|
|
|
- batchFor: {
|
|
|
- type: 'ref',
|
|
|
- content: ['start_0', 'array_obj'],
|
|
|
+ size: {
|
|
|
+ width: 240,
|
|
|
+ height: 150,
|
|
|
},
|
|
|
- outputs: {
|
|
|
- type: 'object',
|
|
|
- properties: {
|
|
|
- result: {
|
|
|
- type: 'string',
|
|
|
- },
|
|
|
- },
|
|
|
+ note: 'hi ~\n\nthis is a comment node\n\n- flowgram.ai',
|
|
|
+ },
|
|
|
+ },
|
|
|
+ {
|
|
|
+ id: 'loop_sGybT',
|
|
|
+ type: 'loop',
|
|
|
+ meta: {
|
|
|
+ position: {
|
|
|
+ x: 1373.5714285714287,
|
|
|
+ y: 394.9758064516129,
|
|
|
},
|
|
|
},
|
|
|
+ data: {
|
|
|
+ title: 'Loop_1',
|
|
|
+ },
|
|
|
blocks: [
|
|
|
{
|
|
|
- id: 'llm_CBdCg',
|
|
|
+ id: 'llm_6aSyo',
|
|
|
type: 'llm',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 180,
|
|
|
- y: 0,
|
|
|
+ x: -196.8663594470046,
|
|
|
+ y: 142.0046082949309,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
- title: 'LLM_4',
|
|
|
+ title: 'LLM_3',
|
|
|
inputsValues: {
|
|
|
- modelType: {
|
|
|
+ modelName: {
|
|
|
type: 'constant',
|
|
|
content: 'gpt-3.5-turbo',
|
|
|
},
|
|
|
+ apiKey: {
|
|
|
+ type: 'constant',
|
|
|
+ content: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
|
|
|
+ },
|
|
|
+ apiHost: {
|
|
|
+ type: 'constant',
|
|
|
+ content: 'https://mock-ai-url/api/v3',
|
|
|
+ },
|
|
|
temperature: {
|
|
|
type: 'constant',
|
|
|
content: 0.5,
|
|
|
@@ -158,9 +171,15 @@ export const initialData: FlowDocumentJSON = {
|
|
|
},
|
|
|
inputs: {
|
|
|
type: 'object',
|
|
|
- required: ['modelType', 'temperature', 'prompt'],
|
|
|
+ required: ['modelName', 'apiKey', 'apiHost', 'temperature', 'prompt'],
|
|
|
properties: {
|
|
|
- modelType: {
|
|
|
+ modelName: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ apiKey: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ apiHost: {
|
|
|
type: 'string',
|
|
|
},
|
|
|
temperature: {
|
|
|
@@ -185,21 +204,29 @@ export const initialData: FlowDocumentJSON = {
|
|
|
},
|
|
|
},
|
|
|
{
|
|
|
- id: 'llm_gZafu',
|
|
|
+ id: 'llm_ZqKlP',
|
|
|
type: 'llm',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 640,
|
|
|
- y: 0,
|
|
|
+ x: 253.1797235023041,
|
|
|
+ y: 142.00460829493088,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
- title: 'LLM_5',
|
|
|
+ title: 'LLM_4',
|
|
|
inputsValues: {
|
|
|
- modelType: {
|
|
|
+ modelName: {
|
|
|
type: 'constant',
|
|
|
content: 'gpt-3.5-turbo',
|
|
|
},
|
|
|
+ apiKey: {
|
|
|
+ type: 'constant',
|
|
|
+ content: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
|
|
|
+ },
|
|
|
+ apiHost: {
|
|
|
+ type: 'constant',
|
|
|
+ content: 'https://mock-ai-url/api/v3',
|
|
|
+ },
|
|
|
temperature: {
|
|
|
type: 'constant',
|
|
|
content: 0.5,
|
|
|
@@ -215,9 +242,15 @@ export const initialData: FlowDocumentJSON = {
|
|
|
},
|
|
|
inputs: {
|
|
|
type: 'object',
|
|
|
- required: ['modelType', 'temperature', 'prompt'],
|
|
|
+ required: ['modelName', 'apiKey', 'apiHost', 'temperature', 'prompt'],
|
|
|
properties: {
|
|
|
- modelType: {
|
|
|
+ modelName: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ apiKey: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ apiHost: {
|
|
|
type: 'string',
|
|
|
},
|
|
|
temperature: {
|
|
|
@@ -244,58 +277,46 @@ export const initialData: FlowDocumentJSON = {
|
|
|
],
|
|
|
edges: [
|
|
|
{
|
|
|
- sourceNodeID: 'llm_CBdCg',
|
|
|
- targetNodeID: 'llm_gZafu',
|
|
|
+ sourceNodeID: 'llm_6aSyo',
|
|
|
+ targetNodeID: 'llm_ZqKlP',
|
|
|
},
|
|
|
],
|
|
|
},
|
|
|
{
|
|
|
- id: '159623',
|
|
|
- type: 'comment',
|
|
|
- meta: {
|
|
|
- position: {
|
|
|
- x: 640,
|
|
|
- y: 522.46875,
|
|
|
- },
|
|
|
- },
|
|
|
- data: {
|
|
|
- size: {
|
|
|
- width: 240,
|
|
|
- height: 150,
|
|
|
- },
|
|
|
- note: 'hi ~\n\nthis is a comment node\n\n- flowgram.ai',
|
|
|
- },
|
|
|
- },
|
|
|
- {
|
|
|
- id: 'group_V-_st',
|
|
|
+ id: 'group_5ci0o',
|
|
|
type: 'group',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 1020,
|
|
|
- y: 96.25,
|
|
|
+ x: 0,
|
|
|
+ y: 0,
|
|
|
},
|
|
|
},
|
|
|
- data: {
|
|
|
- title: 'LLM_Group',
|
|
|
- color: 'Violet',
|
|
|
- },
|
|
|
+ data: {},
|
|
|
blocks: [
|
|
|
{
|
|
|
- id: 'llm_0',
|
|
|
+ id: 'llm_8--A3',
|
|
|
type: 'llm',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 640,
|
|
|
- y: 0,
|
|
|
+ x: 1177.8341013824886,
|
|
|
+ y: 19.25,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
- title: 'LLM_0',
|
|
|
+ title: 'LLM_1',
|
|
|
inputsValues: {
|
|
|
- modelType: {
|
|
|
+ modelName: {
|
|
|
type: 'constant',
|
|
|
content: 'gpt-3.5-turbo',
|
|
|
},
|
|
|
+ apiKey: {
|
|
|
+ type: 'constant',
|
|
|
+ content: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
|
|
|
+ },
|
|
|
+ apiHost: {
|
|
|
+ type: 'constant',
|
|
|
+ content: 'https://mock-ai-url/api/v3',
|
|
|
+ },
|
|
|
temperature: {
|
|
|
type: 'constant',
|
|
|
content: 0.5,
|
|
|
@@ -311,9 +332,15 @@ export const initialData: FlowDocumentJSON = {
|
|
|
},
|
|
|
inputs: {
|
|
|
type: 'object',
|
|
|
- required: ['modelType', 'temperature', 'prompt'],
|
|
|
+ required: ['modelName', 'apiKey', 'apiHost', 'temperature', 'prompt'],
|
|
|
properties: {
|
|
|
- modelType: {
|
|
|
+ modelName: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ apiKey: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ apiHost: {
|
|
|
type: 'string',
|
|
|
},
|
|
|
temperature: {
|
|
|
@@ -338,21 +365,29 @@ export const initialData: FlowDocumentJSON = {
|
|
|
},
|
|
|
},
|
|
|
{
|
|
|
- id: 'llm_l_TcE',
|
|
|
+ id: 'llm_vTyMa',
|
|
|
type: 'llm',
|
|
|
meta: {
|
|
|
position: {
|
|
|
- x: 180,
|
|
|
- y: 0,
|
|
|
+ x: 1625.6221198156682,
|
|
|
+ y: 19.25,
|
|
|
},
|
|
|
},
|
|
|
data: {
|
|
|
- title: 'LLM_1',
|
|
|
+ title: 'LLM_2',
|
|
|
inputsValues: {
|
|
|
- modelType: {
|
|
|
+ modelName: {
|
|
|
type: 'constant',
|
|
|
content: 'gpt-3.5-turbo',
|
|
|
},
|
|
|
+ apiKey: {
|
|
|
+ type: 'constant',
|
|
|
+ content: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
|
|
|
+ },
|
|
|
+ apiHost: {
|
|
|
+ type: 'constant',
|
|
|
+ content: 'https://mock-ai-url/api/v3',
|
|
|
+ },
|
|
|
temperature: {
|
|
|
type: 'constant',
|
|
|
content: 0.5,
|
|
|
@@ -368,9 +403,15 @@ export const initialData: FlowDocumentJSON = {
|
|
|
},
|
|
|
inputs: {
|
|
|
type: 'object',
|
|
|
- required: ['modelType', 'temperature', 'prompt'],
|
|
|
+ required: ['modelName', 'apiKey', 'apiHost', 'temperature', 'prompt'],
|
|
|
properties: {
|
|
|
- modelType: {
|
|
|
+ modelName: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ apiKey: {
|
|
|
+ type: 'string',
|
|
|
+ },
|
|
|
+ apiHost: {
|
|
|
type: 'string',
|
|
|
},
|
|
|
temperature: {
|
|
|
@@ -397,17 +438,17 @@ export const initialData: FlowDocumentJSON = {
|
|
|
],
|
|
|
edges: [
|
|
|
{
|
|
|
- sourceNodeID: 'llm_l_TcE',
|
|
|
- targetNodeID: 'llm_0',
|
|
|
+ sourceNodeID: 'condition_0',
|
|
|
+ targetNodeID: 'llm_8--A3',
|
|
|
+ sourcePortID: 'if_0',
|
|
|
},
|
|
|
{
|
|
|
- sourceNodeID: 'llm_0',
|
|
|
- targetNodeID: 'end_0',
|
|
|
+ sourceNodeID: 'llm_8--A3',
|
|
|
+ targetNodeID: 'llm_vTyMa',
|
|
|
},
|
|
|
{
|
|
|
- sourceNodeID: 'condition_0',
|
|
|
- targetNodeID: 'llm_l_TcE',
|
|
|
- sourcePortID: 'if_0',
|
|
|
+ sourceNodeID: 'llm_vTyMa',
|
|
|
+ targetNodeID: 'end_0',
|
|
|
},
|
|
|
],
|
|
|
},
|
|
|
@@ -419,20 +460,20 @@ export const initialData: FlowDocumentJSON = {
|
|
|
},
|
|
|
{
|
|
|
sourceNodeID: 'condition_0',
|
|
|
- targetNodeID: 'llm_l_TcE',
|
|
|
+ targetNodeID: 'llm_8--A3',
|
|
|
sourcePortID: 'if_0',
|
|
|
},
|
|
|
{
|
|
|
sourceNodeID: 'condition_0',
|
|
|
- targetNodeID: 'loop_H8M3U',
|
|
|
+ targetNodeID: 'loop_sGybT',
|
|
|
sourcePortID: 'if_f0rOAt',
|
|
|
},
|
|
|
{
|
|
|
- sourceNodeID: 'llm_0',
|
|
|
+ sourceNodeID: 'llm_vTyMa',
|
|
|
targetNodeID: 'end_0',
|
|
|
},
|
|
|
{
|
|
|
- sourceNodeID: 'loop_H8M3U',
|
|
|
+ sourceNodeID: 'loop_sGybT',
|
|
|
targetNodeID: 'end_0',
|
|
|
},
|
|
|
],
|