initial-data.ts 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611
  1. /**
  2. * Copyright (c) 2025 Bytedance Ltd. and/or its affiliates
  3. * SPDX-License-Identifier: MIT
  4. */
  5. import { FlowDocumentJSON } from './typings';
  6. export const initialData: FlowDocumentJSON = {
  7. nodes: [
  8. {
  9. id: 'start_0',
  10. type: 'start',
  11. meta: {
  12. position: {
  13. x: 180,
  14. y: 601.2,
  15. },
  16. },
  17. data: {
  18. title: 'Start',
  19. outputs: {
  20. type: 'object',
  21. properties: {
  22. query: {
  23. type: 'string',
  24. default: 'Hello Flow.',
  25. },
  26. enable: {
  27. type: 'boolean',
  28. default: true,
  29. },
  30. array_obj: {
  31. type: 'array',
  32. items: {
  33. type: 'object',
  34. properties: {
  35. int: {
  36. type: 'number',
  37. },
  38. str: {
  39. type: 'string',
  40. },
  41. },
  42. },
  43. },
  44. },
  45. },
  46. },
  47. },
  48. {
  49. id: 'condition_0',
  50. type: 'condition',
  51. meta: {
  52. position: {
  53. x: 1100,
  54. y: 546.2,
  55. },
  56. },
  57. data: {
  58. title: 'Condition',
  59. conditions: [
  60. {
  61. key: 'if_0',
  62. value: {
  63. left: {
  64. type: 'ref',
  65. content: ['start_0', 'query'],
  66. },
  67. operator: 'contains',
  68. right: {
  69. type: 'constant',
  70. content: 'Hello Flow.',
  71. },
  72. },
  73. },
  74. ],
  75. },
  76. },
  77. {
  78. id: 'end_0',
  79. type: 'end',
  80. meta: {
  81. position: {
  82. x: 2968,
  83. y: 601.2,
  84. },
  85. },
  86. data: {
  87. title: 'End',
  88. inputsValues: {
  89. success: {
  90. type: 'constant',
  91. content: true,
  92. schema: {
  93. type: 'boolean',
  94. },
  95. },
  96. query: {
  97. type: 'ref',
  98. content: ['start_0', 'query'],
  99. },
  100. },
  101. inputs: {
  102. type: 'object',
  103. properties: {
  104. success: {
  105. type: 'boolean',
  106. },
  107. query: {
  108. type: 'string',
  109. },
  110. },
  111. },
  112. },
  113. },
  114. {
  115. id: '159623',
  116. type: 'comment',
  117. meta: {
  118. position: {
  119. x: 180,
  120. y: 775.2,
  121. },
  122. },
  123. data: {
  124. size: {
  125. width: 240,
  126. height: 150,
  127. },
  128. note: 'hi ~\n\nthis is a comment node\n\n- flowgram.ai',
  129. },
  130. },
  131. {
  132. id: 'http_rDGIH',
  133. type: 'http',
  134. meta: {
  135. position: {
  136. x: 640,
  137. y: 421.35,
  138. },
  139. },
  140. data: {
  141. title: 'HTTP_1',
  142. outputs: {
  143. type: 'object',
  144. properties: {
  145. body: {
  146. type: 'string',
  147. },
  148. headers: {
  149. type: 'object',
  150. },
  151. statusCode: {
  152. type: 'integer',
  153. },
  154. },
  155. },
  156. api: {
  157. method: 'GET',
  158. url: {
  159. type: 'template',
  160. content: '',
  161. },
  162. },
  163. body: {
  164. bodyType: 'JSON',
  165. },
  166. timeout: {
  167. timeout: 10000,
  168. retryTimes: 1,
  169. },
  170. },
  171. },
  172. {
  173. id: 'loop_Ycnsk',
  174. type: 'loop',
  175. meta: {
  176. position: {
  177. x: 1460,
  178. y: 0,
  179. },
  180. },
  181. data: {
  182. title: 'Loop_1',
  183. loopFor: {
  184. type: 'ref',
  185. content: ['start_0', 'array_obj'],
  186. },
  187. loopOutputs: {
  188. acm: {
  189. type: 'ref',
  190. content: ['llm_6aSyo', 'result'],
  191. },
  192. },
  193. outputs: {
  194. type: 'object',
  195. required: [],
  196. properties: {
  197. acm: {
  198. type: 'array',
  199. items: {
  200. type: 'string',
  201. },
  202. },
  203. },
  204. },
  205. },
  206. blocks: [
  207. {
  208. id: 'llm_6aSyo',
  209. type: 'llm',
  210. meta: {
  211. position: {
  212. x: 344,
  213. y: 0,
  214. },
  215. },
  216. data: {
  217. title: 'LLM_3',
  218. inputsValues: {
  219. modelName: {
  220. type: 'constant',
  221. content: 'gpt-3.5-turbo',
  222. },
  223. apiKey: {
  224. type: 'constant',
  225. content: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
  226. },
  227. apiHost: {
  228. type: 'constant',
  229. content: 'https://mock-ai-url/api/v3',
  230. },
  231. temperature: {
  232. type: 'constant',
  233. content: 0.5,
  234. },
  235. systemPrompt: {
  236. type: 'template',
  237. content: '# Role\nYou are an AI assistant.\n',
  238. },
  239. prompt: {
  240. type: 'template',
  241. content: '',
  242. },
  243. },
  244. inputs: {
  245. type: 'object',
  246. required: ['modelName', 'apiKey', 'apiHost', 'temperature', 'prompt'],
  247. properties: {
  248. modelName: {
  249. type: 'string',
  250. },
  251. apiKey: {
  252. type: 'string',
  253. },
  254. apiHost: {
  255. type: 'string',
  256. },
  257. temperature: {
  258. type: 'number',
  259. },
  260. systemPrompt: {
  261. type: 'string',
  262. extra: {
  263. formComponent: 'prompt-editor',
  264. },
  265. },
  266. prompt: {
  267. type: 'string',
  268. extra: {
  269. formComponent: 'prompt-editor',
  270. },
  271. },
  272. },
  273. },
  274. outputs: {
  275. type: 'object',
  276. properties: {
  277. result: {
  278. type: 'string',
  279. },
  280. },
  281. },
  282. },
  283. },
  284. {
  285. id: 'llm_ZqKlP',
  286. type: 'llm',
  287. meta: {
  288. position: {
  289. x: 804,
  290. y: 0,
  291. },
  292. },
  293. data: {
  294. title: 'LLM_4',
  295. inputsValues: {
  296. modelName: {
  297. type: 'constant',
  298. content: 'gpt-3.5-turbo',
  299. },
  300. apiKey: {
  301. type: 'constant',
  302. content: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
  303. },
  304. apiHost: {
  305. type: 'constant',
  306. content: 'https://mock-ai-url/api/v3',
  307. },
  308. temperature: {
  309. type: 'constant',
  310. content: 0.5,
  311. },
  312. systemPrompt: {
  313. type: 'template',
  314. content: '# Role\nYou are an AI assistant.\n',
  315. },
  316. prompt: {
  317. type: 'template',
  318. content: '',
  319. },
  320. },
  321. inputs: {
  322. type: 'object',
  323. required: ['modelName', 'apiKey', 'apiHost', 'temperature', 'prompt'],
  324. properties: {
  325. modelName: {
  326. type: 'string',
  327. },
  328. apiKey: {
  329. type: 'string',
  330. },
  331. apiHost: {
  332. type: 'string',
  333. },
  334. temperature: {
  335. type: 'number',
  336. },
  337. systemPrompt: {
  338. type: 'string',
  339. extra: {
  340. formComponent: 'prompt-editor',
  341. },
  342. },
  343. prompt: {
  344. type: 'string',
  345. extra: {
  346. formComponent: 'prompt-editor',
  347. },
  348. },
  349. },
  350. },
  351. outputs: {
  352. type: 'object',
  353. properties: {
  354. result: {
  355. type: 'string',
  356. },
  357. },
  358. },
  359. },
  360. },
  361. {
  362. id: 'block_start_PUDtS',
  363. type: 'block-start',
  364. meta: {
  365. position: {
  366. x: 32,
  367. y: 167.1,
  368. },
  369. },
  370. data: {},
  371. },
  372. {
  373. id: 'block_end_leBbs',
  374. type: 'block-end',
  375. meta: {
  376. position: {
  377. x: 1116,
  378. y: 167.1,
  379. },
  380. },
  381. data: {},
  382. },
  383. ],
  384. edges: [
  385. {
  386. sourceNodeID: 'block_start_PUDtS',
  387. targetNodeID: 'llm_6aSyo',
  388. },
  389. {
  390. sourceNodeID: 'llm_6aSyo',
  391. targetNodeID: 'llm_ZqKlP',
  392. },
  393. {
  394. sourceNodeID: 'llm_ZqKlP',
  395. targetNodeID: 'block_end_leBbs',
  396. },
  397. ],
  398. },
  399. {
  400. id: 'group_nYl6D',
  401. type: 'group',
  402. meta: {
  403. position: {
  404. x: 1624,
  405. y: 698.2,
  406. },
  407. },
  408. data: {
  409. parentID: 'root',
  410. blockIDs: ['llm_8--A3', 'llm_vTyMa'],
  411. },
  412. },
  413. {
  414. id: 'llm_8--A3',
  415. type: 'llm',
  416. meta: {
  417. position: {
  418. x: 180,
  419. y: 0,
  420. },
  421. },
  422. data: {
  423. title: 'LLM_1',
  424. inputsValues: {
  425. modelName: {
  426. type: 'constant',
  427. content: 'gpt-3.5-turbo',
  428. },
  429. apiKey: {
  430. type: 'constant',
  431. content: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
  432. },
  433. apiHost: {
  434. type: 'constant',
  435. content: 'https://mock-ai-url/api/v3',
  436. },
  437. temperature: {
  438. type: 'constant',
  439. content: 0.5,
  440. },
  441. systemPrompt: {
  442. type: 'template',
  443. content: '# Role\nYou are an AI assistant.\n',
  444. },
  445. prompt: {
  446. type: 'template',
  447. content: '# User Input\nquery:{{start_0.query}}\nenable:{{start_0.enable}}',
  448. },
  449. },
  450. inputs: {
  451. type: 'object',
  452. required: ['modelName', 'apiKey', 'apiHost', 'temperature', 'prompt'],
  453. properties: {
  454. modelName: {
  455. type: 'string',
  456. },
  457. apiKey: {
  458. type: 'string',
  459. },
  460. apiHost: {
  461. type: 'string',
  462. },
  463. temperature: {
  464. type: 'number',
  465. },
  466. systemPrompt: {
  467. type: 'string',
  468. extra: {
  469. formComponent: 'prompt-editor',
  470. },
  471. },
  472. prompt: {
  473. type: 'string',
  474. extra: {
  475. formComponent: 'prompt-editor',
  476. },
  477. },
  478. },
  479. },
  480. outputs: {
  481. type: 'object',
  482. properties: {
  483. result: {
  484. type: 'string',
  485. },
  486. },
  487. },
  488. },
  489. },
  490. {
  491. id: 'llm_vTyMa',
  492. type: 'llm',
  493. meta: {
  494. position: {
  495. x: 640,
  496. y: 10,
  497. },
  498. },
  499. data: {
  500. title: 'LLM_2',
  501. inputsValues: {
  502. modelName: {
  503. type: 'constant',
  504. content: 'gpt-3.5-turbo',
  505. },
  506. apiKey: {
  507. type: 'constant',
  508. content: 'sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
  509. },
  510. apiHost: {
  511. type: 'constant',
  512. content: 'https://mock-ai-url/api/v3',
  513. },
  514. temperature: {
  515. type: 'constant',
  516. content: 0.5,
  517. },
  518. systemPrompt: {
  519. type: 'template',
  520. content: '# Role\nYou are an AI assistant.\n',
  521. },
  522. prompt: {
  523. type: 'template',
  524. content: '# LLM Input\nresult:{{llm_8--A3.result}}',
  525. },
  526. },
  527. inputs: {
  528. type: 'object',
  529. required: ['modelName', 'apiKey', 'apiHost', 'temperature', 'prompt'],
  530. properties: {
  531. modelName: {
  532. type: 'string',
  533. },
  534. apiKey: {
  535. type: 'string',
  536. },
  537. apiHost: {
  538. type: 'string',
  539. },
  540. temperature: {
  541. type: 'number',
  542. },
  543. systemPrompt: {
  544. type: 'string',
  545. extra: {
  546. formComponent: 'prompt-editor',
  547. },
  548. },
  549. prompt: {
  550. type: 'string',
  551. extra: {
  552. formComponent: 'prompt-editor',
  553. },
  554. },
  555. },
  556. },
  557. outputs: {
  558. type: 'object',
  559. properties: {
  560. result: {
  561. type: 'string',
  562. },
  563. },
  564. },
  565. },
  566. },
  567. ],
  568. edges: [
  569. {
  570. sourceNodeID: 'start_0',
  571. targetNodeID: 'http_rDGIH',
  572. },
  573. {
  574. sourceNodeID: 'http_rDGIH',
  575. targetNodeID: 'condition_0',
  576. },
  577. {
  578. sourceNodeID: 'condition_0',
  579. targetNodeID: 'loop_Ycnsk',
  580. sourcePortID: 'if_0',
  581. },
  582. {
  583. sourceNodeID: 'condition_0',
  584. targetNodeID: 'llm_8--A3',
  585. sourcePortID: 'else',
  586. },
  587. {
  588. sourceNodeID: 'llm_vTyMa',
  589. targetNodeID: 'end_0',
  590. },
  591. {
  592. sourceNodeID: 'loop_Ycnsk',
  593. targetNodeID: 'end_0',
  594. },
  595. {
  596. sourceNodeID: 'llm_8--A3',
  597. targetNodeID: 'llm_vTyMa',
  598. },
  599. ],
  600. globalVariable: {
  601. type: 'object',
  602. required: [],
  603. properties: {
  604. userId: {
  605. type: 'string',
  606. },
  607. },
  608. },
  609. };