run_thread.gd 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786
  1. extends Node
  2. var control_script
  3. var progress_label
  4. var progress_bar
  5. var graph_edit
  6. var console_output
  7. var progress_window
  8. var console_window
  9. var process_successful #tracks if the last run process was successful
  10. var process_info = {} #tracks the data of the currently running process
  11. var process_running := false #tracks if a process is currently running
  12. var process_cancelled = false #checks if the currently running process has been cancelled
  13. # Called when the node enters the scene tree for the first time.
  14. func _ready() -> void:
  15. pass
  16. func init(main_node: Node, progresswindow: Window, progresslabel: Label, progressbar: ProgressBar, graphedit: GraphEdit, consolewindow: Window, consoleoutput: RichTextLabel) -> void:
  17. control_script = main_node
  18. progress_window = progresswindow
  19. progress_label = progresslabel
  20. progress_bar = progressbar
  21. graph_edit = graphedit
  22. console_window = consolewindow
  23. console_output = consoleoutput
  24. func run_thread_with_branches():
  25. process_cancelled = false
  26. process_successful = true
  27. # Detect platform: Determine if the OS is Windows
  28. var is_windows := OS.get_name() == "Windows"
  29. # Choose appropriate commands based on OS
  30. var delete_cmd = "del" if is_windows else "rm"
  31. var rename_cmd = "ren" if is_windows else "mv"
  32. var path_sep := "/" # Always use forward slash for paths
  33. # Get all node connections in the GraphEdit
  34. var connections = graph_edit.get_connection_list()
  35. # Prepare data structures for graph traversal
  36. var graph = {} # forward adjacency list
  37. var reverse_graph = {} # reverse adjacency list (for input lookup)
  38. var indegree = {} # used for topological sort
  39. var all_nodes = {} # map of node name -> GraphNode reference
  40. log_console("Mapping thread.", true)
  41. await get_tree().process_frame # Let UI update
  42. #Step 0: check thread is valid
  43. var is_valid = path_exists_through_all_nodes()
  44. if is_valid == false:
  45. log_console("[color=#9c2828][b]Error: Valid Thread not found[/b][/color]", true)
  46. log_console("Threads must contain at least one processing node and a valid path from the Input File to the Output File.", true)
  47. await get_tree().process_frame # Let UI update
  48. return
  49. else:
  50. log_console("[color=#638382][b]Valid Thread found[/b][/color]", true)
  51. await get_tree().process_frame # Let UI update
  52. # Step 1: Gather nodes from the GraphEdit
  53. for child in graph_edit.get_children():
  54. if child is GraphNode:
  55. var name = str(child.name)
  56. all_nodes[name] = child
  57. if not child.has_meta("utility"):
  58. graph[name] = []
  59. reverse_graph[name] = []
  60. indegree[name] = 0 # Start with zero incoming edges
  61. #do calculations for progress bar
  62. var progress_step
  63. if Global.trim_infile == true:
  64. progress_step = 100 / (graph.size() + 4)
  65. else:
  66. progress_step = 100 / (graph.size() + 3)
  67. # Step 2: Build graph relationships from connections
  68. if process_cancelled:
  69. progress_label.text = "Thread Stopped"
  70. log_console("[b]Thread Stopped[/b]", true)
  71. return
  72. else:
  73. progress_label.text = "Building Thread"
  74. for conn in connections:
  75. var from = str(conn["from_node"])
  76. var to = str(conn["to_node"])
  77. if graph.has(from) and graph.has(to):
  78. graph[from].append(to)
  79. reverse_graph[to].append(from)
  80. indegree[to] += 1 # Count incoming edges
  81. # Step 3: Topological sort to get execution order
  82. var sorted = [] # Sorted list of node names
  83. var queue = [] # Queue of nodes with 0 indegree
  84. for node in graph.keys():
  85. if indegree[node] == 0:
  86. queue.append(node)
  87. while not queue.is_empty():
  88. var current = queue.pop_front()
  89. sorted.append(current)
  90. for neighbor in graph[current]:
  91. indegree[neighbor] -= 1
  92. if indegree[neighbor] == 0:
  93. queue.append(neighbor)
  94. # If not all nodes were processed, there's a cycle
  95. if sorted.size() != graph.size():
  96. log_console("[color=#9c2828][b]Error: Thread not valid[/b][/color]", true)
  97. log_console("Threads cannot contain loops.", true)
  98. return
  99. progress_bar.value = progress_step
  100. # Step 4: Start processing audio
  101. var batch_lines = [] # Holds all batch file commands
  102. var intermediate_files = [] # Files to delete later
  103. var breakfiles = [] #breakfiles to delete later
  104. # Dictionary to keep track of each node's output file
  105. var output_files = {}
  106. var process_count = 0
  107. # Start with the original input file
  108. var starting_infile = Global.infile
  109. #If trim is enabled trim input audio
  110. if Global.trim_infile == true:
  111. if process_cancelled:
  112. progress_label.text = "Thread Stopped"
  113. log_console("[b]Thread Stopped[/b]", true)
  114. return
  115. else:
  116. progress_label.text = "Trimming input audio"
  117. await run_command(control_script.cdpprogs_location + "/sfedit", ["cut", "1", starting_infile, "%s_trimmed.wav" % Global.outfile, str(Global.infile_start), str(Global.infile_stop)])
  118. starting_infile = Global.outfile + "_trimmed.wav"
  119. # Mark trimmed file for cleanup if needed
  120. if control_script.delete_intermediate_outputs:
  121. intermediate_files.append(Global.outfile + "_trimmed.wav")
  122. progress_bar.value += progress_step
  123. var current_infile = starting_infile
  124. # Iterate over the processing nodes in topological order
  125. for node_name in sorted:
  126. var node = all_nodes[node_name]
  127. if process_cancelled:
  128. progress_label.text = "Thread Stopped"
  129. log_console("[b]Thread Stopped[/b]", true)
  130. break
  131. else:
  132. progress_label.text = "Running process: " + node.get_title()
  133. # Find upstream nodes connected to the current node
  134. var inputs = reverse_graph[node_name]
  135. var input_files = []
  136. for input_node in inputs:
  137. input_files.append(output_files[input_node])
  138. # Merge inputs if this node has more than one input
  139. if input_files.size() > 1:
  140. # Prepare final merge output file name
  141. var runmerge = await merge_many_files(process_count, input_files)
  142. var merge_output = runmerge[0]
  143. var converted_files = runmerge[1]
  144. # Track the output and intermediate files
  145. current_infile = merge_output
  146. if control_script.delete_intermediate_outputs:
  147. intermediate_files.append(merge_output)
  148. for f in converted_files:
  149. intermediate_files.append(f)
  150. # If only one input, use that
  151. elif input_files.size() == 1:
  152. current_infile = input_files[0]
  153. ## If no input, use the original input file
  154. else:
  155. current_infile = starting_infile
  156. # Build the command for the current node's audio processing
  157. var slider_data = _get_slider_values_ordered(node)
  158. if node.get_slot_type_right(0) == 1: #detect if process outputs pvoc data
  159. if typeof(current_infile) == TYPE_ARRAY:
  160. #check if infile is an array meaning that the last pvoc process was run in dual mono mode
  161. # Process left and right seperately
  162. var pvoc_stereo_files = []
  163. for infile in current_infile:
  164. var makeprocess = await make_process(node, process_count, infile, slider_data)
  165. # run the command
  166. await run_command(makeprocess[0], makeprocess[3])
  167. await get_tree().process_frame
  168. var output_file = makeprocess[1]
  169. pvoc_stereo_files.append(output_file)
  170. # Mark file for cleanup if needed
  171. if control_script.delete_intermediate_outputs:
  172. for file in makeprocess[2]:
  173. breakfiles.append(file)
  174. intermediate_files.append(output_file)
  175. process_count += 1
  176. output_files[node_name] = pvoc_stereo_files
  177. else:
  178. var input_stereo = await is_stereo(current_infile)
  179. if input_stereo == true:
  180. #audio file is stereo and needs to be split for pvoc processing
  181. var pvoc_stereo_files = []
  182. ##Split stereo to c1/c2
  183. await run_command(control_script.cdpprogs_location + "/housekeep",["chans", "2", current_infile])
  184. # Process left and right seperately
  185. for channel in ["c1", "c2"]:
  186. var dual_mono_file = current_infile.get_basename() + "_%s.wav" % channel
  187. var makeprocess = await make_process(node, process_count, dual_mono_file, slider_data)
  188. # run the command
  189. await run_command(makeprocess[0], makeprocess[3])
  190. await get_tree().process_frame
  191. var output_file = makeprocess[1]
  192. pvoc_stereo_files.append(output_file)
  193. # Mark file for cleanup if needed
  194. if control_script.delete_intermediate_outputs:
  195. for file in makeprocess[2]:
  196. breakfiles.append(file)
  197. intermediate_files.append(output_file)
  198. #Delete c1 and c2 because they can be in the wrong folder and if the same infile is used more than once
  199. #with this stereo process CDP will throw errors in the console even though its fine
  200. if is_windows:
  201. dual_mono_file = dual_mono_file.replace("/", "\\")
  202. await run_command(delete_cmd, [dual_mono_file])
  203. process_count += 1
  204. # Store output file path for this node
  205. output_files[node_name] = pvoc_stereo_files
  206. else:
  207. #input file is mono run through process
  208. var makeprocess = await make_process(node, process_count, current_infile, slider_data)
  209. # run the command
  210. await run_command(makeprocess[0], makeprocess[3])
  211. await get_tree().process_frame
  212. var output_file = makeprocess[1]
  213. # Store output file path for this node
  214. output_files[node_name] = output_file
  215. # Mark file for cleanup if needed
  216. if control_script.delete_intermediate_outputs:
  217. for file in makeprocess[2]:
  218. breakfiles.append(file)
  219. intermediate_files.append(output_file)
  220. # Increase the process step count
  221. process_count += 1
  222. else:
  223. #Process outputs audio
  224. #check if this is the last pvoc process in a stereo processing chain
  225. if node.get_meta("command") == "pvoc_synth" and typeof(current_infile) == TYPE_ARRAY:
  226. #check if infile is an array meaning that the last pvoc process was run in dual mono mode
  227. # Process left and right seperately
  228. var pvoc_stereo_files = []
  229. for infile in current_infile:
  230. var makeprocess = await make_process(node, process_count, infile, slider_data)
  231. # run the command
  232. await run_command(makeprocess[0], makeprocess[3])
  233. await get_tree().process_frame
  234. var output_file = makeprocess[1]
  235. pvoc_stereo_files.append(output_file)
  236. # Mark file for cleanup if needed
  237. if control_script.delete_intermediate_outputs:
  238. for file in makeprocess[2]:
  239. breakfiles.append(file)
  240. intermediate_files.append(output_file)
  241. process_count += 1
  242. #interleave left and right
  243. var output_file = Global.outfile.get_basename() + str(process_count) + "_interleaved.wav"
  244. await run_command(control_script.cdpprogs_location + "/submix", ["interleave", pvoc_stereo_files[0], pvoc_stereo_files[1], output_file])
  245. # Store output file path for this node
  246. output_files[node_name] = output_file
  247. # Mark file for cleanup if needed
  248. if control_script.delete_intermediate_outputs:
  249. intermediate_files.append(output_file)
  250. else:
  251. #Detect if input file is mono or stereo
  252. var input_stereo = await is_stereo(current_infile)
  253. if input_stereo == true:
  254. if node.get_meta("stereo_input") == true: #audio file is stereo and process is stereo, run file through process
  255. var makeprocess = await make_process(node, process_count, current_infile, slider_data)
  256. # run the command
  257. await run_command(makeprocess[0], makeprocess[3])
  258. await get_tree().process_frame
  259. var output_file = makeprocess[1]
  260. # Store output file path for this node
  261. output_files[node_name] = output_file
  262. # Mark file for cleanup if needed
  263. if control_script.delete_intermediate_outputs:
  264. for file in makeprocess[2]:
  265. breakfiles.append(file)
  266. intermediate_files.append(output_file)
  267. else: #audio file is stereo and process is mono, split stereo, process and recombine
  268. ##Split stereo to c1/c2
  269. await run_command(control_script.cdpprogs_location + "/housekeep",["chans", "2", current_infile])
  270. # Process left and right seperately
  271. var dual_mono_output = []
  272. for channel in ["c1", "c2"]:
  273. var dual_mono_file = current_infile.get_basename() + "_%s.wav" % channel
  274. var makeprocess = await make_process(node, process_count, dual_mono_file, slider_data)
  275. # run the command
  276. await run_command(makeprocess[0], makeprocess[3])
  277. await get_tree().process_frame
  278. var output_file = makeprocess[1]
  279. dual_mono_output.append(output_file)
  280. # Mark file for cleanup if needed
  281. if control_script.delete_intermediate_outputs:
  282. for file in makeprocess[2]:
  283. breakfiles.append(file)
  284. intermediate_files.append(output_file)
  285. #Delete c1 and c2 because they can be in the wrong folder and if the same infile is used more than once
  286. #with this stereo process CDP will throw errors in the console even though its fine
  287. if is_windows:
  288. dual_mono_file = dual_mono_file.replace("/", "\\")
  289. await run_command(delete_cmd, [dual_mono_file])
  290. process_count += 1
  291. var output_file = Global.outfile.get_basename() + str(process_count) + "_interleaved.wav"
  292. await run_command(control_script.cdpprogs_location + "/submix", ["interleave", dual_mono_output[0], dual_mono_output[1], output_file])
  293. # Store output file path for this node
  294. output_files[node_name] = output_file
  295. # Mark file for cleanup if needed
  296. if control_script.delete_intermediate_outputs:
  297. intermediate_files.append(output_file)
  298. else: #audio file is mono, run through the process
  299. var makeprocess = await make_process(node, process_count, current_infile, slider_data)
  300. # run the command
  301. await run_command(makeprocess[0], makeprocess[3])
  302. await get_tree().process_frame
  303. var output_file = makeprocess[1]
  304. # Store output file path for this node
  305. output_files[node_name] = output_file
  306. # Mark file for cleanup if needed
  307. if control_script.delete_intermediate_outputs:
  308. for file in makeprocess[2]:
  309. breakfiles.append(file)
  310. intermediate_files.append(output_file)
  311. # Increase the process step count
  312. process_count += 1
  313. progress_bar.value += progress_step
  314. # FINAL OUTPUT STAGE
  315. # Collect all nodes that are connected to the outputfile node
  316. if process_cancelled:
  317. progress_label.text = "Thread Stopped"
  318. log_console("[b]Thread Stopped[/b]", true)
  319. return
  320. else:
  321. progress_label.text = "Finalising output"
  322. var output_inputs := []
  323. for conn in connections:
  324. var to_node = str(conn["to_node"])
  325. if all_nodes.has(to_node) and all_nodes[to_node].get_meta("command") == "outputfile":
  326. output_inputs.append(str(conn["from_node"]))
  327. # List to hold the final output files to be merged (if needed)
  328. var final_outputs := []
  329. for node_name in output_inputs:
  330. if output_files.has(node_name):
  331. final_outputs.append(output_files[node_name])
  332. # If multiple outputs go to the outputfile node, merge them
  333. if final_outputs.size() > 1:
  334. var runmerge = await merge_many_files(process_count, final_outputs)
  335. control_script.final_output_dir = runmerge[0]
  336. var converted_files = runmerge[1]
  337. if control_script.delete_intermediate_outputs:
  338. for f in converted_files:
  339. intermediate_files.append(f)
  340. # Only one output, no merge needed
  341. elif final_outputs.size() == 1:
  342. var single_output = final_outputs[0]
  343. control_script.final_output_dir = single_output
  344. intermediate_files.erase(single_output)
  345. progress_bar.value += progress_step
  346. # CLEANUP: Delete intermediate files after processing and rename final output
  347. if process_cancelled:
  348. progress_label.text = "Thread Stopped"
  349. log_console("[b]Thread Stopped[/b]", true)
  350. return
  351. else:
  352. log_console("Cleaning up intermediate files.", true)
  353. progress_label.text = "Cleaning up"
  354. for file_path in intermediate_files:
  355. # Adjust file path format for Windows if needed
  356. var fixed_path = file_path
  357. if is_windows:
  358. fixed_path = fixed_path.replace("/", "\\")
  359. await run_command(delete_cmd, [fixed_path])
  360. await get_tree().process_frame
  361. #delete break files
  362. for file_path in breakfiles:
  363. # Adjust file path format for Windows if needed
  364. var fixed_path = file_path
  365. if is_windows:
  366. fixed_path = fixed_path.replace("/", "\\")
  367. await run_command(delete_cmd, [fixed_path])
  368. await get_tree().process_frame
  369. var final_filename = "%s.wav" % Global.outfile
  370. var final_output_dir_fixed_path = control_script.final_output_dir
  371. if is_windows:
  372. final_output_dir_fixed_path = final_output_dir_fixed_path.replace("/", "\\")
  373. await run_command(rename_cmd, [final_output_dir_fixed_path, final_filename.get_file()])
  374. else:
  375. await run_command(rename_cmd, [final_output_dir_fixed_path, "%s.wav" % Global.outfile])
  376. control_script.final_output_dir = Global.outfile + ".wav"
  377. control_script.output_audio_player.play_outfile(control_script.final_output_dir)
  378. control_script.outfile = control_script.final_output_dir
  379. progress_bar.value = 100.0
  380. var interface_settings = ConfigHandler.load_interface_settings() #checks if close console is enabled and closes console on a success
  381. progress_window.hide()
  382. if interface_settings.auto_close_console and process_successful == true:
  383. console_window.hide()
  384. func is_stereo(file: String) -> bool:
  385. var output = await run_command(control_script.cdpprogs_location + "/sfprops", ["-c", file])
  386. output = int(output.strip_edges()) #convert output from cmd to clean int
  387. if output == 1:
  388. return false
  389. elif output == 2:
  390. return true
  391. elif output == 1026: #ignore pvoc .ana files
  392. return false
  393. else:
  394. log_console("[color=#9c2828]Error: Only mono and stereo files are supported[/color]", true)
  395. return false
  396. func merge_many_files(process_count: int, input_files: Array) -> Array:
  397. var merge_output = "%s_merge_%d.wav" % [Global.outfile.get_basename(), process_count]
  398. var converted_files := [] # Track any mono->stereo converted files
  399. var inputs_to_merge := [] # Files to be used in the final merge
  400. var mono_files := []
  401. var stereo_files := []
  402. # STEP 1: Check each file's channel count
  403. for f in input_files:
  404. var stereo = await is_stereo(f)
  405. if stereo == false:
  406. mono_files.append(f)
  407. elif stereo == true:
  408. stereo_files.append(f)
  409. # STEP 2: Convert mono to stereo if there is a mix
  410. if mono_files.size() > 0 and stereo_files.size() > 0:
  411. for mono_file in mono_files:
  412. var stereo_file = "%s_stereo.wav" % mono_file.get_basename()
  413. await run_command(control_script.cdpprogs_location + "/submix", ["interleave", mono_file, mono_file, stereo_file])
  414. if process_successful == false:
  415. log_console("Failed to interleave mono file: %s" % mono_file, true)
  416. else:
  417. converted_files.append(stereo_file)
  418. inputs_to_merge.append(stereo_file)
  419. # Add existing stereo files
  420. inputs_to_merge += stereo_files
  421. else:
  422. # All mono or all stereo — use input_files directly
  423. inputs_to_merge = input_files.duplicate()
  424. # STEP 3: Merge all input files (converted or original)
  425. var quoted_inputs := []
  426. for f in inputs_to_merge:
  427. quoted_inputs.append(f)
  428. quoted_inputs.insert(0, "mergemany")
  429. quoted_inputs.append(merge_output)
  430. await run_command(control_script.cdpprogs_location + "/submix", quoted_inputs)
  431. if process_successful == false:
  432. log_console("Failed to to merge files to" + merge_output, true)
  433. return [merge_output, converted_files]
  434. func _get_slider_values_ordered(node: Node) -> Array:
  435. var results := []
  436. for child in node.get_children():
  437. if child is Range:
  438. var flag = child.get_meta("flag") if child.has_meta("flag") else ""
  439. var time
  440. var brk_data = []
  441. var min_slider = child.min_value
  442. var max_slider = child.max_value
  443. if child.has_meta("time"):
  444. time = child.get_meta("time")
  445. else:
  446. time = false
  447. if child.has_meta("brk_data"):
  448. brk_data = child.get_meta("brk_data")
  449. results.append([flag, child.value, time, brk_data, min_slider, max_slider])
  450. elif child.get_child_count() > 0:
  451. var nested := _get_slider_values_ordered(child)
  452. results.append_array(nested)
  453. return results
  454. func make_process(node: Node, process_count: int, current_infile: String, slider_data: Array) -> Array:
  455. # Determine output extension: .wav or .ana based on the node's slot type
  456. var extension = ".wav" if node.get_slot_type_right(0) == 0 else ".ana"
  457. # Construct output filename for this step
  458. var output_file = "%s_%d%s" % [Global.outfile.get_basename(), process_count, extension]
  459. # Get the command name from metadata or default to node name
  460. var command_name = str(node.get_meta("command"))
  461. #command_name = command_name.replace("_", " ")
  462. command_name = command_name.split("_", true, 1)
  463. print(command_name)
  464. var command = "%s/%s" %[control_script.cdpprogs_location, command_name[0]]
  465. print(command)
  466. var args = command_name[1].split("_", true, 1)
  467. print(args)
  468. args.append(current_infile)
  469. args.append(output_file)
  470. print(args)
  471. # Start building the command line windows
  472. var line = "%s/%s \"%s\" \"%s\" " % [control_script.cdpprogs_location, command_name, current_infile, output_file]
  473. #mac
  474. var cleanup = []
  475. # Append parameter values from the sliders, include flags if present
  476. var slider_count = 0
  477. for entry in slider_data:
  478. var flag = entry[0]
  479. var value = entry[1]
  480. var time = entry[2] #checks if slider is a time percentage slider
  481. var brk_data = entry[3]
  482. var min_slider = entry[4]
  483. var max_slider = entry[5]
  484. if brk_data.size() > 0: #if breakpoint data is present on slider
  485. #Sort all points by time
  486. var sorted_brk_data = []
  487. sorted_brk_data = brk_data.duplicate()
  488. sorted_brk_data.sort_custom(sort_points)
  489. var calculated_brk = []
  490. #get length of input file in seconds
  491. var infile_length = await run_command(control_script.cdpprogs_location + "/sfprops", ["-d", current_infile])
  492. infile_length = float(infile_length.strip_edges())
  493. #scale values from automation window to the right length for file and correct slider values
  494. #need to check how time is handled in all files that accept it, zigzag is x = outfile position, y = infile position
  495. #if time == true:
  496. #for point in sorted_brk_data:
  497. #var new_x = infile_length * (point.x / 700) #time
  498. #var new_y = infile_length * (remap(point.y, 255, 0, min_slider, max_slider) / 100) #slider value scaled as a percentage of infile time
  499. #calculated_brk.append(Vector2(new_x, new_y))
  500. #else:
  501. for i in range(sorted_brk_data.size()):
  502. var point = sorted_brk_data[i]
  503. var new_x = infile_length * (point.x / 700) #time
  504. if i == sorted_brk_data.size() - 1: #check if this is last automation point
  505. new_x = infile_length + 0.1 # force last point's x to infile_length + 100ms to make sure the file is defo over
  506. var new_y = remap(point.y, 255, 0, min_slider, max_slider) #slider value
  507. calculated_brk.append(Vector2(new_x, new_y))
  508. #make text file
  509. var brk_file_path = output_file.get_basename() + "_" + str(slider_count) + ".txt"
  510. write_breakfile(calculated_brk, brk_file_path)
  511. #append text file in place of value
  512. line += ("\"%s\" " % brk_file_path)
  513. args.append(brk_file_path)
  514. cleanup.append(brk_file_path)
  515. else:
  516. if time == true:
  517. var infile_length = await run_command(control_script.cdpprogs_location + "/sfprops", ["-d", current_infile])
  518. infile_length = float(infile_length.strip_edges())
  519. value = infile_length * (value / 100) #calculate percentage time of the input file
  520. line += ("%s%.2f " % [flag, value]) if flag.begins_with("-") else ("%.2f " % value)
  521. args.append(("%s%.2f " % [flag, value]) if flag.begins_with("-") else ("%.2f " % value))
  522. slider_count += 1
  523. return [command, output_file, cleanup, args]
  524. #return [line.strip_edges(), output_file, cleanup]
  525. func sort_points(a, b):
  526. return a.x < b.x
  527. func write_breakfile(points: Array, path: String):
  528. var file = FileAccess.open(path, FileAccess.WRITE)
  529. if file:
  530. for point in points:
  531. var line = str(point.x) + " " + str(point.y) + "\n"
  532. file.store_string(line)
  533. file.close()
  534. else:
  535. print("Failed to open file for writing.")
  536. func run_command(command: String, args: Array) -> String:
  537. var is_windows = OS.get_name() == "Windows"
  538. console_output.append_text(command + " " + " ".join(args) + "\n")
  539. console_output.scroll_to_line(console_output.get_line_count() - 1)
  540. await get_tree().process_frame
  541. if is_windows:
  542. #exit_code = OS.execute("cmd.exe", ["/C", command], output, true, false)
  543. args.insert(0, command)
  544. args.insert(0, "/C")
  545. process_info = OS.execute_with_pipe("cmd.exe", args, false)
  546. else:
  547. process_info = OS.execute_with_pipe(command, args, false)
  548. # Check if the process was successfully started
  549. if !process_info.has("pid"):
  550. print("Failed to start process.")
  551. return ""
  552. process_running = true
  553. # Start monitoring the process output and status
  554. return await monitor_process(process_info["pid"], process_info["stdio"], process_info["stderr"])
  555. func monitor_process(pid: int, stdout: FileAccess, stderr: FileAccess) -> String:
  556. var output := ""
  557. while OS.is_process_running(pid):
  558. await get_tree().process_frame
  559. while stdout.get_position() < stdout.get_length():
  560. var line = stdout.get_line()
  561. output += line
  562. console_output.append_text(line + "\n")
  563. console_output.scroll_to_line(console_output.get_line_count() - 1)
  564. while stderr.get_position() < stderr.get_length():
  565. var line = stderr.get_line()
  566. output += line
  567. console_output.append_text(line + "\n")
  568. console_output.scroll_to_line(console_output.get_line_count() - 1)
  569. var exit_code = OS.get_process_exit_code(pid)
  570. if exit_code == 0:
  571. if output.contains("ERROR:"): #checks if CDP reported an error but passed exit code 0 anyway
  572. console_output.append_text("[color=#9c2828][b]Processes failed[/b][/color]\n\n")
  573. console_output.scroll_to_line(console_output.get_line_count() - 1)
  574. process_successful = false
  575. if process_cancelled == false:
  576. progress_window.hide()
  577. if !console_window.visible:
  578. console_window.popup_centered()
  579. else:
  580. console_output.append_text("[color=#638382]Processes ran successfully[/color]\n\n")
  581. console_output.scroll_to_line(console_output.get_line_count() - 1)
  582. else:
  583. console_output.append_text("[color=#9c2828][b]Processes failed with exit code: %d[/b][/color]\n" % exit_code + "\n")
  584. console_output.scroll_to_line(console_output.get_line_count() - 1)
  585. process_successful = false
  586. if process_cancelled == false:
  587. progress_window.hide()
  588. if !console_window.visible:
  589. console_window.popup_centered()
  590. if output.contains("as an internal or external command"): #check for cdprogs location error on windows
  591. console_output.append_text("[color=#9c2828][b]Please make sure your cdprogs folder is set to the correct location in the Settings menu. The default location is C:\\CDPR8\\_cdp\\_cdprogs[/b][/color]\n\n")
  592. console_output.scroll_to_line(console_output.get_line_count() - 1)
  593. if output.contains("command not found"): #check for cdprogs location error on unix systems
  594. console_output.append_text("[color=#9c2828][b]Please make sure your cdprogs folder is set to the correct location in the Settings menu. The default location is ~/cdpr8/_cdp/_cdprogs[/b][/color]\n\n")
  595. console_output.scroll_to_line(console_output.get_line_count() - 1)
  596. process_running = false
  597. return output
  598. func _on_kill_process_button_down() -> void:
  599. if process_running and process_info.has("pid"):
  600. progress_window.hide()
  601. # Terminate the process by PID
  602. OS.kill(process_info["pid"])
  603. process_running = false
  604. print("Process cancelled.")
  605. process_cancelled = true
  606. func path_exists_through_all_nodes() -> bool:
  607. var all_nodes = {}
  608. var graph = {}
  609. var input_node_name = ""
  610. var output_node_name = ""
  611. # Gather all relevant nodes
  612. for child in graph_edit.get_children():
  613. if child is GraphNode:
  614. var name = str(child.name)
  615. all_nodes[name] = child
  616. var command = child.get_meta("command")
  617. if command == "inputfile":
  618. input_node_name = name
  619. elif command == "outputfile":
  620. output_node_name = name
  621. # Skip utility nodes, include others
  622. if command in ["inputfile", "outputfile"] or not child.has_meta("utility"):
  623. graph[name] = []
  624. # Ensure both input and output were found
  625. if input_node_name == "" or output_node_name == "":
  626. print("Input or output node not found!")
  627. return false
  628. # Add edges to graph from the connection list
  629. var connection_list = graph_edit.get_connection_list()
  630. for conn in connection_list:
  631. var from = str(conn["from_node"])
  632. var to = str(conn["to_node"])
  633. if graph.has(from):
  634. graph[from].append(to)
  635. # BFS traversal to check path and depth
  636. var visited = {}
  637. var queue = [ { "node": input_node_name, "depth": 0 } ]
  638. var has_intermediate = false
  639. while queue.size() > 0:
  640. var current = queue.pop_front()
  641. var current_node = current["node"]
  642. var depth = current["depth"]
  643. if current_node in visited:
  644. continue
  645. visited[current_node] = true
  646. if current_node == output_node_name and depth >= 2:
  647. has_intermediate = true
  648. if graph.has(current_node):
  649. for neighbor in graph[current_node]:
  650. queue.append({ "node": neighbor, "depth": depth + 1 })
  651. return has_intermediate
  652. func log_console(text: String, update: bool) -> void:
  653. console_output.append_text(text + "\n \n")
  654. console_output.scroll_to_line(console_output.get_line_count() - 1)
  655. if update == true:
  656. await get_tree().process_frame # Optional: ensure UI updates