Author:
Hash:
Timestamp:
+360 -272 +/-20 browse
Kevin Schoon [me@kevinschoon.com]
750db5c4aa37bd0e0d512d7d668853527661b4d0
Tue, 11 Nov 2025 20:13:19 +0000 (3 weeks ago)
| 1 | diff --git a/.sqlx/query-2df4f3fbc3dc99705195458641f2f9407ec8543bfe8b16fd8723f6614e029f02.json b/.sqlx/query-2df4f3fbc3dc99705195458641f2f9407ec8543bfe8b16fd8723f6614e029f02.json |
| 2 | new file mode 100644 |
| 3 | index 0000000..3a4adf8 |
| 4 | --- /dev/null |
| 5 | +++ b/.sqlx/query-2df4f3fbc3dc99705195458641f2f9407ec8543bfe8b16fd8723f6614e029f02.json |
| 6 | @@ -0,0 +1,12 @@ |
| 7 | + { |
| 8 | + "db_name": "SQLite", |
| 9 | + "query": "INSERT INTO logs\n (step_id, runtime, stream, line)\nVALUES\n (?, ?, ?, ?)\n", |
| 10 | + "describe": { |
| 11 | + "columns": [], |
| 12 | + "parameters": { |
| 13 | + "Right": 4 |
| 14 | + }, |
| 15 | + "nullable": [] |
| 16 | + }, |
| 17 | + "hash": "2df4f3fbc3dc99705195458641f2f9407ec8543bfe8b16fd8723f6614e029f02" |
| 18 | + } |
| 19 | diff --git a/.sqlx/query-57ccc0ec055b6174a5aa9aaaac69c79c6bb2f967da4c2be38df3c743ad802904.json b/.sqlx/query-57ccc0ec055b6174a5aa9aaaac69c79c6bb2f967da4c2be38df3c743ad802904.json |
| 20 | deleted file mode 100644 |
| 21 | index 813ed5b..0000000 |
| 22 | --- a/.sqlx/query-57ccc0ec055b6174a5aa9aaaac69c79c6bb2f967da4c2be38df3c743ad802904.json |
| 23 | +++ /dev/null |
| 24 | @@ -1,68 +0,0 @@ |
| 25 | - { |
| 26 | - "db_name": "SQLite", |
| 27 | - "query": "SELECT\n id,\n name,\n shell,\n input,\n stdout,\n stderr,\n started_at,\n finished_at,\n exit_code\nFROM steps WHERE id = ?\n", |
| 28 | - "describe": { |
| 29 | - "columns": [ |
| 30 | - { |
| 31 | - "name": "id", |
| 32 | - "ordinal": 0, |
| 33 | - "type_info": "Integer" |
| 34 | - }, |
| 35 | - { |
| 36 | - "name": "name", |
| 37 | - "ordinal": 1, |
| 38 | - "type_info": "Text" |
| 39 | - }, |
| 40 | - { |
| 41 | - "name": "shell", |
| 42 | - "ordinal": 2, |
| 43 | - "type_info": "Text" |
| 44 | - }, |
| 45 | - { |
| 46 | - "name": "input", |
| 47 | - "ordinal": 3, |
| 48 | - "type_info": "Text" |
| 49 | - }, |
| 50 | - { |
| 51 | - "name": "stdout", |
| 52 | - "ordinal": 4, |
| 53 | - "type_info": "Text" |
| 54 | - }, |
| 55 | - { |
| 56 | - "name": "stderr", |
| 57 | - "ordinal": 5, |
| 58 | - "type_info": "Text" |
| 59 | - }, |
| 60 | - { |
| 61 | - "name": "started_at", |
| 62 | - "ordinal": 6, |
| 63 | - "type_info": "Integer" |
| 64 | - }, |
| 65 | - { |
| 66 | - "name": "finished_at", |
| 67 | - "ordinal": 7, |
| 68 | - "type_info": "Integer" |
| 69 | - }, |
| 70 | - { |
| 71 | - "name": "exit_code", |
| 72 | - "ordinal": 8, |
| 73 | - "type_info": "Integer" |
| 74 | - } |
| 75 | - ], |
| 76 | - "parameters": { |
| 77 | - "Right": 1 |
| 78 | - }, |
| 79 | - "nullable": [ |
| 80 | - false, |
| 81 | - false, |
| 82 | - false, |
| 83 | - false, |
| 84 | - true, |
| 85 | - true, |
| 86 | - true, |
| 87 | - true, |
| 88 | - true |
| 89 | - ] |
| 90 | - }, |
| 91 | - "hash": "57ccc0ec055b6174a5aa9aaaac69c79c6bb2f967da4c2be38df3c743ad802904" |
| 92 | - } |
| 93 | diff --git a/.sqlx/query-8b01bf6d72dd1129616e2d1ac3d099bee865e2127cf4d995b15e54a1a9d41f96.json b/.sqlx/query-8b01bf6d72dd1129616e2d1ac3d099bee865e2127cf4d995b15e54a1a9d41f96.json |
| 94 | deleted file mode 100644 |
| 95 | index 5abb285..0000000 |
| 96 | --- a/.sqlx/query-8b01bf6d72dd1129616e2d1ac3d099bee865e2127cf4d995b15e54a1a9d41f96.json |
| 97 | +++ /dev/null |
| 98 | @@ -1,12 +0,0 @@ |
| 99 | - { |
| 100 | - "db_name": "SQLite", |
| 101 | - "query": "UPDATE steps\n\tSET\n\t\tfinished_at = UNIXEPOCH(),\n\t\tstdout = ?,\n\t\tstderr = ?,\n\t\texit_code = ?\nWHERE\n\tid = ?\n", |
| 102 | - "describe": { |
| 103 | - "columns": [], |
| 104 | - "parameters": { |
| 105 | - "Right": 4 |
| 106 | - }, |
| 107 | - "nullable": [] |
| 108 | - }, |
| 109 | - "hash": "8b01bf6d72dd1129616e2d1ac3d099bee865e2127cf4d995b15e54a1a9d41f96" |
| 110 | - } |
| 111 | diff --git a/.sqlx/query-96c7f348c8f391d5d6284cd939f320ea15afee96f02af8063150b3bd795fbe41.json b/.sqlx/query-96c7f348c8f391d5d6284cd939f320ea15afee96f02af8063150b3bd795fbe41.json |
| 112 | new file mode 100644 |
| 113 | index 0000000..3f12582 |
| 114 | --- /dev/null |
| 115 | +++ b/.sqlx/query-96c7f348c8f391d5d6284cd939f320ea15afee96f02af8063150b3bd795fbe41.json |
| 116 | @@ -0,0 +1,12 @@ |
| 117 | + { |
| 118 | + "db_name": "SQLite", |
| 119 | + "query": "UPDATE steps\n\tSET\n\t\tfinished_at = UNIXEPOCH(),\n\t\texit_code = ?\nWHERE\n\tid = ?\n", |
| 120 | + "describe": { |
| 121 | + "columns": [], |
| 122 | + "parameters": { |
| 123 | + "Right": 2 |
| 124 | + }, |
| 125 | + "nullable": [] |
| 126 | + }, |
| 127 | + "hash": "96c7f348c8f391d5d6284cd939f320ea15afee96f02af8063150b3bd795fbe41" |
| 128 | + } |
| 129 | diff --git a/.sqlx/query-9ee9022b8b67bded0bac9d476cd72c9336d738d2f2beeaddc624918a76542eff.json b/.sqlx/query-9ee9022b8b67bded0bac9d476cd72c9336d738d2f2beeaddc624918a76542eff.json |
| 130 | new file mode 100644 |
| 131 | index 0000000..d25fa72 |
| 132 | --- /dev/null |
| 133 | +++ b/.sqlx/query-9ee9022b8b67bded0bac9d476cd72c9336d738d2f2beeaddc624918a76542eff.json |
| 134 | @@ -0,0 +1,56 @@ |
| 135 | + { |
| 136 | + "db_name": "SQLite", |
| 137 | + "query": "SELECT id,\n name,\n shell,\n input,\n started_at,\n finished_at,\n exit_code\nFROM steps\nWHERE workflow_id = ?\n", |
| 138 | + "describe": { |
| 139 | + "columns": [ |
| 140 | + { |
| 141 | + "name": "id", |
| 142 | + "ordinal": 0, |
| 143 | + "type_info": "Integer" |
| 144 | + }, |
| 145 | + { |
| 146 | + "name": "name", |
| 147 | + "ordinal": 1, |
| 148 | + "type_info": "Text" |
| 149 | + }, |
| 150 | + { |
| 151 | + "name": "shell", |
| 152 | + "ordinal": 2, |
| 153 | + "type_info": "Text" |
| 154 | + }, |
| 155 | + { |
| 156 | + "name": "input", |
| 157 | + "ordinal": 3, |
| 158 | + "type_info": "Text" |
| 159 | + }, |
| 160 | + { |
| 161 | + "name": "started_at", |
| 162 | + "ordinal": 4, |
| 163 | + "type_info": "Integer" |
| 164 | + }, |
| 165 | + { |
| 166 | + "name": "finished_at", |
| 167 | + "ordinal": 5, |
| 168 | + "type_info": "Integer" |
| 169 | + }, |
| 170 | + { |
| 171 | + "name": "exit_code", |
| 172 | + "ordinal": 6, |
| 173 | + "type_info": "Integer" |
| 174 | + } |
| 175 | + ], |
| 176 | + "parameters": { |
| 177 | + "Right": 1 |
| 178 | + }, |
| 179 | + "nullable": [ |
| 180 | + false, |
| 181 | + false, |
| 182 | + false, |
| 183 | + false, |
| 184 | + true, |
| 185 | + true, |
| 186 | + true |
| 187 | + ] |
| 188 | + }, |
| 189 | + "hash": "9ee9022b8b67bded0bac9d476cd72c9336d738d2f2beeaddc624918a76542eff" |
| 190 | + } |
| 191 | diff --git a/.sqlx/query-b585f9c061d1033d20ca4857915e6f72265d6b8e4dec8f75782dbe7d57d59ab4.json b/.sqlx/query-b585f9c061d1033d20ca4857915e6f72265d6b8e4dec8f75782dbe7d57d59ab4.json |
| 192 | deleted file mode 100644 |
| 193 | index 7b99e39..0000000 |
| 194 | --- a/.sqlx/query-b585f9c061d1033d20ca4857915e6f72265d6b8e4dec8f75782dbe7d57d59ab4.json |
| 195 | +++ /dev/null |
| 196 | @@ -1,68 +0,0 @@ |
| 197 | - { |
| 198 | - "db_name": "SQLite", |
| 199 | - "query": "SELECT id,\n name,\n shell,\n input,\n stdout,\n stderr,\n started_at,\n finished_at,\n exit_code\nFROM steps\nWHERE workflow_id = ?\n", |
| 200 | - "describe": { |
| 201 | - "columns": [ |
| 202 | - { |
| 203 | - "name": "id", |
| 204 | - "ordinal": 0, |
| 205 | - "type_info": "Integer" |
| 206 | - }, |
| 207 | - { |
| 208 | - "name": "name", |
| 209 | - "ordinal": 1, |
| 210 | - "type_info": "Text" |
| 211 | - }, |
| 212 | - { |
| 213 | - "name": "shell", |
| 214 | - "ordinal": 2, |
| 215 | - "type_info": "Text" |
| 216 | - }, |
| 217 | - { |
| 218 | - "name": "input", |
| 219 | - "ordinal": 3, |
| 220 | - "type_info": "Text" |
| 221 | - }, |
| 222 | - { |
| 223 | - "name": "stdout", |
| 224 | - "ordinal": 4, |
| 225 | - "type_info": "Text" |
| 226 | - }, |
| 227 | - { |
| 228 | - "name": "stderr", |
| 229 | - "ordinal": 5, |
| 230 | - "type_info": "Text" |
| 231 | - }, |
| 232 | - { |
| 233 | - "name": "started_at", |
| 234 | - "ordinal": 6, |
| 235 | - "type_info": "Integer" |
| 236 | - }, |
| 237 | - { |
| 238 | - "name": "finished_at", |
| 239 | - "ordinal": 7, |
| 240 | - "type_info": "Integer" |
| 241 | - }, |
| 242 | - { |
| 243 | - "name": "exit_code", |
| 244 | - "ordinal": 8, |
| 245 | - "type_info": "Integer" |
| 246 | - } |
| 247 | - ], |
| 248 | - "parameters": { |
| 249 | - "Right": 1 |
| 250 | - }, |
| 251 | - "nullable": [ |
| 252 | - false, |
| 253 | - false, |
| 254 | - false, |
| 255 | - false, |
| 256 | - true, |
| 257 | - true, |
| 258 | - true, |
| 259 | - true, |
| 260 | - true |
| 261 | - ] |
| 262 | - }, |
| 263 | - "hash": "b585f9c061d1033d20ca4857915e6f72265d6b8e4dec8f75782dbe7d57d59ab4" |
| 264 | - } |
| 265 | diff --git a/.sqlx/query-b87fcd1d46f97c2885e2d5762731c652e0702747750099eb3fc9eb27967f4407.json b/.sqlx/query-b87fcd1d46f97c2885e2d5762731c652e0702747750099eb3fc9eb27967f4407.json |
| 266 | new file mode 100644 |
| 267 | index 0000000..22c6050 |
| 268 | --- /dev/null |
| 269 | +++ b/.sqlx/query-b87fcd1d46f97c2885e2d5762731c652e0702747750099eb3fc9eb27967f4407.json |
| 270 | @@ -0,0 +1,32 @@ |
| 271 | + { |
| 272 | + "db_name": "SQLite", |
| 273 | + "query": "SELECT runtime, stream, line FROM logs WHERE step_id = ?\n", |
| 274 | + "describe": { |
| 275 | + "columns": [ |
| 276 | + { |
| 277 | + "name": "runtime", |
| 278 | + "ordinal": 0, |
| 279 | + "type_info": "Integer" |
| 280 | + }, |
| 281 | + { |
| 282 | + "name": "stream", |
| 283 | + "ordinal": 1, |
| 284 | + "type_info": "Text" |
| 285 | + }, |
| 286 | + { |
| 287 | + "name": "line", |
| 288 | + "ordinal": 2, |
| 289 | + "type_info": "Text" |
| 290 | + } |
| 291 | + ], |
| 292 | + "parameters": { |
| 293 | + "Right": 1 |
| 294 | + }, |
| 295 | + "nullable": [ |
| 296 | + false, |
| 297 | + false, |
| 298 | + true |
| 299 | + ] |
| 300 | + }, |
| 301 | + "hash": "b87fcd1d46f97c2885e2d5762731c652e0702747750099eb3fc9eb27967f4407" |
| 302 | + } |
| 303 | diff --git a/.sqlx/query-dd4c78e9da9a646928ca768eeca2c8ef0feb35770f9c5aea1ac629e95fb0cc6e.json b/.sqlx/query-dd4c78e9da9a646928ca768eeca2c8ef0feb35770f9c5aea1ac629e95fb0cc6e.json |
| 304 | new file mode 100644 |
| 305 | index 0000000..0636ff6 |
| 306 | --- /dev/null |
| 307 | +++ b/.sqlx/query-dd4c78e9da9a646928ca768eeca2c8ef0feb35770f9c5aea1ac629e95fb0cc6e.json |
| 308 | @@ -0,0 +1,56 @@ |
| 309 | + { |
| 310 | + "db_name": "SQLite", |
| 311 | + "query": "SELECT\n id,\n name,\n shell,\n input,\n started_at,\n finished_at,\n exit_code\nFROM steps WHERE id = ?\n", |
| 312 | + "describe": { |
| 313 | + "columns": [ |
| 314 | + { |
| 315 | + "name": "id", |
| 316 | + "ordinal": 0, |
| 317 | + "type_info": "Integer" |
| 318 | + }, |
| 319 | + { |
| 320 | + "name": "name", |
| 321 | + "ordinal": 1, |
| 322 | + "type_info": "Text" |
| 323 | + }, |
| 324 | + { |
| 325 | + "name": "shell", |
| 326 | + "ordinal": 2, |
| 327 | + "type_info": "Text" |
| 328 | + }, |
| 329 | + { |
| 330 | + "name": "input", |
| 331 | + "ordinal": 3, |
| 332 | + "type_info": "Text" |
| 333 | + }, |
| 334 | + { |
| 335 | + "name": "started_at", |
| 336 | + "ordinal": 4, |
| 337 | + "type_info": "Integer" |
| 338 | + }, |
| 339 | + { |
| 340 | + "name": "finished_at", |
| 341 | + "ordinal": 5, |
| 342 | + "type_info": "Integer" |
| 343 | + }, |
| 344 | + { |
| 345 | + "name": "exit_code", |
| 346 | + "ordinal": 6, |
| 347 | + "type_info": "Integer" |
| 348 | + } |
| 349 | + ], |
| 350 | + "parameters": { |
| 351 | + "Right": 1 |
| 352 | + }, |
| 353 | + "nullable": [ |
| 354 | + false, |
| 355 | + false, |
| 356 | + false, |
| 357 | + false, |
| 358 | + true, |
| 359 | + true, |
| 360 | + true |
| 361 | + ] |
| 362 | + }, |
| 363 | + "hash": "dd4c78e9da9a646928ca768eeca2c8ef0feb35770f9c5aea1ac629e95fb0cc6e" |
| 364 | + } |
| 365 | diff --git a/ayllu-build/src/evaluate.rs b/ayllu-build/src/evaluate.rs |
| 366 | index cb21c0c..cfda07a 100644 |
| 367 | --- a/ayllu-build/src/evaluate.rs |
| 368 | +++ b/ayllu-build/src/evaluate.rs |
| 369 | @@ -283,7 +283,7 @@ impl Runtime { |
| 370 | ..Default::default() |
| 371 | }; |
| 372 | |
| 373 | - let (stdout, stderr, exit_code) = executor.execute(&next_step, ctx)?; |
| 374 | + let (lines, exit_code) = executor.execute(&next_step, ctx)?; |
| 375 | if !exit_code.success() { |
| 376 | tracing::warn!("step {} has failed: {:?}", next_step.name, exit_code); |
| 377 | } |
| 378 | @@ -291,8 +291,7 @@ impl Runtime { |
| 379 | self.db |
| 380 | .update_step_finish( |
| 381 | *next_step_id, |
| 382 | - &stdout, |
| 383 | - &stderr, |
| 384 | + lines.as_slice(), |
| 385 | exit_code.code().unwrap() as i8, |
| 386 | ) |
| 387 | .await?; |
| 388 | diff --git a/ayllu-build/src/executor.rs b/ayllu-build/src/executor.rs |
| 389 | index 2609182..7b84627 100644 |
| 390 | --- a/ayllu-build/src/executor.rs |
| 391 | +++ b/ayllu-build/src/executor.rs |
| 392 | @@ -1,39 +1,20 @@ |
| 393 | use std::collections::HashMap; |
| 394 | use std::env; |
| 395 | - use std::fs::{File, read_to_string}; |
| 396 | + use std::fs::OpenOptions; |
| 397 | + use std::io::{BufRead, BufReader}; |
| 398 | + use std::io::{Read, Write}; |
| 399 | use std::mem::take; |
| 400 | use std::path::{Path, PathBuf}; |
| 401 | use std::process::{Command, ExitStatus, Stdio}; |
| 402 | use std::thread; |
| 403 | - use std::{ |
| 404 | - io, |
| 405 | - io::{Read, Write}, |
| 406 | - }; |
| 407 | + use std::time::Instant; |
| 408 | |
| 409 | - use ayllu_database::build::Step; |
| 410 | + use ayllu_database::build::{LogLine, Output, Step}; |
| 411 | use serde::Deserialize; |
| 412 | use tracing::log::{debug, info}; |
| 413 | |
| 414 | use crate::error::Error; |
| 415 | |
| 416 | - /// standard stream when logging output |
| 417 | - enum Output { |
| 418 | - Stdout, |
| 419 | - Stderr, |
| 420 | - } |
| 421 | - |
| 422 | - impl Output { |
| 423 | - fn enabled(self, tee: bool) -> Option<Self> { |
| 424 | - if !tee { |
| 425 | - return None; |
| 426 | - }; |
| 427 | - match self { |
| 428 | - Output::Stdout => Some(Output::Stdout), |
| 429 | - Output::Stderr => Some(Output::Stderr), |
| 430 | - } |
| 431 | - } |
| 432 | - } |
| 433 | - |
| 434 | // context exposed to the runtime of individual steps, also used in other |
| 435 | // parts of the build process. |
| 436 | #[derive(Deserialize, Debug, Clone, Default)] |
| 437 | @@ -49,8 +30,7 @@ pub struct Context { |
| 438 | // An executor runs the step in a process container of some kind. Currently |
| 439 | // the only executor that exists is the Local exector. |
| 440 | pub trait Executor { |
| 441 | - fn execute(&self, step: &Step, context: Context) |
| 442 | - -> Result<(String, String, ExitStatus), Error>; |
| 443 | + fn execute(&self, step: &Step, context: Context) -> Result<(Vec<LogLine>, ExitStatus), Error>; |
| 444 | } |
| 445 | |
| 446 | // build executor that runs with the same permissions as the build server. |
| 447 | @@ -59,47 +39,39 @@ pub struct Local { |
| 448 | pub tee_output: bool, |
| 449 | } |
| 450 | |
| 451 | - fn write_stream( |
| 452 | - mut stream: impl Read, |
| 453 | - filename: &Path, |
| 454 | - output: Option<Output>, |
| 455 | - ) -> std::io::Result<()> { |
| 456 | - let mut file = File::create(filename)?; |
| 457 | - |
| 458 | - let mut buf = [0u8; 1024]; |
| 459 | + fn write_stream(stream: impl Read, filename: &Path, output: Output) -> std::io::Result<()> { |
| 460 | + tracing::info!("Allocating new log file {filename:?}"); |
| 461 | + let mut file = OpenOptions::new() |
| 462 | + .create(true) |
| 463 | + .write(true) |
| 464 | + .truncate(false) |
| 465 | + .open(filename)?; |
| 466 | + let buf_reader = BufReader::new(stream); |
| 467 | + let start = Instant::now(); |
| 468 | + |
| 469 | + let mut lines = buf_reader.lines(); |
| 470 | loop { |
| 471 | - let num_read = stream.read(&mut buf)?; |
| 472 | - if num_read == 0 { |
| 473 | - break; |
| 474 | - } |
| 475 | - |
| 476 | - let buf = &buf[..num_read]; |
| 477 | - |
| 478 | - match output { |
| 479 | - Some(Output::Stdout) => { |
| 480 | - let mut stdout = io::stdout().lock(); |
| 481 | - stdout.write_all(buf).expect("failed to copy stdout"); |
| 482 | - } |
| 483 | - Some(Output::Stderr) => { |
| 484 | - let mut stderr = io::stderr().lock(); |
| 485 | - stderr.write_all(buf).expect("failed to copy stderr"); |
| 486 | + match lines.next() { |
| 487 | + Some(Ok(line)) => { |
| 488 | + let log_line = LogLine { |
| 489 | + output: output.clone(), |
| 490 | + runtime: Instant::now().duration_since(start), |
| 491 | + line, |
| 492 | + }; |
| 493 | + let line_str = serde_json::ser::to_string(&log_line).unwrap(); |
| 494 | + file.write_all(line_str.as_bytes())?; |
| 495 | + file.write_all("\n".as_bytes())?; |
| 496 | } |
| 497 | - None => {} |
| 498 | - }; |
| 499 | - |
| 500 | - file.write_all(buf)?; |
| 501 | + Some(Err(e)) => return Err(e), |
| 502 | + None => break, |
| 503 | + } |
| 504 | } |
| 505 | - |
| 506 | Ok(()) |
| 507 | } |
| 508 | |
| 509 | impl Executor for Local { |
| 510 | // TODO: once parallelism is enabled this should be converted to tokio |
| 511 | - fn execute( |
| 512 | - &self, |
| 513 | - step: &Step, |
| 514 | - context: Context, |
| 515 | - ) -> Result<(String, String, ExitStatus), Error> { |
| 516 | + fn execute(&self, step: &Step, context: Context) -> Result<(Vec<LogLine>, ExitStatus), Error> { |
| 517 | let work_dir = self |
| 518 | .temp_dir |
| 519 | .as_path() |
| 520 | @@ -137,6 +109,7 @@ impl Executor for Local { |
| 521 | (String::from("AYLLU_REPO_URL"), context.repo_url.clone()), |
| 522 | ]); |
| 523 | info!("starting command: {} {}", step.shell, step.input); |
| 524 | + |
| 525 | // TODO: update this to write to files / support "tee" style logging |
| 526 | // when debugging output is enabled. |
| 527 | let mut proc = Command::new(step.shell.clone()) |
| 528 | @@ -148,39 +121,40 @@ impl Executor for Local { |
| 529 | .stderr(Stdio::piped()) |
| 530 | .spawn()?; |
| 531 | |
| 532 | - let stdout_log_path = log_dir.clone().join("stdout.log"); |
| 533 | - info!("writing stdout to: {:?}", stdout_log_path); |
| 534 | let proc_stdout = take(&mut proc.stdout).expect("cannot get stdout"); |
| 535 | - let tee_output = self.tee_output; |
| 536 | + let stdout_log_path = log_dir.clone().join("stdout.json"); |
| 537 | let stdout_thd = thread::spawn(move || { |
| 538 | - write_stream( |
| 539 | - proc_stdout, |
| 540 | - stdout_log_path.as_path(), |
| 541 | - Output::Stdout.enabled(tee_output), |
| 542 | - ) |
| 543 | - .unwrap(); |
| 544 | + write_stream(proc_stdout, stdout_log_path.as_path(), Output::Stdout).unwrap(); |
| 545 | }); |
| 546 | - let stderr_log_path = log_dir.clone().join("stderr.log"); |
| 547 | - info!("writing stderr to: {:?}", stderr_log_path); |
| 548 | let proc_stderr = take(&mut proc.stderr).expect("cannot get stderr"); |
| 549 | + let stderr_log_path = log_dir.clone().join("stderr.json"); |
| 550 | let stderr_thd = thread::spawn(move || { |
| 551 | - write_stream( |
| 552 | - proc_stderr, |
| 553 | - stderr_log_path.as_path(), |
| 554 | - Output::Stderr.enabled(tee_output), |
| 555 | - ) |
| 556 | - .unwrap(); |
| 557 | + write_stream(proc_stderr, stderr_log_path.as_path(), Output::Stderr).unwrap(); |
| 558 | }); |
| 559 | |
| 560 | stdout_thd.join().unwrap(); |
| 561 | stderr_thd.join().unwrap(); |
| 562 | |
| 563 | - let stdout_str = read_to_string(Path::new(&log_dir).join("stdout.log"))?; |
| 564 | - let stderr_str = read_to_string(Path::new(&log_dir).join("stderr.log"))?; |
| 565 | - |
| 566 | debug!("waiting for process to complete"); |
| 567 | let status_code = proc.wait().expect("failed to wait on process"); |
| 568 | - Ok((stdout_str, stderr_str, status_code)) |
| 569 | + |
| 570 | + let stdout_content = std::fs::read_to_string(log_dir.join("stdout.json").as_path())?; |
| 571 | + let stderr_content = std::fs::read_to_string(log_dir.join("stderr.json").as_path())?; |
| 572 | + |
| 573 | + let mut log_lines: Vec<LogLine> = stdout_content |
| 574 | + .lines() |
| 575 | + .map(|content| serde_json::de::from_str::<LogLine>(content).unwrap()) |
| 576 | + .collect(); |
| 577 | + |
| 578 | + let stderr_lines: Vec<LogLine> = stderr_content |
| 579 | + .lines() |
| 580 | + .map(|content| serde_json::de::from_str::<LogLine>(content).unwrap()) |
| 581 | + .collect(); |
| 582 | + |
| 583 | + log_lines.extend(stderr_lines); |
| 584 | + log_lines.sort_by(|first, second| first.runtime.cmp(&second.runtime)); |
| 585 | + |
| 586 | + Ok((log_lines, status_code)) |
| 587 | } |
| 588 | } |
| 589 | |
| 590 | diff --git a/ayllu/src/web2/routes/build.rs b/ayllu/src/web2/routes/build.rs |
| 591 | index b13fe2d..57ac3f4 100644 |
| 592 | --- a/ayllu/src/web2/routes/build.rs |
| 593 | +++ b/ayllu/src/web2/routes/build.rs |
| 594 | @@ -12,7 +12,21 @@ use crate::web2::middleware::repository::Preamble; |
| 595 | use crate::web2::template::Base; |
| 596 | use crate::web2::template::{components, filters}; |
| 597 | |
| 598 | - use ayllu_database::build::{BuildExt, Manifest, ManifestItem, Step, Workflow}; |
| 599 | + use ayllu_database::build::{BuildExt, LogLine, Manifest, ManifestItem, Step, Workflow}; |
| 600 | + |
| 601 | + fn concat_log_lines(lines: &[LogLine]) -> String { |
| 602 | + let n_lines = lines.len(); |
| 603 | + lines |
| 604 | + .iter() |
| 605 | + .enumerate() |
| 606 | + .fold(String::default(), |mut text, (i, line)| { |
| 607 | + text.push_str(&line.line); |
| 608 | + if i < n_lines { |
| 609 | + text.push('\n'); |
| 610 | + } |
| 611 | + text |
| 612 | + }) |
| 613 | + } |
| 614 | |
| 615 | #[derive(Deserialize)] |
| 616 | pub struct BuildParams { |
| 617 | @@ -32,7 +46,7 @@ struct BuildTemplate<'a> { |
| 618 | pub dag_svg: String, |
| 619 | pub current_workflow: i64, |
| 620 | pub current_step: Option<(i64, Step)>, |
| 621 | - pub step_output: Option<String>, |
| 622 | + pub step_output: Option<&'a [LogLine]>, |
| 623 | } |
| 624 | |
| 625 | pub async fn raw( |
| 626 | @@ -45,14 +59,9 @@ pub async fn raw( |
| 627 | }): Path<BuildParams>, |
| 628 | ) -> Result<String, Error> { |
| 629 | let db = builds.db()?; |
| 630 | - let step = db.read_step(step_id.unwrap()).await?; |
| 631 | - // FIXME: We need to change the db representaiton of log lines and merge |
| 632 | - // them together here with stdout/stderr highlighting. For now I concat |
| 633 | - // them together which is excessively stupid / annoying. |
| 634 | - let stdout = step.stdout.as_ref().cloned().unwrap_or_default(); |
| 635 | - let stderr = step.stderr.as_ref().cloned().unwrap_or_default(); |
| 636 | - let combined = format!("{stdout}\n{stderr}"); |
| 637 | - Ok(combined) |
| 638 | + // let step = db.read_step(step_id.unwrap()).await?; |
| 639 | + let lines = db.read_log_lines(step_id.unwrap()).await?; |
| 640 | + Ok(concat_log_lines(lines.as_slice())) |
| 641 | } |
| 642 | |
| 643 | pub async fn build( |
| 644 | @@ -87,11 +96,10 @@ pub async fn build( |
| 645 | // FIXME: We need to change the db representaiton of log lines and merge |
| 646 | // them together here with stdout/stderr highlighting. For now I concat |
| 647 | // them together which is excessively stupid / annoying. |
| 648 | - let stdout = step.stdout.as_ref().cloned().unwrap_or_default(); |
| 649 | - let stderr = step.stderr.as_ref().cloned().unwrap_or_default(); |
| 650 | - let combined = format!("{stdout}\n{stderr}"); |
| 651 | - let (_, highlighted) = highlighter.highlight(&combined, None, None, None, true); |
| 652 | - Some(highlighted) |
| 653 | + Some(db.read_log_lines(step.id).await?) |
| 654 | + // let (_, highlighted) = |
| 655 | + // highlighter.highlight(&concat_log_lines(lines.as_slice()), None, None, None, true); |
| 656 | + // Some(highlighted) |
| 657 | } |
| 658 | None => None, |
| 659 | }; |
| 660 | @@ -111,7 +119,7 @@ pub async fn build( |
| 661 | dag_svg, |
| 662 | current_workflow: workflow_id.unwrap_or(-1), |
| 663 | current_step, |
| 664 | - step_output, |
| 665 | + step_output: step_output.as_deref(), |
| 666 | } |
| 667 | .render()?, |
| 668 | )) |
| 669 | diff --git a/ayllu/src/web2/template.rs b/ayllu/src/web2/template.rs |
| 670 | index 520cc1b..d9267d2 100644 |
| 671 | --- a/ayllu/src/web2/template.rs |
| 672 | +++ b/ayllu/src/web2/template.rs |
| 673 | @@ -87,6 +87,7 @@ impl Default for Base { |
| 674 | |
| 675 | pub mod components { |
| 676 | use askama::{Template, filters::Safe}; |
| 677 | + use ayllu_database::build::{LogLine, Output}; |
| 678 | |
| 679 | use crate::web2::template::DEFAULT_RSS_ICON; |
| 680 | |
| 681 | @@ -94,6 +95,37 @@ pub mod components { |
| 682 | #[template( |
| 683 | ext = "html", |
| 684 | source = r#" |
| 685 | + <section id="log-viewer"> |
| 686 | + <table class="code"> |
| 687 | + {%- for entry in lines -%} |
| 688 | + <tr> |
| 689 | + <td class="line-number">{{loop.index}} </td> |
| 690 | + {%- match entry.output -%} |
| 691 | + {% when Output::Stderr %} |
| 692 | + <td class="negative"> |
| 693 | + {% else %} |
| 694 | + <td> |
| 695 | + {%- endmatch -%} |
| 696 | + {{ entry.line }} |
| 697 | + </td> |
| 698 | + </tr> |
| 699 | + {% endfor -%} |
| 700 | + </table> |
| 701 | + </section> |
| 702 | + "# |
| 703 | + )] |
| 704 | + struct LogViewer<'a> { |
| 705 | + lines: &'a [LogLine], |
| 706 | + } |
| 707 | + |
| 708 | + pub fn log_viewer(lines: &[LogLine]) -> Safe<String> { |
| 709 | + Safe(LogViewer { lines }.render().unwrap()) |
| 710 | + } |
| 711 | + |
| 712 | + #[derive(Template)] |
| 713 | + #[template( |
| 714 | + ext = "html", |
| 715 | + source = r#" |
| 716 | <section class="rss-links"> |
| 717 | <section class="rss-icon"> |
| 718 | {{ feed_icon }} |
| 719 | diff --git a/ayllu/templates/build.html b/ayllu/templates/build.html |
| 720 | index d3ea142..943da12 100644 |
| 721 | --- a/ayllu/templates/build.html |
| 722 | +++ b/ayllu/templates/build.html |
| 723 | @@ -76,9 +76,9 @@ |
| 724 | </tbody> |
| 725 | </table> |
| 726 | |
| 727 | - {%- if let Some(step_output) = step_output -%} |
| 728 | + {%- if let Some(log_lines) = step_output -%} |
| 729 | <section id="code-viewer" class="scrollable"> |
| 730 | - {{ step_output | safe}} |
| 731 | + {{ components::log_viewer(log_lines)| safe}} |
| 732 | </section> |
| 733 | {%- endif -%} |
| 734 | |
| 735 | diff --git a/crates/database/queries/steps_list.sql b/crates/database/queries/steps_list.sql |
| 736 | index 4ab824b..f3f22d8 100644 |
| 737 | --- a/crates/database/queries/steps_list.sql |
| 738 | +++ b/crates/database/queries/steps_list.sql |
| 739 | @@ -2,8 +2,6 @@ SELECT id, |
| 740 | name, |
| 741 | shell, |
| 742 | input, |
| 743 | - stdout, |
| 744 | - stderr, |
| 745 | started_at, |
| 746 | finished_at, |
| 747 | exit_code |
| 748 | diff --git a/crates/database/queries/steps_read.sql b/crates/database/queries/steps_read.sql |
| 749 | index 2b5c4e3..9228c4d 100644 |
| 750 | --- a/crates/database/queries/steps_read.sql |
| 751 | +++ b/crates/database/queries/steps_read.sql |
| 752 | @@ -3,8 +3,6 @@ SELECT |
| 753 | name, |
| 754 | shell, |
| 755 | input, |
| 756 | - stdout, |
| 757 | - stderr, |
| 758 | started_at, |
| 759 | finished_at, |
| 760 | exit_code |
| 761 | diff --git a/crates/database/queries/steps_read_log_lines.sql b/crates/database/queries/steps_read_log_lines.sql |
| 762 | new file mode 100644 |
| 763 | index 0000000..5515e25 |
| 764 | --- /dev/null |
| 765 | +++ b/crates/database/queries/steps_read_log_lines.sql |
| 766 | @@ -0,0 +1 @@ |
| 767 | + SELECT runtime, stream, line FROM logs WHERE step_id = ? |
| 768 | diff --git a/crates/database/queries/steps_update_finish.sql b/crates/database/queries/steps_update_finish.sql |
| 769 | index d80c0e3..5faaf81 100644 |
| 770 | --- a/crates/database/queries/steps_update_finish.sql |
| 771 | +++ b/crates/database/queries/steps_update_finish.sql |
| 772 | @@ -1,8 +1,6 @@ |
| 773 | UPDATE steps |
| 774 | SET |
| 775 | finished_at = UNIXEPOCH(), |
| 776 | - stdout = ?, |
| 777 | - stderr = ?, |
| 778 | exit_code = ? |
| 779 | WHERE |
| 780 | id = ? |
| 781 | diff --git a/crates/database/queries/steps_update_log_line.sql b/crates/database/queries/steps_update_log_line.sql |
| 782 | new file mode 100644 |
| 783 | index 0000000..8e3bae0 |
| 784 | --- /dev/null |
| 785 | +++ b/crates/database/queries/steps_update_log_line.sql |
| 786 | @@ -0,0 +1,4 @@ |
| 787 | + INSERT INTO logs |
| 788 | + (step_id, runtime, stream, line) |
| 789 | + VALUES |
| 790 | + (?, ?, ?, ?) |
| 791 | diff --git a/crates/database/src/build.rs b/crates/database/src/build.rs |
| 792 | index 6168a82..e193a78 100644 |
| 793 | --- a/crates/database/src/build.rs |
| 794 | +++ b/crates/database/src/build.rs |
| 795 | @@ -1,4 +1,4 @@ |
| 796 | - use std::collections::HashMap; |
| 797 | + use std::{collections::HashMap, fmt::Display, time::Duration}; |
| 798 | |
| 799 | use async_trait::async_trait; |
| 800 | use serde::{Deserialize, Serialize}; |
| 801 | @@ -93,8 +93,6 @@ pub struct Step { |
| 802 | pub name: String, |
| 803 | pub shell: String, |
| 804 | pub input: String, |
| 805 | - pub stdout: Option<String>, |
| 806 | - pub stderr: Option<String>, |
| 807 | pub started_at: Option<i64>, |
| 808 | pub finished_at: Option<i64>, |
| 809 | pub exit_code: Option<i64>, |
| 810 | @@ -114,6 +112,29 @@ pub enum Identifier { |
| 811 | Id(i64), |
| 812 | } |
| 813 | |
| 814 | + #[derive(Serialize, Deserialize, Debug, Clone)] |
| 815 | + pub struct LogLine { |
| 816 | + pub output: Output, |
| 817 | + pub runtime: Duration, |
| 818 | + pub line: String, |
| 819 | + } |
| 820 | + |
| 821 | + /// standard stream when logging output |
| 822 | + #[derive(Serialize, Deserialize, Debug, Clone)] |
| 823 | + pub enum Output { |
| 824 | + Stdout, |
| 825 | + Stderr, |
| 826 | + } |
| 827 | + |
| 828 | + impl Display for Output { |
| 829 | + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { |
| 830 | + match self { |
| 831 | + Output::Stdout => write!(f, "stdout"), |
| 832 | + Output::Stderr => write!(f, "stderr"), |
| 833 | + } |
| 834 | + } |
| 835 | + } |
| 836 | + |
| 837 | #[async_trait] |
| 838 | pub trait BuildTx { |
| 839 | async fn create_manifest( |
| 840 | @@ -233,14 +254,13 @@ pub trait BuildExt { |
| 841 | async fn update_step_finish( |
| 842 | &self, |
| 843 | step_id: i64, |
| 844 | - stdout: &str, |
| 845 | - stderr: &str, |
| 846 | + lines: &[LogLine], |
| 847 | exit_code: i8, |
| 848 | ) -> Result<(), Error>; |
| 849 | + async fn read_log_lines(&self, step_id: i64) -> Result<Vec<LogLine>, Error>; |
| 850 | |
| 851 | // misc |
| 852 | async fn create_sample(&self, manifest_id: i64, sample: Sample) -> Result<i64, Error>; |
| 853 | - |
| 854 | async fn read_dag(&self, manifest_id: i64) -> Result<Dag, Error>; |
| 855 | } |
| 856 | |
| 857 | @@ -516,22 +536,50 @@ impl BuildExt for Database { |
| 858 | async fn update_step_finish( |
| 859 | &self, |
| 860 | step_id: i64, |
| 861 | - stdout: &str, |
| 862 | - stderr: &str, |
| 863 | + logs: &[LogLine], |
| 864 | exit_code: i8, |
| 865 | ) -> Result<(), Error> { |
| 866 | - sqlx::query_file!( |
| 867 | - "queries/steps_update_finish.sql", |
| 868 | - stdout, |
| 869 | - stderr, |
| 870 | - exit_code, |
| 871 | - step_id, |
| 872 | - ) |
| 873 | - .execute(&self.pool) |
| 874 | - .await?; |
| 875 | + sqlx::query_file!("queries/steps_update_finish.sql", exit_code, step_id) |
| 876 | + .execute(&self.pool) |
| 877 | + .await?; |
| 878 | + for log_line in logs { |
| 879 | + let runtime_ms = log_line.runtime.as_millis() as i64; |
| 880 | + let output = log_line.output.to_string(); |
| 881 | + let line = log_line.line.clone(); |
| 882 | + sqlx::query_file!( |
| 883 | + "queries/steps_update_log_line.sql", |
| 884 | + step_id, |
| 885 | + runtime_ms, |
| 886 | + output, |
| 887 | + line, |
| 888 | + ) |
| 889 | + .execute(&self.pool) |
| 890 | + .await?; |
| 891 | + } |
| 892 | + |
| 893 | Ok(()) |
| 894 | } |
| 895 | |
| 896 | + async fn read_log_lines(&self, step_id: i64) -> Result<Vec<LogLine>, Error> { |
| 897 | + let records = sqlx::query_file!("queries/steps_read_log_lines.sql", step_id) |
| 898 | + .fetch_all(&self.pool) |
| 899 | + .await?; |
| 900 | + Ok(records |
| 901 | + .into_iter() |
| 902 | + .map(|record| LogLine { |
| 903 | + output: match record.stream.as_str() { |
| 904 | + "stdout" => Output::Stdout, |
| 905 | + "stderr" => Output::Stderr, |
| 906 | + other => unimplemented!("Unknown stream: {other}"), |
| 907 | + }, |
| 908 | + runtime: Duration::from_millis(record.runtime as u64), |
| 909 | + line: record.line.unwrap_or_default(), |
| 910 | + }) |
| 911 | + .collect()) |
| 912 | + } |
| 913 | + |
| 914 | + // misc |
| 915 | + |
| 916 | async fn create_sample(&self, manifest_id: i64, sample: Sample) -> Result<i64, Error> { |
| 917 | let ret = sqlx::query_file!( |
| 918 | "queries/samples_create.sql", |
| 919 | diff --git a/migrations/20231204194038_init.sql b/migrations/20231204194038_init.sql |
| 920 | index 2882f7e..4528e18 100644 |
| 921 | --- a/migrations/20231204194038_init.sql |
| 922 | +++ b/migrations/20231204194038_init.sql |
| 923 | @@ -30,6 +30,14 @@ CREATE TABLE steps ( |
| 924 | exit_code INTEGER CHECK (exit_code <= 255) |
| 925 | ) STRICT ; |
| 926 | |
| 927 | + CREATE TABLE logs ( |
| 928 | + id INTEGER PRIMARY KEY NOT NULL, |
| 929 | + step_id INTEGER REFERENCES steps(id) ON DELETE CASCADE NOT NULL, |
| 930 | + runtime INTEGER NOT NULL, |
| 931 | + stream TEXT NOT NULL CHECK (stream IN ('stdout', 'stderr', 'stdin')), |
| 932 | + line TEXT |
| 933 | + ) STRICT ; |
| 934 | + |
| 935 | CREATE TABLE steps_env_vars ( |
| 936 | id INTEGER PRIMARY KEY NOT NULL, |
| 937 | step_id INTEGER REFERENCES steps(id) ON DELETE CASCADE NOT NULL, |