Compare commits
31 Commits
c1a5934215
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
060a8dfb3b | ||
|
|
3d7ebcf9bc | ||
|
|
51f10b7944 | ||
|
|
9ed6b48806 | ||
|
|
a867809cb8 | ||
|
|
053071f4be | ||
|
|
eccaa06d98 | ||
|
|
ee9377d6da | ||
|
|
7da6aa8ae9 | ||
|
|
a5553d75f4 | ||
|
|
fe8a55b4c1 | ||
|
|
0a7e6873a3 | ||
|
|
1a61701c53 | ||
|
|
f5b47c4e74 | ||
|
|
8728867cc3 | ||
|
|
bb64b87752 | ||
|
|
4ad0b7d2ff | ||
|
|
700b2a22cc | ||
|
|
97caaebc09 | ||
|
|
72eb29d766 | ||
|
|
56d0628ece | ||
|
|
4884d551e2 | ||
|
|
b37e61a223 | ||
|
|
c06091d576 | ||
|
|
fa7b537106 | ||
|
|
4ca5ed4d7c | ||
|
|
89185f5016 | ||
|
|
b57618889e | ||
|
|
c2f1255406 | ||
|
|
9e63b5b1dd | ||
|
|
1a9f052396 |
277
pttodoest/Cargo.lock
generated
277
pttodoest/Cargo.lock
generated
@@ -2,6 +2,24 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "android_system_properties"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstream"
|
||||
version = "0.6.21"
|
||||
@@ -52,12 +70,53 @@ dependencies = [
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
|
||||
|
||||
[[package]]
|
||||
name = "bumpalo"
|
||||
version = "3.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.45"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "35900b6c8d709fb1d854671ae27aeaa9eec2f8b01b364e1619a40da3e6fe2afe"
|
||||
dependencies = [
|
||||
"find-msvc-tools",
|
||||
"shlex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.42"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2"
|
||||
dependencies = [
|
||||
"iana-time-zone",
|
||||
"js-sys",
|
||||
"num-traits",
|
||||
"wasm-bindgen",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.51"
|
||||
@@ -104,6 +163,21 @@ version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation-sys"
|
||||
version = "0.8.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
|
||||
|
||||
[[package]]
|
||||
name = "cron-parser"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "702858ce99daf23d8822fb22ec363b641b4bdcd9704182211fc113b01870f6de"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "equivalent"
|
||||
version = "1.0.2"
|
||||
@@ -120,6 +194,12 @@ dependencies = [
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "find-msvc-tools"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127"
|
||||
|
||||
[[package]]
|
||||
name = "fuchsia-cprng"
|
||||
version = "0.1.1"
|
||||
@@ -148,6 +228,30 @@ version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
||||
|
||||
[[package]]
|
||||
name = "iana-time-zone"
|
||||
version = "0.1.64"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb"
|
||||
dependencies = [
|
||||
"android_system_properties",
|
||||
"core-foundation-sys",
|
||||
"iana-time-zone-haiku",
|
||||
"js-sys",
|
||||
"log",
|
||||
"wasm-bindgen",
|
||||
"windows-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iana-time-zone-haiku"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
|
||||
dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.12.0"
|
||||
@@ -170,6 +274,16 @@ version = "1.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.82"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "json-patch"
|
||||
version = "4.1.0"
|
||||
@@ -204,12 +318,33 @@ version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.21.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
|
||||
|
||||
[[package]]
|
||||
name = "once_cell_polyfill"
|
||||
version = "1.70.2"
|
||||
@@ -229,10 +364,13 @@ dependencies = [
|
||||
name = "pttodoest"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"clap",
|
||||
"cron-parser",
|
||||
"gethostname",
|
||||
"json-patch",
|
||||
"jsonptr",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
@@ -285,6 +423,35 @@ dependencies = [
|
||||
"rand_core 0.3.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.12.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.8.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
|
||||
|
||||
[[package]]
|
||||
name = "remove_dir_all"
|
||||
version = "0.5.3"
|
||||
@@ -307,6 +474,12 @@ dependencies = [
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.20"
|
||||
@@ -369,6 +542,12 @@ dependencies = [
|
||||
"unsafe-libyaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.1"
|
||||
@@ -434,6 +613,51 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.105"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"rustversion",
|
||||
"wasm-bindgen-macro",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.105"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.105"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.105"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
@@ -456,12 +680,65 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.62.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb"
|
||||
dependencies = [
|
||||
"windows-implement",
|
||||
"windows-interface",
|
||||
"windows-link",
|
||||
"windows-result",
|
||||
"windows-strings",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-implement"
|
||||
version = "0.60.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-interface"
|
||||
version = "0.59.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
|
||||
|
||||
[[package]]
|
||||
name = "windows-result"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-strings"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.60.2"
|
||||
|
||||
@@ -4,10 +4,13 @@ version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4.42"
|
||||
clap = { version = "4.5.51", features = ["derive"] }
|
||||
cron-parser = "0.11.0"
|
||||
gethostname = "1.1.0"
|
||||
json-patch = "4.1.0"
|
||||
jsonptr = "0.7.1"
|
||||
regex = "1.12.2"
|
||||
serde = { version = "1.0.228", features = ["serde_derive"] }
|
||||
serde_json = "1.0.145"
|
||||
serde_yaml = "0.9.34"
|
||||
|
||||
@@ -8,35 +8,33 @@ fn main() {
|
||||
let files = flags.files().expect("failed to files");
|
||||
|
||||
if !flags.dry_run {
|
||||
for file in files.files.iter() {
|
||||
file.stage_new_persisted()
|
||||
.expect("failed to stage new log files");
|
||||
file.persist_stage()
|
||||
.expect("failed to persist staged changes to log file");
|
||||
file.stage_persisted().expect("failed to stage log files");
|
||||
}
|
||||
files.reconcile().expect("failed to reconcile");
|
||||
|
||||
if let Some(add) = flags.add {
|
||||
let patch: json_patch::PatchOperation =
|
||||
json_patch::PatchOperation::Add(json_patch::AddOperation {
|
||||
path: jsonptr::PointerBuf::parse("/-").expect("cannot create path to /-"),
|
||||
value: serde_json::json!(add),
|
||||
});
|
||||
let task = match flags.add_schedule.clone() {
|
||||
None => Task(serde_yaml::Value::String(add)),
|
||||
Some(add_schedule) => {
|
||||
let mut m = serde_yaml::Mapping::new();
|
||||
m.insert("schedule".into(), add_schedule.into());
|
||||
m.insert("do".into(), add.into());
|
||||
Task(serde_yaml::Value::Mapping(m))
|
||||
}
|
||||
};
|
||||
let now = Delta::now_time();
|
||||
files.files[0]
|
||||
.append(Delta::now(patch))
|
||||
.append(match task.next_due(now.clone()) {
|
||||
None => Delta::add(task),
|
||||
Some(due) => Delta::add_at(task, if due > now { due } else { now }),
|
||||
})
|
||||
.expect("failed to add");
|
||||
if !flags.enqueue_add {
|
||||
files.files[0]
|
||||
.stage_persisted()
|
||||
.expect("failed to stage added");
|
||||
}
|
||||
|
||||
files.reconcile().expect("failed to reconcile");
|
||||
}
|
||||
}
|
||||
|
||||
for file in files.files.iter() {
|
||||
println!(
|
||||
"{} => {}",
|
||||
file.file,
|
||||
"{}",
|
||||
serde_yaml::to_string(&file.events().unwrap().snapshot().unwrap()).unwrap(),
|
||||
);
|
||||
}
|
||||
@@ -60,8 +58,8 @@ struct Flags {
|
||||
#[arg(short = 'd', long = "dry-run", default_value = "false")]
|
||||
dry_run: bool,
|
||||
|
||||
#[arg(short = 'q', long = "enqueue", default_value = "false")]
|
||||
enqueue_add: bool,
|
||||
#[arg(short = 's', long = "add-schedule")]
|
||||
add_schedule: Option<String>,
|
||||
}
|
||||
|
||||
impl Flags {
|
||||
@@ -122,7 +120,7 @@ mod test_flags {
|
||||
add: None,
|
||||
edit: false,
|
||||
dry_run: true,
|
||||
enqueue_add: false,
|
||||
add_schedule: None,
|
||||
};
|
||||
let files = flags.files().expect("failed to files from dir");
|
||||
assert_eq!(1, files.files.len());
|
||||
@@ -143,6 +141,14 @@ impl Files {
|
||||
files: files.into_iter().map(|x| File::new(&x)).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reconcile(&self) -> Result<(), String> {
|
||||
for file in self.files.iter() {
|
||||
file.persist_stage()?;
|
||||
file.stage_persisted()?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -159,8 +165,8 @@ impl File {
|
||||
Events::new(&self.file)
|
||||
}
|
||||
|
||||
pub fn stage_new_persisted(&self) -> Result<(), String> {
|
||||
let events = self.events()?;
|
||||
pub fn persist_stage(&self) -> Result<(), String> {
|
||||
let old_snapshot = self.events()?.last_snapshot();
|
||||
let stage_mod_time = std::fs::metadata(&self.file)
|
||||
.unwrap()
|
||||
.modified()
|
||||
@@ -168,48 +174,44 @@ impl File {
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
let new_persisted: Vec<Delta> = events
|
||||
.0
|
||||
.iter()
|
||||
.filter(|x| x.ts > stage_mod_time)
|
||||
.map(|x| x.clone())
|
||||
.collect();
|
||||
panic!("not impl: apply filtered deltas to stage");
|
||||
Ok(())
|
||||
self.persist_delta_at(old_snapshot, self.stage()?, stage_mod_time)
|
||||
}
|
||||
|
||||
pub fn stage_persisted(&self) -> Result<(), String> {
|
||||
let stage = self.events()?.snapshot()?;
|
||||
let plaintext = serde_yaml::to_string(&stage).unwrap();
|
||||
let persisted_as_snapshot = self.events()?.snapshot()?;
|
||||
if persisted_as_snapshot != self.events()?.last_snapshot() {
|
||||
self.append(Delta::snapshot(persisted_as_snapshot.clone()))?;
|
||||
}
|
||||
let plaintext = serde_yaml::to_string(&persisted_as_snapshot).unwrap();
|
||||
let mut f = std::fs::File::create(&self.file).expect("failed to open file for writing");
|
||||
writeln!(f, "{}", plaintext).expect("failed to write");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn persist_stage(&self) -> Result<(), String> {
|
||||
let persisted = self.events()?.snapshot()?;
|
||||
|
||||
let stage = self.stage()?;
|
||||
|
||||
self.persist_delta(persisted, stage)
|
||||
pub fn persist_delta(&self, before: Vec<Task>, after: Vec<Task>) -> Result<(), String> {
|
||||
self.persist_delta_at(before, after, Delta::now_time())
|
||||
}
|
||||
|
||||
pub fn persist_delta(&self, before: Vec<Task>, after: Vec<Task>) -> Result<(), String> {
|
||||
let before = serde_json::to_string(&before).unwrap();
|
||||
let before = before.as_str();
|
||||
let before: serde_json::Value = serde_json::from_str(&before).unwrap();
|
||||
|
||||
let after = serde_json::to_string(&after).unwrap();
|
||||
let after: serde_json::Value = serde_json::from_str(after.as_str()).unwrap();
|
||||
|
||||
let patches = json_patch::diff(&before, &after);
|
||||
let deltas: Vec<Delta> = patches
|
||||
.iter()
|
||||
.map(|patch| patch.clone())
|
||||
.map(|patch| Delta::now(patch.clone()))
|
||||
.collect();
|
||||
for delta in deltas.iter() {
|
||||
self.append(delta.clone())?;
|
||||
fn persist_delta_at(
|
||||
&self,
|
||||
before: Vec<Task>,
|
||||
after: Vec<Task>,
|
||||
now: u64,
|
||||
) -> Result<(), String> {
|
||||
for before in before.iter() {
|
||||
if !after.contains(before) {
|
||||
self.append(Delta::remove_at(before.clone(), now))?;
|
||||
let now = Delta::now_time();
|
||||
let due = before.must_next_due(now.clone());
|
||||
if due >= now {
|
||||
self.append(Delta::add_at(before.clone(), due))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
for after in after.iter() {
|
||||
if !before.contains(after) {
|
||||
self.append(Delta::add_at(after.clone(), now))?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -305,10 +307,11 @@ mod test_file {
|
||||
f.persist_stage().unwrap();
|
||||
assert_eq!(2, f.events().unwrap().0.len());
|
||||
assert_eq!(2, f.stage().unwrap().len());
|
||||
tests::file_contains(&d, "plain", "[hello, world]");
|
||||
tests::file_contains(&d, "plain", "hello");
|
||||
tests::file_contains(&d, "plain", "world");
|
||||
|
||||
f.stage_persisted().unwrap();
|
||||
assert_eq!(2, f.events().unwrap().0.len());
|
||||
assert_eq!(3, f.events().unwrap().0.len());
|
||||
assert_eq!(2, f.stage().unwrap().len());
|
||||
tests::file_contains(&d, "plain", "- hello\n- world");
|
||||
});
|
||||
@@ -322,32 +325,39 @@ mod test_file {
|
||||
&d,
|
||||
".plain.host_a",
|
||||
r#"
|
||||
{"ts":1, "patch":{"op":"replace", "path":"", "value": ["initial"]}}
|
||||
{"ts":3, "patch":{"op":"add", "path":"/-", "value": {"k":"v"}}}
|
||||
{"ts":1, "op":"Add", "task": "initial"}
|
||||
{"ts":3, "op":"Add", "task": {"k":"v"}}
|
||||
{"ts":3, "op":"Snapshot", "task": null, "tasks": ["initial", 1, {"k":"v"}]}
|
||||
"#,
|
||||
);
|
||||
tests::write_file(
|
||||
&d,
|
||||
".plain.host_b",
|
||||
r#"
|
||||
{"ts":2, "patch":{"op":"add", "path":"/-", "value": 1}}
|
||||
{"ts":2, "op":"Add", "task": 1}
|
||||
"#,
|
||||
);
|
||||
|
||||
let f = File::new(&d.path().join("plain").to_str().unwrap().to_string());
|
||||
|
||||
assert_eq!(3, f.events().unwrap().0.len());
|
||||
assert_eq!(4, f.events().unwrap().0.len());
|
||||
assert_eq!(0, f.stage().unwrap().len());
|
||||
tests::file_contains(&d, "plain", "[]");
|
||||
|
||||
f.persist_stage().unwrap();
|
||||
assert_eq!(6, f.events().unwrap().0.len());
|
||||
assert_eq!(7, f.events().unwrap().0.len());
|
||||
assert_eq!(0, f.stage().unwrap().len());
|
||||
tests::file_contains(&d, "plain", "[]");
|
||||
|
||||
f.stage_persisted().unwrap();
|
||||
assert_eq!(6, f.events().unwrap().0.len());
|
||||
assert_eq!(0, f.stage().unwrap().len());
|
||||
assert_eq!(
|
||||
0,
|
||||
f.events().unwrap().snapshot().unwrap().len(),
|
||||
"{:?}",
|
||||
f.events().unwrap().snapshot().unwrap(),
|
||||
);
|
||||
assert_eq!(8, f.events().unwrap().0.len(), "{:?}", f.events().unwrap());
|
||||
assert_eq!(0, f.stage().unwrap().len(), "{:?}", f.stage().unwrap());
|
||||
tests::file_contains(&d, "plain", "[]");
|
||||
});
|
||||
}
|
||||
@@ -360,21 +370,22 @@ mod test_file {
|
||||
&d,
|
||||
".plain.host_a",
|
||||
r#"
|
||||
{"ts":1, "patch":{"op":"replace", "path":"", "value": ["initial"]}}
|
||||
{"ts":3, "patch":{"op":"add", "path":"/-", "value": {"k":"v"}}}
|
||||
{"ts":1, "op":"Add", "task": "initial"}
|
||||
{"ts":3, "op":"Add", "task": {"k":"v"}}
|
||||
"#,
|
||||
);
|
||||
tests::write_file(
|
||||
&d,
|
||||
".plain.host_b",
|
||||
r#"
|
||||
{"ts":2, "patch":{"op":"add", "path":"/-", "value": 1}}
|
||||
{"ts":2, "op":"Add", "task": 1}
|
||||
{"ts":2, "op":"Snapshot", "task": null, "tasks": ["initial", 1]}
|
||||
"#,
|
||||
);
|
||||
|
||||
let f = File::new(&d.path().join("plain").to_str().unwrap().to_string());
|
||||
|
||||
assert_eq!(3, f.events().unwrap().0.len());
|
||||
assert_eq!(4, f.events().unwrap().0.len());
|
||||
assert_eq!(2, f.stage().unwrap().len());
|
||||
tests::file_contains(&d, "plain", "- initial\n- 1");
|
||||
|
||||
@@ -384,14 +395,14 @@ mod test_file {
|
||||
tests::file_contains(&d, "plain", "- initial\n- 1");
|
||||
|
||||
f.stage_persisted().unwrap();
|
||||
assert_eq!(4, f.events().unwrap().0.len());
|
||||
assert_eq!(2, f.stage().unwrap().len());
|
||||
tests::file_contains(&d, "plain", "- initial\n- 1");
|
||||
assert_eq!(5, f.events().unwrap().0.len());
|
||||
assert_eq!(3, f.stage().unwrap().len());
|
||||
tests::file_contains(&d, "plain", "- initial\n- 1\n- k: v");
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stage_new_persisted() {
|
||||
fn test_persist_stage() {
|
||||
tests::with_dir(|d| {
|
||||
tests::write_file(&d, "plain", "- old\n- new");
|
||||
tests::write_file(
|
||||
@@ -399,36 +410,151 @@ mod test_file {
|
||||
".plain.host",
|
||||
format!(
|
||||
r#"
|
||||
{{"ts":{}, "patch":{{"op":"replace", "path":"/0", "value": "enqueued for persistence"}}}}
|
||||
{{"ts":1, "op":"Add", "task": "removed"}}
|
||||
{{"ts":2, "op":"Add", "task": "old"}}
|
||||
{{"ts":2, "op":"Snapshot", "task": null, "tasks": ["removed", "old"]}}
|
||||
{{"ts":{}, "op":"Add", "task": "persisted but not snapshotted"}}
|
||||
{{"ts":{}, "op":"Add", "task": "doesnt exist yet"}}
|
||||
"#,
|
||||
2147483647,
|
||||
).as_str(),
|
||||
Delta::now_time() - 50,
|
||||
Delta::now_time() + 50,
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
|
||||
let f = File::new(&d.path().join("plain").to_str().unwrap().to_string());
|
||||
|
||||
assert_eq!(1, f.events().unwrap().0.len());
|
||||
assert_eq!(5, f.events().unwrap().0.len());
|
||||
assert_eq!(2, f.stage().unwrap().len());
|
||||
tests::file_contains(&d, "plain", "old");
|
||||
tests::file_contains(&d, "plain", "new");
|
||||
|
||||
f.stage_new_persisted().unwrap();
|
||||
tests::file_contains(&d, "plain", "enqueued");
|
||||
tests::file_contains(&d, "plain", "new");
|
||||
assert_eq!(1, f.events().unwrap().0.len());
|
||||
assert_eq!(2, f.stage().unwrap().len());
|
||||
|
||||
f.persist_stage().unwrap();
|
||||
assert_eq!(3, f.events().unwrap().0.len());
|
||||
assert_eq!(
|
||||
7,
|
||||
f.events().unwrap().0.len(),
|
||||
"events: {:?}",
|
||||
f.events().unwrap()
|
||||
);
|
||||
assert_eq!(2, f.stage().unwrap().len());
|
||||
tests::file_contains(&d, "plain", "enqueued");
|
||||
tests::file_contains(&d, "plain", "new");
|
||||
|
||||
f.stage_persisted().unwrap();
|
||||
assert_eq!(3, f.events().unwrap().0.len());
|
||||
assert_eq!(2, f.stage().unwrap().len());
|
||||
tests::file_contains(&d, "plain", "enqueued");
|
||||
assert_eq!(8, f.events().unwrap().0.len(), "{:?}", f.events().unwrap());
|
||||
assert_eq!(3, f.stage().unwrap().len(), "{:?}", f.stage().unwrap());
|
||||
tests::file_contains(&d, "plain", "new");
|
||||
tests::file_contains(&d, "plain", "old");
|
||||
tests::file_contains(&d, "plain", "persisted but not snapshotted");
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schedule_date_future() {
|
||||
tests::with_dir(|d| {
|
||||
tests::write_file(&d, "plain", "[]");
|
||||
let f = File::new(&d.path().join("plain").to_str().unwrap().to_string());
|
||||
|
||||
let mut m = serde_yaml::Mapping::new();
|
||||
m.insert("schedule".into(), "2036-01-02".into());
|
||||
let task = Task(serde_yaml::Value::Mapping(m));
|
||||
|
||||
f.append(Delta::add(task)).unwrap();
|
||||
assert_eq!(1, f.events().unwrap().0.len(), "{:?}", f.events().unwrap());
|
||||
assert_eq!(0, f.stage().unwrap().len(), "{:?}", f.stage());
|
||||
|
||||
f.persist_stage().unwrap();
|
||||
assert_eq!(1, f.events().unwrap().0.len(), "{:?}", f.events().unwrap());
|
||||
assert_eq!(0, f.stage().unwrap().len(), "{:?}", f.stage());
|
||||
|
||||
f.stage_persisted().unwrap();
|
||||
assert_eq!(1, f.events().unwrap().0.len(), "{:?}", f.events().unwrap());
|
||||
assert_eq!(0, f.stage().unwrap().len(), "{:?}", f.stage());
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schedule_cron_resolve_reschedules() {
|
||||
panic!("not impl");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schedule_duration_resolve_reschedules() {
|
||||
panic!("not impl");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schedule_date_resolve_does_not_reschedule() {
|
||||
panic!("not impl");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schedule_date_future_with_snapshot_between_scheduled_and_fired() {
|
||||
tests::with_dir(|d| {
|
||||
tests::write_file(&d, "plain", "- stage");
|
||||
tests::write_file(
|
||||
&d,
|
||||
".plain.host",
|
||||
format!(
|
||||
r#"
|
||||
{{"ts":3, "op":"Add", "task": "scheduled add for after snapshot"}}
|
||||
{{"ts":2, "op":"Snapshot", "task": null, "tasks": ["removed"]}}
|
||||
"#,
|
||||
)
|
||||
.as_str(),
|
||||
);
|
||||
let f = File::new(&d.path().join("plain").to_str().unwrap().to_string());
|
||||
|
||||
assert_eq!(2, f.events().unwrap().0.len(), "{:?}", f.events().unwrap());
|
||||
assert_eq!(1, f.stage().unwrap().len(), "{:?}", f.stage());
|
||||
|
||||
f.persist_stage().unwrap();
|
||||
assert_eq!(4, f.events().unwrap().0.len(), "{:?}", f.events().unwrap());
|
||||
assert_eq!(1, f.stage().unwrap().len(), "{:?}", f.stage());
|
||||
|
||||
f.stage_persisted().unwrap();
|
||||
assert_eq!(5, f.events().unwrap().0.len(), "{:?}", f.events().unwrap());
|
||||
assert_eq!(2, f.stage().unwrap().len(), "{:?}", f.stage());
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schedule_date_past() {
|
||||
tests::with_dir(|d| {
|
||||
tests::write_file(&d, "plain", "[]");
|
||||
let f = File::new(&d.path().join("plain").to_str().unwrap().to_string());
|
||||
|
||||
let mut m = serde_yaml::Mapping::new();
|
||||
m.insert("schedule".into(), "2006-01-02".into());
|
||||
let task = Task(serde_yaml::Value::Mapping(m));
|
||||
|
||||
f.append(Delta::add(task)).unwrap();
|
||||
assert_eq!(1, f.events().unwrap().0.len(), "{:?}", f.events().unwrap());
|
||||
assert_eq!(0, f.stage().unwrap().len(), "{:?}", f.stage());
|
||||
|
||||
f.persist_stage().unwrap();
|
||||
assert_eq!(
|
||||
1,
|
||||
f.events().unwrap().0.len(),
|
||||
"events after 1 add scheduled: {:?}",
|
||||
f.events().unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
1,
|
||||
f.events().unwrap().snapshot().unwrap().len(),
|
||||
"events.snapshot after 1 add scheduled: {:?}",
|
||||
f.events().unwrap().snapshot().unwrap(),
|
||||
);
|
||||
tests::file_contains(&d, "plain", "[]");
|
||||
assert_eq!(
|
||||
0,
|
||||
f.stage().unwrap().len(),
|
||||
"stage after 1 add scheduled: {:?}",
|
||||
f.stage()
|
||||
);
|
||||
|
||||
f.stage_persisted().unwrap();
|
||||
assert_eq!(2, f.events().unwrap().0.len(), "{:?}", f.events().unwrap());
|
||||
assert_eq!(1, f.stage().unwrap().len(), "{:?}", f.stage());
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -436,36 +562,223 @@ mod test_file {
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct Delta {
|
||||
ts: u64,
|
||||
patch: json_patch::PatchOperation,
|
||||
op: Op,
|
||||
task: Task,
|
||||
tasks: Option<Vec<Task>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
enum Op {
|
||||
Add,
|
||||
Remove,
|
||||
Snapshot,
|
||||
}
|
||||
|
||||
impl Delta {
|
||||
pub fn new(patch: json_patch::PatchOperation, ts: u64) -> Delta {
|
||||
pub fn new(ts: u64, op: Op, task: Task) -> Delta {
|
||||
Delta {
|
||||
patch: patch,
|
||||
ts: ts,
|
||||
op: op,
|
||||
task: task,
|
||||
tasks: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn now(patch: json_patch::PatchOperation) -> Delta {
|
||||
Self::new(
|
||||
patch,
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
)
|
||||
pub fn snapshot(tasks: Vec<Task>) -> Delta {
|
||||
Delta {
|
||||
ts: Self::now_time(),
|
||||
op: Op::Snapshot,
|
||||
task: Task(serde_yaml::Value::Null),
|
||||
tasks: Some(tasks),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(task: Task) -> Delta {
|
||||
Self::add_at(task, Self::now_time())
|
||||
}
|
||||
|
||||
pub fn add_at(task: Task, at: u64) -> Delta {
|
||||
Self::new(at, Op::Add, task)
|
||||
}
|
||||
|
||||
pub fn remove_at(task: Task, at: u64) -> Delta {
|
||||
Self::new(at, Op::Remove, task)
|
||||
}
|
||||
|
||||
fn now_time() -> u64 {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
struct Task(serde_yaml::Value);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
impl Task {
|
||||
pub fn _due(&self, after: u64) -> bool {
|
||||
match self.next_due(after) {
|
||||
Some(ts) => Delta::now_time() > ts,
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn must_next_due(&self, after: u64) -> u64 {
|
||||
self.next_due(after).unwrap_or(1)
|
||||
}
|
||||
|
||||
pub fn next_due(&self, after: u64) -> Option<u64> {
|
||||
match self.schedule() {
|
||||
Some(schedule) => self.parse_schedule_next(schedule, after),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_schedule_next(&self, schedule: String, after: u64) -> Option<u64> {
|
||||
let mut schedule = schedule;
|
||||
|
||||
if regex::Regex::new(r"^[0-9]+h$").unwrap().is_match(&schedule) {
|
||||
let hours = &schedule[..schedule.len() - 1];
|
||||
match hours.parse::<u64>() {
|
||||
Ok(hours) => return Some(after + hours * 60 * 60),
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
|
||||
if regex::Regex::new(r"[0-9]{4}-[0-9]{2}-[0-9]{2}$")
|
||||
.unwrap()
|
||||
.is_match(&schedule)
|
||||
{
|
||||
schedule += "T00";
|
||||
}
|
||||
|
||||
if regex::Regex::new(r"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}$")
|
||||
.unwrap()
|
||||
.is_match(&schedule)
|
||||
{
|
||||
let date = schedule.clone() + ":00:00";
|
||||
match chrono::NaiveDateTime::parse_from_str(&date, "%Y-%m-%dT%H:%M:%S") {
|
||||
Ok(datehour) => {
|
||||
let seconds = datehour.format("%s").to_string();
|
||||
match seconds.parse::<u64>() {
|
||||
Ok(n) => return Some(n),
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
Err(msg) => panic!("{}", msg),
|
||||
};
|
||||
}
|
||||
|
||||
if regex::Regex::new(r"^([^ ]+ ){4}[^ ]+$")
|
||||
.unwrap()
|
||||
.is_match(&schedule)
|
||||
{
|
||||
let after = chrono::DateTime::from_timestamp(after as i64, 0).unwrap();
|
||||
if let Ok(next) = cron_parser::parse(&schedule, &after) {
|
||||
let seconds = next.format("%s").to_string();
|
||||
match seconds.parse::<u64>() {
|
||||
Ok(n) => return Some(n),
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn schedule(&self) -> Option<String> {
|
||||
match &self.0 {
|
||||
serde_yaml::Value::Mapping(m) => match m.get("schedule".to_string()) {
|
||||
Some(schedule) => match schedule {
|
||||
serde_yaml::Value::String(s) => Some(s.clone()),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_task {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_unscheduled() {
|
||||
let task = Task(serde_yaml::Value::String("hello world".to_string()));
|
||||
assert_eq!(None, task.schedule());
|
||||
assert_eq!(1 as u64, task.must_next_due(100));
|
||||
assert!(task._due(100));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scheduled_date_before() {
|
||||
let mut m = serde_yaml::Mapping::new();
|
||||
m.insert("schedule".into(), "2006-01-02".into());
|
||||
let task = Task(serde_yaml::Value::Mapping(m));
|
||||
assert_eq!(Some("2006-01-02".to_string()), task.schedule());
|
||||
assert_eq!(Some(1136160000 as u64), task.next_due(100));
|
||||
assert!(task._due(100));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scheduled_date_after() {
|
||||
let mut m = serde_yaml::Mapping::new();
|
||||
m.insert("schedule".into(), "2036-01-02".into());
|
||||
let task = Task(serde_yaml::Value::Mapping(m));
|
||||
assert_eq!(Some("2036-01-02".to_string()), task.schedule());
|
||||
assert_eq!(Some(2082844800 as u64), task.next_due(100));
|
||||
assert!(!task._due(100));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scheduled_hour_after() {
|
||||
let mut m = serde_yaml::Mapping::new();
|
||||
m.insert("schedule".into(), "2036-01-02T16".into());
|
||||
let task = Task(serde_yaml::Value::Mapping(m));
|
||||
assert_eq!(Some("2036-01-02T16".to_string()), task.schedule());
|
||||
assert_eq!(Some(2082902400 as u64), task.next_due(100));
|
||||
assert!(!task._due(100));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scheduled_duration() {
|
||||
let mut m = serde_yaml::Mapping::new();
|
||||
m.insert("schedule".into(), "1h".into());
|
||||
let task = Task(serde_yaml::Value::Mapping(m));
|
||||
assert_eq!(Some("1h".to_string()), task.schedule());
|
||||
assert_eq!(Some(3700), task.next_due(100));
|
||||
assert!(task._due(100));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scheduled_cron() {
|
||||
let mut m = serde_yaml::Mapping::new();
|
||||
m.insert("schedule".into(), "* * * * *".into());
|
||||
let task = Task(serde_yaml::Value::Mapping(m));
|
||||
assert_eq!(Some("* * * * *".to_string()), task.schedule());
|
||||
assert_eq!(Some(120 as u64), task.next_due(100));
|
||||
assert!(task._due(100));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Events(Vec<Delta>);
|
||||
|
||||
impl std::fmt::Debug for Events {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut arr = vec![];
|
||||
for i in self.0.iter() {
|
||||
arr.push(format!("{:?}", i.clone()));
|
||||
}
|
||||
write!(f, "[\n {}\n]", arr.join("\n "))
|
||||
}
|
||||
}
|
||||
|
||||
impl Events {
|
||||
pub fn new(file: &String) -> Result<Events, String> {
|
||||
let logs = match std::fs::read_dir(Self::dir(&file)) {
|
||||
@@ -527,21 +840,49 @@ impl Events {
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn last_snapshot(&self) -> Vec<Task> {
|
||||
let reversed_events = {
|
||||
let mut e = self.0.clone();
|
||||
e.reverse();
|
||||
e
|
||||
};
|
||||
for event in reversed_events.iter() {
|
||||
match &event.op {
|
||||
Op::Snapshot => return event.tasks.clone().unwrap(),
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn snapshot(&self) -> Result<Vec<Task>, String> {
|
||||
let mut result = serde_json::json!([]);
|
||||
for event in self.0.iter() {
|
||||
match json_patch::patch(&mut result, vec![event.patch.clone()].as_slice()) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(msg) => Err(format!(
|
||||
"failed to patch {} onto {}: {}",
|
||||
&event.patch, &result, msg
|
||||
)),
|
||||
}?;
|
||||
}
|
||||
match serde_json::from_str(serde_json::to_string(&result).unwrap().as_str()) {
|
||||
Ok(v) => Ok(v),
|
||||
Err(msg) => Err(format!("failed turning patched into events: {}", msg)),
|
||||
let mut result = vec![];
|
||||
for event in self.0.iter().filter(|t| t.ts <= Delta::now_time()) {
|
||||
match &event.op {
|
||||
Op::Add => match event.task.next_due(event.ts) {
|
||||
Some(next_due) => match next_due <= Delta::now_time() {
|
||||
true => result.push(event.task.clone()),
|
||||
false => {}
|
||||
},
|
||||
None => result.push(event.task.clone()),
|
||||
},
|
||||
Op::Remove => {
|
||||
let mut i = (result.len() - 1) as i32;
|
||||
while i >= 0 {
|
||||
if event.task == result[i as usize] {
|
||||
result.remove(i as usize);
|
||||
if i == result.len() as i32 {
|
||||
i -= 1
|
||||
}
|
||||
} else {
|
||||
i -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
Op::Snapshot => result = event.tasks.clone().unwrap(),
|
||||
};
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -549,6 +890,31 @@ impl Events {
|
||||
mod test_events {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_events_op_snapshot() {
|
||||
tests::with_dir(|d| {
|
||||
tests::write_file(&d, "plain", "- who cares");
|
||||
tests::write_file(
|
||||
&d,
|
||||
".plain.some_host",
|
||||
r#"
|
||||
{"ts":1, "op":"Snapshot", "task":"", "tasks":["snapshotted"]}
|
||||
"#,
|
||||
);
|
||||
|
||||
let events =
|
||||
Events::new(&d.path().join("plain").to_str().unwrap().to_string()).unwrap();
|
||||
assert_eq!(1, events.0.len(), "events: {:?}", events);
|
||||
|
||||
let snapshot = events.snapshot().unwrap();
|
||||
assert_eq!(1, snapshot.len());
|
||||
assert_eq!(
|
||||
serde_yaml::Value::String("snapshotted".to_string()),
|
||||
snapshot[0].0
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_events_oplog_to_snapshot_one() {
|
||||
tests::with_dir(|d| {
|
||||
@@ -557,7 +923,7 @@ mod test_events {
|
||||
&d,
|
||||
".plain.some_host",
|
||||
r#"
|
||||
{"ts":1, "patch":{"op":"replace", "path":"", "value":["persisted"]}}
|
||||
{"ts":1, "op":"Add", "task":"persisted"}
|
||||
"#,
|
||||
);
|
||||
|
||||
@@ -582,19 +948,20 @@ mod test_events {
|
||||
&d,
|
||||
".plain.host_a",
|
||||
r#"
|
||||
{"ts":1, "patch":{"op":"replace", "path":"", "value":["persisted"]}}
|
||||
{"ts":3, "patch":{"op":"add", "path":"/-", "value":"persisted 3"}}
|
||||
{"ts":2, "patch":{"op":"add", "path":"/-", "value":"persisted 2"}}
|
||||
{"ts":6, "patch":{"op":"replace", "path":"/4", "value":"persisted 5'"}}
|
||||
{"ts":7, "patch":{"op":"remove", "path":"/3"}}
|
||||
{"ts":1, "op":"Add", "task":"persisted"}
|
||||
{"ts":3, "op":"Add", "task":"persisted 3"}
|
||||
{"ts":2, "op":"Add", "task":"persisted 2"}
|
||||
{"ts":6, "op":"Remove", "task":"persisted 5"}
|
||||
{"ts":6, "op":"Add", "task":"persisted 5'"}
|
||||
{"ts":7, "op":"Remove", "task":"persisted 4"}
|
||||
"#,
|
||||
);
|
||||
tests::write_file(
|
||||
&d,
|
||||
".plain.host_b",
|
||||
r#"
|
||||
{"ts":4, "patch":{"op":"add", "path":"/-", "value":"persisted 4"}}
|
||||
{"ts":5, "patch":{"op":"add", "path":"/-", "value":"persisted 5"}}
|
||||
{"ts":4, "op":"Add", "task":"persisted 4"}
|
||||
{"ts":5, "op":"Add", "task":"persisted 5"}
|
||||
"#,
|
||||
);
|
||||
|
||||
@@ -636,6 +1003,7 @@ mod tests {
|
||||
f.sync_all().unwrap();
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn file_contains(d: &tempdir::TempDir, fname: &str, content: &str) {
|
||||
let p = d.path().join(&fname);
|
||||
let file_content = file_content(&p.to_str().unwrap().to_string());
|
||||
|
||||
@@ -1,4 +1,32 @@
|
||||
{"ts":1762884455,"patch":{"op":"replace","path":"","value":["read; https://topicpartition.io/blog/postgres-pubsub-queue-benchmarks","pglogical vs ha\n\n# api.git#breel/keys-620-pglogical-always-set-cr/2-user-survives-cr\n$ mise run pulsegres-new ^logical/toggl\n","drive; VERIFY spoc posts daily summary w/ unresolved","drive; VERIFY spoc refreshes summary w/ thread comment contianing 'refresh'","637; reconcile deploy if replicas wrong; https://github.com/renderinc/api/pull/26540/files","https://linear.app/render-com/issue/KEYS-633/add-3-when-max-connections-overridden-for-3-superuser-connections","https://linear.app/render-com/issue/KEYS-637/billing-resume-should-1-unsuspend-pg-in-cloudsql-2-unsuspend-pg-in-cr","https://linear.app/render-com/issue/KEYS-638/pgoperator-generates-new-ha-patroni-cert-every-reconcile-no-matter","pg; how2partition; https://renderinc.slack.com/archives/C0319NYCSSG/p1756357545556659?thread_ts=1756357467.613369&cid=C0319NYCSSG","pitr; backup purge cronjob for PL types","pg11 pgbackup doesnt write to envsetting mucked env key","incident io; teach spocbotvr to read slacks","userdb to internal; peer packages can use internal as userdb","fcr; cannot pitr because pgbackrest doesnt know wal spans thus pgexporter and friends cant know pitr works","etcd statefulset of 1 (for no random podname, no conflict, k8s ensures pod replace)\npatroni always\n","maher; https://slab.render.com/posts/hopes-and-dreams-blegf8fx#hdsyt-valkey-bundle","maher; shadow lizhi pm loops","maher; get more interviewers","maher; get concrete career and project plans so i can get promo in 2y; no manager to advocate","read; https://trychroma.com/engineering/wal3","read; https://github.com/renderinc/dashboard/pull/8883","read; https://litestream.io/getting-started/","kr\nto del gcloud old key\nie https://console.cloud.google.com/iam-admin/serviceaccounts/details/104206017956912104938/keys?hl=en&project=render-prod\n",{"subtasks":["","pitr\nhttps://slab.render.com/posts/pitr-as-a-service-health-abvnqx11\nmore aggressive alert autotune backup cores\nmore aggressive alert on MOAR backup cores\ncreate alert autotune archive-push cores\ncreate alert MOAR archive-push cores\n","cr; frontend","cr; cli.git","cr; public-api-schema.git; https://github.com/renderinc/public-api-schema/pull/407 STILL NEED EVENTS","cr; website.git","cr; changelog","ops; pgproxy rate limits 50ps 100burst; https://github.com/renderinc/dbproxy/pull/91","2873; no conn patroni if upgradeInProgressWithoutHA; https://github.com/renderinc/api/pull/26328","2733; only EnvSettings; https://github.com/renderinc/api/pull/25322/files","pg18; after cred rotation works, re enable e2e","655; pg18; pub api sch; https://github.com/renderinc/public-api-schema/pull/421","655; pg18; go generate pub api sch; https://github.com/renderinc/api/pull/26694","663; das; show status in /info; https://github.com/renderinc/dashboard/pull/9616","664; pg18; go gen terraform; https://github.com/renderinc/api/pull/26701","664; pg18; ga; push terraform.git#breel/keys-664-pg18","656; pg18; website; https://github.com/renderinc/website/pull/985/files","663; das; note disk cannot decrease even if autoscaled; https://github.com/renderinc/dashboard/pull/9621","pulsegres; pls let me keep my test emails; https://github.com/renderinc/api/pull/26741","pgup; restore view owner; https://github.com/renderinc/api/pull/26814","pgup; resync if missing resync; https://github.com/renderinc/api/pull/26817","pgup; replicas use $RESYNC; https://github.com/renderinc/api/pull/26878"],"todo":"blocked"}]}}
|
||||
{"ts":1762885026,"patch":{"op":"add","path":"/-","value":"hi"}}
|
||||
{"ts":1762915959,"patch":{"op":"add","path":"/-","value":"enqueued add"}}
|
||||
{"ts":1762915973,"patch":{"op":"remove","path":"/25"}}
|
||||
{"ts":1762915973,"op":"Add","task":"read; https://topicpartition.io/blog/postgres-pubsub-queue-benchmarks","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"pglogical vs ha\n\n# api.git#breel/keys-620-pglogical-always-set-cr/2-user-survives-cr\n$ mise run pulsegres-new ^logical/toggl\n","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"drive; VERIFY spoc posts daily summary w/ unresolved","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"drive; VERIFY spoc refreshes summary w/ thread comment contianing 'refresh'","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"637; reconcile deploy if replicas wrong; https://github.com/renderinc/api/pull/26540/files","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"https://linear.app/render-com/issue/KEYS-633/add-3-when-max-connections-overridden-for-3-superuser-connections","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"https://linear.app/render-com/issue/KEYS-637/billing-resume-should-1-unsuspend-pg-in-cloudsql-2-unsuspend-pg-in-cr","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"https://linear.app/render-com/issue/KEYS-638/pgoperator-generates-new-ha-patroni-cert-every-reconcile-no-matter","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"pg; how2partition; https://renderinc.slack.com/archives/C0319NYCSSG/p1756357545556659?thread_ts=1756357467.613369&cid=C0319NYCSSG","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"pitr; backup purge cronjob for PL types","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"pg11 pgbackup doesnt write to envsetting mucked env key","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"incident io; teach spocbotvr to read slacks","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"userdb to internal; peer packages can use internal as userdb","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"fcr; cannot pitr because pgbackrest doesnt know wal spans thus pgexporter and friends cant know pitr works","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"etcd statefulset of 1 (for no random podname, no conflict, k8s ensures pod replace)\npatroni always\n","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"maher; https://slab.render.com/posts/hopes-and-dreams-blegf8fx#hdsyt-valkey-bundle","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"maher; shadow lizhi pm loops","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"maher; get more interviewers","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"maher; get concrete career and project plans so i can get promo in 2y; no manager to advocate","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"read; https://trychroma.com/engineering/wal3","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"read; https://github.com/renderinc/dashboard/pull/8883","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"read; https://litestream.io/getting-started/","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"kr\nto del gcloud old key\nie https://console.cloud.google.com/iam-admin/serviceaccounts/details/104206017956912104938/keys?hl=en&project=render-prod\n","tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":{"subtasks":["","pitr\nhttps://slab.render.com/posts/pitr-as-a-service-health-abvnqx11\nmore aggressive alert autotune backup cores\nmore aggressive alert on MOAR backup cores\ncreate alert autotune archive-push cores\ncreate alert MOAR archive-push cores\n","cr; frontend","cr; cli.git","cr; public-api-schema.git; https://github.com/renderinc/public-api-schema/pull/407 STILL NEED EVENTS","cr; website.git","cr; changelog","ops; pgproxy rate limits 50ps 100burst; https://github.com/renderinc/dbproxy/pull/91","2873; no conn patroni if upgradeInProgressWithoutHA; https://github.com/renderinc/api/pull/26328","2733; only EnvSettings; https://github.com/renderinc/api/pull/25322/files","pg18; after cred rotation works, re enable e2e","655; pg18; pub api sch; https://github.com/renderinc/public-api-schema/pull/421","655; pg18; go generate pub api sch; https://github.com/renderinc/api/pull/26694","663; das; show status in /info; https://github.com/renderinc/dashboard/pull/9616","664; pg18; go gen terraform; https://github.com/renderinc/api/pull/26701","664; pg18; ga; push terraform.git#breel/keys-664-pg18","656; pg18; website; https://github.com/renderinc/website/pull/985/files","663; das; note disk cannot decrease even if autoscaled; https://github.com/renderinc/dashboard/pull/9621","pulsegres; pls let me keep my test emails; https://github.com/renderinc/api/pull/26741","pgup; restore view owner; https://github.com/renderinc/api/pull/26814","pgup; resync if missing resync; https://github.com/renderinc/api/pull/26817","pgup; replicas use $RESYNC; https://github.com/renderinc/api/pull/26878"],"todo":"blocked"},"tasks":null}
|
||||
{"ts":1762915973,"op":"Add","task":"hi","tasks":null}
|
||||
{"ts":1764635053,"op":"Snapshot","task":null,"tasks":["read; https://topicpartition.io/blog/postgres-pubsub-queue-benchmarks","pglogical vs ha\n\n# api.git#breel/keys-620-pglogical-always-set-cr/2-user-survives-cr\n$ mise run pulsegres-new ^logical/toggl\n","drive; VERIFY spoc posts daily summary w/ unresolved","drive; VERIFY spoc refreshes summary w/ thread comment contianing 'refresh'","637; reconcile deploy if replicas wrong; https://github.com/renderinc/api/pull/26540/files","https://linear.app/render-com/issue/KEYS-633/add-3-when-max-connections-overridden-for-3-superuser-connections","https://linear.app/render-com/issue/KEYS-637/billing-resume-should-1-unsuspend-pg-in-cloudsql-2-unsuspend-pg-in-cr","https://linear.app/render-com/issue/KEYS-638/pgoperator-generates-new-ha-patroni-cert-every-reconcile-no-matter","pg; how2partition; https://renderinc.slack.com/archives/C0319NYCSSG/p1756357545556659?thread_ts=1756357467.613369&cid=C0319NYCSSG","pitr; backup purge cronjob for PL types","pg11 pgbackup doesnt write to envsetting mucked env key","incident io; teach spocbotvr to read slacks","userdb to internal; peer packages can use internal as userdb","fcr; cannot pitr because pgbackrest doesnt know wal spans thus pgexporter and friends cant know pitr works","etcd statefulset of 1 (for no random podname, no conflict, k8s ensures pod replace)\npatroni always\n","maher; https://slab.render.com/posts/hopes-and-dreams-blegf8fx#hdsyt-valkey-bundle","maher; shadow lizhi pm loops","maher; get more interviewers","maher; get concrete career and project plans so i can get promo in 2y; no manager to advocate","read; https://trychroma.com/engineering/wal3","read; https://github.com/renderinc/dashboard/pull/8883","read; https://litestream.io/getting-started/","kr\nto del gcloud old key\nie https://console.cloud.google.com/iam-admin/serviceaccounts/details/104206017956912104938/keys?hl=en&project=render-prod\n",{"subtasks":["","pitr\nhttps://slab.render.com/posts/pitr-as-a-service-health-abvnqx11\nmore aggressive alert autotune backup cores\nmore aggressive alert on MOAR backup cores\ncreate alert autotune archive-push cores\ncreate alert MOAR archive-push cores\n","cr; frontend","cr; cli.git","cr; public-api-schema.git; https://github.com/renderinc/public-api-schema/pull/407 STILL NEED EVENTS","cr; website.git","cr; changelog","ops; pgproxy rate limits 50ps 100burst; https://github.com/renderinc/dbproxy/pull/91","2873; no conn patroni if upgradeInProgressWithoutHA; https://github.com/renderinc/api/pull/26328","2733; only EnvSettings; https://github.com/renderinc/api/pull/25322/files","pg18; after cred rotation works, re enable e2e","655; pg18; pub api sch; https://github.com/renderinc/public-api-schema/pull/421","655; pg18; go generate pub api sch; https://github.com/renderinc/api/pull/26694","663; das; show status in /info; https://github.com/renderinc/dashboard/pull/9616","664; pg18; go gen terraform; https://github.com/renderinc/api/pull/26701","664; pg18; ga; push terraform.git#breel/keys-664-pg18","656; pg18; website; https://github.com/renderinc/website/pull/985/files","663; das; note disk cannot decrease even if autoscaled; https://github.com/renderinc/dashboard/pull/9621","pulsegres; pls let me keep my test emails; https://github.com/renderinc/api/pull/26741","pgup; restore view owner; https://github.com/renderinc/api/pull/26814","pgup; resync if missing resync; https://github.com/renderinc/api/pull/26817","pgup; replicas use $RESYNC; https://github.com/renderinc/api/pull/26878"],"todo":"blocked"},"hi"]}
|
||||
{"ts":1764636274,"op":"Add","task":{"schedule":"2026-01-01","todo":"not yet due"},"tasks":null}
|
||||
{"ts":1764721753,"op":"Add","task":"just_add","tasks":null}
|
||||
{"ts":1764721753,"op":"Snapshot","task":null,"tasks":["read; https://topicpartition.io/blog/postgres-pubsub-queue-benchmarks","pglogical vs ha\n\n# api.git#breel/keys-620-pglogical-always-set-cr/2-user-survives-cr\n$ mise run pulsegres-new ^logical/toggl\n","drive; VERIFY spoc posts daily summary w/ unresolved","drive; VERIFY spoc refreshes summary w/ thread comment contianing 'refresh'","637; reconcile deploy if replicas wrong; https://github.com/renderinc/api/pull/26540/files","https://linear.app/render-com/issue/KEYS-633/add-3-when-max-connections-overridden-for-3-superuser-connections","https://linear.app/render-com/issue/KEYS-637/billing-resume-should-1-unsuspend-pg-in-cloudsql-2-unsuspend-pg-in-cr","https://linear.app/render-com/issue/KEYS-638/pgoperator-generates-new-ha-patroni-cert-every-reconcile-no-matter","pg; how2partition; https://renderinc.slack.com/archives/C0319NYCSSG/p1756357545556659?thread_ts=1756357467.613369&cid=C0319NYCSSG","pitr; backup purge cronjob for PL types","pg11 pgbackup doesnt write to envsetting mucked env key","incident io; teach spocbotvr to read slacks","userdb to internal; peer packages can use internal as userdb","fcr; cannot pitr because pgbackrest doesnt know wal spans thus pgexporter and friends cant know pitr works","etcd statefulset of 1 (for no random podname, no conflict, k8s ensures pod replace)\npatroni always\n","maher; https://slab.render.com/posts/hopes-and-dreams-blegf8fx#hdsyt-valkey-bundle","maher; shadow lizhi pm loops","maher; get more interviewers","maher; get concrete career and project plans so i can get promo in 2y; no manager to advocate","read; https://trychroma.com/engineering/wal3","read; https://github.com/renderinc/dashboard/pull/8883","read; https://litestream.io/getting-started/","kr\nto del gcloud old key\nie https://console.cloud.google.com/iam-admin/serviceaccounts/details/104206017956912104938/keys?hl=en&project=render-prod\n",{"subtasks":["","pitr\nhttps://slab.render.com/posts/pitr-as-a-service-health-abvnqx11\nmore aggressive alert autotune backup cores\nmore aggressive alert on MOAR backup cores\ncreate alert autotune archive-push cores\ncreate alert MOAR archive-push cores\n","cr; frontend","cr; cli.git","cr; public-api-schema.git; https://github.com/renderinc/public-api-schema/pull/407 STILL NEED EVENTS","cr; website.git","cr; changelog","ops; pgproxy rate limits 50ps 100burst; https://github.com/renderinc/dbproxy/pull/91","2873; no conn patroni if upgradeInProgressWithoutHA; https://github.com/renderinc/api/pull/26328","2733; only EnvSettings; https://github.com/renderinc/api/pull/25322/files","pg18; after cred rotation works, re enable e2e","655; pg18; pub api sch; https://github.com/renderinc/public-api-schema/pull/421","655; pg18; go generate pub api sch; https://github.com/renderinc/api/pull/26694","663; das; show status in /info; https://github.com/renderinc/dashboard/pull/9616","664; pg18; go gen terraform; https://github.com/renderinc/api/pull/26701","664; pg18; ga; push terraform.git#breel/keys-664-pg18","656; pg18; website; https://github.com/renderinc/website/pull/985/files","663; das; note disk cannot decrease even if autoscaled; https://github.com/renderinc/dashboard/pull/9621","pulsegres; pls let me keep my test emails; https://github.com/renderinc/api/pull/26741","pgup; restore view owner; https://github.com/renderinc/api/pull/26814","pgup; resync if missing resync; https://github.com/renderinc/api/pull/26817","pgup; replicas use $RESYNC; https://github.com/renderinc/api/pull/26878"],"todo":"blocked"},"hi","just_add"]}
|
||||
{"ts":1764721753,"op":"Add","task":{"schedule":"2000-01-01","do":"add_past"},"tasks":null}
|
||||
{"ts":1764721753,"op":"Snapshot","task":null,"tasks":["read; https://topicpartition.io/blog/postgres-pubsub-queue-benchmarks","pglogical vs ha\n\n# api.git#breel/keys-620-pglogical-always-set-cr/2-user-survives-cr\n$ mise run pulsegres-new ^logical/toggl\n","drive; VERIFY spoc posts daily summary w/ unresolved","drive; VERIFY spoc refreshes summary w/ thread comment contianing 'refresh'","637; reconcile deploy if replicas wrong; https://github.com/renderinc/api/pull/26540/files","https://linear.app/render-com/issue/KEYS-633/add-3-when-max-connections-overridden-for-3-superuser-connections","https://linear.app/render-com/issue/KEYS-637/billing-resume-should-1-unsuspend-pg-in-cloudsql-2-unsuspend-pg-in-cr","https://linear.app/render-com/issue/KEYS-638/pgoperator-generates-new-ha-patroni-cert-every-reconcile-no-matter","pg; how2partition; https://renderinc.slack.com/archives/C0319NYCSSG/p1756357545556659?thread_ts=1756357467.613369&cid=C0319NYCSSG","pitr; backup purge cronjob for PL types","pg11 pgbackup doesnt write to envsetting mucked env key","incident io; teach spocbotvr to read slacks","userdb to internal; peer packages can use internal as userdb","fcr; cannot pitr because pgbackrest doesnt know wal spans thus pgexporter and friends cant know pitr works","etcd statefulset of 1 (for no random podname, no conflict, k8s ensures pod replace)\npatroni always\n","maher; https://slab.render.com/posts/hopes-and-dreams-blegf8fx#hdsyt-valkey-bundle","maher; shadow lizhi pm loops","maher; get more interviewers","maher; get concrete career and project plans so i can get promo in 2y; no manager to advocate","read; https://trychroma.com/engineering/wal3","read; https://github.com/renderinc/dashboard/pull/8883","read; https://litestream.io/getting-started/","kr\nto del gcloud old key\nie https://console.cloud.google.com/iam-admin/serviceaccounts/details/104206017956912104938/keys?hl=en&project=render-prod\n",{"subtasks":["","pitr\nhttps://slab.render.com/posts/pitr-as-a-service-health-abvnqx11\nmore aggressive alert autotune backup cores\nmore aggressive alert on MOAR backup cores\ncreate alert autotune archive-push cores\ncreate alert MOAR archive-push cores\n","cr; frontend","cr; cli.git","cr; public-api-schema.git; https://github.com/renderinc/public-api-schema/pull/407 STILL NEED EVENTS","cr; website.git","cr; changelog","ops; pgproxy rate limits 50ps 100burst; https://github.com/renderinc/dbproxy/pull/91","2873; no conn patroni if upgradeInProgressWithoutHA; https://github.com/renderinc/api/pull/26328","2733; only EnvSettings; https://github.com/renderinc/api/pull/25322/files","pg18; after cred rotation works, re enable e2e","655; pg18; pub api sch; https://github.com/renderinc/public-api-schema/pull/421","655; pg18; go generate pub api sch; https://github.com/renderinc/api/pull/26694","663; das; show status in /info; https://github.com/renderinc/dashboard/pull/9616","664; pg18; go gen terraform; https://github.com/renderinc/api/pull/26701","664; pg18; ga; push terraform.git#breel/keys-664-pg18","656; pg18; website; https://github.com/renderinc/website/pull/985/files","663; das; note disk cannot decrease even if autoscaled; https://github.com/renderinc/dashboard/pull/9621","pulsegres; pls let me keep my test emails; https://github.com/renderinc/api/pull/26741","pgup; restore view owner; https://github.com/renderinc/api/pull/26814","pgup; resync if missing resync; https://github.com/renderinc/api/pull/26817","pgup; replicas use $RESYNC; https://github.com/renderinc/api/pull/26878"],"todo":"blocked"},"hi","just_add",{"schedule":"2000-01-01","do":"add_past"}]}
|
||||
{"ts":2051222400,"op":"Add","task":{"schedule":"2035-01-01","do":"add_future"},"tasks":null}
|
||||
|
||||
3
pttodoest/src/testdata/root.yaml
vendored
3
pttodoest/src/testdata/root.yaml
vendored
@@ -61,4 +61,7 @@
|
||||
- pgup; replicas use $RESYNC; https://github.com/renderinc/api/pull/26878
|
||||
todo: blocked
|
||||
- hi
|
||||
- just_add
|
||||
- schedule: 2000-01-01
|
||||
do: add_past
|
||||
|
||||
|
||||
Reference in New Issue
Block a user