diff --git a/.gitignore b/.gitignore
index 9fcb0d8..d07fda9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,3 +7,4 @@ stackql*.pkg
stackql_history.txt
stackql.log
.env
+nohup.out
diff --git a/Cargo.lock b/Cargo.lock
index 598ed1c..ce89a44 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -102,17 +102,6 @@ dependencies = [
"windows-sys 0.59.0",
]
-[[package]]
-name = "async-trait"
-version = "0.1.88"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
[[package]]
name = "autocfg"
version = "1.4.0"
@@ -140,18 +129,34 @@ version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
-[[package]]
-name = "base64"
-version = "0.22.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
-
[[package]]
name = "base64ct"
version = "1.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89e25b6adfb930f02d1981565a6e5d9c547ac15a96606256d3b59040e5cd4ca3"
+[[package]]
+name = "bindgen"
+version = "0.64.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4243e6031260db77ede97ad86c27e501d646a27ab57b59a574f725d98ab1fb4"
+dependencies = [
+ "bitflags 1.3.2",
+ "cexpr",
+ "clang-sys",
+ "lazy_static",
+ "lazycell",
+ "log",
+ "peeking_take_while",
+ "proc-macro2",
+ "quote",
+ "regex",
+ "rustc-hash",
+ "shlex",
+ "syn 1.0.109",
+ "which",
+]
+
[[package]]
name = "bitflags"
version = "1.3.2"
@@ -175,9 +180,9 @@ dependencies = [
[[package]]
name = "bstr"
-version = "1.11.3"
+version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0"
+checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
dependencies = [
"memchr",
"serde",
@@ -223,15 +228,24 @@ dependencies = [
[[package]]
name = "cc"
-version = "1.2.16"
+version = "1.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c"
+checksum = "8e3a13707ac958681c13b39b458c073d0d9bc8a22cb1b2f4c8e55eb72c13f362"
dependencies = [
"jobserver",
"libc",
"shlex",
]
+[[package]]
+name = "cexpr"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
+dependencies = [
+ "nom",
+]
+
[[package]]
name = "cfg-if"
version = "1.0.0"
@@ -246,7 +260,9 @@ checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c"
dependencies = [
"android-tzdata",
"iana-time-zone",
+ "js-sys",
"num-traits",
+ "wasm-bindgen",
"windows-link",
]
@@ -282,11 +298,22 @@ dependencies = [
"inout",
]
+[[package]]
+name = "clang-sys"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
+dependencies = [
+ "glob",
+ "libc",
+ "libloading",
+]
+
[[package]]
name = "clap"
-version = "4.5.29"
+version = "4.5.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8acebd8ad879283633b343856142139f2da2317c96b05b4dd6181c61e2480184"
+checksum = "2df961d8c8a0d08aa9945718ccf584145eee3f3aa06cddbeac12933781102e04"
dependencies = [
"clap_builder",
"clap_derive",
@@ -294,9 +321,9 @@ dependencies = [
[[package]]
name = "clap_builder"
-version = "4.5.29"
+version = "4.5.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f6ba32cbda51c7e1dfd49acc1457ba1a7dec5b64fe360e828acb13ca8dc9c2f9"
+checksum = "132dbda40fb6753878316a489d5a1242a8ef2f0d9e47ba01c951ea8aa7d013a5"
dependencies = [
"anstream",
"anstyle",
@@ -306,14 +333,14 @@ dependencies = [
[[package]]
name = "clap_derive"
-version = "4.5.28"
+version = "4.5.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed"
+checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7"
dependencies = [
"heck",
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
[[package]]
@@ -439,9 +466,9 @@ dependencies = [
[[package]]
name = "deranged"
-version = "0.3.11"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4"
+checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
dependencies = [
"powerfmt",
]
@@ -492,9 +519,15 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
+[[package]]
+name = "either"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
+
[[package]]
name = "encode_unicode"
version = "1.0.0"
@@ -516,6 +549,19 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
+[[package]]
+name = "env_logger"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580"
+dependencies = [
+ "humantime",
+ "is-terminal",
+ "log",
+ "regex",
+ "termcolor",
+]
+
[[package]]
name = "equivalent"
version = "1.0.2"
@@ -524,9 +570,9 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "errno"
-version = "0.3.10"
+version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
+checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e"
dependencies = [
"libc",
"windows-sys 0.59.0",
@@ -542,12 +588,6 @@ dependencies = [
"str-buf",
]
-[[package]]
-name = "fallible-iterator"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
-
[[package]]
name = "fastrand"
version = "2.3.0"
@@ -567,9 +607,9 @@ dependencies = [
[[package]]
name = "flate2"
-version = "1.1.0"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc"
+checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece"
dependencies = [
"crc32fast",
"miniz_oxide",
@@ -612,7 +652,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
dependencies = [
"futures-core",
- "futures-sink",
]
[[package]]
@@ -627,17 +666,6 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
-[[package]]
-name = "futures-macro"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
[[package]]
name = "futures-sink"
version = "0.3.31"
@@ -658,8 +686,6 @@ checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [
"futures-core",
"futures-io",
- "futures-macro",
- "futures-sink",
"futures-task",
"memchr",
"pin-project-lite",
@@ -690,14 +716,14 @@ dependencies = [
[[package]]
name = "getrandom"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8"
+checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0"
dependencies = [
"cfg-if",
"libc",
- "wasi 0.13.3+wasi-0.2.2",
- "windows-targets 0.52.6",
+ "r-efi",
+ "wasi 0.14.2+wasi-0.2.4",
]
[[package]]
@@ -706,6 +732,12 @@ version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
+[[package]]
+name = "glob"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2"
+
[[package]]
name = "globset"
version = "0.4.16"
@@ -761,6 +793,12 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
+[[package]]
+name = "hermit-abi"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e"
+
[[package]]
name = "hmac"
version = "0.12.1"
@@ -770,6 +808,15 @@ dependencies = [
"digest",
]
+[[package]]
+name = "home"
+version = "0.5.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf"
+dependencies = [
+ "windows-sys 0.59.0",
+]
+
[[package]]
name = "http"
version = "0.2.12"
@@ -813,6 +860,12 @@ dependencies = [
"libm",
]
+[[package]]
+name = "humantime"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b112acc8b3adf4b107a8ec20977da0273a8c386765a3ec0229bd500a1443f9f"
+
[[package]]
name = "hyper"
version = "0.14.32"
@@ -852,14 +905,15 @@ dependencies = [
[[package]]
name = "iana-time-zone"
-version = "0.1.61"
+version = "0.1.63"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220"
+checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
+ "log",
"wasm-bindgen",
"windows-core",
]
@@ -914,9 +968,9 @@ dependencies = [
[[package]]
name = "icu_locid_transform_data"
-version = "1.5.0"
+version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e"
+checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d"
[[package]]
name = "icu_normalizer"
@@ -938,9 +992,9 @@ dependencies = [
[[package]]
name = "icu_normalizer_data"
-version = "1.5.0"
+version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516"
+checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7"
[[package]]
name = "icu_properties"
@@ -959,9 +1013,9 @@ dependencies = [
[[package]]
name = "icu_properties_data"
-version = "1.5.0"
+version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569"
+checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2"
[[package]]
name = "icu_provider"
@@ -988,7 +1042,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
[[package]]
@@ -1030,9 +1084,9 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "2.8.0"
+version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058"
+checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
dependencies = [
"equivalent",
"hashbrown",
@@ -1066,6 +1120,17 @@ version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
+[[package]]
+name = "is-terminal"
+version = "0.4.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "windows-sys 0.59.0",
+]
+
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
@@ -1080,10 +1145,11 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jobserver"
-version = "0.1.32"
+version = "0.1.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0"
+checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a"
dependencies = [
+ "getrandom 0.3.2",
"libc",
]
@@ -1103,11 +1169,27 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
+[[package]]
+name = "lazycell"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
+
[[package]]
name = "libc"
-version = "0.2.171"
+version = "0.2.172"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6"
+checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
+
+[[package]]
+name = "libloading"
+version = "0.8.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
+dependencies = [
+ "cfg-if",
+ "windows-targets 0.52.6",
+]
[[package]]
name = "libm"
@@ -1115,6 +1197,30 @@ version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa"
+[[package]]
+name = "libpq"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57eb9f8893722a29eab34ec11b42a0455abf265162871cf5d6fa4f04842b8fc5"
+dependencies = [
+ "bitflags 2.9.0",
+ "libc",
+ "libpq-sys",
+ "log",
+ "thiserror 1.0.69",
+]
+
+[[package]]
+name = "libpq-sys"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2ef060ac05c207c85da15f4eb629100c8782e0db4c06a3c91c86be9c18ae8a23"
+dependencies = [
+ "bindgen",
+ "pkg-config",
+ "vcpkg",
+]
+
[[package]]
name = "libredox"
version = "0.1.3"
@@ -1133,9 +1239,9 @@ checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab"
[[package]]
name = "linux-raw-sys"
-version = "0.9.2"
+version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6db9c683daf087dc577b7506e9695b3d556a9f3849903fa28186283afd6809e9"
+checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
[[package]]
name = "litemap"
@@ -1143,31 +1249,11 @@ version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856"
-[[package]]
-name = "lock_api"
-version = "0.4.12"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
-dependencies = [
- "autocfg",
- "scopeguard",
-]
-
[[package]]
name = "log"
-version = "0.4.26"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
-
-[[package]]
-name = "md-5"
-version = "0.10.6"
+version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
-dependencies = [
- "cfg-if",
- "digest",
-]
+checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "memchr"
@@ -1181,11 +1267,17 @@ version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
[[package]]
name = "miniz_oxide"
-version = "0.8.5"
+version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
+checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a"
dependencies = [
"adler2",
]
@@ -1239,6 +1331,16 @@ dependencies = [
"libc",
]
+[[package]]
+name = "nom"
+version = "7.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+dependencies = [
+ "memchr",
+ "minimal-lexical",
+]
+
[[package]]
name = "num-conv"
version = "0.1.0"
@@ -1271,15 +1373,15 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.20.3"
+version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e"
+checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "openssl"
-version = "0.10.71"
+version = "0.10.72"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5e14130c6a98cd258fdcb0fb6d744152343ff729cbfcb28c656a9d12b999fbcd"
+checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da"
dependencies = [
"bitflags 2.9.0",
"cfg-if",
@@ -1298,7 +1400,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
[[package]]
@@ -1309,9 +1411,9 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
[[package]]
name = "openssl-sys"
-version = "0.9.106"
+version = "0.9.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8bb61ea9811cc39e3c2069f40b8b8e2e70d8569b361f879786cc7ed48b777cdd"
+checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07"
dependencies = [
"cc",
"libc",
@@ -1319,29 +1421,6 @@ dependencies = [
"vcpkg",
]
-[[package]]
-name = "parking_lot"
-version = "0.12.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
-dependencies = [
- "lock_api",
- "parking_lot_core",
-]
-
-[[package]]
-name = "parking_lot_core"
-version = "0.9.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
-dependencies = [
- "cfg-if",
- "libc",
- "redox_syscall",
- "smallvec",
- "windows-targets 0.52.6",
-]
-
[[package]]
name = "parse-zoneinfo"
version = "0.3.1"
@@ -1358,7 +1437,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700"
dependencies = [
"base64ct",
- "rand_core 0.6.4",
+ "rand_core",
"subtle",
]
@@ -1374,6 +1453,12 @@ dependencies = [
"sha2",
]
+[[package]]
+name = "peeking_take_while"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
+
[[package]]
name = "percent-encoding"
version = "2.3.1"
@@ -1382,9 +1467,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pest"
-version = "2.7.15"
+version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc"
+checksum = "198db74531d58c70a361c42201efde7e2591e976d518caf7662a47dc5720e7b6"
dependencies = [
"memchr",
"thiserror 2.0.12",
@@ -1393,9 +1478,9 @@ dependencies = [
[[package]]
name = "pest_derive"
-version = "2.7.15"
+version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e"
+checksum = "d725d9cfd79e87dccc9341a2ef39d1b6f6353d68c4b33c177febbe1a402c97c5"
dependencies = [
"pest",
"pest_generator",
@@ -1403,28 +1488,39 @@ dependencies = [
[[package]]
name = "pest_generator"
-version = "2.7.15"
+version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b"
+checksum = "db7d01726be8ab66ab32f9df467ae8b1148906685bbe75c82d1e65d7f5b3f841"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
[[package]]
name = "pest_meta"
-version = "2.7.15"
+version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea"
+checksum = "7f9f832470494906d1fca5329f8ab5791cc60beb230c74815dff541cbd2b5ca0"
dependencies = [
"once_cell",
"pest",
"sha2",
]
+[[package]]
+name = "pgwire-lite"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85b08a19f39360a988ed911d66fd586f5c03f14252618b62941cc9af061456c0"
+dependencies = [
+ "libpq",
+ "libpq-sys",
+ "log",
+]
+
[[package]]
name = "phf"
version = "0.11.3"
@@ -1451,7 +1547,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
dependencies = [
"phf_shared",
- "rand 0.8.5",
+ "rand",
]
[[package]]
@@ -1487,49 +1583,6 @@ version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e"
-[[package]]
-name = "postgres"
-version = "0.19.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "363e6dfbdd780d3aa3597b6eb430db76bb315fa9bad7fae595bb8def808b8470"
-dependencies = [
- "bytes",
- "fallible-iterator",
- "futures-util",
- "log",
- "tokio",
- "tokio-postgres",
-]
-
-[[package]]
-name = "postgres-protocol"
-version = "0.6.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76ff0abab4a9b844b93ef7b81f1efc0a366062aaef2cd702c76256b5dc075c54"
-dependencies = [
- "base64 0.22.1",
- "byteorder",
- "bytes",
- "fallible-iterator",
- "hmac",
- "md-5",
- "memchr",
- "rand 0.9.0",
- "sha2",
- "stringprep",
-]
-
-[[package]]
-name = "postgres-types"
-version = "0.2.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "613283563cd90e1dfc3518d548caee47e0e725455ed619881f5cf21f36de4b48"
-dependencies = [
- "bytes",
- "fallible-iterator",
- "postgres-protocol",
-]
-
[[package]]
name = "powerfmt"
version = "0.2.0"
@@ -1547,22 +1600,28 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.93"
+version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
+checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.38"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
+checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
dependencies = [
"proc-macro2",
]
+[[package]]
+name = "r-efi"
+version = "5.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
+
[[package]]
name = "radix_trie"
version = "0.2.1"
@@ -1580,19 +1639,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
- "rand_chacha 0.3.1",
- "rand_core 0.6.4",
-]
-
-[[package]]
-name = "rand"
-version = "0.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
-dependencies = [
- "rand_chacha 0.9.0",
- "rand_core 0.9.3",
- "zerocopy",
+ "rand_chacha",
+ "rand_core",
]
[[package]]
@@ -1602,17 +1650,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
- "rand_core 0.6.4",
-]
-
-[[package]]
-name = "rand_chacha"
-version = "0.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
-dependencies = [
- "ppv-lite86",
- "rand_core 0.9.3",
+ "rand_core",
]
[[package]]
@@ -1624,24 +1662,6 @@ dependencies = [
"getrandom 0.2.15",
]
-[[package]]
-name = "rand_core"
-version = "0.9.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
-dependencies = [
- "getrandom 0.3.1",
-]
-
-[[package]]
-name = "redox_syscall"
-version = "0.5.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1"
-dependencies = [
- "bitflags 2.9.0",
-]
-
[[package]]
name = "redox_users"
version = "0.4.6"
@@ -1688,7 +1708,7 @@ version = "0.11.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62"
dependencies = [
- "base64 0.21.7",
+ "base64",
"bytes",
"encoding_rs",
"futures-core",
@@ -1728,6 +1748,12 @@ version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
+[[package]]
+name = "rustc-hash"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
+
[[package]]
name = "rustix"
version = "0.38.44"
@@ -1743,14 +1769,14 @@ dependencies = [
[[package]]
name = "rustix"
-version = "1.0.2"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f7178faa4b75a30e269c71e61c353ce2748cf3d76f0c44c393f4e60abf49b825"
+checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf"
dependencies = [
"bitflags 2.9.0",
"errno",
"libc",
- "linux-raw-sys 0.9.2",
+ "linux-raw-sys 0.9.4",
"windows-sys 0.59.0",
]
@@ -1760,7 +1786,7 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c"
dependencies = [
- "base64 0.21.7",
+ "base64",
]
[[package]]
@@ -1862,7 +1888,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
[[package]]
@@ -1889,6 +1915,19 @@ dependencies = [
"serde",
]
+[[package]]
+name = "serde_yaml"
+version = "0.9.34+deprecated"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
+dependencies = [
+ "indexmap",
+ "itoa",
+ "ryu",
+ "serde",
+ "unsafe-libyaml",
+]
+
[[package]]
name = "sha1"
version = "0.10.6"
@@ -1944,15 +1983,15 @@ dependencies = [
[[package]]
name = "smallvec"
-version = "1.14.0"
+version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd"
+checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
[[package]]
name = "socket2"
-version = "0.5.8"
+version = "0.5.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8"
+checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef"
dependencies = [
"libc",
"windows-sys 0.52.0",
@@ -1968,13 +2007,21 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
name = "stackql-deploy"
version = "0.1.0"
dependencies = [
+ "chrono",
"clap",
"colored",
+ "env_logger",
"indicatif",
- "postgres",
+ "log",
+ "once_cell",
+ "pgwire-lite",
"reqwest",
"rustyline",
+ "serde",
+ "serde_json",
+ "serde_yaml",
"tera",
+ "thiserror 1.0.69",
"unicode-width 0.1.14",
"zip",
]
@@ -1985,17 +2032,6 @@ version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e08d8363704e6c71fc928674353e6b7c23dcea9d82d7012c8faf2a3a025f8d0"
-[[package]]
-name = "stringprep"
-version = "0.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1"
-dependencies = [
- "unicode-bidi",
- "unicode-normalization",
- "unicode-properties",
-]
-
[[package]]
name = "strsim"
version = "0.11.1"
@@ -2010,9 +2046,20 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
-version = "2.0.98"
+version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1"
+checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0"
dependencies = [
"proc-macro2",
"quote",
@@ -2033,7 +2080,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
[[package]]
@@ -2059,14 +2106,14 @@ dependencies = [
[[package]]
name = "tempfile"
-version = "3.19.0"
+version = "3.19.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "488960f40a3fd53d72c2a29a58722561dee8afdd175bd88e3db4677d7b2ba600"
+checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf"
dependencies = [
"fastrand",
- "getrandom 0.3.1",
+ "getrandom 0.3.2",
"once_cell",
- "rustix 1.0.2",
+ "rustix 1.0.5",
"windows-sys 0.59.0",
]
@@ -2084,7 +2131,7 @@ dependencies = [
"percent-encoding",
"pest",
"pest_derive",
- "rand 0.8.5",
+ "rand",
"regex",
"serde",
"serde_json",
@@ -2092,6 +2139,15 @@ dependencies = [
"unic-segment",
]
+[[package]]
+name = "termcolor"
+version = "1.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
+dependencies = [
+ "winapi-util",
+]
+
[[package]]
name = "thiserror"
version = "1.0.69"
@@ -2118,7 +2174,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
[[package]]
@@ -2129,14 +2185,14 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
[[package]]
name = "time"
-version = "0.3.39"
+version = "0.3.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dad298b01a40a23aac4580b67e3dbedb7cc8402f3592d7f49469de2ea4aecdd8"
+checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
dependencies = [
"deranged",
"num-conv",
@@ -2147,9 +2203,9 @@ dependencies = [
[[package]]
name = "time-core"
-version = "0.1.3"
+version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "765c97a5b985b7c11d7bc27fa927dc4fe6af3a6dfb021d28deb60d3bf51e76ef"
+checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
[[package]]
name = "tinystr"
@@ -2161,26 +2217,11 @@ dependencies = [
"zerovec",
]
-[[package]]
-name = "tinyvec"
-version = "1.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71"
-dependencies = [
- "tinyvec_macros",
-]
-
-[[package]]
-name = "tinyvec_macros"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
-
[[package]]
name = "tokio"
-version = "1.44.1"
+version = "1.44.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f382da615b842244d4b8738c82ed1275e6c5dd90c459a30941cd07080b06c91a"
+checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48"
dependencies = [
"backtrace",
"bytes",
@@ -2201,32 +2242,6 @@ dependencies = [
"tokio",
]
-[[package]]
-name = "tokio-postgres"
-version = "0.7.13"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c95d533c83082bb6490e0189acaa0bbeef9084e60471b696ca6988cd0541fb0"
-dependencies = [
- "async-trait",
- "byteorder",
- "bytes",
- "fallible-iterator",
- "futures-channel",
- "futures-util",
- "log",
- "parking_lot",
- "percent-encoding",
- "phf",
- "pin-project-lite",
- "postgres-protocol",
- "postgres-types",
- "rand 0.9.0",
- "socket2",
- "tokio",
- "tokio-util",
- "whoami",
-]
-
[[package]]
name = "tokio-util"
version = "0.7.14"
@@ -2333,32 +2348,11 @@ dependencies = [
"unic-common",
]
-[[package]]
-name = "unicode-bidi"
-version = "0.3.18"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
-
[[package]]
name = "unicode-ident"
-version = "1.0.16"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034"
-
-[[package]]
-name = "unicode-normalization"
-version = "0.1.24"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956"
-dependencies = [
- "tinyvec",
-]
-
-[[package]]
-name = "unicode-properties"
-version = "0.1.3"
+version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0"
+checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unicode-segmentation"
@@ -2378,6 +2372,12 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd"
+[[package]]
+name = "unsafe-libyaml"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
+
[[package]]
name = "url"
version = "2.5.4"
@@ -2446,19 +2446,13 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasi"
-version = "0.13.3+wasi-0.2.2"
+version = "0.14.2+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2"
+checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3"
dependencies = [
"wit-bindgen-rt",
]
-[[package]]
-name = "wasite"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
-
[[package]]
name = "wasm-bindgen"
version = "0.2.100"
@@ -2481,7 +2475,7 @@ dependencies = [
"log",
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
"wasm-bindgen-shared",
]
@@ -2516,7 +2510,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@@ -2551,14 +2545,15 @@ dependencies = [
]
[[package]]
-name = "whoami"
-version = "1.5.2"
+name = "which"
+version = "4.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d"
+checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7"
dependencies = [
- "redox_syscall",
- "wasite",
- "web-sys",
+ "either",
+ "home",
+ "once_cell",
+ "rustix 0.38.44",
]
[[package]]
@@ -2594,18 +2589,62 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-core"
-version = "0.52.0"
+version = "0.61.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
+checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980"
dependencies = [
- "windows-targets 0.52.6",
+ "windows-implement",
+ "windows-interface",
+ "windows-link",
+ "windows-result",
+ "windows-strings",
+]
+
+[[package]]
+name = "windows-implement"
+version = "0.60.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.100",
+]
+
+[[package]]
+name = "windows-interface"
+version = "0.59.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.100",
]
[[package]]
name = "windows-link"
-version = "0.1.0"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3"
+checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
+
+[[package]]
+name = "windows-result"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
+name = "windows-strings"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97"
+dependencies = [
+ "windows-link",
+]
[[package]]
name = "windows-sys"
@@ -2767,9 +2806,9 @@ dependencies = [
[[package]]
name = "wit-bindgen-rt"
-version = "0.33.0"
+version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c"
+checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
dependencies = [
"bitflags 2.9.0",
]
@@ -2806,28 +2845,28 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
"synstructure",
]
[[package]]
name = "zerocopy"
-version = "0.8.23"
+version = "0.8.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd97444d05a4328b90e75e503a34bad781f14e28a823ad3557f0750df1ebcbc6"
+checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
-version = "0.8.23"
+version = "0.8.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6352c01d0edd5db859a63e2605f4ea3183ddbd15e2c4a9e7d32184df75e4f154"
+checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
[[package]]
@@ -2847,7 +2886,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
"synstructure",
]
@@ -2870,7 +2909,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.100",
]
[[package]]
@@ -2914,9 +2953,9 @@ dependencies = [
[[package]]
name = "zstd-sys"
-version = "2.0.14+zstd.1.5.7"
+version = "2.0.15+zstd.1.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8fb060d4926e4ac3a3ad15d864e99ceb5f343c6b34f5bd6d81ae6ed417311be5"
+checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237"
dependencies = [
"cc",
"pkg-config",
diff --git a/Cargo.toml b/Cargo.toml
index 464732d..ffd7aa6 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -6,10 +6,18 @@ edition = "2021"
[dependencies]
clap = { version = "4.3", features = ["derive"] }
colored = "2.0"
+rustyline = "10.0"
+tera = "1.19.0"
+log = "0.4"
+env_logger = "0.10"
+pgwire-lite = "0.1.0"
+zip = "0.6"
reqwest = { version = "0.11", features = ["blocking", "json"] }
indicatif = "0.17"
-zip = "0.6"
unicode-width = "0.1.10"
-postgres = "0.19"
-rustyline = "10.0"
-tera = "1.19.0"
\ No newline at end of file
+once_cell = "1.17.0"
+chrono = "0.4"
+serde = { version = "1.0", features = ["derive"] }
+serde_yaml = "0.9"
+serde_json = "1.0"
+thiserror = "1.0"
\ No newline at end of file
diff --git a/README.md b/README.md
index c97762c..b59b930 100644
--- a/README.md
+++ b/README.md
@@ -28,6 +28,8 @@ cargo run -- build --env prod --provider aws --region us-east-1
./target/release/stackql-deploy test my-stack dev
+./target/release/stackql-deploy test examples/aws/aws-stack dev
+
./target/release/stackql-deploy teardown my-stack dev
./target/release/stackql-deploy build
@@ -38,6 +40,8 @@ cargo run -- build --env prod --provider aws --region us-east-1
./target/release/stackql-deploy upgrade
+./target/release/stackql-deploy start-server
+
# Using built-in provider template
./target/release/stackql-deploy init my-project --provider aws
diff --git a/ci-scripts/build-local.sh b/ci-scripts/build-local.sh
index daf2bb3..2cb53a4 100644
--- a/ci-scripts/build-local.sh
+++ b/ci-scripts/build-local.sh
@@ -6,6 +6,7 @@ chmod +x ci-scripts/format.sh
chmod +x ci-scripts/lint.sh
chmod +x ci-scripts/test.sh
chmod +x ci-scripts/build.sh
+chmod +x ci-scripts/doc.sh
# Print banner
echo "==============================================="
@@ -13,17 +14,20 @@ echo " Running Full Local Build Process"
echo "==============================================="
# Run each step in sequence
-printf "\n[STEP 1/4] Formatting code...\n"
+printf "\n[STEP 1/5] Formatting code...\n"
./ci-scripts/format.sh
-printf "\n[STEP 2/4] Running linter...\n"
+printf "\n[STEP 2/5] Running linter...\n"
./ci-scripts/lint.sh
-printf "\n[STEP 3/4] Running tests...\n"
+printf "\n[STEP 3/5] Running tests...\n"
# ./ci-scripts/test.sh
-printf "\n[STEP 4/4] Building binary...\n"
+printf "\n[STEP 4/5] Building binary...\n"
./ci-scripts/build.sh
+printf "\n[STEP 5/5] Generating documentation...\n"
+# ./ci-scripts/doc.sh
+
printf "\nš Local build process completed successfully!\n"
echo "Binary is available at: ./target/release/stackql-deploy"
\ No newline at end of file
diff --git a/ci-scripts/doc.sh b/ci-scripts/doc.sh
new file mode 100644
index 0000000..2e97723
--- /dev/null
+++ b/ci-scripts/doc.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+set -e
+
+echo "==============================================="
+echo " Generating Documentation with cargo doc"
+echo "==============================================="
+
+# Generate documentation
+cargo doc --no-deps
+
+# Verify that documentation was generated successfully
+if [ $? -eq 0 ]; then
+ echo -e "\nā
Documentation generated successfully!"
+ echo "Open the documentation with: open target/doc/index.html"
+else
+ echo -e "\nā Documentation generation failed!"
+ exit 1
+fi
diff --git a/docs/build.md b/docs/build.md
new file mode 100644
index 0000000..2df18a0
--- /dev/null
+++ b/docs/build.md
@@ -0,0 +1,62 @@
+```mermaid
+sequenceDiagram
+ participant User as User/Caller
+ participant Deploy as StackQL Deploy
+ participant Resources as Resource Collection
+ participant DB as Cloud Provider
+
+ User->>Deploy: Start deployment
+ activate Deploy
+ Deploy->>Deploy: Load global variables
+
+ loop For each resource in resources
+ Deploy->>Resources: Get next resource
+ activate Resources
+ Resources-->>Deploy: Resource definition
+ deactivate Resources
+
+ alt Has createorupdate anchor
+ Deploy->>DB: Execute createorupdate query
+ activate DB
+ DB-->>Deploy: Operation result
+ deactivate DB
+ else Standard flow
+ Deploy->>DB: Execute statecheck query
+ activate DB
+ DB-->>Deploy: Current state
+ deactivate DB
+
+ alt No data exists
+ Deploy->>DB: Execute create query
+ activate DB
+ DB-->>Deploy: Creation result
+ deactivate DB
+ else Data exists but not in desired state
+ Deploy->>DB: Execute update query
+ activate DB
+ DB-->>Deploy: Update result
+ deactivate DB
+ else Data exists and in desired state
+ Note over Deploy: Skip operation
+ end
+ end
+
+ Deploy->>DB: Verify state after operation
+ activate DB
+ DB-->>Deploy: Current state
+ deactivate DB
+
+ alt In desired state
+ Deploy->>Deploy: Export variables
+ Note over Deploy: Continue to next resource
+ else Not in desired state
+ Deploy-->>User: Return error
+ break Deployment failed
+ Note over Deploy, User: Error handling
+ end
+ end
+ end
+
+ Deploy-->>User: Deployment successful
+ deactivate Deploy
+```
\ No newline at end of file
diff --git a/src/resource/operation.rs b/docs/plan.md
similarity index 100%
rename from src/resource/operation.rs
rename to docs/plan.md
diff --git a/src/resource/query.rs b/docs/teardown.md
similarity index 100%
rename from src/resource/query.rs
rename to docs/teardown.md
diff --git a/docs/test.md b/docs/test.md
new file mode 100644
index 0000000..e69de29
diff --git a/examples/aws/aws-stack/README.md b/examples/aws/aws-stack/README.md
new file mode 100644
index 0000000..f05f129
--- /dev/null
+++ b/examples/aws/aws-stack/README.md
@@ -0,0 +1,75 @@
+# `stackql-deploy` starter project for `aws`
+
+> for starter projects using other providers, try `stackql-deploy my_stack --provider=azure` or `stackql-deploy my_stack --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `aws` provider:
+
+- [`aws` provider docs](https://stackql.io/registry/aws)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `aws` and `azure` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `aws` provider, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and optionally `AWS_SESSION_TOKEN` must be set, for more information on authentication to `aws` see the [`aws` provider documentation](https://aws.stackql.io/providers/aws).
+
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` and `resources` folders.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build \
+examples/aws/aws-stack sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build \
+examples/aws/aws-stack sit \
+-e AWS_REGION=ap-southeast-2 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test \
+examples/aws/aws-stack sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown \
+examples/aws/aws-stack sit \
+-e AWS_REGION=ap-southeast-2
+```
\ No newline at end of file
diff --git a/examples/aws/aws-stack/resources/example_inet_gateway.iql b/examples/aws/aws-stack/resources/example_inet_gateway.iql
new file mode 100644
index 0000000..473b4c0
--- /dev/null
+++ b/examples/aws/aws-stack/resources/example_inet_gateway.iql
@@ -0,0 +1,52 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.internet_gateways (
+ Tags,
+ region
+)
+SELECT
+'{{ inet_gateway_tags }}',
+'{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ exports */
+SELECT internet_gateway_id FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.internet_gateways
+WHERE data__Identifier = '{{ internet_gateway_id }}'
+AND region = '{{ region }}';
diff --git a/examples/aws/aws-stack/resources/example_inet_gw_attachment.iql b/examples/aws/aws-stack/resources/example_inet_gw_attachment.iql
new file mode 100644
index 0000000..28138a8
--- /dev/null
+++ b/examples/aws/aws-stack/resources/example_inet_gw_attachment.iql
@@ -0,0 +1,39 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+attachment_type,
+vpc_id
+FROM aws.ec2.vpc_gateway_attachments
+WHERE region = '{{ region }}'
+AND internet_gateway_id = '{{ internet_gateway_id }}'
+AND vpc_id = '{{ vpc_id }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpc_gateway_attachments (
+ InternetGatewayId,
+ VpcId,
+ region
+)
+SELECT
+ '{{ internet_gateway_id }}',
+ '{{ vpc_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+attachment_type,
+vpc_id
+FROM aws.ec2.vpc_gateway_attachments
+WHERE region = '{{ region }}'
+AND internet_gateway_id = '{{ internet_gateway_id }}'
+AND vpc_id = '{{ vpc_id }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.vpc_gateway_attachments
+WHERE data__Identifier = 'IGW|{{ vpc_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/aws/aws-stack/resources/example_inet_route.iql b/examples/aws/aws-stack/resources/example_inet_route.iql
new file mode 100644
index 0000000..105b06b
--- /dev/null
+++ b/examples/aws/aws-stack/resources/example_inet_route.iql
@@ -0,0 +1,41 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.routes (
+ DestinationCidrBlock,
+ GatewayId,
+ RouteTableId,
+ region
+)
+SELECT
+ '0.0.0.0/0',
+ '{{ internet_gateway_id }}',
+ '{{ route_table_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t;
+
+/*+ exports */
+SELECT data__Identifier as inet_route_indentifer
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0';
+
+/*+ delete */
+DELETE FROM aws.ec2.routes
+WHERE data__Identifier = '{{ inet_route_indentifer }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/aws/aws-stack/resources/example_route_table.iql b/examples/aws/aws-stack/resources/example_route_table.iql
new file mode 100644
index 0000000..6a56af8
--- /dev/null
+++ b/examples/aws/aws-stack/resources/example_route_table.iql
@@ -0,0 +1,57 @@
+/*+ exists */
+SELECT count(*) as count FROM
+(
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.route_tables (
+ Tags,
+ VpcId,
+ region
+)
+SELECT
+ '{{ route_table_tags }}',
+ '{{ vpc_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT count(*) as count FROM
+(
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ exports */
+SELECT route_table_id FROM
+(
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.route_tables
+WHERE data__Identifier = '{{ route_table_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/aws/aws-stack/resources/example_security_group.iql b/examples/aws/aws-stack/resources/example_security_group.iql
new file mode 100644
index 0000000..485a761
--- /dev/null
+++ b/examples/aws/aws-stack/resources/example_security_group.iql
@@ -0,0 +1,72 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT group_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.security_group_tags
+WHERE region = '{{ region }}'
+AND group_name = '{{ group_name }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY group_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.security_groups (
+ GroupName,
+ GroupDescription,
+ VpcId,
+ SecurityGroupIngress,
+ SecurityGroupEgress,
+ Tags,
+ region
+)
+SELECT
+ '{{ group_name }}',
+ '{{ group_description }}',
+ '{{ vpc_id }}',
+ '{{ security_group_ingress }}',
+ '{{ security_group_egress }}',
+ '{{ sg_tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT group_id,
+security_group_ingress,
+security_group_egress,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.security_group_tags
+WHERE region = '{{ region }}'
+AND group_name = '{{ group_name }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY group_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ exports */
+SELECT group_id as 'security_group_id' FROM
+(
+SELECT group_id,
+security_group_ingress,
+security_group_egress,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.security_group_tags
+WHERE region = '{{ region }}'
+AND group_name = '{{ group_name }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY group_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.security_groups
+WHERE data__Identifier = '{{ security_group_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/aws/aws-stack/resources/example_subnet.iql b/examples/aws/aws-stack/resources/example_subnet.iql
new file mode 100644
index 0000000..5f62cb0
--- /dev/null
+++ b/examples/aws/aws-stack/resources/example_subnet.iql
@@ -0,0 +1,66 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT subnet_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.subnet_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY subnet_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.subnets (
+ VpcId,
+ CidrBlock,
+ MapPublicIpOnLaunch,
+ Tags,
+ region
+)
+SELECT
+ '{{ vpc_id }}',
+ '{{ subnet_cidr_block }}',
+ true,
+ '{{ subnet_tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT subnet_id,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.subnet_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY subnet_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ subnet_cidr_block }}';
+
+/*+ exports */
+SELECT subnet_id, availability_zone FROM
+(
+SELECT subnet_id,
+availability_zone,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.subnet_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY subnet_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ subnet_cidr_block }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.subnets
+WHERE data__Identifier = '{{ subnet_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/aws/aws-stack/resources/example_subnet_rt_assn.iql b/examples/aws/aws-stack/resources/example_subnet_rt_assn.iql
new file mode 100644
index 0000000..58c80f4
--- /dev/null
+++ b/examples/aws/aws-stack/resources/example_subnet_rt_assn.iql
@@ -0,0 +1,42 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT id
+FROM aws.ec2.subnet_route_table_associations
+WHERE region = '{{ region }}'
+AND route_table_id = '{{ route_table_id }}'
+AND subnet_id = '{{ subnet_id }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.subnet_route_table_associations (
+ RouteTableId,
+ SubnetId,
+ region
+)
+SELECT
+ '{{ route_table_id }}',
+ '{{ subnet_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT id
+FROM aws.ec2.subnet_route_table_associations
+WHERE region = '{{ region }}'
+AND route_table_id = '{{ route_table_id }}'
+AND subnet_id = '{{ subnet_id }}'
+) t;
+
+/*+ exports */
+SELECT id as route_table_assn_id
+FROM aws.ec2.subnet_route_table_associations
+WHERE region = '{{ region }}'
+AND route_table_id = '{{ route_table_id }}'
+AND subnet_id = '{{ subnet_id }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.subnet_route_table_associations
+WHERE data__Identifier = '{{ route_table_assn_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/aws/aws-stack/resources/example_vpc.iql b/examples/aws/aws-stack/resources/example_vpc.iql
new file mode 100644
index 0000000..35b2733
--- /dev/null
+++ b/examples/aws/aws-stack/resources/example_vpc.iql
@@ -0,0 +1,63 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpcs (
+ CidrBlock,
+ Tags,
+ EnableDnsSupport,
+ EnableDnsHostnames,
+ region
+)
+SELECT
+ '{{ vpc_cidr_block }}',
+ '{{ vpc_tags }}',
+ true,
+ true,
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ vpc_cidr_block }}';
+
+/*+ exports */
+SELECT vpc_id, vpc_cidr_block FROM
+(
+SELECT vpc_id, cidr_block as "vpc_cidr_block",
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.vpcs
+WHERE data__Identifier = '{{ vpc_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/aws/aws-stack/resources/example_web_server.iql b/examples/aws/aws-stack/resources/example_web_server.iql
new file mode 100644
index 0000000..e479969
--- /dev/null
+++ b/examples/aws/aws-stack/resources/example_web_server.iql
@@ -0,0 +1,71 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT instance_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.instance_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND subnet_id = '{{ subnet_id }}'
+GROUP BY instance_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ instance_name }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.instances (
+ ImageId,
+ InstanceType,
+ SubnetId,
+ SecurityGroupIds,
+ UserData,
+ Tags,
+ region
+)
+SELECT
+ '{{ ami_id }}',
+ '{{ instance_type }}',
+ '{{ instance_subnet_id }}',
+ '{{ sg_ids }}',
+ '{{ user_data | base64_encode }}',
+ '{{ instance_tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT instance_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.instance_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND subnet_id = '{{ subnet_id }}'
+GROUP BY instance_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ instance_name }}'
+) t;
+
+/*+ exports */
+SELECT instance_id, public_dns_name FROM
+(
+SELECT instance_id, public_dns_name,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.instance_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND subnet_id = '{{ subnet_id }}'
+GROUP BY instance_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ instance_name }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.instances
+WHERE data__Identifier = '{{ instance_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/aws/aws-stack/resources/get_web_server_url.iql b/examples/aws/aws-stack/resources/get_web_server_url.iql
new file mode 100644
index 0000000..047bcd5
--- /dev/null
+++ b/examples/aws/aws-stack/resources/get_web_server_url.iql
@@ -0,0 +1,2 @@
+/*+ exports */
+SELECT 'http://' || '{{ public_dns_name }}' as web_server_url
\ No newline at end of file
diff --git a/examples/aws/aws-stack/stackql_manifest.yml b/examples/aws/aws-stack/stackql_manifest.yml
new file mode 100644
index 0000000..19f6251
--- /dev/null
+++ b/examples/aws/aws-stack/stackql_manifest.yml
@@ -0,0 +1,153 @@
+#
+# aws starter project manifest file, add and update values as needed
+#
+version: 1
+name: "aws-stack"
+description: description for "aws-stack"
+providers:
+ - aws
+globals:
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+ - name: example_vpc
+ props:
+ - name: vpc_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ - name: vpc_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ merge:
+ - global_tags
+ exports:
+ - vpc_id
+ - vpc_cidr_block
+ - name: example_subnet
+ props:
+ - name: subnet_cidr_block
+ values:
+ prd:
+ value: "10.0.1.0/24"
+ sit:
+ value: "10.1.1.0/24"
+ dev:
+ value: "10.2.1.0/24"
+ - name: subnet_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-subnet"
+ merge: ['global_tags']
+ exports:
+ - subnet_id
+ - availability_zone
+ - name: example_inet_gateway
+ props:
+ - name: inet_gateway_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-inet-gateway"
+ merge: ['global_tags']
+ exports:
+ - internet_gateway_id
+ - name: example_inet_gw_attachment
+ props: []
+ - name: example_route_table
+ props:
+ - name: route_table_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-route-table"
+ merge: ['global_tags']
+ exports:
+ - route_table_id
+ - name: example_subnet_rt_assn
+ props: []
+ exports:
+ - route_table_assn_id
+ - name: example_inet_route
+ props: []
+ exports:
+ - inet_route_indentifer
+ - name: example_security_group
+ props:
+ - name: group_description
+ value: "web security group for {{ stack_name }} ({{ stack_env }} environment)"
+ - name: group_name
+ value: "{{ stack_name }}-{{ stack_env }}-web-sg"
+ - name: sg_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-web-sg"
+ merge: ['global_tags']
+ - name: security_group_ingress
+ value:
+ - CidrIp: "0.0.0.0/0"
+ Description: Allow HTTP traffic
+ FromPort: 80
+ ToPort: 80
+ IpProtocol: "tcp"
+ - CidrIp: "{{ vpc_cidr_block }}"
+ Description: Allow SSH traffic from the internal network
+ FromPort: 22
+ ToPort: 22
+ IpProtocol: "tcp"
+ - name: security_group_egress
+ value:
+ - CidrIp: "0.0.0.0/0"
+ Description: Allow all outbound traffic
+ FromPort: 0
+ ToPort: 0
+ IpProtocol: "-1"
+ exports:
+ - security_group_id
+ - name: example_web_server
+ props:
+ - name: instance_name
+ value: "{{ stack_name }}-{{ stack_env }}-instance"
+ - name: ami_id
+ value: ami-030a5acd7c996ef60
+ - name: instance_type
+ value: t2.micro
+ - name: instance_subnet_id
+ value: "{{ subnet_id }}"
+ - name: sg_ids
+ value:
+ - "{{ security_group_id }}"
+ - name: user_data
+ value: |
+ #!/bin/bash
+ yum update -y
+ yum install -y httpd
+ systemctl start httpd
+ systemctl enable httpd
+ echo '
StackQL on AWS ' > /var/www/html/index.html
+ echo '' >> /var/www/html/index.html
+ - name: instance_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-instance"
+ merge: ['global_tags']
+ exports:
+ - instance_id
+ - public_dns_name
+ - name: get_web_server_url
+ type: query
+ props: []
+ exports:
+ - web_server_url
\ No newline at end of file
diff --git a/examples/aws/patch-doc-test/README.md b/examples/aws/patch-doc-test/README.md
new file mode 100644
index 0000000..0b72a5a
--- /dev/null
+++ b/examples/aws/patch-doc-test/README.md
@@ -0,0 +1,80 @@
+# `stackql-deploy` starter project for `aws`
+
+> for starter projects using other providers, try `stackql-deploy patch-doc-test --provider=azure` or `stackql-deploy patch-doc-test --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `aws` provider:
+
+- [`aws` provider docs](https://stackql.io/registry/aws)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `aws` and `azure` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `aws` provider, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and optionally `AWS_SESSION_TOKEN` must be set, for more information on authentication to `aws` see the [`aws` provider documentation](https://aws.stackql.io/providers/aws).
+
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` folder.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack named patch-doc-test to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build \
+examples/aws/patch-doc-test \
+sit \
+-e AWS_REGION=ap-southeast-2 \
+--show-queries
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build \
+examples/aws/patch-doc-test \
+sit \
+-e AWS_REGION=ap-southeast-2 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test \
+examples/aws/patch-doc-test \
+sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown \
+examples/aws/patch-doc-test \
+sit \
+-e AWS_REGION=ap-southeast-2
+```
\ No newline at end of file
diff --git a/examples/aws/patch-doc-test/resources/bucket1.iql b/examples/aws/patch-doc-test/resources/bucket1.iql
new file mode 100644
index 0000000..b11970b
--- /dev/null
+++ b/examples/aws/patch-doc-test/resources/bucket1.iql
@@ -0,0 +1,54 @@
+/*+ exists */
+SELECT
+COUNT(*) as count
+FROM aws.s3.buckets
+WHERE region = '{{ region }}' AND data__Identifier = '{{ bucket1_name }}'
+
+/*+ create */
+INSERT INTO aws.s3.buckets (
+ BucketName,
+ VersioningConfiguration,
+ Tags,
+ region
+)
+SELECT
+ '{{ bucket1_name }}',
+ '{{ bucket1_versioning_config }}',
+ '{{ bucket1_tags }}',
+ '{{ region }}'
+
+/*+ statecheck, retries=2, retry_delay=1 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+JSON_EQUAL(versioning_configuration, '{{ bucket1_versioning_config }}') as test_versioning_config
+FROM aws.s3.buckets
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket1_name }}'
+) t
+WHERE test_versioning_config = 1;
+
+/*+ exports, retries=2, retry_delay=1 */
+SELECT bucket_name as bucket1_name, arn as bucket1_arn FROM
+(
+SELECT
+bucket_name,
+arn
+FROM aws.s3.buckets
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket1_name }}'
+) t
+
+/*+ update */
+update aws.s3.buckets
+set data__PatchDocument = string('{{ {
+ "VersioningConfiguration": bucket1_versioning_config,
+ "Tags": bucket1_tags
+ } | generate_patch_document }}')
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket1_name }}';
+
+/*+ delete */
+DELETE FROM aws.s3.buckets
+WHERE data__Identifier = '{{ bucket1_name }}'
+AND region = '{{ region }}'
diff --git a/examples/aws/patch-doc-test/stackql_manifest.yml b/examples/aws/patch-doc-test/stackql_manifest.yml
new file mode 100644
index 0000000..0244891
--- /dev/null
+++ b/examples/aws/patch-doc-test/stackql_manifest.yml
@@ -0,0 +1,34 @@
+version: 1
+name: "patch-doc-test"
+description: description for "patch-doc-test"
+providers:
+ - aws
+globals:
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+ - name: bucket1
+ props:
+ - name: bucket1_name
+ value: "{{ stack_name }}-{{ stack_env }}-bucket1"
+ - name: bucket1_versioning_config
+ value:
+ Status: Enabled
+ - name: bucket1_tags
+ merge:
+ - global_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-bucket1"
+ exports:
+ - bucket1_name
+ - bucket1_arn
diff --git a/examples/azure/azure-stack/README.md b/examples/azure/azure-stack/README.md
new file mode 100644
index 0000000..dc2feac
--- /dev/null
+++ b/examples/azure/azure-stack/README.md
@@ -0,0 +1,79 @@
+# `stackql-deploy` starter project for `azure`
+
+> for starter projects using other providers, try `stackql-deploy my_stack --provider=aws` or `stackql-deploy my_stack --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `azure` provider:
+
+- [`azure` provider docs](https://stackql.io/registry/azure)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `azure` and `azure` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `azure` provider, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and optionally `AWS_SESSION_TOKEN` must be set, for more information on authentication to `azure` see the [`azure` provider documentation](https://azure.stackql.io/providers/azure).
+
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` and `resources` folders.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack to an environment labeled `sit`, run the following:
+
+```bash
+export AZURE_VM_ADMIN_PASSWORD="Your_password_here1"
+stackql-deploy build \
+examples/azure/azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-2a65-43e7-93c2-688bfe4e1468 \
+-e AZURE_VM_ADMIN_PASSWORD=$AZURE_VM_ADMIN_PASSWORD
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build \
+examples/azure/azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-2a65-43e7-93c2-688bfe4e1468 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test \
+examples/azure/azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-2a65-43e7-93c2-688bfe4e1468 \
+-e AZURE_VM_ADMIN_PASSWORD=$AZURE_VM_ADMIN_PASSWORD
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown \
+examples/azure/azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-2a65-43e7-93c2-688bfe4e1468 \
+-e AZURE_VM_ADMIN_PASSWORD=$AZURE_VM_ADMIN_PASSWORD
+```
\ No newline at end of file
diff --git a/examples/azure/azure-stack/resources/example_nic.iql b/examples/azure/azure-stack/resources/example_nic.iql
new file mode 100644
index 0000000..27be6fc
--- /dev/null
+++ b/examples/azure/azure-stack/resources/example_nic.iql
@@ -0,0 +1,35 @@
+/*+ createorupdate */
+INSERT INTO azure.network.interfaces(
+ networkInterfaceName,
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ nic_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{"ipConfigurations": [ {{ nic_ip_config }} ], "networkSecurityGroup": { "id": "{{ network_security_group_id }}"}}',
+ '{{ global_tags }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.interfaces
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkInterfaceName = '{{ nic_name }}';
+
+/*+ exports */
+SELECT id as network_interface_id
+FROM azure.network.interfaces
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkInterfaceName = '{{ nic_name }}';
+
+/*+ delete */
+DELETE FROM azure.network.interfaces
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkInterfaceName = '{{ nic_name }}';
diff --git a/examples/azure/azure-stack/resources/example_nsg.iql b/examples/azure/azure-stack/resources/example_nsg.iql
new file mode 100644
index 0000000..5d37386
--- /dev/null
+++ b/examples/azure/azure-stack/resources/example_nsg.iql
@@ -0,0 +1,36 @@
+/*+ createorupdate */
+INSERT INTO azure.network.security_groups(
+ networkSecurityGroupName,
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ nsg_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{"securityRules":{{ security_rules }}}',
+ '{{ global_tags }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.security_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkSecurityGroupName = '{{ nsg_name }}'
+AND JSON_EXTRACT(properties, '$.securityRules') IS NOT NULL
+
+/*+ exports */
+SELECT id as network_security_group_id
+FROM azure.network.security_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkSecurityGroupName = '{{ nsg_name }}'
+
+/*+ delete */
+DELETE FROM azure.network.security_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkSecurityGroupName = '{{ nsg_name }}'
diff --git a/examples/azure/azure-stack/resources/example_public_ip.iql b/examples/azure/azure-stack/resources/example_public_ip.iql
new file mode 100644
index 0000000..5636482
--- /dev/null
+++ b/examples/azure/azure-stack/resources/example_public_ip.iql
@@ -0,0 +1,37 @@
+/*+ createorupdate */
+INSERT INTO azure.network.public_ip_addresses(
+ publicIpAddressName,
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ public_ip_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{"publicIPAllocationMethod":"Static"}',
+ '{{ global_tags }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.public_ip_addresses
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND publicIpAddressName = '{{ public_ip_name }}'
+
+/*+ exports */
+SELECT '{{ public_ip_name }}' as public_ip_name,
+JSON_EXTRACT(properties, '$.ipAddress') as public_ip_address,
+id as public_ip_id
+FROM azure.network.public_ip_addresses
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND publicIpAddressName = '{{ public_ip_name }}'
+
+/*+ delete */
+DELETE FROM azure.network.public_ip_addresses
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND publicIpAddressName = '{{ public_ip_name }}'
diff --git a/examples/azure/azure-stack/resources/example_resource_group.iql b/examples/azure/azure-stack/resources/example_resource_group.iql
new file mode 100644
index 0000000..dc9c4b6
--- /dev/null
+++ b/examples/azure/azure-stack/resources/example_resource_group.iql
@@ -0,0 +1,31 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+
+/*+ create */
+INSERT INTO azure.resources.resource_groups(
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__tags
+)
+SELECT
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{{ global_tags }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND location = '{{ location }}'
+AND JSON_EXTRACT(properties, '$.provisioningState') = 'Succeeded'
+
+/*+ exports */
+SELECT '{{ resource_group_name }}' as resource_group_name
+
+/*+ delete */
+DELETE FROM azure.resources.resource_groups
+WHERE resourceGroupName = '{{ resource_group_name }}' AND subscriptionId = '{{ subscription_id }}'
diff --git a/examples/azure/azure-stack/resources/example_subnet.iql b/examples/azure/azure-stack/resources/example_subnet.iql
new file mode 100644
index 0000000..fffb317
--- /dev/null
+++ b/examples/azure/azure-stack/resources/example_subnet.iql
@@ -0,0 +1,38 @@
+/*+ createorupdate */
+INSERT INTO azure.network.subnets(
+ subnetName,
+ virtualNetworkName,
+ resourceGroupName,
+ subscriptionId,
+ data__properties
+)
+SELECT
+ '{{ subnet_name }}',
+ '{{ vnet_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{"addressPrefix": "{{ subnet_cidr }}"}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.subnets
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND subnetName = '{{ subnet_name }}'
+AND JSON_EXTRACT(properties, '$.addressPrefix') = '{{ subnet_cidr }}'
+
+/*+ exports */
+SELECT '{{ subnet_name }}' as subnet_name,
+id as subnet_id
+FROM azure.network.subnets
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND subnetName = '{{ subnet_name }}'
+
+/*+ delete */
+DELETE FROM azure.network.subnets
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND subnetName = '{{ subnet_name }}'
\ No newline at end of file
diff --git a/examples/azure/azure-stack/resources/example_vm_ext.iql b/examples/azure/azure-stack/resources/example_vm_ext.iql
new file mode 100644
index 0000000..6291d15
--- /dev/null
+++ b/examples/azure/azure-stack/resources/example_vm_ext.iql
@@ -0,0 +1,36 @@
+/*+ createorupdate */
+INSERT INTO azure.compute.virtual_machine_extensions(
+ resourceGroupName,
+ subscriptionId,
+ vmExtensionName,
+ vmName,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ vm_ext_name }}',
+ '{{ vm_name }}',
+ '{{ location }}',
+ '{ "publisher": "Microsoft.Azure.Extensions", "type": "CustomScript", "typeHandlerVersion": "2.1", "settings": { "commandToExecute": "{{ command_to_execute }}"} }',
+ '{{ global_tags }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM azure.compute.virtual_machine_extensions
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND vmExtensionName = '{{ vm_ext_name }}'
+AND vmName = '{{ vm_name }}'
+
+/*+ exports */
+SELECT 'http://' || '{{ public_ip_address }}' || ':8080' as web_url
+
+/*+ delete */
+DELETE FROM azure.compute.virtual_machine_extensions
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND vmExtensionName = '{{ vm_ext_name }}'
+AND vmName = '{{ vm_name }}'
\ No newline at end of file
diff --git a/examples/azure/azure-stack/resources/example_vnet.iql b/examples/azure/azure-stack/resources/example_vnet.iql
new file mode 100644
index 0000000..55fc558
--- /dev/null
+++ b/examples/azure/azure-stack/resources/example_vnet.iql
@@ -0,0 +1,33 @@
+/*+ createorupdate */
+INSERT INTO azure.network.virtual_networks(
+ virtualNetworkName,
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ vnet_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{"addressSpace": {"addressPrefixes":["{{ vnet_cidr_block }}"]}}',
+ '{{ global_tags }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.virtual_networks
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND JSON_EXTRACT(properties, '$.addressSpace.addressPrefixes[0]') = '{{ vnet_cidr_block }}'
+
+/*+ exports */
+SELECT '{{ vnet_name }}' as vnet_name,
+'{{ vnet_cidr_block }}' as vnet_cidr_block
+
+/*+ delete */
+DELETE FROM azure.network.virtual_networks
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
diff --git a/examples/azure/azure-stack/resources/example_web_server.iql b/examples/azure/azure-stack/resources/example_web_server.iql
new file mode 100644
index 0000000..a069441
--- /dev/null
+++ b/examples/azure/azure-stack/resources/example_web_server.iql
@@ -0,0 +1,36 @@
+/*+ createorupdate */
+INSERT INTO azure.compute.virtual_machines(
+ resourceGroupName,
+ subscriptionId,
+ vmName,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ vm_name }}',
+ '{{ location }}',
+ '{"hardwareProfile": {{ hardwareProfile }}, "storageProfile": {{ storageProfile }}, "osProfile": {{ osProfile }}, "networkProfile": {{ networkProfile }}}',
+ '{{ global_tags }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM azure.compute.virtual_machines
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND vmName = '{{ vm_name }}'
+
+/*+ exports */
+SELECT id as vm_id, '{{ vm_name }}' as vm_name
+FROM azure.compute.virtual_machines
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND vmName = '{{ vm_name }}'
+
+/*+ delete */
+DELETE FROM azure.compute.virtual_machines
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND vmName = '{{ vm_name }}'
diff --git a/examples/azure/azure-stack/resources/hello-stackql.html b/examples/azure/azure-stack/resources/hello-stackql.html
new file mode 100644
index 0000000..5454a02
--- /dev/null
+++ b/examples/azure/azure-stack/resources/hello-stackql.html
@@ -0,0 +1,41 @@
+
+
+
+
+
+ StackQL on Azure
+
+
+
+
+
+
diff --git a/examples/azure/azure-stack/stackql_manifest.yml b/examples/azure/azure-stack/stackql_manifest.yml
new file mode 100644
index 0000000..acba86c
--- /dev/null
+++ b/examples/azure/azure-stack/stackql_manifest.yml
@@ -0,0 +1,154 @@
+#
+# azure starter project manifest file, add and update values as needed
+#
+version: 1
+name: "azure-stack"
+description: description for "azure-stack"
+providers:
+ - azure
+globals:
+ - name: subscription_id
+ description: azure subscription id
+ value: "{{ AZURE_SUBSCRIPTION_ID }}"
+ - name: location
+ description: default location for resources
+ value: eastus
+ - name: admin_password
+ description: vm admin password
+ value: "{{ AZURE_VM_ADMIN_PASSWORD }}"
+ - name: global_tags
+ value:
+ provisioner: stackql
+ stackName: "{{ stack_name }}"
+ stackEnv: "{{ stack_env }}"
+resources:
+ - name: example_resource_group
+ props:
+ - name: resource_group_name
+ value: "{{ stack_name }}-{{ stack_env }}-rg"
+ exports:
+ - resource_group_name
+ - name: example_vnet
+ props:
+ - name: vnet_name
+ value: "{{ stack_name }}-{{ stack_env }}-vnet"
+ - name: vnet_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ exports:
+ - vnet_name
+ - vnet_cidr_block
+ - name: example_subnet
+ props:
+ - name: subnet_name
+ value: "{{ stack_name }}-{{ stack_env }}-subnet-1"
+ - name: subnet_cidr
+ values:
+ prd:
+ value: "10.0.1.0/24"
+ sit:
+ value: "10.1.1.0/24"
+ dev:
+ value: "10.2.1.0/24"
+ exports:
+ - subnet_name
+ - subnet_id
+ - name: example_public_ip
+ props:
+ - name: public_ip_name
+ value: "{{ stack_name }}-{{ stack_env }}-public-ip"
+ exports:
+ - public_ip_name
+ - public_ip_id
+ - public_ip_address
+ - name: example_nsg
+ props:
+ - name: nsg_name
+ value: "{{ stack_name }}-{{ stack_env }}-nsg"
+ - name: security_rules
+ value:
+ - name: AllowHTTP
+ properties:
+ access: Allow
+ protocol: Tcp
+ direction: Inbound
+ priority: 100
+ sourceAddressPrefix: "*"
+ sourcePortRange: "*"
+ destinationAddressPrefix: "*"
+ destinationPortRange: "8080"
+ - name: AllowSSH
+ properties:
+ access: Allow
+ protocol: Tcp
+ direction: Inbound
+ priority: 200
+ sourceAddressPrefix: "{{ vnet_cidr_block }}"
+ sourcePortRange: "*"
+ destinationAddressPrefix: "*"
+ destinationPortRange: "22"
+ exports:
+ - network_security_group_id
+ - name: example_nic
+ props:
+ - name: nic_name
+ value: "{{ stack_name }}-{{ stack_env }}-nic"
+ - name: nic_ip_config
+ value:
+ name: ipconfig1
+ properties:
+ subnet:
+ id: "{{ subnet_id }}"
+ privateIPAllocationMethod: Dynamic
+ publicIPAddress:
+ id: "{{ public_ip_id }}"
+ exports:
+ - network_interface_id
+ - name: example_web_server
+ props:
+ - name: vm_name
+ value: "{{ stack_name }}-{{ stack_env }}-vm"
+ - name: hardwareProfile
+ value:
+ vmSize: Standard_DS1_v2
+ - name: storageProfile
+ value:
+ imageReference:
+ publisher: Canonical
+ offer: UbuntuServer
+ sku: 18.04-LTS
+ version: latest
+ osDisk:
+ name: "{{ stack_name }}-{{ stack_env }}-vm-disk1"
+ createOption: FromImage
+ managedDisk:
+ storageAccountType: Standard_LRS
+ diskSizeGB: 30
+ - name: osProfile
+ value:
+ computerName: myVM-{{ stack_name }}-{{ stack_env }}
+ adminUsername: azureuser
+ adminPassword: "{{ admin_password}}"
+ linuxConfiguration:
+ disablePasswordAuthentication: false
+ - name: networkProfile
+ value:
+ networkInterfaces:
+ - id: "{{ network_interface_id }}"
+ exports:
+ - vm_name
+ - vm_id
+ - name: example_vm_ext
+ props:
+ - name: vm_ext_name
+ value: "{{ stack_name }}-{{ stack_env }}-microsoft.custom-script-linux"
+ - name: command_to_execute
+ value: |
+ wget -O index.html https://raw.githubusercontent.com/stackql/stackql-deploy/main/examples/azure/azure-stack/resources/hello-stackql.html && nohup busybox httpd -f -p 8080 &
+ exports:
+ - web_url
\ No newline at end of file
diff --git a/examples/confluent/cmd-specific-auth/README.md b/examples/confluent/cmd-specific-auth/README.md
new file mode 100644
index 0000000..e56f49d
--- /dev/null
+++ b/examples/confluent/cmd-specific-auth/README.md
@@ -0,0 +1,63 @@
+# `stackql-deploy` starter project for `aws`
+
+> for starter projects using other providers, try `stackql-deploy cmd-specific-auth --provider=azure` or `stackql-deploy cmd-specific-auth --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `aws` provider:
+
+- [`aws` provider docs](https://stackql.io/registry/aws)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `aws` and `azure` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `aws` provider, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and optionally `AWS_SESSION_TOKEN` must be set, for more information on authentication to `aws` see the [`aws` provider documentation](https://aws.stackql.io/providers/aws).
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` folder.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack named cmd-specific-auth to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build cmd-specific-auth sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build cmd-specific-auth sit \
+-e AWS_REGION=ap-southeast-2 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test cmd-specific-auth sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown cmd-specific-auth sit \
+-e AWS_REGION=ap-southeast-2
+```
\ No newline at end of file
diff --git a/examples/confluent/cmd-specific-auth/resources/example_vpc.iql b/examples/confluent/cmd-specific-auth/resources/example_vpc.iql
new file mode 100644
index 0000000..463dbc1
--- /dev/null
+++ b/examples/confluent/cmd-specific-auth/resources/example_vpc.iql
@@ -0,0 +1,67 @@
+/* defines the provisioning and deprovisioning commands
+used to create, update or delete the resource
+replace queries with your queries */
+
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpcs (
+ CidrBlock,
+ Tags,
+ EnableDnsSupport,
+ EnableDnsHostnames,
+ region
+)
+SELECT
+ '{{ vpc_cidr_block }}',
+ '{{ vpc_tags }}',
+ true,
+ true,
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ vpc_cidr_block }}';
+
+/*+ exports, retries=5, retry_delay=5 */
+SELECT vpc_id, vpc_cidr_block FROM
+(
+SELECT vpc_id, cidr_block as "vpc_cidr_block",
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.vpcs
+WHERE data__Identifier = '{{ vpc_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/confluent/cmd-specific-auth/stackql_manifest.yml b/examples/confluent/cmd-specific-auth/stackql_manifest.yml
new file mode 100644
index 0000000..7450964
--- /dev/null
+++ b/examples/confluent/cmd-specific-auth/stackql_manifest.yml
@@ -0,0 +1,40 @@
+#
+# aws starter project manifest file, add and update values as needed
+#
+version: 1
+name: "cmd-specific-auth"
+description: description for "cmd-specific-auth"
+providers:
+ - aws
+globals:
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+ - name: example_vpc
+ description: example vpc resource
+ props:
+ - name: vpc_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ - name: vpc_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ merge: ['global_tags']
+ exports:
+ - vpc_id
+ - vpc_cidr_block
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/README.md b/examples/databricks/all-purpose-cluster/README.md
new file mode 100644
index 0000000..404f7bc
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/README.md
@@ -0,0 +1,245 @@
+# `stackql-deploy` example project for `databricks`
+
+This exercise is to bootstrap a databricks / aws tenancy using `stackql-deploy`. It is an important use case for platform bootstrap and we are excited to perform it with the `stackql` toolchain. We hope you enjoy and find this valuable. Please drop us a note with your forthright opinion on this and check out our issues on github.
+
+## A word of caution
+
+Please take the greatest care in performing this exercise; it will incur expenses, as it involves creating (and destroying) resources which cost money. Please be aware that you **must** cancel your databricks subscription after completing this exercise, otherwise you will incur ongoing expenses. That is, do **not** skip the section [Cancel databricks subscription](#cancel-databricks-subsription). We strongly advise that you verify all resources are destroyed at the conclusion of this exercise. Web pages and certain behaviours may change, so please be thorough in your verification. We will keep this page up-to-date on a best effort basis only. It is very much a case of owner onus applies.
+
+## Manual Setup
+
+Dependencies:
+
+- aws Account Created.
+- Required clickops to set up databricks on aws:
+ - Turn on aws Marketplace `databricks` offering, using [the aws manage subscriptions page](https://console.aws.amazon.com/marketplace/home#/subscriptions), per Figure S1.
+ - Follow the suggested setup flow as directed, from this page. These clickops steps are necessary at this time for initial account setup. The way I followed this, it created a workspace for me at setup, per Figure S3. We shall not use this one and rather, later on we shall dispose of it; because we do not trust auto-created resources out of hand. In the process of creating the databricks subscription, a second aws account is created.
+ - Copy the databricks account id from basically any web page in the databricks console. This is done by clicking on the user icon at the top RHS and then the UI provides a copy shortcut, per Fugure U1. Save this locally for later use, expanded below.
+ - We need the aws account id that was created for the databricks subscription. It is not exactly heralded by the web pages, nor is it actively hidden. It can be captured in a couple of places, including the databricks storage account creatted in the subscription flow, per Figure XA1. copy and save this locally for later use, expanded below.
+ - Create a service principal to use as a "CICD agent", using the page shown in Figure S4.
+ - Grant the CICD agent account admin role, using the page shown in Figure S5.
+ - Create a secret for the CICD agent, using the page shown in Figure S6. At the time you create this, you will need to safely store the client secret and client id, as prompted by the web page. These will be used below.
+- Setup your virtual environment, from the root of this repository `cicd/setup/setup-env.sh`.
+
+Now, is is convenient to use environment variables for context. Note that for our example, there is only one aws account apropos, however this is not always the case for an active professional, so while `DATABRICKS_AWS_ACCOUNT_ID` is the same as `AWS_ACCOUNT_ID` here, it need not always be the case. Create a file in the path `examples/databricks/all-purpose-cluster/sec/env.sh` (relative to the root of this repository) with contents of the form:
+
+```bash
+#!/usr/bin/env bash
+
+export AWS_REGION='us-east-1' # or wherever you want
+export AWS_ACCOUNT_ID=''
+export DATABRICKS_ACCOUNT_ID=''
+export DATABRICKS_AWS_ACCOUNT_ID=''
+
+# These need to be created by clickops under [the account level user managment page](https://accounts.cloud.databricks.com/user-management).
+export DATABRICKS_CLIENT_ID=''
+export DATABRICKS_CLIENT_SECRET=''
+
+## These can be skipped if you run on [aws cloud shell](https://docs.aws.amazon.com/cloudshell/latest/userguide/welcome.html).
+export AWS_SECRET_ACCESS_KEY=''
+export AWS_ACCESS_KEY_ID=''
+
+```
+
+## Optional step: sanity checks with stackql
+
+Now, let us do some sanity checks and housekeeping with `stackql`. This is purely optional. From the root of this repository:
+
+```
+source examples/databricks/all-purpose-cluster/convenience.sh
+stackql shell
+```
+
+This will start a `stackql` interactive shell. Here are some commands you can run (I will not place output here, that will be shared in a corresponding video):
+
+
+```sql
+registry pull databricks_account v24.12.00279;
+registry pull databricks_workspace v24.12.00279;
+
+-- This will fail if accounts, subscription, or credentials are in error.
+select account_id FROM databricks_account.provisioning.credentials WHERE account_id = '';
+select account_id, workspace_name, workspace_id, workspace_status from databricks_account.provisioning.workspaces where account_id = '';
+```
+
+For extra credit, you can (asynchronously) delete the unnecessary workspace with `delete from databricks_account.provisioning.workspaces where account_id = '' and workspace_id = '';`, where you obtain the workspace id from the above query. I have noted that due to some reponse caching it takes a while to disappear from select queries (much longer than disappearance from the web page), and you may want to bounce the `stackql` session to hurry things along. This is not happening on the `stackql` side, but session bouncing forces a token refresh which can help cache busting.
+
+## Lifecycle management
+
+Time to get down to business. From the root of this repository:
+
+```bash
+python3 -m venv myenv
+source examples/databricks/all-purpose-cluster/convenience.sh
+source venv/bin/activate
+pip install stackql-deploy
+```
+
+> alternatively set the `AWS_REGION`, `AWS_ACCOUNT_ID`, `DATABRICKS_ACCOUNT_ID`, `DATABRICKS_AWS_ACCOUNT_ID` along with provider credentials `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, `DATABRICKS_CLIENT_ID`, `DATABRICKS_CLIENT_SECRET`
+
+Then, do a dry run (good for catching **some** environmental issues):
+
+```bash
+stackql-deploy build \
+examples/databricks/all-purpose-cluster dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--dry-run
+```
+
+You will see a verbose rendition of what `stackql-deploy` intends to do.
+
+
+Now, let use do it for real:
+
+```bash
+stackql-deploy build \
+examples/databricks/all-purpose-cluster dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+The output is quite verbose, concludes in:
+
+```
+2025-02-08 12:51:25,914 - stackql-deploy - INFO - š¤ set [databricks_workspace_id] to [482604062392118] in exports
+2025-02-08 12:51:25,915 - stackql-deploy - INFO - ā
successfully deployed databricks_workspace
+2025-02-08 12:51:25,915 - stackql-deploy - INFO - deployment completed in 0:04:09.603631
+š build complete
+```
+
+Success!!!
+
+We can also use `stackql-deploy` to assess if our infra is shipshape:
+
+```bash
+stackql-deploy test \
+examples/databricks/all-purpose-cluster dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+Again, the output is quite verbose, concludes in:
+
+```
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - š¤ set [databricks_workspace_id] to [482604062392118] in exports
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - ā
test passed for databricks_workspace
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - deployment completed in 0:02:30.255860
+š tests complete (dry run: False)
+```
+
+Success!!!
+
+Now, let us teardown our `stackql-deploy` managed infra:
+
+```bash
+stackql-deploy teardown \
+examples/databricks/all-purpose-cluster dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+Takes its time, again verbose, concludes in:
+
+```
+2025-02-08 13:24:17,941 - stackql-deploy - INFO - ā
successfully deleted AWS_iam_cross_account_role
+2025-02-08 13:24:17,942 - stackql-deploy - INFO - deployment completed in 0:03:21.191788
+š§ teardown complete (dry run: False)
+```
+
+Success!!!
+
+## Optional step: verify destruction with stackql
+
+Now, let us do some sanity checks and housekeeping with `stackql`. This is purely optional. From the root of this repository:
+
+```
+
+source examples/databricks/all-purpose-cluster/convenience.sh
+
+stackql shell
+
+```
+
+This will start a `stackql` interactive shell. Here are some commands you can run (I will not place output here):
+
+
+```sql
+
+registry pull databricks_account v24.12.00279;
+
+registry pull databricks_workspace v24.12.00279;
+
+
+
+select account_id, workspace_name, workspace_id, workspace_status from databricks_account.provisioning.workspaces where account_id = '';
+
+```
+
+## Cancel databricks subsription
+
+This is **very** important.
+
+Go to [the aws Marketplace manage subscriptions page](https://console.aws.amazon.com/marketplace/home#/subscriptions), navigate to databricks and then cancel the subscription.
+
+## Figures
+
+
+
+
+**Figure S1**: Create aws databricks subscription.
+
+---
+
+
+
+**Figure S2**: Awaiting aws databricks subscription resources.
+
+---
+
+
+
+**Figure S3**: Auto provisioned workspace.
+
+---
+
+
+
+**Figure U1**: Capture databricks account id.
+
+---
+
+
+
+**Figure XA1**: Capture cross databricks aws account id.
+
+---
+
+
+
+**Figure S4**: Create CICD agent.
+
+---
+
+
+
+**Figure S5**: Grant account admin to CICD agent.
+
+---
+
+
+
+**Figure S6**: Generate secret for CICD agent.
+
+---
diff --git a/examples/databricks/all-purpose-cluster/assets/auto-provisioned-worskpace.png b/examples/databricks/all-purpose-cluster/assets/auto-provisioned-worskpace.png
new file mode 100644
index 0000000..a9fbcb6
Binary files /dev/null and b/examples/databricks/all-purpose-cluster/assets/auto-provisioned-worskpace.png differ
diff --git a/examples/databricks/all-purpose-cluster/assets/awaiting-subscription-resources.png b/examples/databricks/all-purpose-cluster/assets/awaiting-subscription-resources.png
new file mode 100644
index 0000000..9505100
Binary files /dev/null and b/examples/databricks/all-purpose-cluster/assets/awaiting-subscription-resources.png differ
diff --git a/examples/databricks/all-purpose-cluster/assets/capture-cross-databricks-aws-account-id.png b/examples/databricks/all-purpose-cluster/assets/capture-cross-databricks-aws-account-id.png
new file mode 100644
index 0000000..6fdb3c4
Binary files /dev/null and b/examples/databricks/all-purpose-cluster/assets/capture-cross-databricks-aws-account-id.png differ
diff --git a/examples/databricks/all-purpose-cluster/assets/capture-databricks-account-id.png b/examples/databricks/all-purpose-cluster/assets/capture-databricks-account-id.png
new file mode 100644
index 0000000..c890299
Binary files /dev/null and b/examples/databricks/all-purpose-cluster/assets/capture-databricks-account-id.png differ
diff --git a/examples/databricks/all-purpose-cluster/assets/create-aws-databricks-subscription.png b/examples/databricks/all-purpose-cluster/assets/create-aws-databricks-subscription.png
new file mode 100644
index 0000000..b5c9e7f
Binary files /dev/null and b/examples/databricks/all-purpose-cluster/assets/create-aws-databricks-subscription.png differ
diff --git a/examples/databricks/all-purpose-cluster/assets/create-cicd-agent.png b/examples/databricks/all-purpose-cluster/assets/create-cicd-agent.png
new file mode 100644
index 0000000..faf1643
Binary files /dev/null and b/examples/databricks/all-purpose-cluster/assets/create-cicd-agent.png differ
diff --git a/examples/databricks/all-purpose-cluster/assets/generate-secret-ui.png b/examples/databricks/all-purpose-cluster/assets/generate-secret-ui.png
new file mode 100644
index 0000000..daf4f23
Binary files /dev/null and b/examples/databricks/all-purpose-cluster/assets/generate-secret-ui.png differ
diff --git a/examples/databricks/all-purpose-cluster/assets/grant-account-admin-cicd-agent.png b/examples/databricks/all-purpose-cluster/assets/grant-account-admin-cicd-agent.png
new file mode 100644
index 0000000..f50e0c0
Binary files /dev/null and b/examples/databricks/all-purpose-cluster/assets/grant-account-admin-cicd-agent.png differ
diff --git a/examples/databricks/all-purpose-cluster/convenience.sh b/examples/databricks/all-purpose-cluster/convenience.sh
new file mode 100644
index 0000000..d4913f6
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/convenience.sh
@@ -0,0 +1,72 @@
+#!/usr/bin/env bash
+
+CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+export REPOSITORY_ROOT="$(realpath $CURRENT_DIR/../../..)"
+
+
+if [ -f "${REPOSITORY_ROOT}/examples/databricks/all-purpose-cluster/sec/env.sh" ];
+then
+ source "${REPOSITORY_ROOT}/examples/databricks/all-purpose-cluster/sec/env.sh"
+fi
+
+if [ "${AWS_REGION}" = "" ];
+then
+ AWS_REGION='us-east-1'
+fi
+
+if [ "${AWS_ACCOUNT_ID}" = "" ];
+then
+ echo "AWS_ACCOUNT_ID must be set" >&2
+ exit 1s
+fi
+
+if [ "${DATABRICKS_ACCOUNT_ID}" = "" ];
+then
+ echo "DATABRICKS_ACCOUNT_ID must be set" >&2
+ exit 1
+fi
+
+if [ "${DATABRICKS_AWS_ACCOUNT_ID}" = "" ];
+then
+ echo "DATABRICKS_AWS_ACCOUNT_ID must be set" >&2
+ exit 1
+fi
+
+if [ "${DATABRICKS_CLIENT_ID}" = "" ];
+then
+ echo "DATABRICKS_CLIENT_ID must be set" >&2
+ exit 1
+fi
+
+if [ "${DATABRICKS_CLIENT_SECRET}" = "" ];
+then
+ echo "DATABRICKS_CLIENT_SECRET must be set" >&2
+ exit 1
+fi
+
+if [ "${AWS_SECRET_ACCESS_KEY}" = "" ];
+then
+ echo "AWS_SECRET_ACCESS_KEY must be set" >&2
+ exit 1
+fi
+
+if [ "${AWS_ACCESS_KEY_ID}" = "" ];
+then
+ echo "AWS_ACCESS_KEY_ID must be set" >&2
+ exit 1
+fi
+
+export AWS_REGION
+export AWS_ACCOUNT_ID
+export DATABRICKS_ACCOUNT_ID
+export DATABRICKS_AWS_ACCOUNT_ID
+
+export DATABRICKS_CLIENT_ID
+export DATABRICKS_CLIENT_SECRET
+
+
+export AWS_SECRET_ACCESS_KEY
+export AWS_ACCESS_KEY_ID
+
+
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/iam/iam_role.iql b/examples/databricks/all-purpose-cluster/resources/aws/iam/iam_role.iql
new file mode 100644
index 0000000..ba2d140
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/iam/iam_role.iql
@@ -0,0 +1,59 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.iam.roles
+WHERE data__Identifier = '{{ role_name }}'
+
+/*+ create */
+INSERT INTO aws.iam.roles (
+ RoleName,
+ Description,
+ Path,
+ AssumeRolePolicyDocument,
+ Policies,
+ Tags,
+ region
+)
+SELECT
+'{{ role_name }}',
+'{{ description }}',
+'{{ path }}',
+'{{ assume_role_policy_document }}',
+'{{ policies }}',
+'{{ global_tags }}',
+'us-east-1'
+
+/*+ update */
+update aws.iam.roles
+set data__PatchDocument = string('{{ {
+ "Description": description,
+ "Path": path,
+ "AssumeRolePolicyDocument": assume_role_policy_document,
+ "Policies": policies,
+ "Tags": global_tags
+ } | generate_patch_document }}')
+WHERE data__Identifier = '{{ role_name }}'
+AND region = 'us-east-1';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM (
+ SELECT
+ max_session_duration,
+ path,
+ JSON_EQUAL(assume_role_policy_document, '{{ assume_role_policy_document }}') as test_assume_role_policy_doc,
+ JSON_EQUAL(policies, '{{ policies }}') as test_policies
+ FROM aws.iam.roles
+ WHERE data__Identifier = '{{ role_name }}')t
+WHERE test_assume_role_policy_doc = 1
+AND test_policies = 1
+AND path = '{{ path }}';
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+arn as aws_iam_role_arn
+FROM aws.iam.roles
+WHERE data__Identifier = '{{ role_name }}'
+
+/*+ delete */
+DELETE FROM aws.iam.roles
+WHERE data__Identifier = '{{ role_name }}'
+AND region = 'us-east-1'
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/s3/workspace_bucket.iql b/examples/databricks/all-purpose-cluster/resources/aws/s3/workspace_bucket.iql
new file mode 100644
index 0000000..a20c908
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/s3/workspace_bucket.iql
@@ -0,0 +1,61 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.s3.buckets
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket_name }}'
+
+/*+ create */
+INSERT INTO aws.s3.buckets (
+ BucketName,
+ OwnershipControls,
+ BucketEncryption,
+ PublicAccessBlockConfiguration,
+ VersioningConfiguration,
+ Tags,
+ region
+)
+SELECT
+ '{{ bucket_name }}',
+ '{{ ownership_controls }}',
+ '{{ bucket_encryption }}',
+ '{{ public_access_block_configuration }}',
+ '{{ versioning_configuration }}',
+ '{{ global_tags }}',
+ '{{ region }}'
+
+/*+ update */
+update aws.s3.buckets
+set data__PatchDocument = string('{{ {
+ "OwnershipControls": ownership_controls,
+ "BucketEncryption": bucket_encryption,
+ "PublicAccessBlockConfiguration": public_access_block_configuration,
+ "VersioningConfiguration": versioning_configuration,
+ "Tags": global_tags
+ } | generate_patch_document }}')
+WHERE
+region = '{{ region }}'
+AND data__Identifier = '{{ bucket_name }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM (
+ SELECT
+ JSON_EQUAL(ownership_controls, '{{ ownership_controls }}') as test_ownership_controls,
+ JSON_EQUAL(bucket_encryption, '{{ bucket_encryption }}') as test_encryption,
+ JSON_EQUAL(public_access_block_configuration, '{{ public_access_block_configuration }}') as test_public_access_block_configuration,
+ JSON_EQUAL(versioning_configuration, '{{ versioning_configuration }}') as test_versioning_configuration
+ FROM aws.s3.buckets
+ WHERE region = '{{ region }}'
+ AND data__Identifier = '{{ bucket_name }}'
+)t
+WHERE test_ownership_controls = 1
+AND test_encryption = 1
+AND test_public_access_block_configuration = 1
+AND test_versioning_configuration = 1
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+arn as aws_s3_workspace_bucket_arn,
+bucket_name as aws_s3_workspace_bucket_name
+FROM aws.s3.buckets
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket_name }}'
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/s3/workspace_bucket_policy.iql b/examples/databricks/all-purpose-cluster/resources/aws/s3/workspace_bucket_policy.iql
new file mode 100644
index 0000000..cead151
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/s3/workspace_bucket_policy.iql
@@ -0,0 +1,36 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.s3.bucket_policies
+WHERE region = '{{ region }}'
+AND bucket = '{{ aws_s3_workspace_bucket_name }}';
+
+/*+ create */
+INSERT INTO aws.s3.bucket_policies (
+ Bucket,
+ PolicyDocument,
+ ClientToken,
+ region
+)
+SELECT
+ '{{ aws_s3_workspace_bucket_name }}',
+ '{{ policy_document }}',
+ '{{ uuid() }}',
+ '{{ region }}'
+
+/*+ update */
+update aws.s3.bucket_policies
+set data__PatchDocument = string('{{ {
+ "PolicyDocument": policy_document
+ } | generate_patch_document }}')
+WHERE
+region = '{{ region }}'
+AND data__Identifier = '{{ aws_s3_workspace_bucket_name }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM (
+ SELECT
+ JSON_EQUAL(policy_document, '{{ policy_document }}') as test_policy_document
+ FROM aws.s3.bucket_policies
+ WHERE region = '{{ region }}'
+ AND data__Identifier = '{{ aws_s3_workspace_bucket_name }}')t
+WHERE test_policy_document = 1;
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/elastic_ip.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/elastic_ip.iql
new file mode 100644
index 0000000..d4dd982
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/elastic_ip.iql
@@ -0,0 +1,56 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT allocation_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.eip_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.eips (
+ NetworkBorderGroup,
+ Tags,
+ ClientToken,
+ region
+)
+SELECT
+'{{ region }}',
+'{{ tags }}',
+'{{ idempotency_token }}',
+'{{ region }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT allocation_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.eip_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT allocation_id as eip_allocation_id, public_ip as eip_public_id FROM
+(
+SELECT allocation_id, public_ip,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.eip_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ delete */
+DELETE FROM aws.ec2.eips
+WHERE data__Identifier = '{{ eip_public_id }}|{{ eip_allocation_id}}'
+AND region = '{{ region }}'
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/get_main_route_table_id.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/get_main_route_table_id.iql
new file mode 100644
index 0000000..7679dd2
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/get_main_route_table_id.iql
@@ -0,0 +1,6 @@
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+route_table_id as main_route_table_id
+FROM aws.ec2.route_tables
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/inet_gateway.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/inet_gateway.iql
new file mode 100644
index 0000000..dc42032
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/inet_gateway.iql
@@ -0,0 +1,54 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.internet_gateways (
+ Tags,
+ ClientToken,
+ region
+)
+SELECT
+'{{ tags }}',
+'{{ idempotency_token }}',
+'{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT internet_gateway_id FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.internet_gateways
+WHERE data__Identifier = '{{ internet_gateway_id }}'
+AND region = '{{ region }}';
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/inet_gw_attachment.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/inet_gw_attachment.iql
new file mode 100644
index 0000000..31b9d25
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/inet_gw_attachment.iql
@@ -0,0 +1,39 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+attachment_type,
+vpc_id
+FROM aws.ec2.vpc_gateway_attachments
+WHERE region = '{{ region }}'
+AND internet_gateway_id = '{{ internet_gateway_id }}'
+AND vpc_id = '{{ vpc_id }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.vpc_gateway_attachments (
+ InternetGatewayId,
+ VpcId,
+ region
+)
+SELECT
+ '{{ internet_gateway_id }}',
+ '{{ vpc_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+attachment_type,
+vpc_id
+FROM aws.ec2.vpc_gateway_attachments
+WHERE region = '{{ region }}'
+AND internet_gateway_id = '{{ internet_gateway_id }}'
+AND vpc_id = '{{ vpc_id }}'
+) t
+
+/*+ delete */
+DELETE FROM aws.ec2.vpc_gateway_attachments
+WHERE data__Identifier = 'IGW|{{ vpc_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/inet_route.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/inet_route.iql
new file mode 100644
index 0000000..b46cc0f
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/inet_route.iql
@@ -0,0 +1,41 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.routes (
+ DestinationCidrBlock,
+ GatewayId,
+ RouteTableId,
+ region
+)
+SELECT
+ '0.0.0.0/0',
+ '{{ internet_gateway_id }}',
+ '{{ route_table_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT data__Identifier as inet_route_indentifer
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0';
+
+/*+ delete */
+DELETE FROM aws.ec2.routes
+WHERE data__Identifier = '{{ inet_route_indentifer }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/nat_gateway.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/nat_gateway.iql
new file mode 100644
index 0000000..081fbd2
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/nat_gateway.iql
@@ -0,0 +1,53 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT nat_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.nat_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.nat_gateways (
+ AllocationId,
+ SubnetId,
+ Tags,
+ region
+)
+SELECT
+ '{{ eip_allocation_id }}',
+ '{{ nat_subnet_id }}',
+ '{{ tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT nat_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.nat_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT nat_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.nat_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+
+/*+ delete */
+DELETE FROM aws.ec2.nat_gateways
+WHERE data__Identifier = '{{ nat_gateway_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/nat_inet_route.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/nat_inet_route.iql
new file mode 100644
index 0000000..9e750f6
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/nat_inet_route.iql
@@ -0,0 +1,41 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.routes (
+ DestinationCidrBlock,
+ NatGatewayId,
+ RouteTableId,
+ region
+)
+SELECT
+ '0.0.0.0/0',
+ '{{ nat_gateway_id }}',
+ '{{ route_table_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT data__Identifier as nat_inet_route_indentifer
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0';
+
+/*+ delete */
+DELETE FROM aws.ec2.routes
+WHERE data__Identifier = '{{ inet_route_indentifer }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/route_table.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/route_table.iql
new file mode 100644
index 0000000..7b0aa76
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/route_table.iql
@@ -0,0 +1,54 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ route_table_name }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.route_tables (
+ VpcId,
+ Tags,
+ region
+)
+SELECT
+ '{{ vpc_id }}',
+ '{{ tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ route_table_name }}'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ route_table_name }}'
+
+/*+ delete */
+DELETE FROM aws.ec2.route_tables
+WHERE data__Identifier = '{{ route_table_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/security_group.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/security_group.iql
new file mode 100644
index 0000000..15e9061
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/security_group.iql
@@ -0,0 +1,41 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.ec2.security_groups
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND group_name = '{{ group_name }}'
+
+/*+ create */
+INSERT INTO aws.ec2.security_groups (
+ GroupName,
+ GroupDescription,
+ VpcId,
+ Tags,
+ region
+)
+SELECT
+ '{{ group_name }}',
+ '{{ group_description }}',
+ '{{ vpc_id }}',
+ '{{ tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM aws.ec2.security_groups
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND group_name = '{{ group_name }}'
+AND group_description = '{{ group_description }}'
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT group_id as security_group_id
+FROM aws.ec2.security_groups
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND group_name = '{{ group_name }}'
+
+/*+ delete */
+DELETE FROM aws.ec2.security_groups
+WHERE data__Identifier = '{{ security_group_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/security_group_rules.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/security_group_rules.iql
new file mode 100644
index 0000000..62f79eb
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/security_group_rules.iql
@@ -0,0 +1,27 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.ec2.security_groups
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ security_group_id }}'
+
+/*+ createorupdate */
+update aws.ec2.security_groups
+set data__PatchDocument = string('{{ {
+ "SecurityGroupIngress": security_group_ingress,
+ "SecurityGroupEgress": security_group_egress
+ } | generate_patch_document }}')
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ security_group_id }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+ SELECT
+ JSON_EQUAL(security_group_ingress, '{{ security_group_ingress }}') as ingress_test,
+ JSON_EQUAL(security_group_egress, '{{ security_group_egress }}') as egress_test
+ FROM aws.ec2.security_groups
+ WHERE region = '{{ region }}'
+ AND data__Identifier = '{{ security_group_id }}'
+ AND ingress_test = 1
+ AND egress_test = 1
+) t;
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/subnet.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/subnet.iql
new file mode 100644
index 0000000..83667f5
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/subnet.iql
@@ -0,0 +1,43 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.ec2.subnets
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND cidr_block = '{{ cidr_block }}'
+
+/*+ create */
+INSERT INTO aws.ec2.subnets (
+ VpcId,
+ CidrBlock,
+ AvailabilityZone,
+ MapPublicIpOnLaunch,
+ Tags,
+ region
+)
+SELECT
+ '{{ vpc_id }}',
+ '{{ cidr_block }}',
+ '{{ availability_zone }}',
+ false,
+ '{{ tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM aws.ec2.subnets
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND cidr_block = '{{ cidr_block }}'
+AND availability_zone = '{{ availability_zone }}';
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT subnet_id
+FROM aws.ec2.subnets
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND cidr_block = '{{ cidr_block }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.subnets
+WHERE data__Identifier = '{{ subnet_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/subnet_rt_assn.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/subnet_rt_assn.iql
new file mode 100644
index 0000000..d0c8b33
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/subnet_rt_assn.iql
@@ -0,0 +1,34 @@
+/*+ exists */
+select regexp_like(associationSet, '.*{{ subnet_id }}.*') as count from
+aws.ec2_native.route_tables where region = '{{ region }}'
+and routeTableId = '{{ route_table_id }}'
+
+/*+ create */
+INSERT INTO aws.ec2.subnet_route_table_associations (
+ RouteTableId,
+ SubnetId,
+ ClientToken,
+ region
+)
+SELECT
+ '{{ route_table_id }}',
+ '{{ subnet_id }}',
+ '{{ idempotency_token }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+select regexp_like(associationSet, '.*{{ subnet_id }}.*') as count from
+aws.ec2_native.route_tables where region = '{{ region }}'
+and routeTableId = '{{ route_table_id }}'
+
+/*+ exports, retries=5, retry_delay=5 */
+SELECT id as route_table_assn_id
+FROM aws.ec2.subnet_route_table_associations
+WHERE region = '{{ region }}'
+AND route_table_id = '{{ route_table_id }}'
+AND subnet_id = '{{ subnet_id }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.subnet_route_table_associations
+WHERE data__Identifier = '{{ route_table_assn_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/tag_main_vpc_route_table.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/tag_main_vpc_route_table.iql
new file mode 100644
index 0000000..cc03c2a
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/tag_main_vpc_route_table.iql
@@ -0,0 +1,7 @@
+/*+ command */
+update aws.ec2.route_tables
+set data__PatchDocument = string('{{ {
+ "Tags": tags
+ } | generate_patch_document }}')
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ main_route_table_id }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/vpc.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/vpc.iql
new file mode 100644
index 0000000..56e1c54
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/vpc.iql
@@ -0,0 +1,60 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+ SELECT vpc_id,
+ json_group_object(tag_key, tag_value) as tags
+ FROM aws.ec2.vpc_tags
+ WHERE region = '{{ region }}'
+ AND cidr_block = '{{ cidr_block }}'
+ GROUP BY vpc_id
+ HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+ AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+ AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.vpcs (
+ CidrBlock,
+ Tags,
+ EnableDnsSupport,
+ EnableDnsHostnames,
+ ClientToken,
+ region
+)
+SELECT
+ '{{ cidr_block }}',
+ '{{ tags }}',
+ true,
+ true,
+ '{{ idempotency_token }}',
+ '{{ region }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+ SELECT vpc_id,
+ json_group_object(tag_key, tag_value) as tags
+ FROM aws.ec2.vpc_tags
+ WHERE region = '{{ region }}'
+ AND cidr_block = '{{ cidr_block }}'
+ GROUP BY vpc_id
+ HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+ AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+ AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT vpc_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+
+/*+ delete */
+DELETE FROM aws.ec2.vpcs
+WHERE data__Identifier = '{{ vpc_id}}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/aws/vpc/vpc_endpoint.iql b/examples/databricks/all-purpose-cluster/resources/aws/vpc/vpc_endpoint.iql
new file mode 100644
index 0000000..d40f522
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/aws/vpc/vpc_endpoint.iql
@@ -0,0 +1,60 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+ SELECT id,
+ json_group_object(tag_key, tag_value) as tags
+ FROM aws.ec2.vpc_endpoint_tags
+ WHERE region = '{{ region }}'
+ AND service_name = '{{ service_name }}'
+ GROUP BY id
+ HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+ AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+ AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpc_endpoints (
+ ServiceName,
+ VpcEndpointType,
+ VpcId,
+ RouteTableIds,
+ Tags,
+ region
+)
+SELECT
+ '{{ service_name }}',
+ '{{ vpc_endpoint_type }}',
+ '{{ vpc_id }}',
+ '{{ route_table_ids }}',
+ '{{ tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+ SELECT id,
+ json_group_object(tag_key, tag_value) as tags
+ FROM aws.ec2.vpc_endpoint_tags
+ WHERE region = '{{ region }}'
+ AND service_name = '{{ service_name }}'
+ GROUP BY id
+ HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+ AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+ AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT id as s3_gateway_endpoint_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_endpoint_tags
+WHERE region = '{{ region }}'
+AND service_name = '{{ service_name }}'
+GROUP BY id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.vpc_endpoints
+WHERE data__Identifier = 's3_gateway_endpoint_id'
+AND region = 'us-east-1';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/databricks_account/credentials.iql b/examples/databricks/all-purpose-cluster/resources/databricks_account/credentials.iql
new file mode 100644
index 0000000..d83abc6
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/databricks_account/credentials.iql
@@ -0,0 +1,39 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}'
+AND credentials_name = '{{ credentials_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.credentials (
+account_id,
+data__credentials_name,
+data__aws_credentials
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ credentials_name }}',
+'{{ aws_credentials }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+credentials_id
+FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}'
+AND credentials_name = '{{ credentials_name }}'
+AND JSON_EXTRACT(aws_credentials, '$.sts_role.role_arn') = '{{ aws_iam_cross_account_role_arn }}'
+) t
+
+/*+ exports */
+SELECT credentials_id as databricks_credentials_id,
+JSON_EXTRACT(aws_credentials, '$.sts_role.external_id') as databricks_role_external_id
+FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}'
+AND credentials_name = '{{ credentials_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}' AND
+credentials_id = '{{ databricks_credentials_id }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/databricks_account/get_users.iql b/examples/databricks/all-purpose-cluster/resources/databricks_account/get_users.iql
new file mode 100644
index 0000000..2a978d7
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/databricks_account/get_users.iql
@@ -0,0 +1,6 @@
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+JSON_GROUP_ARRAY(JSON_OBJECT('value', id)) as databricks_workspace_group_members
+FROM databricks_account.iam.users
+WHERE account_id = 'ebfcc5a9-9d49-4c93-b651-b3ee6cf1c9ce'
+AND userName in {{ users | sql_list }};
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/databricks_account/network.iql b/examples/databricks/all-purpose-cluster/resources/databricks_account/network.iql
new file mode 100644
index 0000000..45e0b0a
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/databricks_account/network.iql
@@ -0,0 +1,46 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}'
+AND network_name = '{{ databricks_network_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.networks (
+account_id,
+data__network_name,
+data__vpc_id,
+data__subnet_ids,
+data__security_group_ids
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ databricks_network_name }}',
+'{{ vpc_id }}',
+'{{ subnet_ids }}',
+'{{ security_group_ids }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+JSON_EQUAL(subnet_ids, '{{ subnet_ids }}') as subnet_test,
+JSON_EQUAL(security_group_ids, '{{ security_group_ids }}') as sg_test
+FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}'
+AND network_name = '{{ databricks_network_name }}'
+AND vpc_id = '{{ vpc_id }}'
+AND subnet_test = 1
+AND sg_test = 1
+)t
+
+/*+ exports */
+SELECT
+network_id as databricks_network_id
+FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}' AND
+network_name = '{{ databricks_network_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}' AND
+network_id = '{{ databricks_network_id }}'
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/databricks_account/storage_configuration.iql b/examples/databricks/all-purpose-cluster/resources/databricks_account/storage_configuration.iql
new file mode 100644
index 0000000..4e60cfc
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/databricks_account/storage_configuration.iql
@@ -0,0 +1,35 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}'
+AND storage_configuration_name = '{{ storage_configuration_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.storage (
+account_id,
+data__storage_configuration_name,
+data__root_bucket_info
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ storage_configuration_name }}',
+'{{ root_bucket_info }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}'
+AND storage_configuration_name = '{{ storage_configuration_name }}'
+AND JSON_EXTRACT(root_bucket_info, '$.bucket_name') = '{{ aws_s3_workspace_bucket_name }}'
+
+/*+ exports */
+SELECT
+storage_configuration_id as databricks_storage_configuration_id
+FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}'
+AND storage_configuration_name = '{{ storage_configuration_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}' AND
+storage_configuration_id = '{{ databricks_storage_configuration_id }}'
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/databricks_account/update_group_membership.iql b/examples/databricks/all-purpose-cluster/resources/databricks_account/update_group_membership.iql
new file mode 100644
index 0000000..375d926
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/databricks_account/update_group_membership.iql
@@ -0,0 +1,6 @@
+/*+ command */
+update databricks_account.iam.groups
+set data__schemas = '["urn:ietf:params:scim:api:messages:2.0:PatchOp"]',
+data__Operations = '[{"op": "replace", "path": "members", "value": {{ databricks_workspace_group_members }} }]'
+WHERE account_id = '{{ databricks_account_id }}'
+AND id = '{{ databricks_group_id }}';
diff --git a/examples/databricks/all-purpose-cluster/resources/databricks_account/workspace.iql b/examples/databricks/all-purpose-cluster/resources/databricks_account/workspace.iql
new file mode 100644
index 0000000..9da2dea
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/databricks_account/workspace.iql
@@ -0,0 +1,44 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}'
+AND workspace_name = '{{ workspace_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.workspaces (
+account_id,
+data__workspace_name,
+data__aws_region,
+data__credentials_id,
+data__storage_configuration_id,
+data__pricing_tier
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ workspace_name }}',
+'{{ aws_region }}',
+'{{ credentials_id }}',
+'{{ storage_configuration_id }}',
+'{{ pricing_tier }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}'
+AND workspace_name = '{{ workspace_name }}'
+AND aws_region = '{{ aws_region }}'
+AND credentials_id = '{{ credentials_id }}'
+AND storage_configuration_id = '{{ storage_configuration_id }}'
+AND pricing_tier = '{{ pricing_tier }}'
+
+/*+ exports */
+SELECT workspace_id AS databricks_workspace_id,
+deployment_name AS databricks_deployment_name
+FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}'
+AND workspace_name = '{{ workspace_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/databricks_account/workspace_group.iql b/examples/databricks/all-purpose-cluster/resources/databricks_account/workspace_group.iql
new file mode 100644
index 0000000..4d3494a
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/databricks_account/workspace_group.iql
@@ -0,0 +1,31 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}'
+AND displayName = '{{ display_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.iam.groups (
+account_id,
+data__displayName
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ display_name }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}'
+AND displayName = '{{ display_name }}'
+
+/*+ exports */
+SELECT id AS databricks_group_id
+FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}'
+AND displayName = '{{ display_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}' AND
+id = '{{ databricks_group_id }}';
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/databricks_account/workspace_permission_assignments.iql b/examples/databricks/all-purpose-cluster/resources/databricks_account/workspace_permission_assignments.iql
new file mode 100644
index 0000000..00387e3
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/databricks_account/workspace_permission_assignments.iql
@@ -0,0 +1,32 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.workspace_permission_assignments
+WHERE account_id = '{{ databricks_account_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
+AND JSON_EXTRACT(principal, '$.principal_id') = {{ databricks_group_id }}
+
+/*+ createorupdate */
+INSERT INTO databricks_account.iam.workspace_permission_assignments (
+account_id,
+principal_id,
+workspace_id,
+data__permissions
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ databricks_group_id }}',
+'{{ databricks_workspace_id }}',
+'["ADMIN"]'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.workspace_permission_assignments
+WHERE account_id = '{{ databricks_account_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
+AND JSON_EXTRACT(principal, '$.principal_id') = {{ databricks_group_id }}
+
+/*+ delete */
+DELETE FROM databricks_account.iam.workspace_permission_assignments
+WHERE account_id = '{{ databricks_account_id }}' AND
+principal_id = '{{ databricks_group_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
\ No newline at end of file
diff --git a/examples/databricks/all-purpose-cluster/resources/databricks_workspace/all_purpose_cluster.iql b/examples/databricks/all-purpose-cluster/resources/databricks_workspace/all_purpose_cluster.iql
new file mode 100644
index 0000000..44b3703
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/resources/databricks_workspace/all_purpose_cluster.iql
@@ -0,0 +1,52 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_workspace.compute.clusters
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND cluster_name = '{{ cluster_name }}'
+
+/*+ create */
+INSERT INTO databricks_workspace.compute.clusters (
+deployment_name,
+data__cluster_name,
+data__num_workers,
+data__is_single_node,
+data__kind,
+data__spark_version,
+data__node_type_id,
+data__data_security_mode,
+data__runtime_engine,
+data__single_user_name,
+data__aws_attributes,
+data__custom_tags
+)
+SELECT
+'{{ databricks_deployment_name }}',
+'{{ cluster_name }}',
+ {{ num_workers }},
+ {{ is_single_node }},
+'{{ kind }}',
+'{{ spark_version }}',
+'{{ node_type_id }}',
+'{{ data_security_mode }}',
+'{{ runtime_engine }}',
+'{{ single_user_name }}',
+'{{ aws_attributes }}',
+'{{ custom_tags }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_workspace.compute.clusters
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND cluster_name = '{{ cluster_name }}'
+
+/*+ exports */
+SELECT cluster_id AS databricks_cluster_id,
+state AS databricks_cluster_state
+FROM databricks_workspace.compute.clusters
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND cluster_name = '{{ cluster_name }}'
+
+/*+ delete */
+DELETE FROM databricks_workspace.compute.clusters
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND cluster_id = '{{ databricks_cluster_id }}'
diff --git a/examples/databricks/all-purpose-cluster/sec/.gitignore b/examples/databricks/all-purpose-cluster/sec/.gitignore
new file mode 100644
index 0000000..d6b7ef3
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/sec/.gitignore
@@ -0,0 +1,2 @@
+*
+!.gitignore
diff --git a/examples/databricks/all-purpose-cluster/stackql_manifest.yml b/examples/databricks/all-purpose-cluster/stackql_manifest.yml
new file mode 100644
index 0000000..7a6a4bd
--- /dev/null
+++ b/examples/databricks/all-purpose-cluster/stackql_manifest.yml
@@ -0,0 +1,689 @@
+version: 1
+name: "databricks-all-purpose-cluster"
+description: creates a databricks workspace and all-purpose cluster
+providers:
+ - aws
+ - databricks_account
+ - databricks_workspace
+globals:
+ - name: databricks_account_id
+ description: databricks account id
+ value: "{{ DATABRICKS_ACCOUNT_ID }}"
+ - name: databricks_aws_account_id
+ description: databricks AWS account id
+ value: "{{ DATABRICKS_AWS_ACCOUNT_ID }}"
+ - name: aws_account
+ description: aws_account id
+ value: "{{ AWS_ACCOUNT_ID }}"
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+# ====================================================================================
+# AWS IAM
+# ====================================================================================
+ - name: aws/iam/cross_account_role
+ file: aws/iam/iam_role.iql
+ props:
+ - name: role_name
+ value: "{{ stack_name }}-{{ stack_env }}-role"
+ - name: assume_role_policy_document
+ value:
+ Version: "2012-10-17"
+ Statement:
+ - Sid: ""
+ Effect: "Allow"
+ Principal:
+ AWS: "arn:aws:iam::{{ databricks_aws_account_id }}:root"
+ Action: "sts:AssumeRole"
+ Condition:
+ StringEquals:
+ sts:ExternalId: "{{ databricks_account_id }}"
+ - name: description
+ value: 'allows Databricks to access resources in ({{ stack_name }}-{{ stack_env }})'
+ - name: path
+ value: '/'
+ - name: policies
+ value:
+ - PolicyDocument:
+ Statement:
+ - Sid: Stmt1403287045000
+ Effect: Allow
+ Action:
+ - "ec2:AllocateAddress"
+ - "ec2:AssociateDhcpOptions"
+ - "ec2:AssociateIamInstanceProfile"
+ - "ec2:AssociateRouteTable"
+ - "ec2:AttachInternetGateway"
+ - "ec2:AttachVolume"
+ - "ec2:AuthorizeSecurityGroupEgress"
+ - "ec2:AuthorizeSecurityGroupIngress"
+ - "ec2:CancelSpotInstanceRequests"
+ - "ec2:CreateDhcpOptions"
+ - "ec2:CreateInternetGateway"
+ - "ec2:CreateKeyPair"
+ - "ec2:CreateNatGateway"
+ - "ec2:CreatePlacementGroup"
+ - "ec2:CreateRoute"
+ - "ec2:CreateRouteTable"
+ - "ec2:CreateSecurityGroup"
+ - "ec2:CreateSubnet"
+ - "ec2:CreateTags"
+ - "ec2:CreateVolume"
+ - "ec2:CreateVpc"
+ - "ec2:CreateVpcEndpoint"
+ - "ec2:DeleteDhcpOptions"
+ - "ec2:DeleteInternetGateway"
+ - "ec2:DeleteKeyPair"
+ - "ec2:DeleteNatGateway"
+ - "ec2:DeletePlacementGroup"
+ - "ec2:DeleteRoute"
+ - "ec2:DeleteRouteTable"
+ - "ec2:DeleteSecurityGroup"
+ - "ec2:DeleteSubnet"
+ - "ec2:DeleteTags"
+ - "ec2:DeleteVolume"
+ - "ec2:DeleteVpc"
+ - "ec2:DeleteVpcEndpoints"
+ - "ec2:DescribeAvailabilityZones"
+ - "ec2:DescribeIamInstanceProfileAssociations"
+ - "ec2:DescribeInstanceStatus"
+ - "ec2:DescribeInstances"
+ - "ec2:DescribeInternetGateways"
+ - "ec2:DescribeNatGateways"
+ - "ec2:DescribePlacementGroups"
+ - "ec2:DescribePrefixLists"
+ - "ec2:DescribeReservedInstancesOfferings"
+ - "ec2:DescribeRouteTables"
+ - "ec2:DescribeSecurityGroups"
+ - "ec2:DescribeSpotInstanceRequests"
+ - "ec2:DescribeSpotPriceHistory"
+ - "ec2:DescribeSubnets"
+ - "ec2:DescribeVolumes"
+ - "ec2:DescribeVpcs"
+ - "ec2:DescribeVpcAttribute"
+ - "ec2:DescribeNetworkAcls"
+ - "ec2:DetachInternetGateway"
+ - "ec2:DisassociateIamInstanceProfile"
+ - "ec2:DisassociateRouteTable"
+ - "ec2:ModifyVpcAttribute"
+ - "ec2:ReleaseAddress"
+ - "ec2:ReplaceIamInstanceProfileAssociation"
+ - "ec2:ReplaceRoute"
+ - "ec2:RequestSpotInstances"
+ - "ec2:RevokeSecurityGroupEgress"
+ - "ec2:RevokeSecurityGroupIngress"
+ - "ec2:RunInstances"
+ - "ec2:TerminateInstances"
+ Resource:
+ - "*"
+ - Effect: Allow
+ Action:
+ - "iam:CreateServiceLinkedRole"
+ - "iam:PutRolePolicy"
+ Resource:
+ - arn:aws:iam::*:role/aws-service-role/spot.amazonaws.com/AWSServiceRoleForEC2Spot
+ Condition:
+ StringLike:
+ "iam:AWSServiceName": spot.amazonaws.com
+ Version: '2012-10-17'
+ PolicyName: "{{ stack_name }}-{{ stack_env }}-policy"
+ exports:
+ - aws_iam_role_arn: aws_iam_cross_account_role_arn
+ - name: databricks_account/credentials
+ props:
+ - name: credentials_name
+ value: "{{ stack_name }}-{{ stack_env }}-credentials"
+ - name: aws_credentials
+ value:
+ sts_role:
+ role_arn: "{{ aws_iam_cross_account_role_arn }}"
+ exports:
+ - databricks_credentials_id
+ - databricks_role_external_id
+ - name: aws/iam/databricks_compute_role
+ file: aws/iam/iam_role.iql
+ props:
+ - name: role_name
+ value: "{{ stack_name }}-{{ stack_env }}-compute-role"
+ - name: assume_role_policy_document
+ value:
+ Version: "2012-10-17"
+ Statement:
+ - Action: "sts:AssumeRole"
+ Effect: "Allow"
+ Principal:
+ AWS: "{{ 'arn:aws:iam::314146311478:root' if trustInternalAccount == 'true' else 'arn:aws:iam::414351767826:root' }}"
+ Condition:
+ StringEquals:
+ sts:ExternalId: "{{ databricks_account_id }}"
+ - name: description
+ value: 'allows Databricks to access compute resources in ({{ stack_name }}-{{ stack_env }})'
+ - name: path
+ value: '/'
+ - name: policies
+ value:
+ - PolicyName: "Base"
+ PolicyDocument:
+ Version: "2012-10-17"
+ Statement:
+ - Sid: "CreateEC2ResourcesWithRequestTag"
+ Effect: "Allow"
+ Action:
+ - "ec2:CreateFleet"
+ - "ec2:CreateLaunchTemplate"
+ - "ec2:CreateVolume"
+ - "ec2:RequestSpotInstances"
+ - "ec2:RunInstances"
+ Resource: ["*"]
+ Condition:
+ StringEquals:
+ aws:RequestTag/Vendor: "Databricks"
+ - Sid: "AllowDatabricksTagOnCreate"
+ Effect: "Allow"
+ Action: ["ec2:CreateTags"]
+ Resource: ["*"]
+ Condition:
+ StringEquals:
+ ec2:CreateAction:
+ - "CreateFleet"
+ - "CreateLaunchTemplate"
+ - "CreateVolume"
+ - "RequestSpotInstances"
+ - "RunInstances"
+ - Sid: "UpdateByResourceTags"
+ Effect: "Allow"
+ Action:
+ - "ec2:AssignPrivateIpAddresses"
+ - "ec2:AssociateIamInstanceProfile"
+ - "ec2:AttachVolume"
+ - "ec2:AuthorizeSecurityGroupEgress"
+ - "ec2:AuthorizeSecurityGroupIngress"
+ - "ec2:CancelSpotInstanceRequests"
+ - "ec2:CreateFleet"
+ - "ec2:CreateLaunchTemplate"
+ - "ec2:CreateLaunchTemplateVersion"
+ - "ec2:CreateVolume"
+ - "ec2:DetachVolume"
+ - "ec2:DisassociateIamInstanceProfile"
+ - "ec2:ModifyFleet"
+ - "ec2:ModifyLaunchTemplate"
+ - "ec2:RequestSpotInstances"
+ - "ec2:RevokeSecurityGroupEgress"
+ - "ec2:RevokeSecurityGroupIngress"
+ - "ec2:RunInstances"
+ Resource: ["*"]
+ Condition:
+ StringEquals:
+ ec2:ResourceTag/Vendor: "Databricks"
+ - Sid: "GetByResourceTags"
+ Effect: "Allow"
+ Action: ["ec2:GetLaunchTemplateData"]
+ Resource: ["*"]
+ Condition:
+ StringEquals:
+ ec2:ResourceTag/Vendor: "Databricks"
+ - Sid: "DeleteByResourceTags"
+ Effect: "Allow"
+ Action:
+ - "ec2:DeleteFleets"
+ - "ec2:DeleteLaunchTemplate"
+ - "ec2:DeleteLaunchTemplateVersions"
+ - "ec2:DeleteTags"
+ - "ec2:DeleteVolume"
+ - "ec2:TerminateInstances"
+ Resource: ["*"]
+ Condition:
+ StringEquals:
+ ec2:ResourceTag/Vendor: "Databricks"
+ - Sid: "DescribeResources"
+ Effect: "Allow"
+ Action:
+ - "ec2:DescribeAvailabilityZones"
+ - "ec2:DescribeFleets"
+ - "ec2:DescribeIamInstanceProfileAssociations"
+ - "ec2:DescribeInstances"
+ - "ec2:DescribeInstanceStatus"
+ - "ec2:DescribeInternetGateways"
+ - "ec2:DescribeLaunchTemplates"
+ - "ec2:DescribeLaunchTemplateVersions"
+ - "ec2:DescribeNatGateways"
+ - "ec2:DescribeNetworkAcls"
+ - "ec2:DescribePrefixLists"
+ - "ec2:DescribeReservedInstancesOfferings"
+ - "ec2:DescribeRouteTables"
+ - "ec2:DescribeSecurityGroups"
+ - "ec2:DescribeSpotInstanceRequests"
+ - "ec2:DescribeSpotPriceHistory"
+ - "ec2:DescribeSubnets"
+ - "ec2:DescribeVolumes"
+ - "ec2:DescribeVpcs"
+ - "ec2:GetSpotPlacementScores"
+ Resource: ["*"]
+ exports:
+ - aws_iam_role_arn: databricks_compute_role_arn
+# ====================================================================================
+# AWS VPC Networking
+# ====================================================================================
+ - name: aws/vpc/vpc
+ props:
+ - name: cidr_block
+ values:
+ prd:
+ value: "10.53.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ merge:
+ - global_tags
+ - name: idempotency_token
+ value: 019447a0-b84a-7b7f-bca5-2ee320207e51
+ exports:
+ - vpc_id
+ - name: aws/vpc/nat_subnet
+ file: aws/vpc/subnet.iql
+ props:
+ - name: availability_zone
+ value: "us-east-1a"
+ - name: cidr_block
+ values:
+ prd:
+ value: "10.53.0.0/24"
+ sit:
+ value: "10.1.0.0/19"
+ dev:
+ value: "10.2.0.0/19"
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-nat-subnet"
+ merge:
+ - global_tags
+ exports:
+ - subnet_id: nat_subnet_id
+ - name: aws/vpc/cluster_subnet1
+ file: aws/vpc/subnet.iql
+ props:
+ - name: availability_zone
+ value: "us-east-1b"
+ - name: cidr_block
+ values:
+ prd:
+ value: "10.53.160.0/19"
+ sit:
+ value: "10.1.0.0/19"
+ dev:
+ value: "10.2.0.0/19"
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-subnet-1"
+ merge:
+ - global_tags
+ exports:
+ - subnet_id: cluster_subnet1_id
+ - name: aws/vpc/cluster_subnet2
+ file: aws/vpc/subnet.iql
+ props:
+ - name: availability_zone
+ value: "us-east-1c"
+ - name: cidr_block
+ values:
+ prd:
+ value: "10.53.192.0/19"
+ sit:
+ value: "10.1.32.0/19"
+ dev:
+ value: "10.2.32.0/19"
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-subnet-2"
+ merge:
+ - global_tags
+ exports:
+ - subnet_id: cluster_subnet2_id
+ - name: aws/vpc/inet_gateway
+ props:
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-inet-gateway"
+ merge: ['global_tags']
+ - name: idempotency_token
+ value: 019447a5-f076-75f8-9173-092df5a66d35
+ exports:
+ - internet_gateway_id
+ - name: aws/vpc/inet_gw_attachment
+ props: []
+ - name: aws/vpc/nat_route_table
+ file: aws/vpc/route_table.iql
+ props:
+ - name: route_table_name
+ value: "{{ stack_name }}-{{ stack_env }}-nat-route-table"
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-nat-route-table"
+ merge: ['global_tags']
+ exports:
+ - route_table_id: nat_route_table_id
+ - name: aws/vpc/nat_route_to_inet
+ file: aws/vpc/inet_route.iql
+ props:
+ - name: route_table_id
+ value: "{{ nat_route_table_id }}"
+ exports:
+ - inet_route_indentifer: nat_inet_route_indentifer
+ - name: aws/vpc/nat_subnet_rt_assn
+ file: aws/vpc/subnet_rt_assn.iql
+ props:
+ - name: subnet_id
+ value: "{{ nat_subnet_id }}"
+ - name: route_table_id
+ value: "{{ nat_route_table_id }}"
+ - name: idempotency_token
+ value: 3eaf3040-1c8e-41a6-8be6-512ccaf5ff4e
+ exports:
+ - route_table_assn_id: nat_subnet_rt_assn_id
+ - name: aws/vpc/private_route_table
+ file: aws/vpc/route_table.iql
+ props:
+ - name: route_table_name
+ value: "{{ stack_name }}-{{ stack_env }}-private-route-table"
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-private-route-table"
+ merge: ['global_tags']
+ exports:
+ - route_table_id: private_route_table_id
+ - name: aws/vpc/subnet_rt_assn1
+ file: aws/vpc/subnet_rt_assn.iql
+ props:
+ - name: route_table_id
+ value: "{{ private_route_table_id }}"
+ - name: subnet_id
+ value: "{{ cluster_subnet1_id }}"
+ - name: idempotency_token
+ value: 019447aa-1c7a-775b-91dc-04db7c49f4a7
+ exports:
+ - route_table_assn_id: cluster_subnet1_rt_assn_id
+ - name: aws/vpc/subnet_rt_assn2
+ file: aws/vpc/subnet_rt_assn.iql
+ props:
+ - name: route_table_id
+ value: "{{ private_route_table_id }}"
+ - name: subnet_id
+ value: "{{ cluster_subnet2_id }}"
+ - name: idempotency_token
+ value: c19c9077-c25d-46a4-a299-7bd93d773e58
+ exports:
+ - route_table_assn_id: cluster_subnet2_rt_assn_id
+ - name: aws/vpc/elastic_ip
+ props:
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-eip"
+ merge: ['global_tags']
+ - name: idempotency_token
+ value: 01945908-b80d-7e51-b52c-5e93dea9cbdb
+ exports:
+ - eip_allocation_id
+ - eip_public_id
+ - name: aws/vpc/nat_gateway
+ props:
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-nat-gateway"
+ merge: ['global_tags']
+ - name: idempotency_token
+ value: 019447a5-f076-75f8-9173-092df5a66d35
+ exports:
+ - nat_gateway_id
+ - name: aws/vpc/nat_inet_route
+ props:
+ - name: route_table_id
+ value: "{{ private_route_table_id }}"
+ - name: nat_gateway_id
+ value: "{{ nat_gateway_id }}"
+ exports:
+ - nat_inet_route_indentifer
+ - name: aws/vpc/security_group
+ props:
+ - name: group_name
+ value: "{{ stack_name }}-{{ stack_env }}-sg"
+ - name: group_description
+ value: "security group for {{ stack_name }} ({{ stack_env }} environment)"
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-sg"
+ merge: ['global_tags']
+ exports:
+ - security_group_id
+ - name: aws/vpc/security_group_rules
+ props:
+ - name: security_group_ingress
+ value:
+ - FromPort: 0
+ ToPort: 65535
+ SourceSecurityGroupOwnerId: "{{ aws_account }}"
+ IpProtocol: tcp
+ SourceSecurityGroupId: "{{ security_group_id }}"
+ - FromPort: 0
+ ToPort: 65535
+ SourceSecurityGroupOwnerId: "{{ aws_account }}"
+ IpProtocol: "udp"
+ SourceSecurityGroupId: "{{ security_group_id }}"
+ - CidrIp: "3.237.73.224/28"
+ FromPort: 443
+ ToPort: 443
+ IpProtocol: "tcp"
+ - CidrIp: "54.156.226.103/32"
+ FromPort: 443
+ ToPort: 443
+ IpProtocol: "tcp"
+ - name: security_group_egress
+ value:
+ - FromPort: 0
+ ToPort: 65535
+ IpProtocol: "tcp"
+ DestinationSecurityGroupId: "{{ security_group_id }}"
+ Description: "Allow all TCP outbound access to the same security group"
+ - CidrIp: "0.0.0.0/0"
+ Description: Allow all outbound traffic
+ FromPort: -1
+ ToPort: -1
+ IpProtocol: "-1"
+ - CidrIp: "0.0.0.0/0"
+ FromPort: 3306
+ ToPort: 3306
+ IpProtocol: "tcp"
+ Description: "Allow accessing the Databricks metastore"
+ - FromPort: 0
+ ToPort: 65535
+ IpProtocol: "udp"
+ DestinationSecurityGroupId: "{{ security_group_id }}"
+ Description: "Allow all UDP outbound access to the same security group"
+ - CidrIp: "0.0.0.0/0"
+ FromPort: 443
+ ToPort: 443
+ IpProtocol: "tcp"
+ Description: "Allow accessing Databricks infrastructure, cloud data sources, and library repositories"
+ - name: databricks_account/network
+ props:
+ - name: databricks_network_name
+ value: "{{ stack_name }}-{{ stack_env }}-network"
+ - name: subnet_ids
+ value:
+ - "{{ cluster_subnet1_id }}"
+ - "{{ cluster_subnet2_id }}"
+ - name: security_group_ids
+ value:
+ - "{{ security_group_id }}"
+ exports:
+ - databricks_network_id
+# ====================================================================================
+# AWS Storage
+# ====================================================================================
+ - name: aws/s3/workspace_bucket
+ props:
+ - name: bucket_name
+ value: "{{ stack_name }}-{{ stack_env }}-root-bucket"
+ - name: ownership_controls
+ value:
+ Rules:
+ - ObjectOwnership: "BucketOwnerPreferred"
+ - name: bucket_encryption
+ value:
+ ServerSideEncryptionConfiguration:
+ - BucketKeyEnabled: true
+ ServerSideEncryptionByDefault:
+ SSEAlgorithm: "AES256"
+ - name: public_access_block_configuration
+ value:
+ BlockPublicAcls: true
+ IgnorePublicAcls: true
+ BlockPublicPolicy: true
+ RestrictPublicBuckets: true
+ - name: versioning_configuration
+ value:
+ Status: "Suspended"
+ exports:
+ - aws_s3_workspace_bucket_name
+ - aws_s3_workspace_bucket_arn
+ - name: aws/s3/workspace_bucket_policy
+ props:
+ - name: policy_document
+ value:
+ Version: "2012-10-17"
+ Statement:
+ - Sid: Grant Databricks Access
+ Effect: Allow
+ Principal:
+ AWS: "arn:aws:iam::{{ databricks_aws_account_id }}:root"
+ Action:
+ - "s3:GetObject"
+ - "s3:GetObjectVersion"
+ - "s3:PutObject"
+ - "s3:DeleteObject"
+ - "s3:ListBucket"
+ - "s3:GetBucketLocation"
+ Resource:
+ - "{{ aws_s3_workspace_bucket_arn }}/*"
+ - "{{ aws_s3_workspace_bucket_arn }}"
+ - name: aws/vpc/vpc_endpoint
+ props:
+ - name: service_name
+ value: "com.amazonaws.{{ region }}.s3"
+ - name: vpc_endpoint_type
+ value: "Gateway"
+ - name: route_table_ids
+ value:
+ - "{{ private_route_table_id }}"
+ - name: tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-s3-vpc-endpoint"
+ merge:
+ - global_tags
+ exports:
+ - s3_gateway_endpoint_id
+ - name: databricks_account/storage_configuration
+ props:
+ - name: storage_configuration_name
+ value: "{{ stack_name }}-{{ stack_env }}-storage"
+ - name: root_bucket_info
+ value:
+ bucket_name: "{{ aws_s3_workspace_bucket_name }}"
+ exports:
+ - databricks_storage_configuration_id
+# ====================================================================================
+# DBX Workspace
+# ====================================================================================
+ - name: databricks_account/workspace
+ props:
+ - name: workspace_name
+ value: "{{ stack_name }}-{{ stack_env }}-workspace"
+ - name: network_id
+ value: "{{ databricks_network_id }}"
+ - name: aws_region
+ value: "{{ region }}"
+ - name: credentials_id
+ value: "{{ databricks_credentials_id }}"
+ - name: storage_configuration_id
+ value: "{{ databricks_storage_configuration_id }}"
+ - name: pricing_tier
+ value: PREMIUM
+ exports:
+ - databricks_workspace_id
+ - databricks_deployment_name
+ - name: databricks_account/workspace_group
+ props:
+ - name: display_name
+ value: "{{ stack_name }}-{{ stack_env }}-workspace-admins"
+ exports:
+ - databricks_group_id
+ - name: databricks_account/get_users
+ type: query
+ props:
+ - name: users
+ value:
+ - "javen@stackql.io"
+ - "krimmer@stackql.io"
+ exports:
+ - databricks_workspace_group_members
+ - name: databricks_account/update_group_membership
+ type: command
+ props: []
+ - name: databricks_account/workspace_permission_assignments
+ props: []
+ - name: databricks_workspace/all_purpose_cluster
+ props:
+ - name: cluster_name
+ value: single-user-single-node-cluster
+ - name: num_workers
+ value: 0
+ - name: is_single_node
+ value: true
+ - name: kind
+ value: CLASSIC_PREVIEW
+ - name: spark_version
+ value: 15.4.x-scala2.12
+ - name: node_type_id
+ value: m7g.large
+ - name: data_security_mode
+ value: SINGLE_USER
+ - name: runtime_engine
+ value: PHOTON
+ - name: single_user_name
+ value: javen@stackql.io
+ - name: aws_attributes
+ value:
+ ebs_volume_count: 1
+ ebs_volume_size: 100
+ - name: custom_tags
+ description: Additional tags for cluster resources (max 45 tags)
+ value:
+ Provisioner: stackql
+ StackName: "{{ stack_name }}"
+ StackEnv: "{{ stack_env }}"
+ exports:
+ - databricks_cluster_id
+ - databricks_cluster_state
diff --git a/examples/google/k8s-the-hard-way/README.md b/examples/google/k8s-the-hard-way/README.md
new file mode 100644
index 0000000..4ef7189
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/README.md
@@ -0,0 +1,66 @@
+# example `stackql-deploy` stack
+
+Based upon the [Kubernetes the Hard Way](https://github.com/kelseyhightower/kubernetes-the-hard-way) project.
+
+## about `stackql-deploy`
+
+[`stackql-deploy`](https://pypi.org/project/stackql-deploy/) is a multi cloud deployment automation and testing framework which is an alternative to Terraform or similar IaC tools. `stackql-deploy` uses a declarative model/ELT based approach to cloud resource deployment (inspired by [`dbt`](https://www.getdbt.com/)). Advantages of `stackql-deploy` include:
+
+- declarative framework
+- no state file (state is determined from the target environment)
+- multi-cloud/omni-cloud ready
+- includes resource tests which can include secure config tests
+
+## instaling `stackql-deploy`
+
+`stackql-deploy` is installed as a python based CLI using...
+
+```bash
+pip install stackql-deploy
+# or
+pip3 install stackql-deploy
+```
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## getting started with `stackql-deploy`
+
+Once installed, use the `init` command to scaffold a sample project directory to get started:
+
+```bash
+stackql-deploy init k8s-the-hard-way
+```
+
+this will create a directory named `k8s-the-hard-way` which can be updated for your stack, as you can see in this project.
+
+## deploying using `stackql-deploy`
+
+```bash
+export GOOGLE_CREDENTIALS=$(cat ./testcreds/k8s-the-hard-way-project-demo-service-account.json)
+# deploy a stack
+stackql-deploy build \
+examples/google/k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run \
+--log-level DEBUG
+
+# test a stack
+stackql-deploy test \
+examples/google/k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run
+
+# teardown a stack
+stackql-deploy teardown \
+examples/google/k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run
+```
diff --git a/examples/google/k8s-the-hard-way/resources/firewalls.iql b/examples/google/k8s-the-hard-way/resources/firewalls.iql
new file mode 100644
index 0000000..d69607b
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/resources/firewalls.iql
@@ -0,0 +1,54 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.firewalls
+WHERE project = '{{ project }}'
+AND name = '{{ fw_name }}'
+
+/*+ create */
+INSERT INTO google.compute.firewalls
+(
+ project,
+ data__name,
+ data__network,
+ data__direction,
+ data__sourceRanges,
+ data__allowed
+)
+SELECT
+ '{{ project }}',
+ '{{ fw_name}}',
+ '{{ vpc_link }}',
+ '{{ fw_direction }}',
+ '{{ fw_source_ranges }}',
+ '{{ fw_allowed }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+network = '{{ vpc_link }}' as test_network,
+direction = '{{ fw_direction }}' as test_direction,
+JSON_EQUAL(allowed, '{{ fw_allowed }}') as test_allowed,
+JSON_EQUAL(sourceRanges, '{{ fw_source_ranges }}') as test_source_ranges
+FROM google.compute.firewalls
+WHERE project = '{{ project }}'
+AND name = '{{ fw_name }}'
+) t
+WHERE test_network = 1
+AND test_direction = 1
+AND test_allowed = 1
+AND test_source_ranges = 1;
+
+/*+ update */
+UPDATE google.compute.firewalls
+SET
+ data__network = '{{ vpc_link }}',
+ data__direction = '{{ fw_direction }}',
+ data__sourceRanges = '{{ fw_source_ranges }}',
+ data__allowed = '{{ fw_allowed }}'
+WHERE firewall = '{{ fw_name}}'
+AND project = '{{ project }}'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.firewalls
+WHERE project = '{{ project }}'
+AND firewall = '{{ fw_name }}'
\ No newline at end of file
diff --git a/examples/google/k8s-the-hard-way/resources/forwarding_rule.iql b/examples/google/k8s-the-hard-way/resources/forwarding_rule.iql
new file mode 100644
index 0000000..2f25e4e
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/resources/forwarding_rule.iql
@@ -0,0 +1,36 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.forwarding_rules
+WHERE region = '{{ region }}'
+AND project = '{{ project }}'
+AND forwardingRule = '{{ forwarding_rule_name }}'
+
+/*+ create */
+INSERT INTO google.compute.forwarding_rules(
+ project,
+ region,
+ data__name,
+ data__IPAddress,
+ data__loadBalancingScheme,
+ data__portRange,
+ data__target
+)
+SELECT
+ '{{ project }}',
+ '{{ region }}',
+ '{{ forwarding_rule_name }}',
+ '{{ address }}',
+ '{{ forwarding_rule_load_balancing_scheme }}',
+ '{{ forwarding_rule_port_range }}',
+ '{{ target_pool_link }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.forwarding_rules
+WHERE region = '{{ region }}'
+AND project = '{{ project }}'
+AND forwardingRule = '{{ forwarding_rule_name }}'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.forwarding_rules
+WHERE project = '{{ project }}'
+ AND region = '{{ region }}'
+ AND forwardingRule = '{{ forwarding_rule_name }}'
\ No newline at end of file
diff --git a/examples/google/k8s-the-hard-way/resources/get_controller_instances.iql b/examples/google/k8s-the-hard-way/resources/get_controller_instances.iql
new file mode 100644
index 0000000..36d7aef
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/resources/get_controller_instances.iql
@@ -0,0 +1,6 @@
+/*+ exports */
+SELECT JSON_GROUP_ARRAY(json_object('instance', selfLink)) as controller_instances
+FROM google.compute.instances
+WHERE project = '{{ project }}'
+AND zone = '{{ default_zone }}'
+AND name like '%-{{ stack_env }}-controller-%'
\ No newline at end of file
diff --git a/examples/google/k8s-the-hard-way/resources/health_checks.iql b/examples/google/k8s-the-hard-way/resources/health_checks.iql
new file mode 100644
index 0000000..7154450
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/resources/health_checks.iql
@@ -0,0 +1,45 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.http_health_checks
+WHERE project = '{{ project }}'
+AND httpHealthCheck = '{{ health_check_name }}'
+
+/*+ create */
+INSERT INTO google.compute.http_health_checks(
+ project,
+ data__name,
+ data__checkIntervalSec,
+ data__description,
+ data__healthyThreshold,
+ data__host,
+ data__port,
+ data__requestPath,
+ data__timeoutSec,
+ data__unhealthyThreshold
+)
+SELECT
+ '{{ project }}',
+ '{{ health_check_name }}',
+ {{ health_check_interval_sec }},
+ '{{ health_check_description }}',
+ {{ health_check_healthy_threshold }},
+ '{{ health_check_host }}',
+ {{ health_check_port }},
+ '{{ health_check_path }}',
+ {{ health_check_timeout_sec }},
+ {{ health_check_unhealthy_threshold }}
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.http_health_checks
+WHERE project = '{{ project }}'
+AND httpHealthCheck = '{{ health_check_name }}'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.http_health_checks
+WHERE project = '{{ project }}'
+AND httpHealthCheck = '{{ health_check_name }}'
+
+/*+ exports */
+SELECT selfLink as health_check_link
+FROM google.compute.http_health_checks
+WHERE project = '{{ project }}'
+AND httpHealthCheck = '{{ health_check_name }}'
\ No newline at end of file
diff --git a/examples/google/k8s-the-hard-way/resources/instances.iql b/examples/google/k8s-the-hard-way/resources/instances.iql
new file mode 100644
index 0000000..bf482fa
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/resources/instances.iql
@@ -0,0 +1,61 @@
+/*+ exists */
+SELECT
+CASE
+ WHEN COUNT(*) = {{ num_instances | int }} THEN 1
+ ELSE 0
+END AS count
+FROM google.compute.instances
+WHERE
+project = '{{ project }}'
+AND zone = '{{ default_zone }}'
+AND name IN ({% for i in range(num_instances | int) %}'{{ instance_name_prefix }}-{{ loop.index }}'{% if not loop.last %}, {% endif %}{% endfor %})
+
+/*+ create */
+{% for network_interface in network_interfaces | from_json %}
+INSERT INTO google.compute.instances
+ (
+ zone,
+ project,
+ data__name,
+ data__machineType,
+ data__canIpForward,
+ data__deletionProtection,
+ data__scheduling,
+ data__networkInterfaces,
+ data__disks,
+ data__serviceAccounts,
+ data__tags
+ )
+ SELECT
+'{{ default_zone }}',
+'{{ project }}',
+'{{ instance_name_prefix }}-{{ loop.index }}',
+'{{ machine_type }}',
+true,
+false,
+'{{ scheduling }}',
+'[ {{ network_interface | tojson }} ]',
+'{{ disks }}',
+'{{ service_accounts }}',
+'{{ tags }}';
+{% endfor %}
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT
+CASE
+ WHEN COUNT(*) = {{ num_instances | int }} THEN 1
+ ELSE 0
+END AS count
+FROM google.compute.instances
+WHERE
+project = '{{ project }}'
+AND zone = '{{ default_zone }}'
+AND name IN ({% for i in range(num_instances | int) %}'{{ instance_name_prefix }}-{{ loop.index }}'{% if not loop.last %}, {% endif %}{% endfor %})
+
+/*+ delete */
+{% for network_interface in network_interfaces | from_json %}
+DELETE FROM google.compute.instances
+WHERE project = '{{ project }}'
+AND zone = '{{ default_zone }}'
+AND instance = '{{ instance_name_prefix }}-{{ loop.index }}';
+{% endfor %}
\ No newline at end of file
diff --git a/examples/google/k8s-the-hard-way/resources/network.iql b/examples/google/k8s-the-hard-way/resources/network.iql
new file mode 100644
index 0000000..c1b39d7
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/resources/network.iql
@@ -0,0 +1,43 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+
+/*+ create */
+INSERT INTO google.compute.networks
+(
+ project,
+ data__name,
+ data__autoCreateSubnetworks,
+ data__routingConfig
+)
+SELECT
+'{{ project }}',
+'{{ vpc_name }}',
+false,
+'{"routingMode": "REGIONAL"}'
+
+/*+ update */
+UPDATE google.compute.networks
+SET data__autoCreateSubnetworks = false
+AND data__routingConfig = '{"routingMode": "REGIONAL"}'
+WHERE network = '{{ vpc_name }}' AND project = '{{ project }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+AND autoCreateSubnetworks = false
+AND JSON_EXTRACT(routingConfig, '$.routingMode') = 'REGIONAL'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.networks
+WHERE network = '{{ vpc_name }}' AND project = '{{ project }}'
+
+/*+ exports */
+SELECT
+'{{ vpc_name }}' as vpc_name,
+selfLink as vpc_link
+FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
diff --git a/examples/google/k8s-the-hard-way/resources/public_address.iql b/examples/google/k8s-the-hard-way/resources/public_address.iql
new file mode 100644
index 0000000..022db98
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/resources/public_address.iql
@@ -0,0 +1,35 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.addresses
+WHERE name = '{{ address_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ create */
+INSERT INTO google.compute.addresses
+(
+ project,
+ region,
+ data__name
+)
+SELECT
+'{{ project }}',
+'{{ region }}',
+'{{ address_name }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.addresses
+WHERE name = '{{ address_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ delete */
+DELETE FROM google.compute.addresses
+WHERE address = '{{ address_name }}' AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ exports */
+SELECT address
+FROM google.compute.addresses
+WHERE name = '{{ address_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
diff --git a/examples/google/k8s-the-hard-way/resources/routes.iql b/examples/google/k8s-the-hard-way/resources/routes.iql
new file mode 100644
index 0000000..e40be78
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/resources/routes.iql
@@ -0,0 +1,45 @@
+/*+ exists */
+SELECT
+ CASE
+ WHEN COUNT(*) = {{ num_routes | int }} THEN 1
+ ELSE 0
+ END AS count
+FROM google.compute.routes
+WHERE project = '{{ project }}'
+AND name IN ({% for i in range(num_routes | int) %}'{{ route_name_prefix }}-{{ loop.index }}'{% if not loop.last %}, {% endif %}{% endfor %})
+
+/*+ create */
+{% for route in route_data | from_json %}
+INSERT INTO google.compute.routes(
+ project,
+ data__destRange,
+ data__name,
+ data__network,
+ data__nextHopIp,
+ data__priority
+)
+SELECT
+ '{{ project }}',
+ '{{ route.dest_range }}',
+ '{{ route_name_prefix }}-{{ loop.index }}',
+ '{{ vpc_link }}',
+ '{{ route.next_hop_ip }}',
+ {{ route_priority }};
+{% endfor %}
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT
+ CASE
+ WHEN COUNT(*) = {{ num_routes | int }} THEN 1
+ ELSE 0
+ END AS count
+FROM google.compute.routes
+WHERE project = '{{ project }}'
+AND name IN ({% for i in range(num_routes | int) %}'{{ route_name_prefix }}-{{ loop.index }}'{% if not loop.last %}, {% endif %}{% endfor %})
+
+/*+ delete, retries=20, retry_delay=10 */
+{% for route in route_data | from_json %}
+DELETE FROM google.compute.routes
+WHERE project = '{{ project }}'
+AND route = '{{ route_name_prefix }}-{{ loop.index }}';
+{% endfor %}
\ No newline at end of file
diff --git a/examples/google/k8s-the-hard-way/resources/subnetwork.iql b/examples/google/k8s-the-hard-way/resources/subnetwork.iql
new file mode 100644
index 0000000..7d55eb7
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/resources/subnetwork.iql
@@ -0,0 +1,56 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.subnetworks
+WHERE subnetwork = '{{ subnet_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+AND network = '{{ vpc_link }}'
+
+/*+ create, retries=5, retry_delay=10 */
+INSERT INTO google.compute.subnetworks
+(
+ project,
+ region,
+ data__name,
+ data__network,
+ data__ipCidrRange,
+ data__privateIpGoogleAccess
+)
+SELECT
+'{{ project }}',
+'{{ region }}',
+'{{ subnet_name }}',
+'{{ vpc_link }}',
+'{{ ip_cidr_range }}',
+true
+
+/*+ update */
+UPDATE google.compute.subnetworks
+SET
+data__name = '{{ subnet_name }}',
+data__network = '{{ vpc_link }}',
+data__ipCidrRange = '{{ ip_cidr_range }}',
+data__privateIpGoogleAccess = true
+WHERE subnetwork = '{{ subnet_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.subnetworks
+WHERE project = '{{ project }}'
+AND region = '{{ region }}'
+AND subnetwork = '{{ subnet_name }}'
+AND network = '{{ vpc_link }}'
+
+/*+ delete */
+DELETE FROM google.compute.subnetworks
+WHERE subnetwork = '{{ subnet_name }}' AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ exports */
+SELECT
+name as subnet_name,
+selfLink as subnet_link
+FROM google.compute.subnetworks
+WHERE subnetwork = '{{ subnet_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
diff --git a/examples/google/k8s-the-hard-way/resources/target_pool.iql b/examples/google/k8s-the-hard-way/resources/target_pool.iql
new file mode 100644
index 0000000..66db671
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/resources/target_pool.iql
@@ -0,0 +1,42 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.target_pools
+WHERE project = '{{ project }}'
+ AND region = '{{ region }}'
+ AND targetPool = '{{ target_pool_name }}'
+
+/*+ create */
+INSERT INTO google.compute.target_pools(
+ project,
+ region,
+ data__name,
+ data__healthChecks,
+ data__instances,
+ data__sessionAffinity
+)
+SELECT
+ '{{ project }}',
+ '{{ region }}',
+ '{{ target_pool_name }}',
+ '{{ target_pool_health_checks }}',
+ '{{ target_pool_instances }}',
+ '{{ target_pool_session_affinity }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.target_pools
+WHERE project = '{{ project }}'
+ AND region = '{{ region }}'
+ AND targetPool = '{{ target_pool_name }}'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.target_pools
+WHERE project = '{{ project }}'
+ AND region = '{{ region }}'
+ AND targetPool = '{{ target_pool_name }}'
+
+/*+ exports */
+SELECT
+selfLink as target_pool_link
+FROM google.compute.target_pools
+WHERE targetPool = '{{ target_pool_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
diff --git a/examples/google/k8s-the-hard-way/stackql_manifest.yml b/examples/google/k8s-the-hard-way/stackql_manifest.yml
new file mode 100644
index 0000000..e3f0d0e
--- /dev/null
+++ b/examples/google/k8s-the-hard-way/stackql_manifest.yml
@@ -0,0 +1,254 @@
+version: 1
+name: kubernetes-the-hard-way
+description: stackql-deploy example for kubernetes-the-hard-way
+providers:
+ - google
+globals:
+- name: project
+ description: google project name
+ value: "{{ GOOGLE_PROJECT }}"
+- name: region
+ value: australia-southeast1
+- name: default_zone
+ value: australia-southeast1-a
+resources:
+- name: network
+ description: vpc network for k8s-the-hard-way sample app
+ props:
+ - name: vpc_name
+ description: name for the vpc
+ value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ exports:
+ - vpc_name
+ - vpc_link
+- name: subnetwork
+ props:
+ - name: subnet_name
+ value: "{{ stack_name }}-{{ stack_env }}-{{ region }}-subnet"
+ - name: ip_cidr_range
+ values:
+ prd:
+ value: 192.168.0.0/16
+ sit:
+ value: 10.10.0.0/16
+ dev:
+ value: 10.240.0.0/24
+ exports:
+ - subnet_name
+ - subnet_link
+- name: public_address
+ props:
+ - name: address_name
+ value: "{{ stack_name }}-{{ stack_env }}-{{ region }}-ip-addr"
+ exports:
+ - address
+- name: controller_instances
+ file: instances.iql
+ type: multi
+ props:
+ - name: num_instances
+ value: 3
+ - name: instance_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-controller"
+ - name: disks
+ value:
+ - autoDelete: true
+ boot: true
+ initializeParams:
+ diskSizeGb: 10
+ sourceImage: https://compute.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/family/ubuntu-2004-lts
+ mode: READ_WRITE
+ type: PERSISTENT
+ - name: machine_type
+ value: "https://compute.googleapis.com/compute/v1/projects/{{ project }}/zones/{{ default_zone }}/machineTypes/f1-micro"
+ - name: scheduling
+ value: {automaticRestart: true}
+ - name: tags
+ value: {items: ["{{ stack_name }}", "controller"]}
+ - name: service_accounts
+ value:
+ - email: default
+ scopes:
+ - https://www.googleapis.com/auth/compute
+ - https://www.googleapis.com/auth/devstorage.read_only
+ - https://www.googleapis.com/auth/logging.write
+ - https://www.googleapis.com/auth/monitoring
+ - https://www.googleapis.com/auth/service.management.readonly
+ - https://www.googleapis.com/auth/servicecontrol
+ - name: network_interfaces
+ values:
+ dev:
+ value:
+ - {networkIP: "10.240.0.10", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.11", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.12", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ sit:
+ value:
+ - {networkIP: "10.10.0.10", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.10.0.11", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.10.0.12", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ prd:
+ value:
+ - {networkIP: "192.168.0.10", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "192.168.0.11", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "192.168.0.12", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+- name: worker_instances
+ file: instances.iql
+ type: multi
+ props:
+ - name: num_instances
+ value: 3
+ - name: instance_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-worker"
+ - name: disks
+ value:
+ - autoDelete: true
+ boot: true
+ initializeParams:
+ diskSizeGb: 10
+ sourceImage: https://compute.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/family/ubuntu-2004-lts
+ mode: READ_WRITE
+ type: PERSISTENT
+ - name: machine_type
+ value: "https://compute.googleapis.com/compute/v1/projects/{{ project }}/zones/{{ default_zone }}/machineTypes/f1-micro"
+ - name: scheduling
+ value: {automaticRestart: true}
+ - name: tags
+ value: {items: ["{{ stack_name }}", "worker"]}
+ - name: service_accounts
+ value:
+ - email: default
+ scopes:
+ - https://www.googleapis.com/auth/compute
+ - https://www.googleapis.com/auth/devstorage.read_only
+ - https://www.googleapis.com/auth/logging.write
+ - https://www.googleapis.com/auth/monitoring
+ - https://www.googleapis.com/auth/service.management.readonly
+ - https://www.googleapis.com/auth/servicecontrol
+ - name: network_interfaces
+ values:
+ dev:
+ value:
+ - {networkIP: "10.240.0.20", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.21", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.22", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ sit:
+ value:
+ - {networkIP: "10.10.0.20", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.10.0.21", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.10.0.22", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ prd:
+ value:
+ - {networkIP: "192.168.0.20", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "192.168.0.21", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "192.168.0.22", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+- name: health_checks
+ props:
+ - name: health_check_name
+ value: "{{ stack_name }}-{{ stack_env }}-kubernetes-health-check"
+ - name: health_check_interval_sec
+ value: 5
+ - name: health_check_description
+ value: Kubernetes Health Check
+ - name: health_check_timeout_sec
+ value: 5
+ - name: health_check_healthy_threshold
+ value: 2
+ - name: health_check_unhealthy_threshold
+ value: 2
+ - name: health_check_host
+ value: kubernetes.default.svc.cluster.local
+ - name: health_check_port
+ value: 80
+ - name: health_check_path
+ value: /healthz
+ exports:
+ - health_check_link
+- name: internal_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-internal-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ values:
+ dev:
+ value: ["10.240.0.0/24", "10.200.0.0/16"]
+ prd:
+ value: ["192.168.0.0/16"]
+ sit:
+ value: ["10.10.0.0/16"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp}, {IPProtocol: udp}, {IPProtocol: icmp}]
+- name: external_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-external-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ value: ["0.0.0.0/0"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp, ports: ["22"]}, {IPProtocol: tcp, ports: ["6443"]},{IPProtocol: icmp}]
+- name: health_check_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-health-check-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ value: ["209.85.152.0/22", "209.85.204.0/22", "35.191.0.0/16"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp}]
+- name: get_controller_instances
+ type: query
+ exports:
+ - controller_instances
+- name: target_pool
+ props:
+ - name: target_pool_name
+ value: "{{ stack_name }}-{{ stack_env }}-target-pool"
+ - name: target_pool_session_affinity
+ value: NONE
+ - name: target_pool_health_checks
+ value: ["{{ health_check_link }}"]
+ - name: target_pool_instances
+ value: "{{ controller_instances }}"
+ exports:
+ - target_pool_link
+- name: forwarding_rule
+ props:
+ - name: forwarding_rule_name
+ value: "{{ stack_name }}-{{ stack_env }}-forwarding-rule"
+ - name: forwarding_rule_load_balancing_scheme
+ value: EXTERNAL
+ - name: forwarding_rule_port_range
+ value: 6443
+- name: routes
+ props:
+ - name: num_routes
+ value: 3
+ - name: route_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-route"
+ - name: route_priority
+ value: 1000
+ - name: route_data
+ values:
+ dev:
+ value:
+ - {dest_range: "10.200.0.0/24", next_hop_ip: "10.240.0.20"}
+ - {dest_range: "10.200.1.0/24", next_hop_ip: "10.240.0.21"}
+ - {dest_range: "10.200.2.0/24", next_hop_ip: "10.240.0.22"}
+ sit:
+ value:
+ - {dest_range: "10.12.0.0/24", next_hop_ip: "10.10.0.20"}
+ - {dest_range: "10.12.1.0/24", next_hop_ip: "10.10.0.21"}
+ - {dest_range: "10.12.2.0/24", next_hop_ip: "10.10.0.22"}
+ prd:
+ value:
+ - {dest_range: "172.16.1.0/24", next_hop_ip: "192.168.0.20"}
+ - {dest_range: "172.16.2.0/24", next_hop_ip: "192.168.0.21"}
+ - {dest_range: "172.16.3.0/24", next_hop_ip: "192.168.0.22"}
\ No newline at end of file
diff --git a/examples/google/load-balanced-vms/README.md b/examples/google/load-balanced-vms/README.md
new file mode 100644
index 0000000..486de76
--- /dev/null
+++ b/examples/google/load-balanced-vms/README.md
@@ -0,0 +1,72 @@
+# example `stackql-deploy` stack
+
+Based upon the [__terraform-google-load-balanced-vms__](https://github.com/GoogleCloudPlatform/terraform-google-load-balanced-vms) project.
+
+
+
+## about `stackql-deploy`
+
+[`stackql-deploy`](https://pypi.org/project/stackql-deploy/) is a multi cloud deployment automation and testing framework which is an alternative to Terraform or similar IaC tools. `stackql-deploy` uses a declarative model/ELT based approach to cloud resource deployment (inspired by [`dbt`](https://www.getdbt.com/)). Advantages of `stackql-deploy` include:
+
+- declarative framework
+- no state file (state is determined from the target environment)
+- multi-cloud/omni-cloud ready
+- includes resource tests which can include secure config tests
+
+## instaling `stackql-deploy`
+
+`stackql-deploy` is installed as a python based CLI using...
+
+```bash
+pip install stackql-deploy
+# or
+pip3 install stackql-deploy
+```
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## getting started with `stackql-deploy`
+
+Once installed, use the `init` command to scaffold a sample project directory to get started:
+
+```bash
+stackql-deploy init load-balanced-vms
+```
+
+this will create a directory named `load-balanced-vms` which can be updated for your stack, as you can see in this project.
+
+## deploying using `stackql-deploy`
+
+```bash
+export GOOGLE_CREDENTIALS=$(cat ./testcreds/stackql-deploy-project-demo-service-account.json)
+# deploy a stack
+stackql-deploy build \
+examples\google\load-balanced-vms \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run \
+--log-level DEBUG
+
+# test a stack
+stackql-deploy test \
+examples/google/k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run
+
+# teardown a stack
+stackql-deploy teardown \
+examples/google/k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run
+```
+
+
+
+stackql-deploy-project
\ No newline at end of file
diff --git a/examples/google/load-balanced-vms/example.tf b/examples/google/load-balanced-vms/example.tf
new file mode 100644
index 0000000..24e7b24
--- /dev/null
+++ b/examples/google/load-balanced-vms/example.tf
@@ -0,0 +1,107 @@
+
+# Create a Network Security Group and rule
+resource "azurerm_network_security_group" "tfexample" {
+ name = "my-terraform-nsg"
+ location = azurerm_resource_group.tfexample.location
+ resource_group_name = azurerm_resource_group.tfexample.name
+
+ security_rule {
+ name = "HTTP"
+ priority = 1001
+ direction = "Inbound"
+ access = "Allow"
+ protocol = "Tcp"
+ source_port_range = "*"
+ destination_port_range = "8080"
+ source_address_prefix = "*"
+ destination_address_prefix = "*"
+ }
+
+ tags = {
+ environment = "my-terraform-env"
+ }
+}
+
+# Create a Network Interface
+resource "azurerm_network_interface" "tfexample" {
+ name = "my-terraform-nic"
+ location = azurerm_resource_group.tfexample.location
+ resource_group_name = azurerm_resource_group.tfexample.name
+
+ ip_configuration {
+ name = "my-terraform-nic-ip-config"
+ subnet_id = azurerm_subnet.tfexample.id
+ private_ip_address_allocation = "Dynamic"
+ public_ip_address_id = azurerm_public_ip.tfexample.id
+ }
+
+ tags = {
+ environment = "my-terraform-env"
+ }
+}
+
+# Create a Network Interface Security Group association
+resource "azurerm_network_interface_security_group_association" "tfexample" {
+ network_interface_id = azurerm_network_interface.tfexample.id
+ network_security_group_id = azurerm_network_security_group.tfexample.id
+}
+
+# Create a Virtual Machine
+resource "azurerm_linux_virtual_machine" "tfexample" {
+ name = "my-terraform-vm"
+ location = azurerm_resource_group.tfexample.location
+ resource_group_name = azurerm_resource_group.tfexample.name
+ network_interface_ids = [azurerm_network_interface.tfexample.id]
+ size = "Standard_DS1_v2"
+ computer_name = "myvm"
+ admin_username = "azureuser"
+ admin_password = "Password1234!"
+ disable_password_authentication = false
+
+ source_image_reference {
+ publisher = "Canonical"
+ offer = "UbuntuServer"
+ sku = "18.04-LTS"
+ version = "latest"
+ }
+
+ os_disk {
+ name = "my-terraform-os-disk"
+ storage_account_type = "Standard_LRS"
+ caching = "ReadWrite"
+ }
+
+ tags = {
+ environment = "my-terraform-env"
+ }
+}
+
+# Configurate to run automated tasks in the VM start-up
+resource "azurerm_virtual_machine_extension" "tfexample" {
+ name = "hostname"
+ virtual_machine_id = azurerm_linux_virtual_machine.tfexample.id
+ publisher = "Microsoft.Azure.Extensions"
+ type = "CustomScript"
+ type_handler_version = "2.1"
+
+ settings = < index.html ; nohup busybox httpd -f -p 8080 &"
+ }
+ SETTINGS
+
+ tags = {
+ environment = "my-terraform-env"
+ }
+}
+
+# Data source to access the properties of an existing Azure Public IP Address
+data "azurerm_public_ip" "tfexample" {
+ name = azurerm_public_ip.tfexample.name
+ resource_group_name = azurerm_linux_virtual_machine.tfexample.resource_group_name
+}
+
+# Output variable: Public IP address
+output "public_ip" {
+ value = data.azurerm_public_ip.tfexample.ip_address
+}
\ No newline at end of file
diff --git a/examples/google/load-balanced-vms/resources/project_services.iql b/examples/google/load-balanced-vms/resources/project_services.iql
new file mode 100644
index 0000000..d6a1fcb
--- /dev/null
+++ b/examples/google/load-balanced-vms/resources/project_services.iql
@@ -0,0 +1,47 @@
+/*+ exists */
+SELECT name FROM google.serviceusage.services
+WHERE parent = '219788095364'
+AND parentType = 'projects'
+AND filter = 'state:ENABLED'
+AND name = 'compute.googleapis.com';
+
+
+projects//services/cloudtrace.googleapis.com
+
+SELECT * FROM google.serviceusage.services
+WHERE name = 'projects/123/services/serviceusage.googleapis.com'
+
+parent, parentType
+
+
+name string The resource name of the consumer and service. A valid name would be: - projects/123/services/serviceusage.googleapis.com
+config object The configuration of the service.
+parent string The resource name of the consumer. A valid name would be: - projects/123
+state string Whether or not the service has been enabled for use by the consumer.
+
+
+
+/*+ createorupdate */
+{% for network_interface in network_interfaces | from_json %}
+DELETE FROM google.compute.instances
+WHERE project = '{{ project }}'
+AND zone = '{{ default_zone }}'
+AND instance = '{{ instance_name_prefix }}-{{ loop.index }}';
+{% endfor %}
+
+
+
+
+{{ range .root_projects }}
+{{ $project := . }}
+{{ range .apis }}
+EXEC google.serviceusage.services.enable
+@name = (
+ SELECT
+ 'projects/' || name || '/services/{{ . }}'
+ FROM google.cloudresourcemanager.projects
+ WHERE parent='{{ $global.organization_id }}'
+ and displayName= '{{ $project.displayName }}'
+);
+{{end}}
+{{end}}
\ No newline at end of file
diff --git a/examples/google/load-balanced-vms/stackql_manifest.yml b/examples/google/load-balanced-vms/stackql_manifest.yml
new file mode 100644
index 0000000..3b0feb2
--- /dev/null
+++ b/examples/google/load-balanced-vms/stackql_manifest.yml
@@ -0,0 +1,153 @@
+version: 1
+name: "gcp-stack"
+description: StackQL-Deploy example for GCP infrastructure setup
+providers:
+ - google
+globals:
+ - name: project_id
+ description: Google Cloud Project ID
+ value: "{{ GOOGLE_PROJECT_ID }}"
+ - name: region
+ description: GCP region
+ value: "us-central1"
+ - name: zone
+ description: GCP zone
+ value: "us-central1-a"
+resources:
+ - name: project_services
+ props:
+ - name: apis
+ value:
+ - compute.googleapis.com
+ # - name: vpc_network
+ # props:
+ # - name: network_name
+ # value: "{{ stack_name }}-network"
+ # - name: subnets
+ # value:
+ # - name: "{{ stack_name }}-subnet"
+ # region: "{{ region }}"
+ # cidr_block: "10.10.10.0/24"
+ # exports:
+ # - network_id
+ # - subnet_id
+ # - name: firewall_rules
+ # props:
+ # - name: allow_ssh
+ # value:
+ # - name: "{{ stack_name }}-allow-ssh"
+ # network: "{{ network_id }}"
+ # allow:
+ # - protocol: "tcp"
+ # ports: ["22"]
+ # source_ranges: ["0.0.0.0/0"]
+ # - name: allow_healthchecks
+ # value:
+ # - name: "{{ stack_name }}-allow-healthchecks"
+ # network: "{{ network_id }}"
+ # allow:
+ # - protocol: "tcp"
+ # source_ranges: ["35.191.0.0/16", "209.85.152.0/22", "209.85.204.0/22"]
+ # exports:
+ # - firewall_rule_ids
+ # - name: compute_instance
+ # props:
+ # - name: instance_name
+ # value: "{{ stack_name }}-exemplar"
+ # - name: machine_type
+ # value: "e2-medium"
+ # - name: boot_disk
+ # value:
+ # - image: "debian-10"
+ # size: 200
+ # - name: network_interface
+ # value:
+ # - subnet: "{{ subnet_id }}"
+ # access_config: []
+ # - name: metadata_startup_script
+ # value: |
+ # apt-get update -y
+ # apt-get install nginx -y
+ # echo 'Hello, StackQL!' > /var/www/html/index.html
+ # exports:
+ # - instance_id
+ # - instance_self_link
+ # - name: instance_snapshot
+ # props:
+ # - name: snapshot_name
+ # value: "{{ stack_name }}-snapshot"
+ # - name: source_disk
+ # value: "{{ instance_self_link }}"
+ # - name: storage_locations
+ # value: ["{{ region }}"]
+ # exports:
+ # - snapshot_id
+ # - name: compute_image
+ # props:
+ # - name: image_name
+ # value: "{{ stack_name }}-image"
+ # - name: source_snapshot
+ # value: "{{ snapshot_id }}"
+ # exports:
+ # - image_id
+ # - name: instance_template
+ # props:
+ # - name: template_name
+ # value: "{{ stack_name }}-template"
+ # - name: machine_type
+ # value: "e2-micro"
+ # - name: disk
+ # value:
+ # - source_image: "{{ image_id }}"
+ # auto_delete: true
+ # - name: network_interface
+ # value:
+ # - subnet: "{{ subnet_id }}"
+ # exports:
+ # - template_id
+ # - name: managed_instance_group
+ # props:
+ # - name: mig_name
+ # value: "{{ stack_name }}-mig"
+ # - name: zone
+ # value: "{{ zone }}"
+ # - name: target_size
+ # value: 3
+ # - name: instance_template
+ # value: "{{ template_id }}"
+ # exports:
+ # - mig_id
+ # - name: load_balancer
+ # props:
+ # - name: lb_name
+ # value: "{{ stack_name }}-lb"
+ # - name: backend_services
+ # value:
+ # - backend:
+ # group: "{{ mig_id }}"
+ # balancing_mode: UTILIZATION
+ # capacity_scaler: 1
+ # - name: health_checks
+ # value:
+ # - name: "{{ stack_name }}-health-check"
+ # port: 80
+ # request_path: "/"
+ # exports:
+ # - lb_ip
+ # - name: health_check_firewall
+ # props:
+ # - name: fw_name
+ # value: "{{ stack_name }}-allow-health-check-fw"
+ # - name: fw_direction
+ # value: "INGRESS"
+ # - name: fw_source_ranges
+ # value: ["35.191.0.0/16", "209.85.152.0/22", "209.85.204.0/22"]
+ # - name: fw_allowed
+ # value:
+ # - protocol: "tcp"
+ # exports:
+ # - fw_id
+ # - name: health_check_test
+ type: query
+ exports:
+ - health_check_result
diff --git a/ref-python-packages/pystackql/.devcontainer/devcontainer.json b/ref-python-packages/pystackql/.devcontainer/devcontainer.json
new file mode 100644
index 0000000..683ff27
--- /dev/null
+++ b/ref-python-packages/pystackql/.devcontainer/devcontainer.json
@@ -0,0 +1,25 @@
+{
+ "image": "mcr.microsoft.com/devcontainers/universal:2",
+ "containerEnv": {
+ },
+ "hostRequirements": {
+ "cpus": 2
+ },
+ "waitFor": "onCreateCommand",
+ "updateContentCommand": "pip install -e .",
+ "postCreateCommand": "",
+ "postStartCommand": "git reset --hard && git clean -fd",
+ "customizations": {
+ "codespaces": {
+ "openFiles": [
+ "notebooks/demo.ipynb"
+ ]
+ },
+ "vscode": {
+ "extensions": [
+ "ms-toolsai.jupyter",
+ "ms-python.python"
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/.github/ISSUE_TEMPLATE/bug_report.md b/ref-python-packages/pystackql/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..0d88430
--- /dev/null
+++ b/ref-python-packages/pystackql/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,38 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: '[BUG]'
+labels: 'bug'
+assignees: ''
+
+---
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**To Reproduce**
+Steps to reproduce the behavior:
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+**Expected behavior**
+A clear and concise description of what you expected to happen.
+
+**Screenshots**
+If applicable, add screenshots to help explain your problem.
+
+**Desktop (please complete the following information):**
+ - OS: [e.g. iOS]
+ - Browser [e.g. chrome, safari]
+ - Version [e.g. 22]
+
+**Smartphone (please complete the following information):**
+ - Device: [e.g. iPhone6]
+ - OS: [e.g. iOS8.1]
+ - Browser [e.g. stock browser, safari]
+ - Version [e.g. 22]
+
+**Additional context**
+Add any other context about the problem here.
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/.github/ISSUE_TEMPLATE/feature_request.md b/ref-python-packages/pystackql/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 0000000..9d33cbe
--- /dev/null
+++ b/ref-python-packages/pystackql/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,20 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+title: "[FEATURE]"
+labels: enhancement
+assignees: ''
+
+---
+
+**Feature Description**
+A clear and concise description of what you want to happen.
+
+**Example(s)**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Possible Approaches or Libraries to Consider**
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+Add any other context or screenshots about the feature request here.
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/.github/ISSUE_TEMPLATE/question.md b/ref-python-packages/pystackql/.github/ISSUE_TEMPLATE/question.md
new file mode 100644
index 0000000..b72dd78
--- /dev/null
+++ b/ref-python-packages/pystackql/.github/ISSUE_TEMPLATE/question.md
@@ -0,0 +1,15 @@
+---
+name: Question
+about: Pose a question to the StackQL team
+title: "[QUESTION]"
+labels: question
+assignees: ''
+
+---
+
+
+## Question
+
+This channel is an opportunity to ask ad-hoc questions to the `stackql` team. This channel is in lieu of an official platform for ongoing discussions and questions. Please ask your question :)
+
+**Note**: Questions over github issues will be deprecated and retired once we settle on a platform / process ongoing.
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/.github/workflows/claude.yaml b/ref-python-packages/pystackql/.github/workflows/claude.yaml
new file mode 100644
index 0000000..d07f4be
--- /dev/null
+++ b/ref-python-packages/pystackql/.github/workflows/claude.yaml
@@ -0,0 +1,36 @@
+name: Claude PR Assistant
+
+on:
+ issue_comment:
+ types: [created]
+ pull_request_review_comment:
+ types: [created]
+ issues:
+ types: [opened, assigned]
+ pull_request_review:
+ types: [submitted]
+
+jobs:
+ claude-code-action:
+ if: |
+ (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
+ (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
+ (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
+ (github.event_name == 'issues' && contains(github.event.issue.body, '@claude'))
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ pull-requests: read
+ issues: read
+ id-token: write
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+
+ - name: Run Claude PR Action
+ uses: anthropics/claude-code-action@beta
+ with:
+ anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
+ timeout_minutes: "60"
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/.github/workflows/test.yaml b/ref-python-packages/pystackql/.github/workflows/test.yaml
new file mode 100644
index 0000000..d3623cf
--- /dev/null
+++ b/ref-python-packages/pystackql/.github/workflows/test.yaml
@@ -0,0 +1,127 @@
+name: 'Run Tests'
+on:
+ pull_request:
+ branches:
+ - main
+
+jobs:
+ run-tests:
+ strategy:
+ matrix:
+ os:
+ - ubuntu-latest
+ - windows-latest
+ - macos-latest
+ python-version:
+ - "3.9"
+ - "3.10"
+ - "3.11"
+ - "3.12"
+ - "3.13"
+ runs-on: ${{matrix.os}}
+ name: '${{matrix.os}} Python ${{matrix.python-version}}'
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4.2.2
+
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v5.6.0
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: Upgrade pip
+ shell: bash
+ run: |
+ python3 -m pip install --upgrade pip
+
+ - name: Install pystackql with all dependencies
+ run: |
+ pip install -e .
+
+ - name: Install test dependencies
+ run: |
+ pip install pytest>=6.2.5 pytest-cov>=2.12.0 nose>=1.3.7
+
+ - name: setup-stackql
+ uses: stackql/setup-stackql@v2.2.3
+ with:
+ use_wrapper: true
+
+ - name: Show stackql version (Linux/macOS)
+ if: matrix.os != 'windows-latest'
+ shell: bash
+ run: |
+ stackql --version
+
+ - name: Show stackql version (Windows)
+ if: matrix.os == 'windows-latest'
+ shell: cmd
+ run: |
+ stackql-bin.exe --version
+
+ - name: Move stackql binary to temp dir (Linux/macOS)
+ if: matrix.os != 'windows-latest'
+ shell: bash
+ run: |
+ STACKQL_PATH=$(which stackql)
+ mkdir -p /tmp || true
+ cp "$STACKQL_PATH" /tmp/stackql
+ echo "StackQL binary moved from ${STACKQL_PATH} to /tmp/stackql"
+
+ - name: Move stackql binary to temp dir (Windows)
+ if: matrix.os == 'windows-latest'
+ shell: pwsh
+ run: |
+ $bin = Join-Path $Env:STACKQL_CLI_PATH 'stackql-bin.exe'
+ if (-Not (Test-Path $bin)) {
+ throw "Binary not found at $bin"
+ }
+ Copy-Item $bin -Destination "C:\Temp\stackql.exe" -Force
+ Write-Host "Moved real StackQL binary to C:\Temp\stackql.exe"
+
+ - name: Run non-server tests
+ env:
+ GITHUB_ACTIONS: 'true'
+ run: |
+ python3 run_tests.py
+
+ - name: Start StackQL server and run tests (Linux/macOS)
+ if: matrix.os != 'windows-latest'
+ shell: bash
+ env:
+ GITHUB_ACTIONS: 'true'
+ run: |
+ nohup /tmp/stackql -v --pgsrv.port=5466 srv &
+ sleep 5
+ python3 run_server_tests.py
+
+ - name: Start StackQL server (Windows)
+ if: matrix.os == 'windows-latest'
+ shell: pwsh
+ run: |
+ Start-Process -FilePath "C:\Temp\stackql.exe" `
+ -ArgumentList "-v", "--pgsrv.port=5466", "srv"
+ Start-Sleep -Seconds 5
+
+ - name: Stop StackQL server (Linux/macOS)
+ if: matrix.os != 'windows-latest'
+ shell: bash
+ run: |
+ echo "Stopping StackQL server on Unix/macOS..."
+ PID=$(pgrep -f "/tmp/stackql.*srv" || pgrep -f "stackql.*srv" || echo "")
+ if [ -z "$PID" ]; then
+ echo "No stackql server process found."
+ else
+ echo "stopping stackql server (PID: $PID)..."
+ kill -9 $PID
+ echo "stackql server stopped."
+ fi
+
+ - name: Stop StackQL server (Windows)
+ if: matrix.os == 'windows-latest'
+ shell: cmd
+ run: |
+ echo "Stopping StackQL server on Windows..."
+ taskkill /F /IM stackql.exe 2>nul || echo "No stackql.exe process found"
+ echo "StackQL server stopped (Windows)"
diff --git a/ref-python-packages/pystackql/.gitignore b/ref-python-packages/pystackql/.gitignore
new file mode 100644
index 0000000..173aff7
--- /dev/null
+++ b/ref-python-packages/pystackql/.gitignore
@@ -0,0 +1,181 @@
+# Miscellaneous
+.pypirc
+/.vscode
+/.vscode/*
+
+# Virtual environments
+.venv/
+venv/
+env/
+ENV/
+
+# stackql
+.stackql/
+stackql
+stackql-*.sh
+.env
+nohup.out
+
+# Byte-compiled / optimized / DLL files
+__pycache__
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+stackql
+stackql-zip
+
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/.readthedocs.yaml b/ref-python-packages/pystackql/.readthedocs.yaml
new file mode 100644
index 0000000..39a5da7
--- /dev/null
+++ b/ref-python-packages/pystackql/.readthedocs.yaml
@@ -0,0 +1,22 @@
+# .readthedocs.yaml
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+version: 2
+
+# Set the OS, Python version and other tools you might need
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.11"
+
+# Build documentation in the docs/ directory with Sphinx
+sphinx:
+ configuration: docs/source/conf.py
+
+# Optionally declare the Python requirements required to build your docs
+python:
+ install:
+ - method: pip
+ path: .
+ - requirements: docs/requirements.txt
diff --git a/ref-python-packages/pystackql/.readthedocs.yml b/ref-python-packages/pystackql/.readthedocs.yml
new file mode 100644
index 0000000..d93a9dd
--- /dev/null
+++ b/ref-python-packages/pystackql/.readthedocs.yml
@@ -0,0 +1,35 @@
+# Read the Docs configuration file for Sphinx projects
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+# Required
+version: 2
+
+# Set the OS, Python version and other tools you might need
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.11"
+ # You can also specify other tool versions:
+ # nodejs: "20"
+ # rust: "1.70"
+ # golang: "1.20"
+
+# Build documentation in the "docs/" directory with Sphinx
+sphinx:
+ configuration: docs/source/conf.py
+ # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
+ # builder: "dirhtml"
+ # Fail on all warnings to avoid broken references
+ # fail_on_warning: true
+
+# Optionally build your docs in additional formats such as PDF and ePub
+# formats:
+# - pdf
+# - epub
+
+# Optional but recommended, declare the Python requirements required
+# to build your documentation
+# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
+python:
+ install:
+ - requirements: docs/requirements.txt
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/CHANGELOG.md b/ref-python-packages/pystackql/CHANGELOG.md
new file mode 100644
index 0000000..6033a75
--- /dev/null
+++ b/ref-python-packages/pystackql/CHANGELOG.md
@@ -0,0 +1,228 @@
+# Changelog
+
+## v3.8.2 (2025-11-09)
+
+### New Features
+
+- **Centralized Error Detection**: Added centralized error detection system with configurable patterns
+ - New `errors.yaml` configuration file with error patterns
+ - Supports three pattern types: fuzzy matches, exact matches, and regex matches
+ - Automatically detects errors in stdout and moves them to error field
+ - Eliminates need for external applications to parse error messages
+ - Includes patterns for HTTP 4xx/5xx errors, DNS failures, connection errors, and timeouts
+ - Added `ErrorDetector` class for pattern-based error detection
+
+- **Markdown-KV Output Format**: Added `markdownkv` output format optimized for LLM understanding
+ - Based on research showing 60.7% LLM accuracy vs 44.3% for CSV
+ - Ideal for RAG pipelines and AI-based systems processing tabular data
+ - Hierarchical structure with markdown headers and code blocks
+ - Supported in both local and server modes
+ - Reference: [Which Table Format Do LLMs Understand Best?](https://www.empiricalagents.com/blog/which-table-format-do-llms-understand-best)
+
+### Dependencies
+
+- Added `PyYAML>=5.4.0` for error pattern configuration
+
+### Testing
+
+- Added comprehensive test suite for error detection (`tests/test_error_detection.py`)
+- Added test suite for Markdown-KV format (`tests/test_markdownkv_format.py`)
+- Tests for regex pattern matching, DNS errors, connection errors, and timeouts
+- Tests for LLM-friendly data formatting
+
+## v3.8.1 (2025-06-25)
+
+### Updates
+
+- Added `--csv-download` argument for stackql magic commands
+- Refactor
+- Enhanced test coverage
+
+## v3.7.2 (2024-11-19)
+
+### Updates
+
+- Added `http_debug` constructor argument to return HTTP log information
+
+### Bug Fixes
+
+- Fixed issue passing JSON strings to queries, added test
+
+## v3.7.0 (2024-11-08)
+
+### Updates
+
+- Added support for setting command specific environment variables (`env_vars` and `custom_auth`) in `execute` and `executeStmt`.
+- Upgraded to use `psycopg`
+
+## v3.6.5 (2024-09-19)
+
+### Bug Fixes
+
+- Fix(MacOS): Enhanced platform check for stackql installation
+- Fix(Test Code): Removed loading of `test.env` in test execution script and Add mocking logic for `pystackql.StackQL`'s methods.
+
+## v3.6.4 (2024-07-17)
+
+### Updates
+
+- added dataflow dependency arguments
+
+## v3.6.3 (2024-06-22)
+
+### Updates
+
+- build updates
+
+## v3.6.2 (2024-05-06)
+
+### Updates
+
+- added `rowsaffected` to dict response for `executeStmt`
+
+## v3.6.1 (2024-04-18)
+
+### Updates
+
+- modified dict response for `executeStmt`
+- modified error response for `execute`, should never return `None`
+
+## v3.5.4 (2024-04-11)
+
+### Updates
+
+- added `suppress_errors` argument to the `execute` function
+
+## v3.5.3 (2024-04-08)
+
+### Updates
+
+- added `backend_storage_mode` and `backend_file_storage_location` constructor args for specifying a file based backend (not applicable in `server_mode`)
+
+## v3.5.2 (2024-03-21)
+
+### Updates
+
+- added `custom_registry` constructor arg for specifying a non-default registry
+
+## v3.5.1 (2024-03-15)
+
+### Updates
+
+- included `pandas` and `IPython` install requirements
+- optional required import of `psycopg2` only if in `server_mode`
+
+## v3.2.5 (2023-12-07)
+
+### Updates
+
+- included `app_root` and `execution_concurrency_limit` options in `StackQL` constructor
+
+## v3.2.4 (2023-10-24)
+
+### Updates
+
+- implemented non `server_mode` magic extension
+- updated dataframe output for statements
+- `pandas` type updates
+- updated class parameters
+- added additional tests
+- bin path defaults for codespaces notebooks
+
+## v3.0.0 (2023-10-11)
+
+### Updates
+
+- added `StackqlMagic` class for `jupyter`, `IPython` integration
+- `server_mode` is now used to connect to a `stackql` server process using `pyscopg2`
+- added additional tests
+
+## v2.0.0 (2023-08-15)
+
+### Updates
+
+- added `executeQueriesAsync` stackql class method
+
+## v1.5.0 (2023-04-04)
+
+### Updates
+
+- added `server_mode` to run a background stackql server process
+
+## v1.0.2 (2023-02-23)
+
+### Updates
+
+- enabled custom `download_dir` argument
+
+## v1.0.1 (2023-02-23)
+
+### Minor updates
+
+- updated `setup.py`
+
+## v1.0.0 (2023-02-22)
+
+### Refactor
+
+- refactored package
+- added support for `kwargs` for the `StackQL` constructor
+- added `setup.py`
+- added `docs` using `sphinx`
+- added additional tests
+
+## v0.9.0 (2022-06-06)
+
+### Bug Fixes
+
+- added exception handling
+
+## v0.5.0 (2022-06-03)
+
+### Bug Fixes
+
+- added local registry support
+- updated docs
+
+## v0.4.1 (2022-05-31)
+
+### Bug Fixes
+
+- added `str` handling
+- updated docs
+
+## v0.4.0 (2022-02-08)
+
+### Updates
+
+- updated `version` output
+- updated docs
+
+## v0.3.0 (2022-02-07)
+
+### Initial release as `pystackql`
+
+- added `auth` switch
+- converted `byte` output to `str`
+
+## v0.2.0 (2021-07-19)
+
+### Updates to `pyinfraql`
+
+- added `version` method
+- updates to accept `None` for arguments
+- updated docs
+
+## v0.1.1 (2021-07-16)
+
+### Updates to `pyinfraql`
+
+- added `dbfilepath` argument
+
+## v0.1.0 (2021-02-15)
+
+### Initial Release (as `pyinfraql`)
+
+- class constructor for `pyinfraql`
+- support for `google` provider
+- support for integration with `pandas`, `matplotlib` and `jupyter`
diff --git a/ref-python-packages/pystackql/LICENSE b/ref-python-packages/pystackql/LICENSE
new file mode 100644
index 0000000..f0e0b3c
--- /dev/null
+++ b/ref-python-packages/pystackql/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022-2025 StackQL Studios
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/MANIFEST.in b/ref-python-packages/pystackql/MANIFEST.in
new file mode 100644
index 0000000..ab11a96
--- /dev/null
+++ b/ref-python-packages/pystackql/MANIFEST.in
@@ -0,0 +1 @@
+include pystackql/errors.yaml
diff --git a/ref-python-packages/pystackql/README.rst b/ref-python-packages/pystackql/README.rst
new file mode 100644
index 0000000..e84b8fd
--- /dev/null
+++ b/ref-python-packages/pystackql/README.rst
@@ -0,0 +1,200 @@
+.. image:: https://stackql.io/img/stackql-logo-bold.png
+ :alt: "stackql logo"
+ :target: https://github.com/stackql/stackql
+ :align: center
+
+======================================
+PyStackQL - Python Wrapper for StackQL
+======================================
+
+.. image:: https://readthedocs.org/projects/pystackql/badge/?version=latest
+ :target: https://pystackql.readthedocs.io/en/latest/
+ :alt: Documentation Status
+
+.. image:: https://img.shields.io/pypi/v/pystackql
+ :target: https://pypi.org/project/pystackql/
+ :alt: PyPI
+
+.. image:: https://img.shields.io/pypi/dm/pystackql?label=pypi%20downloads
+ :target: https://pypi.org/project/pystackql/
+ :alt: PyPI - Downloads
+
+StackQL is an open source developer tool which allows you to query and interact with cloud and SaaS provider APIs using SQL grammar.
+StackQL can be used for cloud inventory analysis, cloud cost optimization, cloud security and compliance, provisioning/IaC, assurance, XOps, and more.
+
+`PyStackQL `_ is a Python wrapper for StackQL which allows you to use StackQL within Python applications and to use the power of Python to extend StackQL.
+PyStackQL can be used with ``pandas``, ``matplotlib``, ``plotly``, ``jupyter`` and other Python libraries to create powerful data analysis and visualization applications.
+
+For detailed documentation, including the API reference, see `Read the Docs `_.
+
+Installing PyStackQL
+--------------------
+
+PyStackQL can be installed with pip as follows:
+
+::
+
+ pip install pystackql
+
+You can install from source by cloning this repository and running a pip install command in the root directory of the repository:
+
+::
+
+ git clone https://github.com/stackql/pystackql
+ cd pystackql
+ pip install .
+
+Using PyStackQL
+---------------
+
+The following example demonstrates how to run a query and return the results as a ``pandas.DataFrame``:
+
+::
+
+ from pystackql import StackQL
+ region = "ap-southeast-2"
+ stackql = StackQL(output='pandas')
+
+ query = """
+ SELECT instance_type, COUNT(*) as num_instances
+ FROM aws.ec2.instances
+ WHERE region = '%s'
+ GROUP BY instance_type
+ """ % (region)
+
+ df = stackql.execute(query)
+ print(df)
+
+Using PyStackQL with Jupyter Notebook
+-------------------------------------
+
+To use the integrated Jupyter magic commands provided by PyStackQL:
+
+1. **Load the Extension**:
+
+.. code-block:: python
+
+ %load_ext pystackql.magic
+
+2. **Execute a Query Using Line Magic**:
+
+.. code-block:: python
+
+ %stackql SHOW SERVICES IN azure
+
+3. **Or Using Cell Magic**:
+
+.. code-block:: python
+
+ %%stackql
+ SELECT status, count(*) as num_instances
+ FROM google.compute.instances
+ WHERE project = '$project'
+ AND zone = '$zone'
+ GROUP BY status
+
+You can find more examples in the `stackql docs `_ or the examples in `readthedocs `_.
+
+Supported Operating Systems
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+PyStackQL (and StackQL) are supported on:
+
+- MacOS (arm and amd)
+- Linux
+- Windows
+
+Supported Python Versions
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+PyStackQL has been tested on:
+
+- Python 3.9
+- Python 3.10
+- Python 3.11
+- Python 3.12
+- Python 3.13
+
+Licensing
+~~~~~~~~~
+PyStackQL is licensed under the MIT License. The license is available `here `_
+
+Building the docs
+~~~~~~~~~~~~~~~~~
+
+To build the docs, you will need to install the following packages:
+
+::
+
+ pip install sphinx sphinx_rtd_theme sphinx-autodoc-typehints
+
+Then, from the root directory of the repository, run:
+
+::
+
+ cd docs
+ make html
+
+The docs will be built in the ``docs/build/html`` directory.
+
+Building the package
+~~~~~~~~~~~~~~~~~~~~
+
+To build the package, you will need to install the following packages:
+
+::
+
+ pip install build
+
+Then, from the root directory of the repository, run:
+
+::
+
+ rm -rf dist/*
+ python3 -m build
+
+The package will be built in the ``dist`` directory.
+
+Testing Locally
+---------------
+
+Before testing, ensure you have all the required packages installed:
+
+::
+
+ pip install -r requirements.txt
+ pip install psycopg
+
+Once the dependencies are installed, you can run the tests using the provided script:
+
+::
+
+ sh run_tests
+
+This script sets up the necessary environment variables and then runs the unit tests.
+
+Note: Make sure to set up the environment variables in the `tests/creds/env_vars/test.env` file or supply them in another way before running the tests. The tests may require specific configurations or access keys to connect to services.
+
+For better isolation and reproducibility, consider using a virtual environment:
+
+::
+
+ python3 -m venv venv
+ source venv/bin/activate
+ pip install -r requirements.txt
+
+Once you're done testing, you can deactivate the virtual environment:
+
+::
+
+ deactivate
+
+Publishing the package
+~~~~~~~~~~~~~~~~~~~~~~
+
+To publish the package to PyPI, run the following command:
+
+::
+
+ twine upload --config-file .pypirc dist/*
+
diff --git a/ref-python-packages/pystackql/docs/Makefile b/ref-python-packages/pystackql/docs/Makefile
new file mode 100644
index 0000000..8bba488
--- /dev/null
+++ b/ref-python-packages/pystackql/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS ?=
+SPHINXBUILD = sphinx-build
+SPHINXPROJ = pystackql
+SOURCEDIR = source
+BUILDDIR = build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/docs/make.bat b/ref-python-packages/pystackql/docs/make.bat
new file mode 100644
index 0000000..bb21b16
--- /dev/null
+++ b/ref-python-packages/pystackql/docs/make.bat
@@ -0,0 +1,36 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=source
+set BUILDDIR=build
+set SPHINXPROJ=simpleble
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
+
+:end
+popd
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/docs/requirements.txt b/ref-python-packages/pystackql/docs/requirements.txt
new file mode 100644
index 0000000..228ef30
--- /dev/null
+++ b/ref-python-packages/pystackql/docs/requirements.txt
@@ -0,0 +1,4 @@
+sphinx_rtd_theme
+pandas
+requests
+IPython
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/docs/source/conf.py b/ref-python-packages/pystackql/docs/source/conf.py
new file mode 100644
index 0000000..472b4e5
--- /dev/null
+++ b/ref-python-packages/pystackql/docs/source/conf.py
@@ -0,0 +1,187 @@
+# -*- coding: utf-8 -*-
+#
+# Configuration file for the Sphinx documentation builder.
+#
+# This file does only contain a selection of the most common options. For a
+# full list see the documentation:
+# http://www.sphinx-doc.org/en/stable/config
+
+# -- Path setup --------------------------------------------------------------
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import os
+import sys
+sys.path.insert(0, os.path.abspath('../..'))
+
+
+# -- Project information -----------------------------------------------------
+
+project = 'pystackql'
+copyright = '2021-2025, StackQL Studios'
+author = 'StackQL Studios'
+
+# The short X.Y version
+version = ''
+# The full version, including alpha/beta/rc tags
+release = 'v3.8.1'
+
+
+# -- General configuration ---------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.viewcode',
+ 'sphinx.ext.todo',
+ 'sphinx.ext.napoleon',
+]
+
+# Autodoc configuration
+autodoc_default_options = {
+ 'members': True,
+ 'member-order': 'bysource',
+ 'special-members': '__init__',
+ 'undoc-members': True,
+ 'exclude-members': '__weakref__'
+}
+
+# Napoleon settings for Google/NumPy style docstrings
+napoleon_google_docstring = True
+napoleon_numpy_docstring = True
+napoleon_include_init_with_doc = True
+napoleon_include_private_with_doc = False
+napoleon_include_special_with_doc = True
+napoleon_use_admonition_for_examples = False
+napoleon_use_admonition_for_notes = False
+napoleon_use_admonition_for_references = False
+napoleon_use_ivar = False
+napoleon_use_param = True
+napoleon_use_rtype = True
+napoleon_preprocess_types = False
+napoleon_type_aliases = None
+napoleon_attr_annotations = True
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['../_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = 'en'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This pattern also affects html_static_path and html_extra_path .
+exclude_patterns = []
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'default'
+
+
+# -- Options for HTML output -------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'sphinx_rtd_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#
+html_theme_options = {}
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = ['../_static']
+
+# Custom sidebar templates, must be a dictionary that maps document names
+# to template names.
+#
+# The default sidebars (for documents that don't match any pattern) are
+# defined by theme itself. Builtin themes are using these templates by
+# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
+# 'searchbox.html']``.
+#
+# html_sidebars = {}
+
+
+# -- Options for HTMLHelp output ---------------------------------------------
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'stackqldoc'
+
+
+# -- Options for LaTeX output ------------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+
+ 'papersize': 'letterpaper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+
+ 'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+
+ 'preamble': '',
+
+ # Latex figure (float) alignment
+
+ 'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'stackql.tex', 'stackql Documentation',
+ 'StackQL Studios', 'manual'),
+]
+
+
+# -- Options for manual page output ------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'stackql', 'stackql Documentation',
+ [author], 1)
+]
+
+
+# -- Options for Texinfo output ----------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'stackql', 'stackql Documentation',
+ author, 'stackql', 'Query and interact with cloud providers using SQL.',
+ 'Miscellaneous'),
+]
+
+
+# -- Extension configuration -------------------------------------------------
diff --git a/ref-python-packages/pystackql/docs/source/examples.rst b/ref-python-packages/pystackql/docs/source/examples.rst
new file mode 100644
index 0000000..14e8e8c
--- /dev/null
+++ b/ref-python-packages/pystackql/docs/source/examples.rst
@@ -0,0 +1,241 @@
+Examples
+=============
+
+The following examples demonstrate running a StackQL query against a cloud or SaaS provider and returning the results as a ``pandas.DataFrame``.
+For brevity, the examples below assume that the appropriate imports have been specified and that an instance of the :class:`pystackql.StackQL` has been instantiated with the appropriate provider authentication.
+For more information, see :ref:`auth-overview` and the `StackQL provider docs `_.
+
+.. contents:: Table of Contents
+ :local:
+ :depth: 2
+
+.. code-block:: python
+
+ from pystackql import StackQL
+ import pandas as pd
+ stackql = StackQL()
+
+Discover Provider Metadata
+**************************
+
+StackQL provider definitions are extensions of the provider's OpenAPI specification, which exposes all of the provider's services, resources, and operations - making them accessible using SQL grammar.
+StackQL allows you to explore the provider's metadata using the ``SHOW`` and ``DESCRIBE`` commands as demonstrated here.
+
+.. code-block:: python
+
+ query = "SHOW SERVICES in aws"
+ df = pd.read_json(stackql.execute(query))
+ print(df)
+
+.. code-block:: python
+
+ query = "SHOW RESOURCES in azure.compute"
+ df = pd.read_json(stackql.execute(query))
+ print(df)
+
+.. code-block:: python
+
+ query = "DESCRIBE EXTENDED google.compute.instances"
+ df = pd.read_json(stackql.execute(query))
+ print(df)
+
+Analyze Cloud Resource Inventory
+********************************
+
+StackQL can be used to collect, analyze, summarize, and report on cloud resource inventory data. The following example shows how to query the AWS EC2 inventory and return the number of instances by instance type.
+
+.. code-block:: python
+
+ regions = ["ap-southeast-2", "us-east-1"]
+ queries = [
+ f"""
+ SELECT '{region}' as region, instance_type, COUNT(*) as num_instances
+ FROM aws.ec2.instances
+ WHERE region = '{region}'
+ GROUP BY instance_type
+ """
+ for region in regions
+ ]
+
+ res = stackql.executeQueriesAsync(queries)
+ df = pd.read_json(json.dumps(res))
+ print(df)
+
+Using `pystackql` with Pandas and Matplotlib
+********************************************
+
+:mod:`pystackql` can be used with `pandas `_ and `matplotlib `_ to create visualizations of the data returned by StackQL queries. Typically, this would be done in a Jupyter notebook. The following code can be used to generate a bar chart using :mod:`pystackql`, ``pandas`` and ``matplotlib``:
+
+.. code-block:: python
+
+ org = "my-okta-org"
+ query = """
+ SELECT status, COUNT(*) as num
+ FROM okta.user.users
+ WHERE subdomain = '%s'
+ GROUP BY status
+ """ % (org)
+
+ res = stackql.execute(query)
+ df = pd.read_json(res)
+ df.plot(kind='bar', title='User Status', x='status', y='num')
+
+.. image:: https://rawcdn.githack.com/stackql/stackql-jupyter-demo/46c330faab9d03a3cf79c3bc06571b5e7a3bf1e7/images/stackql-jupyter.png
+ :alt: StackQL Jupyter Demo
+
+Run CSPM Queries
+****************
+
+StackQL can perform point-in-time or interactive queries against cloud resources to determine if they comply with your organization's security policies. This is an example of a CSPM query to find buckets with public access enabled in a Google project.
+
+.. code-block:: python
+
+ project = "stackql-demo"
+ query = """
+ SELECT name,
+ JSON_EXTRACT(iamConfiguration, '$.publicAccessPrevention') as publicAccessPrevention
+ FROM google.storage.buckets
+ WHERE project = '%s'
+ """ % (project)
+
+ res = stackql.execute(query)
+ df = pd.read_json(res)
+ print(df)
+
+Run Cross Cloud Provider Queries
+********************************
+
+StackQL can be used to run queries across multiple cloud providers, this can be useful for cross cloud reporting or analysis. StackQL supports standard SQL set-based operators, including ``UNION`` and ``JOIN``. Here is an example of a ``UNION`` operation between AWS and GCP.
+
+.. code-block:: python
+
+ project = "stackql-demo"
+ gcp_zone = "australia-southeast1-a"
+ region = "ap-southeast-2"
+ google_query = f"""
+ select
+ 'google' as vendor,
+ name,
+ split_part(split_part(type, '/', 11), '-', 2) as type,
+ status,
+ sizeGb as size
+ from google.compute.disks
+ where project = '{project}'
+ and zone = '{gcp_zone}'
+ """
+ aws_query = f"""
+ select
+ 'aws' as vendor,
+ volumeId as name,
+ volumeType as type,
+ status,
+ size
+ from aws.ec2.volumes
+ where region = '{region}'
+ """
+ res = stackql.executeQueriesAsync([google_query, aws_query])
+ df = pd.read_json(json.dumps(res))
+ print(df)
+
+Deploy Cloud Resources
+**********************
+
+StackQL can be used as an Infrastructure-as-Code solution to deploy cloud resources using the ``INSERT`` command. Here is an example of deploying a 10GB disk in GCP. Note that ``INSERT`` operations do not return a dataset, so the :meth:`pystackql.StackQL.executeStmt` is used in this case.
+
+.. code-block:: python
+
+ project = "stackql-demo"
+ gcp_zone = "australia-southeast1-a"
+ query = """
+ INSERT INTO google.compute.disks (project, zone, name, sizeGb)
+ SELECT '%s',
+ '%s',
+ 'test10gbdisk', 10;
+ """ % (project, gcp_zone)
+
+ res = stackql.executeStmt(query)
+ print(res)
+
+``DELETE`` and ``UPDATE`` operations are also supported.
+
+.. note::
+
+ By default StackQL provider mutation operations are asynchronous (non-blocking), you can make them synchronous by using the ``/*+ AWAIT */`` query hint, for example:
+
+ .. code-block:: sql
+
+ INSERT /*+ AWAIT */ INTO google.compute.disks (project, zone, name, sizeGb)
+ SELECT 'stackql-demo',
+ 'australia-southeast1-a',
+ 'test10gbdisk', 10;
+
+Perform Lifecycle Operations
+****************************
+
+In addition to query, reporting, and analysis operations using ``SELECT`` and mutation operations using ``INSERT``, ``UPDATE``, and ``DELETE``, StackQL can also be used to perform lifecycle operations on cloud resources using the ``EXEC`` command. An example of a lifecycle operation is to start a GCP instance.
+
+.. code-block:: python
+
+ project = "stackql-demo"
+ gcp_zone = "australia-southeast1-a"
+ query = """
+ EXEC compute.instances.start
+ @instance = 'demo-instance-1',
+ @project = '%s',
+ @zone = '%s';
+ """ % (project, gcp_zone)
+
+ res = stackql.executeStmt(query)
+ print(res)
+
+To make the lifecycle operation synchronous (blocking), use the ``/*+ AWAIT */`` query hint, for example:
+
+.. code-block:: python
+
+ project = "stackql-demo"
+ gcp_zone = "australia-southeast1-a"
+ query = """
+ EXEC /*+ AWAIT */ compute.instances.start
+ @instance = 'demo-instance-1',
+ @project = '%s',
+ @zone = '%s';
+ """ % (project, gcp_zone)
+
+ res = stackql.executeStmt(query)
+ print(res)
+
+Jupyter Notebook Examples
+*************************
+
+Using StackQL in a Jupyter Notebook provides a seamless way to execute and visualize your queries. You can make use of Jupyter's line and cell magics to run your StackQL commands directly within notebook cells.
+
+To get started, you'd first load the StackQL magic extension:
+
+.. container:: jupyter-cell
+
+ .. code-block:: ipython
+
+ %load_ext pystackql
+
+Once loaded, you can run StackQL commands as either line or cell magics. Here are some of the previous examples rendered in a Jupyter-like style:
+
+.. container:: jupyter-cell
+
+ .. code-block:: ipython
+
+ %%stackql
+ SHOW SERVICES in aws
+
+.. container:: jupyter-cell
+
+ .. code-block:: ipython
+
+ %%stackql
+ SHOW RESOURCES in azure.compute
+
+.. container:: jupyter-cell
+
+ .. code-block:: ipython
+
+ %%stackql
+ DESCRIBE EXTENDED google.compute.instances
diff --git a/ref-python-packages/pystackql/docs/source/index.rst b/ref-python-packages/pystackql/docs/source/index.rst
new file mode 100644
index 0000000..bab138a
--- /dev/null
+++ b/ref-python-packages/pystackql/docs/source/index.rst
@@ -0,0 +1,26 @@
+.. _StackQL: https://github.com/stackql/stackql
+
+pystackql documentation
+=======================
+`StackQL`_ is a dev tool which allows you to query and interact with cloud provider APIs using a SQL language.
+StackQL can be used for cloud inventory analysis, security audits, Infrastructure-as-Code, lifecycle operations (such as starting or stopping a VM).
+
+The :mod:`pystackql` package is a Python library for using `StackQL `_.
+For information on the StackQL language specification, grammar and built-in functions, see the `StackQL documentation `_.
+For StackQL provider information, see the `StackQL Provider Registry `_.
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Contents
+
+ intro
+ examples
+ pystackql
+ magic_ext
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/docs/source/intro.rst b/ref-python-packages/pystackql/docs/source/intro.rst
new file mode 100644
index 0000000..1060314
--- /dev/null
+++ b/ref-python-packages/pystackql/docs/source/intro.rst
@@ -0,0 +1,283 @@
+Getting Started
+###############
+
+:mod:`pystackql` allows you to run StackQL queries against cloud and SaaS providers within a native Python environment.
+The :class:`pystackql.StackQL` class can be used with Pandas, Matplotlib, Jupyter and more.
+
+.. contents:: Contents
+ :local:
+ :depth: 2
+
+Installation
+************
+
+`pystackql` can be installed from `PyPi `_ using pip:
+
+.. code-block:: sh
+
+ $ pip install pystackql
+
+or you can use the ``setup.py`` script:
+
+.. code-block:: sh
+
+ $ git clone https://github.com/stackql/pystackql && cd pystackql
+ $ python setup.py install
+
+to confirm that the installation was successful, you can run the following command:
+
+.. code-block:: python
+
+ from pystackql import StackQL
+ stackql= StackQL()
+
+ print(stackql.version)
+
+you should see a result like:
+
+.. code-block:: sh
+
+ v0.5.396
+
+.. _auth-overview:
+
+Authentication Overview
+***********************
+
+StackQL providers will have different authentication methods. To see the available authentication methods for a provider, consult the `StackQL provider docs `_.
+In general, most providers will use API keys or service account files, which can be generated and revoked from the provider's console.
+
+StackQL will use the designated environment variable or variables for each respective provider for authentication.
+For instance, if the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environment variables are set on the machine you are running `pystackql` on, these will be used to authenticate requests to the `aws` provider.
+
+If you wish to use custom variables for providers you can override the defaults by supplying the ``auth`` keyword/named argument to the :class:`pystackql.StackQL` class constructor.
+The ``auth`` argument can be set to a dictionary or a string. If a dictionary is used, the keys should be the provider name and the values should be the authentication method.
+If a string is supplied, it needs to be a stringified JSON object with the same structure as the dictionary.
+
+.. note::
+
+ Keyword arguments to the :class:`pystackql.StackQL` class constructor are simply command line arguments to the `stackql exec command `_.
+
+Running Queries
+***************
+
+The :class:`pystackql.StackQL` class has a single method, :meth:`pystackql.StackQL.execute`, which can be used to run StackQL queries and return results in ``json``, ``csv``, ``text`` or ``table`` format.
+
+Using Pandas
+============
+
+The following example demonstrates how to run a query and return the results as a ``pandas.DataFrame``:
+
+.. code-block:: python
+
+ from pystackql import StackQL
+ region = "ap-southeast-2"
+ stackql = StackQL(output='pandas')
+
+ query = """
+ SELECT instance_type, COUNT(*) as num_instances
+ FROM aws.ec2.instances
+ WHERE region = '%s'
+ GROUP BY instance_type
+ """ % (region)
+
+ df = stackql.execute(query)
+ print(df)
+
+Using ``UNION`` and ``JOIN`` operators
+======================================
+
+StackQL is a fully functional SQL programming environment, enabling the full set of SQL relational algebra (including ``UNION`` and ``JOIN``) operations, here is an example of a simple ``UNION`` query:
+
+.. code-block:: python
+
+ ...
+ regions = ["ap-southeast-2", "us-east-1"]
+ query = """
+ SELECT '%s' as region, instance_type, COUNT(*) as num_instances
+ FROM aws.ec2.instances
+ WHERE region = '%s'
+ GROUP BY instance_type
+ UNION
+ SELECT '%s' as region, instance_type, COUNT(*) as num_instances
+ FROM aws.ec2.instances
+ WHERE region = '%s'
+ GROUP BY instance_type
+ """ % (regions[0], regions[0], regions[1], regions[1])
+
+ df = stackql.execute(query)
+ print(df)
+
+The preceding example will print a ``pandas.DataFrame`` which would look like this:
+
+.. code-block:: sh
+
+ instance_type num_instances region
+ 0 t2.medium 2 ap-southeast-2
+ 1 t2.micro 7 ap-southeast-2
+ 2 t2.small 4 ap-southeast-2
+ 3 t2.micro 6 us-east-1
+
+Running Queries Asynchronously
+==============================
+
+In addition to ``UNION`` DML operators, you can also run a batch (list) of queries asynchronously using the :meth:`pystackql.StackQL.executeQueriesAsync` method. The results of each query will be combined and returned as a single result set.
+
+.. code-block:: python
+
+ ...
+ regions = ["ap-southeast-2", "us-east-1"]
+
+ queries = [
+ f"""
+ SELECT '{region}' as region, instance_type, COUNT(*) as num_instances
+ FROM aws.ec2.instances
+ WHERE region = '{region}'
+ GROUP BY instance_type
+ """
+ for region in regions
+ ]
+
+ df = stackql.executeQueriesAsync(queries)
+ print(df)
+
+
+Using built-in functions
+========================
+
+StackQL has a complete library of built in functions and operators for manipulating scalar and complex fields (JSON objects), for more information on the available functions and operators, see the `StackQL docs `_.
+Here is an example of using the ``json_extract`` function to extract a field from a JSON object as well as the ``split_part`` function to extract a field from a string:
+
+.. code-block:: python
+
+ from pystackql import StackQL
+ subscriptionId = "273769f6-545f-45b2-8ab8-2f14ec5768dc"
+ resourceGroupName = "stackql-ops-cicd-dev-01"
+ stackql = StackQL() # output format defaults to 'dict'
+
+ query = """
+ SELECT name,
+ split_part(id, '/', 3) as subscription,
+ split_part(id, '/', 5) as resource_group,
+ json_extract(properties, '$.hardwareProfile.vmSize') as vm_size
+ FROM azure.compute.virtual_machines
+ WHERE resourceGroupName = '%s'
+ AND subscriptionId = '%s';
+ """ % (resourceGroupName, subscriptionId)
+
+ res = stackql.execute(query)
+ print(res)
+
+Overriding Parameters per Query
+================================
+
+The :meth:`pystackql.StackQL.execute` and :meth:`pystackql.StackQL.executeStmt` methods support keyword arguments that can override parameters set in the constructor for individual query executions. This is useful when you need to:
+
+- Change the output format for specific queries
+- Adjust CSV formatting (separator, headers) for specific exports
+- Override authentication for specific providers
+- Change other execution parameters on a per-query basis
+
+**Example: Overriding Output Format**
+
+You can create a StackQL instance with a default output format, then override it for specific queries:
+
+.. code-block:: python
+
+ from pystackql import StackQL
+
+ # Create instance with CSV output by default
+ provider_auth = {
+ "github": {
+ "credentialsenvvar": "GITHUBCREDS",
+ "type": "basic"
+ }
+ }
+ stackql = StackQL(auth=provider_auth, output="csv")
+
+ # This returns CSV format (default)
+ csv_result = stackql.execute("select id, name from github.repos.repos where org = 'stackql'")
+ print(csv_result)
+ # Output:
+ # id,name
+ # 443987542,stackql
+ # 441087132,stackql-provider-registry
+ # ...
+
+ # This overrides to dict format for this query only
+ dict_result = stackql.execute("select id, name from github.repos.repos where org = 'stackql'", output="dict")
+ print(dict_result)
+ # Output:
+ # [{"id":"443987542","name":"stackql"},{"id":"441087132","name":"stackql-provider-registry"},...]
+
+ # Subsequent calls without override use the original CSV format
+ csv_result2 = stackql.execute("select id, name from github.repos.repos where org = 'stackql' limit 1")
+
+**Example: Overriding CSV Formatting**
+
+You can also override CSV-specific parameters like separator and headers:
+
+.. code-block:: python
+
+ from pystackql import StackQL
+
+ # Create instance with default CSV settings
+ stackql = StackQL(output="csv", sep=",", header=False)
+
+ # Override to use pipe separator and include headers for this query
+ result = stackql.execute(
+ "select id, name from github.repos.repos where org = 'stackql' limit 3",
+ sep="|",
+ header=True
+ )
+
+**Supported Override Parameters**
+
+The following parameters can be overridden in :meth:`pystackql.StackQL.execute` and :meth:`pystackql.StackQL.executeStmt`:
+
+- ``output``: Output format ('dict', 'pandas', or 'csv')
+- ``sep``: CSV delimiter/separator (when output='csv')
+- ``header``: Include headers in CSV output (when output='csv')
+- ``auth``: Custom authentication for providers
+- ``custom_registry``: Custom StackQL provider registry URL
+- ``max_results``: Maximum results per HTTP request
+- ``page_limit``: Maximum pages per resource
+- ``max_depth``: Maximum depth for indirect queries
+- ``api_timeout``: API request timeout
+- ``http_debug``: Enable HTTP debug logging
+- Proxy settings: ``proxy_host``, ``proxy_port``, ``proxy_user``, ``proxy_password``, ``proxy_scheme``
+- Backend settings: ``backend_storage_mode``, ``backend_file_storage_location``, ``app_root``
+- Execution settings: ``execution_concurrency_limit``, ``dataflow_dependency_max``, ``dataflow_components_max``
+
+.. note::
+
+ Parameter overrides only affect the specific query execution and do not modify the StackQL instance's configuration. Subsequent queries will use the original constructor parameters unless overridden again.
+
+
+Using the Jupyter Magic Extension
+=================================
+
+For those using Jupyter Notebook or Jupyter Lab, `pystackql` offers a Jupyter magic extension that makes it even simpler to execute StackQL commands directly within your Jupyter cells.
+
+To get started with the magic extension, first load it into your Jupyter environment:
+
+.. code-block:: ipython
+
+ %load_ext pystackql.magic
+
+After loading the magic extension, you can use the `%%stackql` magic to execute StackQL commands in a dedicated Jupyter cell. The output will be displayed directly below the cell, just like any other Jupyter command output.
+
+Example:
+
+.. code-block:: ipython
+
+ %%stackql
+ SHOW SERVICES in aws
+
+This Jupyter magic extension provides a seamless integration of `pystackql` into your Jupyter workflows, allowing you to explore cloud and SaaS provider data interactively within your notebooks.
+
+To use the magic extension to run queries against a StackQL server, you can use the following command:
+
+.. code-block:: ipython
+
+ %load_ext pystackql.magics
diff --git a/ref-python-packages/pystackql/docs/source/magic_ext.rst b/ref-python-packages/pystackql/docs/source/magic_ext.rst
new file mode 100644
index 0000000..898a8c2
--- /dev/null
+++ b/ref-python-packages/pystackql/docs/source/magic_ext.rst
@@ -0,0 +1,128 @@
+StackqlMagic Extension for Jupyter
+==================================
+
+The ``StackqlMagic`` extension for Jupyter notebooks provides a convenient interface to run StackQL queries against cloud or SaaS providers directly from within the notebook environment. Results can be visualized in a tabular format using Pandas DataFrames.
+
+Setup
+-----
+
+To enable the `StackqlMagic` extension in your Jupyter notebook, use the following command:
+
+.. code-block:: python
+
+ %load_ext pystackql.magic
+
+To use the `StackqlMagic` extension in your Jupyter notebook to run queries against a StackQL server, use the following command:
+
+.. code-block:: python
+
+ %load_ext pystackql.magics
+
+Usage
+-----
+
+The extension provides both line and cell magic functionalities:
+
+1. **Line Magic**:
+
+ You can run StackQL queries directly from a single line:
+
+ .. code-block:: python
+
+ %stackql DESCRIBE aws.ec2.instances
+
+2. **Cell Magic**:
+
+ For multi-line queries or when you need to use specific options:
+
+ .. code-block:: python
+
+ %%stackql
+ SELECT instance_type, COUNT(*) as num_instances
+ FROM aws.ec2.instances
+ WHERE region = '$region' GROUP BY instance_type
+
+Options
+-------
+
+When using `StackqlMagic` as cell magic, you can pass in the following options:
+
+- ``--no-display`` : Suppresses the display of the results. Even when this option is enabled, the results are still saved in the `stackql_df` Pandas DataFrame.
+- ``--csv-download`` : Adds a download button below the query results that allows you to download the data as a CSV file.
+
+Examples
+--------
+
+Basic Query
+~~~~~~~~~~
+
+.. code-block:: python
+
+ project = 'stackql-demo'
+ zone = 'australia-southeast1-a'
+ region = 'australia-southeast1'
+
+ %%stackql
+ SELECT SPLIT_PART(machineType, '/', -1) as machine_type, count(*) as num_instances
+ FROM google.compute.instances
+ WHERE project = '$project' AND zone = '$zone'
+ GROUP BY machine_type
+
+Suppressing Display
+~~~~~~~~~~~~~~~~~~
+
+.. code-block:: python
+
+ %%stackql --no-display
+ SELECT SPLIT_PART(machineType, '/', -1) as machine_type, count(*) as num_instances
+ FROM google.compute.instances
+ WHERE project = '$project' AND zone = '$zone'
+ GROUP BY machine_type
+
+This will run the query but won't display the results in the notebook. Instead, you can later access the results via the `stackql_df` variable.
+
+Downloading Results as CSV
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: python
+
+ %%stackql --csv-download
+ SELECT
+ 'Python' as language,
+ 'Development' as mode,
+ 'PyStackQL' as package
+
+This will display the query results in the notebook and add a download button below the results. Clicking the button will download the data as a CSV file named ``stackql_results.csv``.
+
+Combining Options
+~~~~~~~~~~~~~~~
+
+You can also combine options. For example, if you want to suppress the display but still want a download button:
+
+.. code-block:: python
+
+ # First run the query with no display
+ %%stackql --no-display
+ SELECT instance_type, COUNT(*) as num_instances
+ FROM aws.ec2.instances
+ WHERE region = '$region' GROUP BY instance_type
+
+ # Then manually display with the download button
+ from IPython.display import display
+ display(stackql_df)
+ from pystackql import StackqlMagic
+ StackqlMagic(get_ipython())._display_with_csv_download(stackql_df)
+
+.. note::
+
+ The results of the queries are always saved in a Pandas DataFrame named `stackql_df` in the notebook's current namespace. This allows you to further process or visualize the data as needed.
+
+An example of visualizing the results using Pandas is shown below:
+
+.. code-block:: python
+
+ stackql_df.plot(kind='pie', y='num_instances', labels=stackql_df['machine_type'], title='Instances by Type', autopct='%1.1f%%')
+
+--------
+
+This documentation provides a basic overview and usage guide for the `StackqlMagic` extension. For advanced usage or any additional features provided by the extension, refer to the source code or any other accompanying documentation.
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/docs/source/pystackql.rst b/ref-python-packages/pystackql/docs/source/pystackql.rst
new file mode 100644
index 0000000..890d389
--- /dev/null
+++ b/ref-python-packages/pystackql/docs/source/pystackql.rst
@@ -0,0 +1,12 @@
+API Reference for pystackql
+===========================
+
+StackQL Class
+-------------
+.. autoclass:: pystackql.StackQL
+ :members:
+ :undoc-members:
+ :show-inheritance:
+ :special-members: __init__
+
+
diff --git a/ref-python-packages/pystackql/launch_venv.sh b/ref-python-packages/pystackql/launch_venv.sh
new file mode 100644
index 0000000..741d635
--- /dev/null
+++ b/ref-python-packages/pystackql/launch_venv.sh
@@ -0,0 +1,140 @@
+#!/bin/bash
+# launch_venv.sh - Script to create, set up and activate a Python virtual environment for PyStackQL
+
+# Use simpler code without colors when running with sh
+if [ -n "$BASH_VERSION" ]; then
+ # Color definitions for bash
+ GREEN='\033[0;32m'
+ YELLOW='\033[1;33m'
+ BLUE='\033[0;34m'
+ RED='\033[0;31m'
+ NC='\033[0m' # No Color
+
+ # Function to print colored text in bash
+ cecho() {
+ printf "%b%s%b\n" "$1" "$2" "$NC"
+ }
+else
+ # No colors for sh
+ cecho() {
+ echo "$2"
+ }
+fi
+
+# Default virtual environment name
+VENV_NAME=".venv"
+REQUIREMENTS_FILE="requirements.txt"
+
+# Function to check if command exists
+command_exists() {
+ command -v "$1" >/dev/null 2>&1
+}
+
+# Banner
+cecho "$BLUE" "======================================="
+cecho "$BLUE" " PyStackQL Development Environment "
+cecho "$BLUE" "======================================="
+echo ""
+
+# Check for Python
+if ! command_exists python3; then
+ cecho "$RED" "Error: Python 3 is not installed."
+ echo "Please install Python 3 and try again."
+ exit 1
+fi
+
+# Print Python version
+cecho "$YELLOW" "Using Python:"
+python3 --version
+echo ""
+
+# Create virtual environment if it doesn't exist
+if [ ! -d "$VENV_NAME" ]; then
+ cecho "$YELLOW" "Creating virtual environment in ${VENV_NAME}..."
+ python3 -m venv "$VENV_NAME"
+ if [ $? -ne 0 ]; then
+ cecho "$RED" "Error: Failed to create virtual environment."
+ exit 1
+ fi
+ cecho "$GREEN" "Virtual environment created successfully."
+else
+ cecho "$YELLOW" "Using existing virtual environment in ${VENV_NAME}"
+fi
+
+# Determine the activate script based on OS
+case "$OSTYPE" in
+ msys*|win*|cygwin*)
+ # Windows
+ ACTIVATE_SCRIPT="$VENV_NAME/Scripts/activate"
+ ;;
+ *)
+ # Unix-like (Linux, macOS)
+ ACTIVATE_SCRIPT="$VENV_NAME/bin/activate"
+ ;;
+esac
+
+# Check if activation script exists
+if [ ! -f "$ACTIVATE_SCRIPT" ]; then
+ cecho "$RED" "Error: Activation script not found at $ACTIVATE_SCRIPT"
+ echo "The virtual environment may be corrupt. Try removing the $VENV_NAME directory and running this script again."
+ exit 1
+fi
+
+# Source the activation script
+cecho "$YELLOW" "Activating virtual environment..."
+. "$ACTIVATE_SCRIPT"
+if [ $? -ne 0 ]; then
+ cecho "$RED" "Error: Failed to activate virtual environment."
+ exit 1
+fi
+
+# Install/upgrade pip, setuptools, and wheel
+cecho "$YELLOW" "Upgrading pip, setuptools, and wheel..."
+pip install --upgrade pip setuptools wheel
+if [ $? -ne 0 ]; then
+ cecho "$RED" "Warning: Failed to upgrade pip, setuptools, or wheel. Continuing anyway."
+fi
+
+# Check if requirements.txt exists
+if [ ! -f "$REQUIREMENTS_FILE" ]; then
+ cecho "$RED" "Error: $REQUIREMENTS_FILE not found."
+ echo "Please make sure the file exists in the current directory."
+ cecho "$YELLOW" "Continuing with an activated environment without installing dependencies."
+else
+ # Install requirements
+ cecho "$YELLOW" "Installing dependencies from $REQUIREMENTS_FILE..."
+ pip install -r "$REQUIREMENTS_FILE"
+ if [ $? -ne 0 ]; then
+ cecho "$RED" "Warning: Some dependencies may have failed to install."
+ else
+ cecho "$GREEN" "Dependencies installed successfully."
+ fi
+fi
+
+# Install the package in development mode if setup.py exists
+if [ -f "setup.py" ]; then
+ cecho "$YELLOW" "Installing PyStackQL in development mode..."
+ pip install .
+ if [ $? -ne 0 ]; then
+ cecho "$RED" "Warning: Failed to install package in development mode."
+ else
+ cecho "$GREEN" "Package installed in development mode."
+ fi
+fi
+
+# Success message
+echo ""
+cecho "$GREEN" "Virtual environment is now set up and activated!"
+cecho "$YELLOW" "You can use PyStackQL and run tests."
+echo ""
+cecho "$BLUE" "To run tests:"
+echo " python run_tests.py"
+echo ""
+cecho "$BLUE" "To deactivate the virtual environment when done:"
+echo " deactivate"
+echo ""
+cecho "$BLUE" "======================================="
+
+# Keep the terminal open with the activated environment
+# The script will be source'd, so the environment stays active
+exec "${SHELL:-bash}"
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/notebooks/demo.ipynb b/ref-python-packages/pystackql/notebooks/demo.ipynb
new file mode 100644
index 0000000..7009386
--- /dev/null
+++ b/ref-python-packages/pystackql/notebooks/demo.ipynb
@@ -0,0 +1,148 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# PyStackQL Development Demo\n",
+ "\n",
+ "This notebook demonstrates how to use the development version of PyStackQL directly from the source code. Any changes you make to the PyStackQL code will be immediately reflected here."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from pystackql import StackQL\n",
+ "stackql = StackQL()\n",
+ "print(stackql.version)\n",
+ "print(stackql.package_version)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Load the magic extension\n",
+ "%load_ext pystackql.magic"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Basic Query Test\n",
+ "\n",
+ "Let's run a simple query to test that everything is working:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%stackql SELECT 42 as answer"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## CSV Download Test\n",
+ "\n",
+ "Let's test the CSV download functionality:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%%stackql --csv-download\n",
+ "SELECT \n",
+ " 'Python' as language,\n",
+ " 'Development' as mode,\n",
+ " 'PyStackQL' as package"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Test Cloud Provider Functionality\n",
+ "\n",
+ "If you have credentials configured, you can test actual cloud provider queries:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Uncomment and run the appropriate provider query based on your available credentials\n",
+ "\n",
+ "# AWS Example\n",
+ "# %stackql DESCRIBE aws.ec2.instances\n",
+ "\n",
+ "# GitHub Example\n",
+ "# %stackql registry pull github\n",
+ "# %stackql SELECT login FROM github.users.followers WHERE username = 'stackql'"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Development Tips\n",
+ "\n",
+ "1. After modifying PyStackQL code, you don't need to reinstall the package - changes are reflected immediately\n",
+ "2. You can run tests from the terminal with `pytest tests/`\n",
+ "3. If you modify deep core functionality, you may need to restart the kernel\n",
+ "4. To debug issues, you can use Python's built-in debugging tools:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Example debugging a PyStackQL function\n",
+ "from pystackql.core import StackQL\n",
+ "\n",
+ "# Get instance properties\n",
+ "stackql = StackQL()\n",
+ "props = stackql.properties()\n",
+ "print(props)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python (PyStackQL Dev)",
+ "language": "python",
+ "name": "pystackql-dev"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.4"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/ref-python-packages/pystackql/pyproject.toml b/ref-python-packages/pystackql/pyproject.toml
new file mode 100644
index 0000000..49f5698
--- /dev/null
+++ b/ref-python-packages/pystackql/pyproject.toml
@@ -0,0 +1,45 @@
+[build-system]
+requires = ["setuptools>=42", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "pystackql"
+version = "3.8.2"
+description = "A Python interface for StackQL"
+readme = "README.rst"
+authors = [
+ {name = "Jeffrey Aven", email = "javen@stackql.io"}
+]
+
+license = "MIT"
+classifiers = [
+ "Operating System :: Microsoft :: Windows",
+ "Operating System :: MacOS",
+ "Operating System :: POSIX :: Linux",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+]
+requires-python = ">=3.9"
+dependencies = [
+ "requests",
+ "pandas",
+ "IPython",
+ "psycopg[binary]>=3.1.0",
+ "nest-asyncio>=1.5.5",
+ "termcolor>=1.1.0",
+ "tqdm>=4.61.0",
+ "PyYAML>=5.4.0",
+]
+
+[tool.setuptools.packages.find]
+include = ["pystackql", "pystackql.*"]
+
+[tool.setuptools]
+include-package-data = true
+
+[project.urls]
+Homepage = "https://github.com/stackql/pystackql"
+Documentation = "https://pystackql.readthedocs.io"
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/__init__.py b/ref-python-packages/pystackql/pystackql/__init__.py
new file mode 100644
index 0000000..74d3d3f
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/__init__.py
@@ -0,0 +1,17 @@
+# pystackql/__init__.py
+
+"""
+PyStackQL - Python wrapper for StackQL
+
+This package provides a Python interface to the StackQL query language
+for cloud resource querying.
+"""
+
+# Import the core StackQL class
+from .core import StackQL
+
+# Import the magic classes for Jupyter integration
+from .magic_ext import StackqlMagic, StackqlServerMagic
+
+# Define the public API
+__all__ = ['StackQL', 'StackqlMagic', 'StackqlServerMagic']
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/core/__init__.py b/ref-python-packages/pystackql/pystackql/core/__init__.py
new file mode 100644
index 0000000..b1231b2
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/core/__init__.py
@@ -0,0 +1,16 @@
+# pystackql/core/__init__.py
+
+"""
+Core functionality for PyStackQL.
+
+This module provides the core functionality for the PyStackQL package,
+including the main StackQL class.
+"""
+
+from .binary import BinaryManager
+from .server import ServerConnection
+from .query import QueryExecutor, AsyncQueryExecutor
+from .output import OutputFormatter
+from .stackql import StackQL
+
+__all__ = ['StackQL']
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/core/binary.py b/ref-python-packages/pystackql/pystackql/core/binary.py
new file mode 100644
index 0000000..023f671
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/core/binary.py
@@ -0,0 +1,79 @@
+# pystackql/core/binary.py
+
+"""
+Binary management module for PyStackQL.
+
+This module handles the installation, version checking, and management
+of the StackQL binary executable.
+"""
+
+import os
+from ..utils import (
+ is_binary_local,
+ get_platform,
+ get_download_dir,
+ get_binary_name,
+ setup_binary,
+ get_binary_version
+)
+
+class BinaryManager:
+ """Manages the StackQL binary installation and versions.
+
+ This class is responsible for ensuring the StackQL binary is available
+ and correctly configured for use.
+ """
+
+ def __init__(self, download_dir=None):
+ """Initialize the BinaryManager.
+
+ Args:
+ download_dir (str, optional): Directory to store the binary. Defaults to None.
+ """
+ self.platform_info, self.system = get_platform()
+
+ # Determine binary location
+ if self.system == 'Linux' and is_binary_local(self.system) and download_dir is None:
+ self.bin_path = '/usr/local/bin/stackql'
+ self.download_dir = '/usr/local/bin'
+ else:
+ # Use provided download_dir or default
+ self.download_dir = download_dir if download_dir else get_download_dir()
+ self.bin_path = os.path.join(self.download_dir, get_binary_name(self.system))
+
+ # Check if binary exists and get version
+ self._ensure_binary_exists()
+
+ def _ensure_binary_exists(self):
+ """Ensure the binary exists, download it if not."""
+ if os.path.exists(self.bin_path):
+ # Binary exists, get version
+ self.version, self.sha = get_binary_version(self.bin_path)
+ else:
+ # Binary doesn't exist, download it
+ setup_binary(self.download_dir, self.system)
+ self.version, self.sha = get_binary_version(self.bin_path)
+
+ def upgrade(self, showprogress=True):
+ """Upgrade the StackQL binary to the latest version.
+
+ Args:
+ showprogress (bool, optional): Whether to show download progress. Defaults to True.
+
+ Returns:
+ str: A message indicating the new version
+ """
+ setup_binary(self.download_dir, self.system, showprogress)
+ self.version, self.sha = get_binary_version(self.bin_path)
+ return f"stackql upgraded to version {self.version}"
+
+ def get_version_info(self):
+ """Get the version information for the binary.
+
+ Returns:
+ dict: Version information including version and sha
+ """
+ return {
+ "version": self.version,
+ "sha": self.sha
+ }
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/core/error_detector.py b/ref-python-packages/pystackql/pystackql/core/error_detector.py
new file mode 100644
index 0000000..9bd77ef
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/core/error_detector.py
@@ -0,0 +1,151 @@
+# pystackql/core/error_detector.py
+
+"""
+Error detection module for PyStackQL.
+
+This module provides centralized error detection logic that checks messages
+against predefined error patterns loaded from errors.yaml.
+"""
+
+import os
+import re
+import yaml
+
+
+class ErrorDetector:
+ """Detects errors in query results based on predefined patterns.
+
+ This class loads error patterns from errors.yaml and provides methods
+ to check if a message contains any of these error patterns.
+ """
+
+ def __init__(self):
+ """Initialize the ErrorDetector by loading error patterns from errors.yaml."""
+ self.fuzzy_patterns = []
+ self.exact_patterns = []
+ self.regex_patterns = [] # List of compiled regex pattern objects
+ self._load_error_patterns()
+
+ def _load_error_patterns(self):
+ """Load error patterns from the errors.yaml file.
+
+ The errors.yaml file should be located in the same directory as this module.
+ """
+ # Get the directory containing the pystackql package
+ current_dir = os.path.dirname(os.path.abspath(__file__))
+ package_dir = os.path.dirname(current_dir)
+ errors_file = os.path.join(package_dir, 'errors.yaml')
+
+ try:
+ if os.path.exists(errors_file):
+ with open(errors_file, 'r') as f:
+ error_config = yaml.safe_load(f)
+
+ if error_config and 'errors' in error_config:
+ errors = error_config['errors']
+
+ # Load fuzzy match patterns (case-insensitive substring matching)
+ if 'fuzzy_matches' in errors:
+ self.fuzzy_patterns = [
+ pattern.lower()
+ for pattern in errors['fuzzy_matches']
+ if pattern
+ ]
+
+ # Load exact match patterns (case-sensitive exact/prefix matching)
+ if 'exact_matches' in errors:
+ self.exact_patterns = [
+ pattern
+ for pattern in errors['exact_matches']
+ if pattern
+ ]
+
+ # Load regex patterns (compile them for efficiency)
+ if 'regex_matches' in errors:
+ self.regex_patterns = []
+ for pattern in errors['regex_matches']:
+ if pattern:
+ try:
+ # Compile with IGNORECASE flag for case-insensitive matching
+ compiled = re.compile(pattern, re.IGNORECASE)
+ self.regex_patterns.append((pattern, compiled))
+ except re.error as regex_err:
+ print(f"Warning: Invalid regex pattern '{pattern}': {regex_err}")
+ except Exception as e:
+ # If we can't load the error patterns, continue with empty lists
+ # This ensures the module doesn't break existing functionality
+ print(f"Warning: Could not load error patterns from {errors_file}: {e}")
+
+ def is_error(self, message):
+ """Check if a message contains any error patterns.
+
+ Args:
+ message (str): The message to check for error patterns
+
+ Returns:
+ bool: True if the message matches any error pattern, False otherwise
+ """
+ if not message or not isinstance(message, str):
+ return False
+
+ message_lower = message.lower()
+
+ # Check fuzzy matches (case-insensitive substring matching)
+ for pattern in self.fuzzy_patterns:
+ if pattern in message_lower:
+ return True
+
+ # Check exact matches (exact string or starts with prefix)
+ for pattern in self.exact_patterns:
+ if message == pattern or message.startswith(pattern):
+ return True
+
+ # Check regex matches
+ for pattern_str, compiled_pattern in self.regex_patterns:
+ if compiled_pattern.search(message):
+ return True
+
+ return False
+
+ def extract_error_info(self, message):
+ """Extract error information from a message.
+
+ Args:
+ message (str): The error message
+
+ Returns:
+ dict: Dictionary containing error details with 'error' and 'detected_pattern' keys
+ """
+ if not self.is_error(message):
+ return None
+
+ message_lower = message.lower()
+ detected_pattern = None
+ pattern_type = None
+
+ # Find which pattern was matched (check in order: fuzzy, exact, regex)
+ for pattern in self.fuzzy_patterns:
+ if pattern in message_lower:
+ detected_pattern = pattern
+ pattern_type = "fuzzy"
+ break
+
+ if not detected_pattern:
+ for pattern in self.exact_patterns:
+ if message == pattern or message.startswith(pattern):
+ detected_pattern = pattern
+ pattern_type = "exact"
+ break
+
+ if not detected_pattern:
+ for pattern_str, compiled_pattern in self.regex_patterns:
+ if compiled_pattern.search(message):
+ detected_pattern = pattern_str
+ pattern_type = "regex"
+ break
+
+ return {
+ "error": message,
+ "detected_pattern": detected_pattern,
+ "pattern_type": pattern_type
+ }
diff --git a/ref-python-packages/pystackql/pystackql/core/output.py b/ref-python-packages/pystackql/pystackql/core/output.py
new file mode 100644
index 0000000..79b1213
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/core/output.py
@@ -0,0 +1,378 @@
+# pystackql/core/output.py
+
+"""
+Output formatting module for PyStackQL.
+
+This module handles the formatting of query results into different output formats.
+"""
+
+import json
+from io import StringIO
+from .error_detector import ErrorDetector
+
+class OutputFormatter:
+ """Formats query results into different output formats.
+
+ This class is responsible for converting raw query results into
+ the desired output format (dict, pandas, or csv).
+ """
+
+ def __init__(self, output_format='dict'):
+ """Initialize the OutputFormatter.
+
+ Args:
+ output_format (str, optional): The output format. Defaults to 'dict'.
+ Allowed values: 'dict', 'pandas', 'csv'
+
+ Raises:
+ ValueError: If an invalid output format is specified
+ """
+ ALLOWED_OUTPUTS = {'dict', 'pandas', 'csv', 'markdownkv'}
+ if output_format.lower() not in ALLOWED_OUTPUTS:
+ raise ValueError(f"Invalid output format. Expected one of {ALLOWED_OUTPUTS}, got {output_format}.")
+ self.output_format = output_format.lower()
+ self.error_detector = ErrorDetector()
+
+ def format_query_result(self, result, suppress_errors=True):
+ """Format a query result.
+
+ Args:
+ result (dict): The raw query result from the executor
+ suppress_errors (bool, optional): Whether to suppress errors. Defaults to True.
+
+ Returns:
+ The formatted result in the specified output format
+ """
+ # Handle exceptions
+ if "exception" in result:
+ exception_msg = result["exception"]
+ return self._format_exception(exception_msg)
+
+ # Handle data
+ if "data" in result:
+ data = result["data"]
+ return self._format_data(data)
+
+ # Handle errors
+ if "error" in result and not suppress_errors:
+ err_msg = result["error"]
+ return self._format_error(err_msg)
+
+ # No data, no error, return empty result
+ return self._format_empty()
+
+ def _format_exception(self, exception_msg):
+ """Format an exception message.
+
+ Args:
+ exception_msg (str): The exception message
+
+ Returns:
+ The formatted exception in the specified output format
+ """
+ if self.output_format == 'pandas':
+ import pandas as pd
+ return pd.DataFrame({'error': [exception_msg]}) if exception_msg else pd.DataFrame({'error': []})
+ elif self.output_format == 'csv':
+ return exception_msg
+ elif self.output_format == 'markdownkv':
+ return self._format_markdownkv_error(exception_msg)
+ else: # dict
+ return [{"error": exception_msg}]
+
+ def _format_error(self, error_msg):
+ """Format an error message.
+
+ Args:
+ error_msg (str): The error message
+
+ Returns:
+ The formatted error in the specified output format
+ """
+ if self.output_format == 'pandas':
+ import pandas as pd
+ return pd.DataFrame({'error': [error_msg]}) if error_msg else pd.DataFrame({'error': []})
+ elif self.output_format == 'csv':
+ return error_msg
+ elif self.output_format == 'markdownkv':
+ return self._format_markdownkv_error(error_msg)
+ else: # dict
+ return [{"error": error_msg}]
+
+ def _format_data(self, data):
+ """Format data.
+
+ This method processes SQL type objects from StackQL:
+ - SQL NULL values: {'String': '', 'Valid': False} ā None
+ - Regular values: {'String': 'value', 'Valid': True} ā 'value'
+ - Empty strings: {'String': '', 'Valid': True} ā '' (preserved as empty string)
+
+ Additionally, this method checks for error patterns in the data and
+ converts them to proper error responses.
+
+ Args:
+ data (str): The data string
+
+ Returns:
+ The formatted data in the specified output format
+ """
+ if self.output_format == 'csv':
+ # For CSV, check if the raw data contains error patterns
+ if self.error_detector.is_error(data):
+ return data # Return the error message as-is for CSV
+ return data
+
+ if self.output_format == 'markdownkv':
+ # For markdownkv, check for errors before parsing
+ if isinstance(data, str) and self.error_detector.is_error(data):
+ return self._format_markdownkv_error(data)
+
+ # Check if the raw data string itself is an error message (before JSON parsing)
+ if isinstance(data, str) and self.error_detector.is_error(data):
+ # The entire response is an error message
+ return self._format_error(data)
+
+ try:
+ # Attempt to parse JSON first
+ raw_json_data = json.loads(data)
+ except json.JSONDecodeError as e:
+ # Handle specific JSON parsing errors
+ error_result = [{"error": f"Invalid JSON format: {str(e)}", "position": e.pos, "line": e.lineno, "column": e.colno}]
+ return pd.DataFrame(error_result) if self.output_format == 'pandas' else error_result
+ except TypeError as e:
+ # Handle cases where data is not a string or buffer
+ error_result = [{"error": f"Invalid data type for JSON parsing: {str(e)}", "data_type": str(type(data))}]
+ return pd.DataFrame(error_result) if self.output_format == 'pandas' else error_result
+ except Exception as e:
+ # Catch any other unexpected errors
+ error_result = [{"error": f"Unexpected error parsing JSON: {str(e)}", "exception_type": type(e).__name__}]
+ return pd.DataFrame(error_result) if self.output_format == 'pandas' else error_result
+
+ try:
+ # Process the JSON data to clean up SQL type objects
+ processed_json_data = self._process_sql_types(raw_json_data)
+
+ # Handle empty data
+ if not processed_json_data:
+ return pd.DataFrame() if self.output_format == 'pandas' else []
+
+ # Check if the processed data contains error patterns
+ # This handles cases where StackQL returns error messages in structured data
+ detected_error = self._check_data_for_errors(processed_json_data)
+ if detected_error:
+ return self._format_error(detected_error)
+
+ if self.output_format == 'pandas':
+ import pandas as pd
+ # Convert the preprocessed JSON data to a DataFrame
+ return pd.DataFrame(processed_json_data)
+ elif self.output_format == 'markdownkv':
+ return self._format_markdownkv(processed_json_data)
+
+ # Return the preprocessed dictionary data
+ return processed_json_data
+
+ except Exception as e:
+ # Handle any errors during processing
+ error_msg = f"Error processing data: {str(e)}"
+ if self.output_format == 'pandas':
+ import pandas as pd
+ return pd.DataFrame([{"error": error_msg}])
+ return [{"error": error_msg}]
+
+ def _check_data_for_errors(self, data):
+ """Check if processed data contains error patterns.
+
+ This method recursively checks all string values in the data structure
+ to detect error patterns that might have been returned as valid data.
+
+ Args:
+ data: The processed data (list, dict, or primitive type)
+
+ Returns:
+ str: The error message if an error pattern is detected, None otherwise
+ """
+ if isinstance(data, list):
+ # Check each item in the list
+ for item in data:
+ error = self._check_data_for_errors(item)
+ if error:
+ return error
+
+ elif isinstance(data, dict):
+ # Check each value in the dictionary
+ for key, value in data.items():
+ # Check string values for error patterns
+ if isinstance(value, str) and self.error_detector.is_error(value):
+ return value
+ # Recursively check nested structures
+ error = self._check_data_for_errors(value)
+ if error:
+ return error
+
+ elif isinstance(data, str):
+ # Check if the string itself is an error
+ if self.error_detector.is_error(data):
+ return data
+
+ return None
+
+ def _process_sql_types(self, data):
+ """Process SQL type objects in the data.
+
+ Args:
+ data: The parsed JSON data
+
+ Returns:
+ The processed data with SQL type objects transformed
+ """
+ # Handle lists (most common case from StackQL)
+ if isinstance(data, list):
+ return [self._process_sql_types(item) for item in data]
+
+ # Handle dictionaries (individual records or nested objects)
+ elif isinstance(data, dict):
+ # Check if this is an SQL type object
+ if 'Valid' in data and len(data) <= 2 and ('String' in data or 'Int64' in data or 'Float64' in data):
+ # This is an SQL type object - transform it
+ if data.get('Valid', False):
+ # Valid: True -> return the actual value
+ for type_key in ['String', 'Int64', 'Float64']:
+ if type_key in data:
+ return data.get(type_key)
+ return None # Fallback if no value field found
+ else:
+ # Valid: False -> return None (SQL NULL)
+ return None
+ else:
+ # Regular dictionary - process each value
+ result = {}
+ for key, value in data.items():
+ result[key] = self._process_sql_types(value)
+ return result
+
+ # All other data types (strings, numbers, booleans, None) - return as is
+ return data
+
+ def _format_empty(self):
+ """Format an empty result.
+
+ Returns:
+ An empty result in the specified output format
+ """
+ if self.output_format == 'pandas':
+ import pandas as pd
+ return pd.DataFrame()
+ elif self.output_format == 'csv':
+ return ""
+ elif self.output_format == 'markdownkv':
+ return "# Query Results\n\nNo records found.\n"
+ else: # dict
+ return []
+
+ def _format_markdownkv(self, data):
+ """Format data as Markdown Key-Value pairs.
+
+ This format is optimized for LLM understanding based on research showing
+ it achieves 60.7% accuracy vs 44.3% for CSV when LLMs process tabular data.
+
+ Args:
+ data: The processed data (list of dicts)
+
+ Returns:
+ str: Markdown-formatted key-value representation
+ """
+ if not data:
+ return "# Query Results\n\nNo records found.\n"
+
+ # Handle single dict (convert to list for consistency)
+ if isinstance(data, dict):
+ data = [data]
+
+ output = ["# Query Results\n"]
+
+ for idx, record in enumerate(data, 1):
+ output.append(f"## Record {idx}\n")
+ output.append("```")
+
+ # Format each key-value pair
+ for key, value in record.items():
+ # Handle None values
+ if value is None:
+ value = "null"
+ output.append(f"{key}: {value}")
+
+ output.append("```\n")
+
+ return "\n".join(output)
+
+ def _format_markdownkv_error(self, error_msg):
+ """Format an error message in Markdown-KV style.
+
+ Args:
+ error_msg (str): The error message
+
+ Returns:
+ str: Markdown-formatted error
+ """
+ return f"""# Query Results
+
+## Error
+
+```
+error: {error_msg}
+```
+"""
+
+ def format_statement_result(self, result):
+ """Format a statement result.
+
+ Args:
+ result (dict): The raw statement result from the executor
+
+ Returns:
+ The formatted result in the specified output format
+ """
+ # Handle exceptions
+ if "exception" in result:
+ exception_msg = result["exception"]
+ return self._format_exception(exception_msg)
+
+ # Message on stderr or empty message
+ message = result.get("error", "")
+
+ # Check if the message contains error patterns
+ if message and self.error_detector.is_error(message):
+ # Return as error instead of as a regular message
+ return self._format_error(message)
+
+ if self.output_format == 'pandas':
+ import pandas as pd
+ return pd.DataFrame({'message': [message]}) if message else pd.DataFrame({'message': []})
+ elif self.output_format == 'csv':
+ return message
+ elif self.output_format == 'markdownkv':
+ return self._format_markdownkv_statement(message)
+ else: # dict
+ return {'message': message.rstrip('\n')}
+
+ def _format_markdownkv_statement(self, message):
+ """Format a statement result message in Markdown-KV style.
+
+ Args:
+ message (str): The statement result message
+
+ Returns:
+ str: Markdown-formatted statement result
+ """
+ if not message:
+ return "# Statement Result\n\nNo message returned.\n"
+
+ return f"""# Statement Result
+
+## Result
+
+```
+message: {message.rstrip()}
+```
+"""
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/core/query.py b/ref-python-packages/pystackql/pystackql/core/query.py
new file mode 100644
index 0000000..427ced0
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/core/query.py
@@ -0,0 +1,215 @@
+# pystackql/core/query.py
+
+"""
+Query execution module for PyStackQL.
+
+This module handles the execution of StackQL queries via the binary or server.
+"""
+
+import json
+import os
+import shlex
+import subprocess
+import tempfile
+from io import StringIO
+import asyncio
+from concurrent.futures import ThreadPoolExecutor
+
+class QueryExecutor:
+ """Executes StackQL queries using a subprocess.
+
+ This class is responsible for executing StackQL queries using either
+ a local binary or a server connection.
+ """
+
+ def __init__(self, binary_path, params=None, debug=False, debug_log_file=None):
+ """Initialize the QueryExecutor.
+
+ Args:
+ binary_path (str): Path to the StackQL binary
+ params (list, optional): Additional parameters for the binary. Defaults to None.
+ debug (bool, optional): Whether to enable debug logging. Defaults to False.
+ debug_log_file (str, optional): Path to debug log file. Defaults to None.
+ """
+ self.bin_path = binary_path
+ self.params = params or []
+ self.debug = debug
+ self.debug_log_file = debug_log_file
+
+ # Determine platform for command formatting
+ import platform
+ self.platform = platform.system()
+
+ def _debug_log(self, message):
+ """Log a debug message.
+
+ Args:
+ message (str): The message to log
+ """
+ if self.debug and self.debug_log_file:
+ with open(self.debug_log_file, "a") as log_file:
+ log_file.write(message + "\n")
+
+ def execute(self, query, custom_auth=None, env_vars=None, override_params=None):
+ """Execute a StackQL query.
+
+ Args:
+ query (str): The query to execute
+ custom_auth (dict, optional): Custom authentication dictionary. Defaults to None.
+ env_vars (dict, optional): Environment variables for the subprocess. Defaults to None.
+ override_params (list, optional): Override parameters for this execution. Defaults to None.
+
+ Returns:
+ dict: The query results
+ """
+ local_params = (override_params if override_params is not None else self.params).copy()
+ script_path = None
+
+ # Format query for platform
+ if self.platform.startswith("Windows"):
+ # Escape double quotes and wrap in double quotes for Windows
+ escaped_query = query.replace('"', '\\"')
+ safe_query = f'"{escaped_query}"'
+ else:
+ # Use shlex.quote for Unix-like systems
+ safe_query = shlex.quote(query)
+
+ local_params.insert(1, safe_query)
+
+ # Handle custom authentication if provided
+ if custom_auth:
+ if '--auth' in local_params:
+ # override auth set in the constructor with the command-specific auth
+ auth_index = local_params.index('--auth')
+ local_params.pop(auth_index) # remove --auth
+ local_params.pop(auth_index) # remove the auth string
+ authstr = json.dumps(custom_auth)
+ local_params.extend(["--auth", f"'{authstr}'"])
+
+ output = {}
+ env_command_prefix = ""
+
+ # Determine platform and set environment command prefix accordingly
+ if env_vars:
+ if self.platform.startswith("Windows"):
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".ps1", mode="w") as script_file:
+ # Write environment variable setup and command to script file
+ for key, value in env_vars.items():
+ script_file.write(f'$env:{key} = "{value}";\n')
+ script_file.write(f"{self.bin_path} " + " ".join(local_params) + "\n")
+ script_path = script_file.name
+ full_command = f"powershell -File {script_path}"
+ else:
+ # For Linux/Mac, use standard env variable syntax
+ env_command_prefix = "env " + " ".join([f'{key}="{value}"' for key, value in env_vars.items()]) + " "
+ full_command = env_command_prefix + " ".join([self.bin_path] + local_params)
+ else:
+ full_command = " ".join([self.bin_path] + local_params)
+
+ try:
+ # Replace newlines to ensure command works in shell
+ full_command = full_command.replace("\n", " ")
+
+ # Execute the command
+ result = subprocess.run(
+ full_command,
+ shell=True,
+ text=True,
+ capture_output=True
+ )
+
+ stdout = result.stdout
+ stderr = result.stderr
+ returncode = result.returncode
+
+ # Log debug information if enabled
+ if self.debug:
+ self._debug_log(f"fullcommand: {full_command}")
+ self._debug_log(f"returncode: {returncode}")
+ self._debug_log(f"stdout: {stdout}")
+ self._debug_log(f"stderr: {stderr}")
+
+ # Process stdout and stderr
+ if stderr:
+ output["error"] = stderr.decode('utf-8') if isinstance(stderr, bytes) else str(stderr)
+ if stdout:
+ output["data"] = stdout.decode('utf-8') if isinstance(stdout, bytes) else str(stdout)
+
+ except FileNotFoundError:
+ output["exception"] = f"ERROR: {self.bin_path} not found"
+ except Exception as e:
+ error_details = {
+ "exception": str(e),
+ "doc": e.__doc__,
+ "params": local_params,
+ "stdout": stdout.decode('utf-8') if 'stdout' in locals() and isinstance(stdout, bytes) else "",
+ "stderr": stderr.decode('utf-8') if 'stderr' in locals() and isinstance(stderr, bytes) else ""
+ }
+ output["exception"] = f"ERROR: {json.dumps(error_details)}"
+ finally:
+ # Clean up the temporary script file
+ if script_path is not None:
+ os.remove(script_path)
+ return output
+
+class AsyncQueryExecutor:
+ """Executes StackQL queries asynchronously in local mode.
+
+ This class provides methods for executing multiple StackQL queries
+ concurrently using asyncio. Server mode is not supported for async queries.
+ """
+
+ def __init__(self, sync_query_func, output_format='dict'):
+ """Initialize the AsyncQueryExecutor.
+
+ Args:
+ sync_query_func (callable): Function to execute a single query synchronously
+ output_format (str, optional): Output format (dict or pandas). Defaults to 'dict'.
+ """
+ self.sync_query_func = sync_query_func
+ self.output_format = output_format
+
+ async def execute_queries(self, queries):
+ """Execute multiple queries asynchronously.
+
+ Args:
+ queries (list): List of query strings to execute
+
+ Returns:
+ list or DataFrame: Results of all queries
+
+ Raises:
+ ValueError: If output_format is not supported
+ """
+ if self.output_format not in ['dict', 'pandas']:
+ raise ValueError("executeQueriesAsync supports only 'dict' or 'pandas' output modes.")
+
+ async def main():
+ with ThreadPoolExecutor() as executor:
+ # Create tasks for each query
+ loop = asyncio.get_event_loop()
+ futures = [
+ loop.run_in_executor(
+ executor,
+ lambda q=query: self.sync_query_func(q),
+ # Pass query as a default argument to avoid late binding issues
+ )
+ for query in queries
+ ]
+
+ # Gather results from all the async calls
+ results = await asyncio.gather(*futures)
+
+ return results
+
+ # Run the async function and process results
+ results = await main()
+
+ # Process results based on output format
+ if self.output_format == 'pandas':
+ import pandas as pd
+ # Concatenate the DataFrames
+ return pd.concat(results, ignore_index=True)
+ else:
+ # Flatten the list of results
+ return [item for sublist in results for item in sublist]
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/core/server.py b/ref-python-packages/pystackql/pystackql/core/server.py
new file mode 100644
index 0000000..c42d5b5
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/core/server.py
@@ -0,0 +1,164 @@
+# pystackql/core/server.py
+
+"""
+Server connection management for PyStackQL.
+
+This module handles connections to a StackQL server using the Postgres wire protocol.
+"""
+
+class ServerConnection:
+ """Manages connections to a StackQL server.
+
+ This class handles connecting to and querying a StackQL server
+ using the Postgres wire protocol.
+ """
+
+ def __init__(self, server_address='127.0.0.1', server_port=5466):
+ """Initialize the ServerConnection.
+
+ Args:
+ server_address (str, optional): Address of the server. Defaults to '127.0.0.1'.
+ server_port (int, optional): Port of the server. Defaults to 5466.
+ """
+ self.server_address = server_address
+ self.server_port = server_port
+ self._conn = None
+
+ # Import psycopg on demand to avoid dependency issues
+ try:
+ global psycopg, dict_row
+ import psycopg
+ from psycopg.rows import dict_row
+ except ImportError:
+ raise ImportError("psycopg is required in server mode but is not installed. "
+ "Please install psycopg and try again.")
+
+ # Connect to the server
+ self._connect()
+
+ def _connect(self):
+ """Connect to the StackQL server.
+
+ Returns:
+ bool: True if connection successful, False otherwise
+ """
+ try:
+ self._conn = psycopg.connect(
+ dbname='stackql',
+ user='stackql',
+ host=self.server_address,
+ port=self.server_port,
+ autocommit=True,
+ row_factory=dict_row
+ )
+ return True
+ except psycopg.OperationalError as oe:
+ print(f"OperationalError while connecting to the server: {oe}")
+ except Exception as e:
+ print(f"Unexpected error while connecting to the server: {e}")
+ return False
+
+ def is_connected(self):
+ """Check if the connection to the server is active.
+
+ Returns:
+ bool: True if connected, False otherwise
+ """
+ return self._conn is not None and not self._conn.closed
+
+ def ensure_connected(self):
+ """Ensure the connection to the server is active.
+
+ If the connection is closed, attempt to reconnect.
+
+ Returns:
+ bool: True if connected, False otherwise
+ """
+ if not self.is_connected():
+ return self._connect()
+ return True
+
+ def execute_query(self, query, is_statement=False):
+ """Execute a query on the server.
+
+ Args:
+ query (str): The query to execute
+ is_statement (bool, optional): Whether this is a statement (non-SELECT). Defaults to False.
+
+ Returns:
+ list: Results of the query as a list of dictionaries
+
+ Raises:
+ ConnectionError: If no active connection is available
+ """
+ if not self.ensure_connected():
+ raise ConnectionError("No active connection to the server")
+
+ try:
+ with self._conn.cursor() as cur:
+ cur.execute(query)
+ if is_statement:
+ # Return status message for non-SELECT statements
+ result_msg = cur.statusmessage
+ return [{'message': result_msg}]
+ try:
+ # Fetch results for SELECT queries
+ rows = cur.fetchall()
+ return rows
+ except psycopg.ProgrammingError as e:
+ # Handle cases with no results
+ if "no results to fetch" in str(e):
+ return []
+ else:
+ raise
+ except psycopg.OperationalError as oe:
+ print(f"OperationalError during query execution: {oe}")
+ # Try to reconnect and retry once
+ if self._connect():
+ return self.execute_query(query, is_statement)
+ except Exception as e:
+ print(f"Unexpected error during query execution: {e}")
+
+ return []
+
+ def execute_query_with_new_connection(self, query):
+ """Execute a query with a new connection.
+
+ This method creates a new connection to the server, executes the query,
+ and then closes the connection.
+
+ Args:
+ query (str): The query to execute
+
+ Returns:
+ list: Results of the query as a list of dictionaries
+ """
+ try:
+ with psycopg.connect(
+ dbname='stackql',
+ user='stackql',
+ host=self.server_address,
+ port=self.server_port,
+ row_factory=dict_row
+ ) as conn:
+ with conn.cursor() as cur:
+ cur.execute(query)
+ try:
+ rows = cur.fetchall()
+ except psycopg.ProgrammingError as e:
+ if str(e) == "no results to fetch":
+ rows = []
+ else:
+ raise
+ return rows
+ except psycopg.OperationalError as oe:
+ print(f"OperationalError while connecting to the server: {oe}")
+ except Exception as e:
+ print(f"Unexpected error while connecting to the server: {e}")
+
+ return []
+
+ def close(self):
+ """Close the connection to the server."""
+ if self._conn and not self._conn.closed:
+ self._conn.close()
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/core/stackql.py b/ref-python-packages/pystackql/pystackql/core/stackql.py
new file mode 100644
index 0000000..1e0be4d
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/core/stackql.py
@@ -0,0 +1,507 @@
+# pystackql/core/stackql.py
+
+"""
+Main StackQL class for PyStackQL.
+
+This module provides the main StackQL class that serves as the primary
+interface for executing StackQL queries.
+"""
+
+import os
+import json
+from .server import ServerConnection
+from .query import QueryExecutor, AsyncQueryExecutor
+from .output import OutputFormatter
+from ..utils import setup_local_mode
+
+class StackQL:
+ """A class representing an instance of the StackQL query engine.
+
+ :param server_mode: Connect to a StackQL server
+ (defaults to `False`)
+ :type server_mode: bool, optional
+ :param server_address: The address of the StackQL server
+ (`server_mode` only, defaults to `'127.0.0.1'`)
+ :type server_address: str, optional
+ :param server_port: The port of the StackQL server
+ (`server_mode` only, defaults to `5466`)
+ :type server_port: int, optional
+ :param backend_storage_mode: Specifies backend storage mode, options are 'memory' and 'file'
+ (defaults to `'memory'`, this option is ignored in `server_mode`)
+ :type backend_storage_mode: str, optional
+ :param backend_file_storage_location: Specifies location for database file, only applicable when `backend_storage_mode` is 'file'
+ (defaults to `'{cwd}/stackql.db'`, this option is ignored in `server_mode`)
+ :type backend_file_storage_location: str, optional
+ :param output: Determines the format of the output, options are 'dict', 'pandas', 'csv', and 'markdownkv'
+ (defaults to `'dict'`, `'csv'` is not supported in `server_mode`, 'markdownkv' is optimized for LLM understanding)
+ :type output: str, optional
+ :param sep: Seperator for values in CSV output
+ (defaults to `','`, `output='csv'` only)
+ :type sep: str, optional
+ :param header: Show column headers in CSV output
+ (defaults to `False`, `output='csv'` only)
+ :type header: bool, optional
+ :param download_dir: The download directory for the StackQL executable
+ (defaults to `site.getuserbase()`, not supported in `server_mode`)
+ :type download_dir: str, optional
+ :param app_root: Application config and cache root path
+ (defaults to `{cwd}/.stackql`)
+ :type app_root: str, optional
+ :param execution_concurrency_limit: Concurrency limit for query execution
+ (defaults to `-1` - unlimited)
+ :type execution_concurrency_limit: int, optional
+ :param dataflow_dependency_max: Max dataflow weakly connected components for a given query
+ (defaults to `50`)
+ :type dataflow_dependency_max: int, optional
+ :param dataflow_components_max: Max dataflow components for a given query
+ (defaults to `50`)
+ :type dataflow_components_max: int, optional
+ :param api_timeout: API timeout
+ (defaults to `45`, not supported in `server_mode`)
+ :type api_timeout: int, optional
+ :param proxy_host: HTTP proxy host
+ (not supported in `server_mode`)
+ :type proxy_host: str, optional
+ :param proxy_password: HTTP proxy password
+ (only applicable when `proxy_host` is set)
+ :type proxy_password: str, optional
+ :param proxy_port: HTTP proxy port
+ (defaults to `-1`, only applicable when `proxy_host` is set)
+ :type proxy_port: int, optional
+ :param proxy_scheme: HTTP proxy scheme
+ (defaults to `'http'`, only applicable when `proxy_host` is set)
+ :type proxy_scheme: str, optional
+ :param proxy_user: HTTP proxy user
+ (only applicable when `proxy_host` is set)
+ :type proxy_user: str, optional
+ :param max_results: Max results per HTTP request
+ (defaults to `-1` for no limit, not supported in `server_mode`)
+ :type max_results: int, optional
+ :param page_limit: Max pages of results that will be returned per resource
+ (defaults to `20`, not supported in `server_mode`)
+ :type page_limit: int, optional
+ :param max_depth: Max depth for indirect queries: views and subqueries
+ (defaults to `5`, not supported in `server_mode`)
+ :type max_depth: int, optional
+ :param custom_registry: Custom StackQL provider registry URL
+ (e.g. https://registry-dev.stackql.app/providers) supplied using the class constructor
+ :type custom_registry: str, optional
+ :param custom_auth: Custom StackQL provider authentication object supplied using the class constructor
+ (not supported in `server_mode`)
+ :type custom_auth: dict, optional
+ :param debug: Enable debug logging
+ (defaults to `False`)
+ :type debug: bool, optional
+ :param debug_log_file: Path to debug log file
+ (defaults to `~/.pystackql/debug.log`, only available if debug is `True`)
+ :type debug_log_file: str, optional
+
+ --- Read-Only Attributes ---
+
+ :param platform: The operating system platform
+ :type platform: str, readonly
+ :param package_version: The version number of the `pystackql` Python package
+ :type package_version: str, readonly
+ :param version: The version number of the `stackql` executable
+ (not supported in `server_mode`)
+ :type version: str, readonly
+ :param params: A list of command-line parameters passed to the `stackql` executable
+ (not supported in `server_mode`)
+ :type params: list, readonly
+ :param bin_path: The full path of the `stackql` executable
+ (not supported in `server_mode`).
+ :type bin_path: str, readonly
+ :param sha: The commit (short) sha for the installed `stackql` binary build
+ (not supported in `server_mode`).
+ :type sha: str, readonly
+ """
+
+ def __init__(self,
+ server_mode=False,
+ server_address='127.0.0.1',
+ server_port=5466,
+ output='dict',
+ sep=',',
+ header=False,
+ debug=False,
+ debug_log_file=None,
+ **kwargs):
+ """Constructor method
+ """
+
+ # Get package information from utils
+ from ..utils import get_platform, get_package_version
+ self.platform, this_os = get_platform()
+ self.package_version = get_package_version("pystackql")
+
+ # Setup debug logging
+ self.debug = debug
+ if debug:
+ if debug_log_file is None:
+ self.debug_log_file = os.path.join(os.path.expanduser("~"), '.pystackql', 'debug.log')
+ else:
+ self.debug_log_file = debug_log_file
+ # Check if the path exists. If not, try to create it.
+ log_dir = os.path.dirname(self.debug_log_file)
+ if not os.path.exists(log_dir):
+ try:
+ os.makedirs(log_dir, exist_ok=True)
+ except OSError as e:
+ raise ValueError(f"Unable to create the log directory {log_dir}: {str(e)}")
+ else:
+ self.debug_log_file = None
+
+ # Setup output formatter
+ self.local_output_formatter = OutputFormatter(output)
+ self.output = output.lower()
+
+ # Server mode setup
+ self.server_mode = server_mode
+
+ if self.server_mode and self.output == 'csv':
+ raise ValueError("CSV output is not supported in server mode, use 'dict' or 'pandas' instead.")
+ elif self.output == 'csv':
+ self.sep = sep
+ self.header = header
+
+ if self.server_mode:
+ # Server mode - connect to a server via the postgres wire protocol
+ self.server_address = server_address
+ self.server_port = server_port
+ self.server_connection = ServerConnection(server_address, server_port)
+ else:
+ # Local mode - execute the binary locally
+ # Get all parameters from local variables (excluding 'self')
+ local_params = locals().copy()
+ local_params.pop('self')
+
+ # Set up local mode - this sets the instance attributes and returns params
+ self.params = setup_local_mode(self, **local_params)
+
+ # Initialize query executor
+ self.local_query_executor = QueryExecutor(
+ self.bin_path,
+ self.params,
+ self.debug,
+ self.debug_log_file
+ )
+
+ # Initialize async query executor (only for local mode)
+ if not self.server_mode:
+ self.async_executor = AsyncQueryExecutor(
+ self._sync_query_wrapper,
+ output_format=self.output
+ )
+
+ def _sync_query_wrapper(self, query):
+ """Wrapper for synchronous query execution used by AsyncQueryExecutor.
+
+ This method is exclusively used for local mode async queries.
+ Server mode is not supported for async queries.
+
+ Args:
+ query (str): The query to execute
+
+ Returns:
+ The formatted query result
+ """
+ # Execute query
+ query_result = self.local_query_executor.execute(query)
+
+ # Format the result using the OutputFormatter
+ # This will handle SQL type objects through the _format_data method
+ return self.local_output_formatter.format_query_result(query_result)
+
+ def properties(self):
+ """Retrieves the properties of the StackQL instance.
+
+ This method collects all the attributes of the StackQL instance and
+ returns them in a dictionary format.
+
+ :return: A dictionary containing the properties of the StackQL instance.
+ :rtype: dict
+
+ Example:
+ ::
+
+ {
+ "platform": "Darwin x86_64 (macOS-12.0.1-x86_64-i386-64bit), Python 3.10.9",
+ "output": "dict",
+ ...
+ }
+ """
+ props = {}
+ for var in vars(self):
+ # Skip internal objects
+ if var.startswith('_') or var in ['local_output_formatter', 'local_query_executor', 'async_executor', 'binary_manager', 'server_connection']:
+ continue
+ props[var] = getattr(self, var)
+ return props
+
+ def upgrade(self, showprogress=True):
+ """Upgrades the StackQL binary to the latest version available.
+
+ This method initiates an upgrade of the StackQL binary. Post-upgrade,
+ it updates the `version` and `sha` attributes of the StackQL instance
+ to reflect the newly installed version.
+
+ :param showprogress: Indicates if progress should be displayed during the upgrade. Defaults to True.
+ :type showprogress: bool, optional
+
+ :return: A message indicating the new version of StackQL post-upgrade.
+ :rtype: str
+ """
+ if self.server_mode:
+ raise ValueError("The upgrade method is not supported in server mode.")
+
+ # Use the binary manager to upgrade
+ message = self.binary_manager.upgrade(showprogress)
+
+ # Update the version and sha attributes
+ self.version = self.binary_manager.version
+ self.sha = self.binary_manager.sha
+
+ return message
+
+ def executeStmt(self, query, custom_auth=None, env_vars=None, **kwargs):
+ """Executes a query using the StackQL instance and returns the output as a string.
+ This is intended for operations which do not return a result set, for example a mutation
+ operation such as an `INSERT` or a `DELETE` or life cycle method such as an `EXEC` operation
+ or a `REGISTRY PULL` operation.
+
+ This method determines the mode of operation (server_mode or local execution) based
+ on the `server_mode` attribute of the instance. If `server_mode` is True, it runs the query
+ against the server. Otherwise, it executes the query using a subprocess.
+
+ :param query: The StackQL query string to be executed.
+ :type query: str, list of dict objects, or Pandas DataFrame
+ :param custom_auth: Custom authentication dictionary.
+ :type custom_auth: dict, optional
+ :param env_vars: Command-specific environment variables for this execution.
+ :type env_vars: dict, optional
+ :param kwargs: Additional keyword arguments that override constructor parameters for this execution.
+ Supported overrides: output, sep, header, auth, custom_registry, max_results, page_limit,
+ max_depth, api_timeout, http_debug, proxy_host, proxy_port, proxy_user, proxy_password,
+ proxy_scheme, backend_storage_mode, backend_file_storage_location, app_root,
+ execution_concurrency_limit, dataflow_dependency_max, dataflow_components_max
+ :type kwargs: optional
+
+ :return: The output result of the query in string format. If in `server_mode`, it
+ returns a JSON string representation of the result.
+ :rtype: dict, Pandas DataFrame or str (for `csv` output)
+
+ Example:
+ >>> from pystackql import StackQL
+ >>> stackql = StackQL()
+ >>> stackql_query = "REGISTRY PULL okta"
+ >>> result = stackql.executeStmt(stackql_query)
+ >>> result
+ """
+ if self.server_mode:
+ # Server mode: handle output override
+ output_format = kwargs.get('output', self.output)
+
+ result = self.server_connection.execute_query(query, is_statement=True)
+
+ # Format result based on output type
+ if output_format == 'pandas':
+ import pandas as pd
+ return pd.DataFrame(result)
+ elif output_format == 'csv':
+ # Return the string representation of the result
+ return result[0]['message']
+ elif output_format == 'markdownkv':
+ from .output import OutputFormatter
+ temp_formatter = OutputFormatter('markdownkv')
+ # Extract message from result
+ message = result[0].get('message', '') if result else ''
+ return temp_formatter._format_markdownkv_statement(message)
+ else:
+ return result
+ else:
+ # Local mode: handle parameter overrides
+ override_params = None
+ output_format = kwargs.get('output', self.output)
+
+ # If custom_auth is provided as kwarg, use it
+ if 'auth' in kwargs:
+ custom_auth = kwargs['auth']
+
+ # Generate override params if kwargs provided
+ if kwargs:
+ from ..utils import generate_params_for_execution
+ override_params = generate_params_for_execution(self._base_kwargs, kwargs)
+
+ # Execute the query
+ result = self.local_query_executor.execute(query, custom_auth=custom_auth, env_vars=env_vars, override_params=override_params)
+
+ # Format the result with appropriate output formatter
+ if output_format != self.output:
+ # Create a temporary formatter for this execution
+ from .output import OutputFormatter
+ temp_formatter = OutputFormatter(output_format)
+ return temp_formatter.format_statement_result(result)
+ else:
+ return self.local_output_formatter.format_statement_result(result)
+
+ def execute(self, query, suppress_errors=True, custom_auth=None, env_vars=None, **kwargs):
+ """
+ Executes a StackQL query and returns the output based on the specified output format.
+
+ This method supports execution both in server mode and locally using a subprocess. In server mode,
+ the query is sent to a StackQL server, while in local mode, it runs the query using a local binary.
+
+ :param query: The StackQL query string to be executed.
+ :type query: str
+ :param suppress_errors: If set to True, the method will return an empty list if an error occurs.
+ :type suppress_errors: bool, optional
+ :param custom_auth: Custom authentication dictionary.
+ :type custom_auth: dict, optional
+ :param env_vars: Command-specific environment variables for this execution.
+ :type env_vars: dict, optional
+ :param kwargs: Additional keyword arguments that override constructor parameters for this execution.
+ Supported overrides: output, sep, header, auth, custom_registry, max_results, page_limit,
+ max_depth, api_timeout, http_debug, proxy_host, proxy_port, proxy_user, proxy_password,
+ proxy_scheme, backend_storage_mode, backend_file_storage_location, app_root,
+ execution_concurrency_limit, dataflow_dependency_max, dataflow_components_max
+ :type kwargs: optional
+
+ :return: The output of the query, which can be a list of dictionary objects, a Pandas DataFrame,
+ or a raw CSV string, depending on the configured output format.
+ :rtype: list(dict) | pd.DataFrame | str
+
+ :raises ValueError: If an unsupported output format is specified.
+
+ :example:
+
+ >>> stackql = StackQL()
+ >>> query = '''
+ ... SELECT SPLIT_PART(machineType, '/', -1) as machine_type, status, COUNT(*) as num_instances
+ ... FROM google.compute.instances
+ ... WHERE project = 'stackql-demo' AND zone = 'australia-southeast1-a'
+ ... GROUP BY machine_type, status HAVING COUNT(*) > 2
+ ... '''
+ >>> result = stackql.execute(query)
+ """
+ if self.server_mode:
+ # Server mode: handle output override
+ output_format = kwargs.get('output', self.output)
+
+ result = self.server_connection.execute_query(query)
+
+ # Format result based on output type
+ if output_format == 'pandas':
+ import pandas as pd
+ import json
+ from io import StringIO
+ json_str = json.dumps(result)
+ return pd.read_json(StringIO(json_str))
+ elif output_format == 'csv':
+ raise ValueError("CSV output is not supported in server_mode.")
+ elif output_format == 'markdownkv':
+ from .output import OutputFormatter
+ temp_formatter = OutputFormatter('markdownkv')
+ return temp_formatter._format_markdownkv(result)
+ else: # Assume 'dict' output
+ return result
+ else:
+ # Local mode: handle parameter overrides
+ override_params = None
+ output_format = kwargs.get('output', self.output)
+ http_debug = kwargs.get('http_debug', self.http_debug)
+
+ # If custom_auth is provided as kwarg, use it
+ if 'auth' in kwargs:
+ custom_auth = kwargs['auth']
+
+ # Generate override params if kwargs provided
+ if kwargs:
+ from ..utils import generate_params_for_execution
+ override_params = generate_params_for_execution(self._base_kwargs, kwargs)
+
+ # Apply HTTP debug setting
+ if http_debug:
+ suppress_errors = False
+
+ # Execute the query
+ output = self.local_query_executor.execute(query, custom_auth=custom_auth, env_vars=env_vars, override_params=override_params)
+
+ # Format the result with appropriate output formatter
+ if output_format != self.output:
+ # Create a temporary formatter for this execution
+ from .output import OutputFormatter
+ temp_formatter = OutputFormatter(output_format)
+ return temp_formatter.format_query_result(output, suppress_errors)
+ else:
+ return self.local_output_formatter.format_query_result(output, suppress_errors)
+
+ async def executeQueriesAsync(self, queries):
+ """Executes multiple StackQL queries asynchronously using the current StackQL instance.
+
+ This method utilizes an asyncio event loop to concurrently run a list of provided
+ StackQL queries. Each query is executed independently, and the combined results of
+ all the queries are returned as a list of JSON objects if 'dict' output mode is selected,
+ or as a concatenated DataFrame if 'pandas' output mode is selected.
+
+ The order of the results in the returned list or DataFrame may not necessarily
+ correspond to the order of the queries in the input list due to the asynchronous nature
+ of execution.
+
+ :param queries: A list of StackQL query strings to be executed concurrently.
+ :type queries: list[str], required
+ :return: A list of results corresponding to each query. Each result is a JSON object or a DataFrame.
+ :rtype: list[dict] or pd.DataFrame
+ :raises ValueError: If server_mode is True (async is only supported in local mode).
+ :raises ValueError: If an unsupported output mode is selected (anything other than 'dict' or 'pandas').
+
+ Example:
+ >>> from pystackql import StackQL
+ >>> stackql = StackQL()
+ >>> queries = [
+ >>> \"\"\"SELECT '%s' as region, instance_type, COUNT(*) as num_instances
+ ... FROM aws.ec2.instances
+ ... WHERE region = '%s'
+ ... GROUP BY instance_type\"\"\" % (region, region)
+ >>> for region in regions ]
+ >>> result = stackql.executeQueriesAsync(queries)
+
+ Note:
+ - This method is only supported in local mode.
+ """
+ if self.server_mode:
+ raise ValueError(
+ "The executeQueriesAsync method is not supported in server mode. "
+ "Please use the standard execute method with individual queries instead, "
+ "or switch to local mode if you need to run multiple queries concurrently."
+ )
+
+ # Verify that async_executor is available (should only be initialized in local mode)
+ if not hasattr(self, 'async_executor'):
+ raise RuntimeError("Async executor not initialized. This should not happen.")
+
+ return await self.async_executor.execute_queries(queries)
+
+ def test_connection(self):
+ """Tests if the server connection is working by executing a simple query.
+
+ This method is only valid when server_mode=True.
+
+ Returns:
+ bool: True if the connection is working, False otherwise.
+
+ Raises:
+ ValueError: If called when not in server mode.
+ """
+ if not self.server_mode:
+ raise ValueError("The test_connectivity method is only available in server mode.")
+
+ try:
+ result = self.server_connection.execute_query("SELECT 'test' as test_value")
+ return (isinstance(result, list) and
+ len(result) == 1 and
+ 'test_value' in result[0] and
+ result[0]['test_value'] == 'test')
+ except Exception as e:
+ if self.debug:
+ print(f"Connection test failed: {str(e)}")
+ return False
diff --git a/ref-python-packages/pystackql/pystackql/errors.yaml b/ref-python-packages/pystackql/pystackql/errors.yaml
new file mode 100644
index 0000000..64b4e9e
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/errors.yaml
@@ -0,0 +1,52 @@
+# Error patterns for centralized error detection in PyStackQL
+#
+# This file defines patterns that should be detected as errors when they appear
+# in query results. These patterns are checked against messages returned in stdout
+# to identify error conditions that would otherwise be treated as valid data.
+#
+# Pattern Types:
+# - fuzzy_matches: Substring matching (case-insensitive)
+# - exact_matches: Exact string matching (case-sensitive)
+# - regex_matches: Regular expression matching (for complex patterns with variable parts)
+
+errors:
+ # Fuzzy matches - will match if the pattern appears anywhere in the message
+ fuzzy_matches:
+ # HTTP error status codes (4xx client errors, 5xx server errors)
+ - "http response status code: 4"
+ - "http response status code: 5"
+
+ # StackQL-specific error patterns from stackql-deploy
+ - "disparity in fields"
+ - "cannot find matching operation"
+
+ # Additional StackQL error patterns
+ - "invalid query"
+ - "syntax error"
+
+ # Exact matches - must match the entire message or start with this prefix
+ exact_matches:
+ - "error:"
+ - "ERROR:"
+ - "Error:"
+ - "FAILED"
+ - "FAILURE"
+
+ # Regex matches - regular expressions for complex error patterns
+ # Use standard Python regex syntax (case-insensitive by default)
+ regex_matches:
+ # Network/DNS errors
+ - 'dial tcp:.*no such host'
+ - 'Get ".*".*dial tcp.*lookup.*no such host'
+
+ # Connection errors
+ - 'dial tcp.*connection refused'
+ - 'unable to connect to.*connection refused'
+
+ # Timeout errors
+ - 'context deadline exceeded'
+ - 'timeout.*waiting for'
+
+ # Generic network errors
+ - 'dial tcp.*i/o timeout'
+ - 'net/http.*timeout'
diff --git a/ref-python-packages/pystackql/pystackql/magic.py b/ref-python-packages/pystackql/pystackql/magic.py
new file mode 100644
index 0000000..8fa63b5
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/magic.py
@@ -0,0 +1,10 @@
+# pystackql/magic.py
+
+"""
+StackQL Jupyter magic extension (non-server mode).
+"""
+# Import and re-export the load_ipython_extension function
+from .magic_ext.local import load_ipython_extension
+
+# For direct imports (though less common)
+from .magic_ext.local import StackqlMagic
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/magic_ext/__init__.py b/ref-python-packages/pystackql/pystackql/magic_ext/__init__.py
new file mode 100644
index 0000000..b3d503b
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/magic_ext/__init__.py
@@ -0,0 +1,14 @@
+# pystackql/magic_ext/__init__.py
+
+"""
+Jupyter magic extensions for PyStackQL.
+
+This module provides Jupyter magic commands for running StackQL queries
+directly in Jupyter notebooks.
+"""
+
+from .base import BaseStackqlMagic
+from .local import StackqlMagic
+from .server import StackqlServerMagic
+
+__all__ = ['BaseStackqlMagic', 'StackqlMagic', 'StackqlServerMagic']
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/magic_ext/base.py b/ref-python-packages/pystackql/pystackql/magic_ext/base.py
new file mode 100644
index 0000000..b48d770
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/magic_ext/base.py
@@ -0,0 +1,142 @@
+# pystackql/magic_ext/base.py
+
+"""
+Base Jupyter magic extension for PyStackQL.
+
+This module provides the base class for PyStackQL Jupyter magic extensions.
+"""
+
+from __future__ import print_function
+from IPython.core.magic import Magics, line_cell_magic
+from string import Template
+import argparse
+
+class BaseStackqlMagic(Magics):
+ """Base Jupyter magic extension enabling running StackQL queries.
+
+ This extension allows users to conveniently run StackQL queries against cloud
+ or SaaS resources directly from Jupyter notebooks, and visualize the results in a tabular
+ format using Pandas DataFrames.
+ """
+ def __init__(self, shell, server_mode):
+ """Initialize the BaseStackqlMagic class.
+
+ :param shell: The IPython shell instance.
+ :param server_mode: Whether to use server mode.
+ """
+ from ..core import StackQL
+ super(BaseStackqlMagic, self).__init__(shell)
+ self.stackql_instance = StackQL(server_mode=server_mode, output='pandas')
+ self.server_mode = server_mode
+
+ def get_rendered_query(self, data):
+ """Substitute placeholders in a query template with variables from the current namespace.
+
+ :param data: SQL query template containing placeholders.
+ :type data: str
+ :return: A SQL query with placeholders substituted.
+ :rtype: str
+ """
+ t = Template(data)
+ return t.substitute(self.shell.user_ns)
+
+ def run_query(self, query):
+ """Execute a StackQL query
+
+ :param query: StackQL query to be executed.
+ :type query: str
+ :return: Query results, returned as a Pandas DataFrame.
+ :rtype: pandas.DataFrame
+ """
+ # Check if the query starts with "registry pull" (case insensitive)
+ if query.strip().lower().startswith("registry pull"):
+ return self.stackql_instance.executeStmt(query)
+
+ return self.stackql_instance.execute(query)
+
+ @line_cell_magic
+ def stackql(self, line, cell=None):
+ """A Jupyter magic command to run StackQL queries.
+
+ Can be used as both line and cell magic:
+ - As a line magic: `%stackql QUERY`
+ - As a cell magic: `%%stackql [OPTIONS]` followed by the QUERY in the next line.
+
+ :param line: The arguments and/or StackQL query when used as line magic.
+ :param cell: The StackQL query when used as cell magic.
+ :return: StackQL query results as a named Pandas DataFrame (`stackql_df`).
+ """
+ is_cell_magic = cell is not None
+
+ if is_cell_magic:
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--no-display", action="store_true", help="Suppress result display.")
+ parser.add_argument("--csv-download", action="store_true", help="Add CSV download link to output.")
+ args = parser.parse_args(line.split())
+ query_to_run = self.get_rendered_query(cell)
+ else:
+ args = None
+ query_to_run = self.get_rendered_query(line)
+
+ results = self.run_query(query_to_run)
+ self.shell.user_ns['stackql_df'] = results
+
+ if is_cell_magic and args and args.no_display:
+ return None
+ elif is_cell_magic and args and args.csv_download and not args.no_display:
+ # First display the DataFrame
+ import IPython.display
+ IPython.display.display(results)
+ # Then add the download button without displaying the DataFrame again
+ self._display_with_csv_download(results)
+ return results
+ elif is_cell_magic and args and not args.no_display:
+ return results
+ elif not is_cell_magic:
+ return results
+ else:
+ return results
+
+ def _display_with_csv_download(self, df):
+ """Display a CSV download link for the DataFrame without displaying the DataFrame again.
+
+ :param df: The DataFrame to make downloadable.
+ """
+ import IPython.display
+
+ try:
+ # Generate CSV data
+ import io
+ import base64
+ csv_buffer = io.StringIO()
+ df.to_csv(csv_buffer, index=False)
+ csv_data = csv_buffer.getvalue()
+
+ # Encode to base64 for data URI
+ csv_base64 = base64.b64encode(csv_data.encode()).decode()
+
+ # Create download link
+ download_link = f'data:text/csv;base64,{csv_base64}'
+
+ # Only display the download button, not the DataFrame
+ download_html = f'''
+
+ '''
+ IPython.display.display(IPython.display.HTML(download_html))
+
+ except Exception as e:
+ # If CSV generation fails, just print an error message without displaying anything
+ print(f"Error generating CSV download: {e}")
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/magic_ext/local.py b/ref-python-packages/pystackql/pystackql/magic_ext/local.py
new file mode 100644
index 0000000..1830249
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/magic_ext/local.py
@@ -0,0 +1,34 @@
+# pystackql/magic_ext/local.py
+
+"""
+Local Jupyter magic extension for PyStackQL.
+
+This module provides a Jupyter magic command for running StackQL queries
+using a local StackQL binary.
+"""
+
+from IPython.core.magic import magics_class
+from .base import BaseStackqlMagic
+
+@magics_class
+class StackqlMagic(BaseStackqlMagic):
+ """Jupyter magic command for running StackQL queries in local mode."""
+
+ def __init__(self, shell):
+ """Initialize the StackqlMagic class.
+
+ :param shell: The IPython shell instance.
+ """
+ super().__init__(shell, server_mode=False)
+
+def load_ipython_extension(ipython):
+ """Load the non-server magic in IPython.
+
+ This is called when running %load_ext pystackql.magic in a notebook.
+ It registers the %stackql and %%stackql magic commands.
+
+ :param ipython: The IPython shell instance
+ """
+ # Create an instance of the magic class and register it
+ magic_instance = StackqlMagic(ipython)
+ ipython.register_magics(magic_instance)
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/magic_ext/server.py b/ref-python-packages/pystackql/pystackql/magic_ext/server.py
new file mode 100644
index 0000000..2c6d8f1
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/magic_ext/server.py
@@ -0,0 +1,28 @@
+# pystackql/magic_ext/server.py
+
+"""
+Server Jupyter magic extension for PyStackQL.
+
+This module provides a Jupyter magic command for running StackQL queries
+using a StackQL server connection.
+"""
+
+from IPython.core.magic import magics_class
+from .base import BaseStackqlMagic
+
+@magics_class
+class StackqlServerMagic(BaseStackqlMagic):
+ """Jupyter magic command for running StackQL queries in server mode."""
+
+ def __init__(self, shell):
+ """Initialize the StackqlServerMagic class.
+
+ :param shell: The IPython shell instance.
+ """
+ super().__init__(shell, server_mode=True)
+
+def load_ipython_extension(ipython):
+ """Load the server magic in IPython."""
+ # Create an instance of the magic class and register it
+ magic_instance = StackqlServerMagic(ipython)
+ ipython.register_magics(magic_instance)
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/magics.py b/ref-python-packages/pystackql/pystackql/magics.py
new file mode 100644
index 0000000..2f52ac3
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/magics.py
@@ -0,0 +1,10 @@
+# pystackql/magics.py
+
+"""
+StackQL Jupyter magic extension (server mode).
+"""
+# Import and re-export the load_ipython_extension function
+from .magic_ext.server import load_ipython_extension
+
+# For direct imports (though less common)
+from .magic_ext.server import StackqlServerMagic
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/utils/__init__.py b/ref-python-packages/pystackql/pystackql/utils/__init__.py
new file mode 100644
index 0000000..c927f48
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/utils/__init__.py
@@ -0,0 +1,50 @@
+# pystackql/utils/__init__.py
+
+"""
+Utility functions for PyStackQL package.
+"""
+from .package import get_package_version
+
+from .platform import (
+ get_platform,
+ is_binary_local
+)
+
+from .binary import (
+ get_binary_name,
+ get_binary_version,
+ setup_binary
+)
+
+from .download import (
+ get_download_dir,
+ get_download_url,
+ download_file
+)
+
+from .auth import format_auth
+from .params import setup_local_mode, generate_params_for_execution
+
+__all__ = [
+ # Platform utilities
+ 'get_platform',
+ 'get_package_version',
+ 'is_binary_local',
+
+ # Binary utilities
+ 'get_binary_name',
+ 'get_binary_version',
+ 'setup_binary',
+
+ # Download utilities
+ 'get_download_dir',
+ 'get_download_url',
+ 'download_file',
+
+ # Auth utilities
+ 'format_auth',
+
+ # Parameter utilities
+ 'setup_local_mode',
+ 'generate_params_for_execution'
+]
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/utils/auth.py b/ref-python-packages/pystackql/pystackql/utils/auth.py
new file mode 100644
index 0000000..809557e
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/utils/auth.py
@@ -0,0 +1,39 @@
+# pystackql/utils/auth.py
+
+"""
+Authentication utility functions for PyStackQL.
+
+This module contains functions for handling authentication.
+"""
+
+import json
+
+def format_auth(auth):
+ """Formats an authentication object for use with stackql.
+
+ Args:
+ auth: The authentication object, can be a string or a dict
+
+ Returns:
+ tuple: (auth_obj, auth_str)
+ - auth_obj: The authentication object as a dict
+ - auth_str: The authentication object as a JSON string
+
+ Raises:
+ Exception: If the authentication object is invalid
+ """
+ try:
+ if auth is not None:
+ if isinstance(auth, str):
+ authobj = json.loads(auth)
+ authstr = auth
+ elif isinstance(auth, dict):
+ authobj = auth
+ authstr = json.dumps(auth)
+ return authobj, authstr
+ else:
+ raise Exception("ERROR: [format_auth] auth key supplied with no value")
+ except Exception as e:
+ error_message = e.args[0]
+ print(f"ERROR: [format_auth] {error_message}")
+ exit(1)
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/utils/binary.py b/ref-python-packages/pystackql/pystackql/utils/binary.py
new file mode 100644
index 0000000..2411da1
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/utils/binary.py
@@ -0,0 +1,125 @@
+# pystackql/utils/binary.py
+
+"""
+Binary management utility functions for PyStackQL.
+
+This module contains functions for managing the StackQL binary.
+"""
+
+import os
+import subprocess
+from .download import get_download_url, download_file, get_download_dir
+from .platform import get_platform
+
+
+def get_binary_name(system_platform):
+ """Gets the binary name based on the platform.
+
+ Args:
+ system_platform (str): The operating system platform
+
+ Returns:
+ str: The name of the binary
+ """
+ if system_platform.startswith('Windows'):
+ return r'stackql.exe'
+ elif system_platform.startswith('Darwin'):
+ return r'stackql/Payload/stackql'
+ else:
+ return r'stackql'
+
+
+def get_binary_version(bin_path):
+ """Gets the version of the stackql binary.
+
+ Args:
+ bin_path (str): The path to the binary
+
+ Returns:
+ tuple: (version, sha)
+ - version: The version number
+ - sha: The git commit sha
+
+ Raises:
+ FileNotFoundError: If the binary is not found
+ Exception: If the version cannot be determined
+ """
+ try:
+ iqlPopen = subprocess.Popen([bin_path] + ["--version"],
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ # Use communicate to fetch the outputs and wait for the process to finish
+ output, _ = iqlPopen.communicate()
+ # Decode the output
+ decoded_output = output.decode('utf-8')
+ # Split to get the version tokens
+ version_tokens = decoded_output.split('\n')[0].split(' ')
+ version = version_tokens[1]
+ sha = version_tokens[3].replace('(', '').replace(')', '')
+ return version, sha
+ except FileNotFoundError:
+ print(f"ERROR: [get_binary_version] {bin_path} not found")
+ exit(1)
+ except Exception as e:
+ error_message = e.args[0]
+ print(f"ERROR: [get_binary_version] {error_message}")
+ exit(1)
+ finally:
+ # Ensure the subprocess is terminated and streams are closed
+ iqlPopen.terminate()
+ if hasattr(iqlPopen, 'stdout') and iqlPopen.stdout:
+ iqlPopen.stdout.close()
+
+
+def setup_binary(download_dir, system_platform, showprogress=False):
+ """Sets up the stackql binary by downloading and extracting it.
+
+ Args:
+ download_dir (str): The directory to download to
+ system_platform (str): The operating system platform
+ showprogress (bool, optional): Whether to show download progress. Defaults to False.
+
+ Raises:
+ Exception: If the setup fails
+ """
+ try:
+ print('installing stackql...')
+ binary_name = get_binary_name(system_platform)
+ url = get_download_url()
+ print(f"Downloading latest version of stackql from {url} to {download_dir}")
+
+ # Paths
+ archive_file_name = os.path.join(download_dir, os.path.basename(url))
+ binary_path = os.path.join(download_dir, binary_name)
+
+ # Download and extract
+ download_file(url, archive_file_name, showprogress)
+
+ # Handle extraction
+ if system_platform.startswith('Darwin'):
+ unpacked_file_name = os.path.join(download_dir, 'stackql')
+ command = f'pkgutil --expand-full {archive_file_name} {unpacked_file_name}'
+ if os.path.exists(unpacked_file_name):
+ os.system(f'rm -rf {unpacked_file_name}')
+ os.system(command)
+
+ else: # Handle Windows and Linux
+ import zipfile
+ with zipfile.ZipFile(archive_file_name, 'r') as zip_ref:
+ zip_ref.extractall(download_dir)
+
+ # Specific check for Windows to ensure `stackql.exe` is extracted
+ if system_platform.startswith("Windows"):
+ if not os.path.exists(binary_path) and os.path.exists(os.path.join(download_dir, "stackql")):
+ os.rename(os.path.join(download_dir, "stackql"), binary_path)
+
+ # Confirm binary presence and set permissions
+ if os.path.exists(binary_path):
+ print(f"StackQL executable successfully located at: {binary_path}")
+ os.chmod(binary_path, 0o755)
+ else:
+ print(f"ERROR: Expected binary '{binary_path}' not found after extraction.")
+ exit(1)
+
+ except Exception as e:
+ print(f"ERROR: [setup_binary] {str(e)}")
+ exit(1)
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/utils/download.py b/ref-python-packages/pystackql/pystackql/utils/download.py
new file mode 100644
index 0000000..5fd71ea
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/utils/download.py
@@ -0,0 +1,79 @@
+# pystackql/utils/download.py
+
+"""
+Download-related utility functions for PyStackQL.
+
+This module contains functions for downloading and managing the StackQL binary.
+"""
+
+import os
+import site
+import platform
+import requests
+
+
+def get_download_dir():
+ """Gets the directory to download the stackql binary.
+
+ Returns:
+ str: The directory path
+ """
+ # check if site.getuserbase() dir exists
+ if not os.path.exists(site.getuserbase()):
+ # if not, create it
+ os.makedirs(site.getuserbase())
+ return site.getuserbase()
+
+
+def get_download_url():
+ """Gets the download URL for the stackql binary based on the platform.
+
+ Returns:
+ str: The download URL
+
+ Raises:
+ Exception: If the platform is not supported
+ """
+ system_val = platform.system()
+ machine_val = platform.machine()
+
+ if system_val == 'Linux' and machine_val == 'x86_64':
+ return 'https://releases.stackql.io/stackql/latest/stackql_linux_amd64.zip'
+ elif system_val == 'Windows':
+ return 'https://releases.stackql.io/stackql/latest/stackql_windows_amd64.zip'
+ elif system_val == 'Darwin':
+ return 'https://storage.googleapis.com/stackql-public-releases/latest/stackql_darwin_multiarch.pkg'
+ else:
+ raise Exception(f"ERROR: [get_download_url] unsupported OS type: {system_val} {machine_val}")
+
+
+def download_file(url, path, showprogress=True):
+ """Downloads a file from a URL to a local path.
+
+ Args:
+ url (str): The URL to download from
+ path (str): The local path to save the file to
+ showprogress (bool, optional): Whether to show a progress bar. Defaults to True.
+
+ Raises:
+ Exception: If the download fails
+ """
+ try:
+ r = requests.get(url, stream=True)
+ r.raise_for_status()
+ total_size_in_bytes = int(r.headers.get('content-length', 0))
+ block_size = 1024
+ with open(path, 'wb') as f:
+ chunks = 0
+ for data in r.iter_content(block_size):
+ chunks += 1
+ f.write(data)
+ downloaded_size = chunks * block_size
+ progress_bar = '#' * int(downloaded_size / total_size_in_bytes * 20)
+ if showprogress:
+ print(f'\r[{progress_bar.ljust(20)}] {int(downloaded_size / total_size_in_bytes * 100)}%', end='')
+
+ print("\nDownload complete.")
+ except Exception as e:
+ print(f"ERROR: [download_file] {str(e)}")
+ exit(1)
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/utils/helpers.py b/ref-python-packages/pystackql/pystackql/utils/helpers.py
new file mode 100644
index 0000000..de0c329
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/utils/helpers.py
@@ -0,0 +1,284 @@
+# pystackql/utils/helpers.py
+
+"""
+Utility functions for PyStackQL package.
+
+This module contains helper functions for binary management, platform detection,
+and other utilities needed by the PyStackQL package.
+"""
+
+import subprocess
+import platform
+import json
+import site
+import os
+import requests
+import zipfile
+
+# Conditional import for package metadata retrieval
+try:
+ from importlib.metadata import version, PackageNotFoundError
+except ImportError:
+ # This is for Python versions earlier than 3.8
+ from importlib_metadata import version, PackageNotFoundError
+
+
+def is_binary_local(system_platform):
+ """Checks if the binary exists at the specified local path.
+
+ Args:
+ system_platform (str): The operating system platform
+
+ Returns:
+ bool: True if the binary exists at the expected local path
+ """
+ if system_platform == 'Linux' and os.path.exists('/usr/local/bin/stackql'):
+ return True
+ return False
+
+
+def get_package_version(package_name):
+ """Gets the version of the specified package.
+
+ Args:
+ package_name (str): The name of the package
+
+ Returns:
+ str: The version of the package or None if not found
+ """
+ try:
+ pkg_version = version(package_name)
+ if pkg_version is None:
+ print(f"Warning: Retrieved version for '{package_name}' is None!")
+ return pkg_version
+ except PackageNotFoundError:
+ print(f"Warning: Package '{package_name}' not found!")
+ return None
+
+
+def get_platform():
+ """Gets the current platform information.
+
+ Returns:
+ tuple: (platform_string, system_value)
+ - platform_string: A string with platform details
+ - system_value: The operating system name
+ """
+ system_val = platform.system()
+ machine_val = platform.machine()
+ platform_val = platform.platform()
+ python_version_val = platform.python_version()
+ return (
+ f"{system_val} {machine_val} ({platform_val}), Python {python_version_val}",
+ system_val
+ )
+
+
+def get_download_dir():
+ """Gets the directory to download the stackql binary.
+
+ Returns:
+ str: The directory path
+ """
+ # check if site.getuserbase() dir exists
+ if not os.path.exists(site.getuserbase()):
+ # if not, create it
+ os.makedirs(site.getuserbase())
+ return site.getuserbase()
+
+
+def get_binary_name(system_platform):
+ """Gets the binary name based on the platform.
+
+ Args:
+ system_platform (str): The operating system platform
+
+ Returns:
+ str: The name of the binary
+ """
+ if system_platform.startswith('Windows'):
+ return r'stackql.exe'
+ elif system_platform.startswith('Darwin'):
+ return r'stackql/Payload/stackql'
+ else:
+ return r'stackql'
+
+
+def get_download_url():
+ """Gets the download URL for the stackql binary based on the platform.
+
+ Returns:
+ str: The download URL
+
+ Raises:
+ Exception: If the platform is not supported
+ """
+ system_val = platform.system()
+ machine_val = platform.machine()
+
+ if system_val == 'Linux' and machine_val == 'x86_64':
+ return 'https://releases.stackql.io/stackql/latest/stackql_linux_amd64.zip'
+ elif system_val == 'Windows':
+ return 'https://releases.stackql.io/stackql/latest/stackql_windows_amd64.zip'
+ elif system_val == 'Darwin':
+ return 'https://storage.googleapis.com/stackql-public-releases/latest/stackql_darwin_multiarch.pkg'
+ else:
+ raise Exception(f"ERROR: [get_download_url] unsupported OS type: {system_val} {machine_val}")
+
+
+def download_file(url, path, showprogress=True):
+ """Downloads a file from a URL to a local path.
+
+ Args:
+ url (str): The URL to download from
+ path (str): The local path to save the file to
+ showprogress (bool, optional): Whether to show a progress bar. Defaults to True.
+
+ Raises:
+ Exception: If the download fails
+ """
+ try:
+ r = requests.get(url, stream=True)
+ r.raise_for_status()
+ total_size_in_bytes = int(r.headers.get('content-length', 0))
+ block_size = 1024
+ with open(path, 'wb') as f:
+ chunks = 0
+ for data in r.iter_content(block_size):
+ chunks += 1
+ f.write(data)
+ downloaded_size = chunks * block_size
+ progress_bar = '#' * int(downloaded_size / total_size_in_bytes * 20)
+ if showprogress:
+ print(f'\r[{progress_bar.ljust(20)}] {int(downloaded_size / total_size_in_bytes * 100)}%', end='')
+
+ print("\nDownload complete.")
+ except Exception as e:
+ print(f"ERROR: [download_file] {str(e)}")
+ exit(1)
+
+
+def setup_binary(download_dir, system_platform, showprogress=False):
+ """Sets up the stackql binary by downloading and extracting it.
+
+ Args:
+ download_dir (str): The directory to download to
+ system_platform (str): The operating system platform
+ showprogress (bool, optional): Whether to show download progress. Defaults to False.
+
+ Raises:
+ Exception: If the setup fails
+ """
+ try:
+ print('installing stackql...')
+ binary_name = get_binary_name(system_platform)
+ url = get_download_url()
+ print(f"Downloading latest version of stackql from {url} to {download_dir}")
+
+ # Paths
+ archive_file_name = os.path.join(download_dir, os.path.basename(url))
+ binary_path = os.path.join(download_dir, binary_name)
+
+ # Download and extract
+ download_file(url, archive_file_name, showprogress)
+
+ # Handle extraction
+ if system_platform.startswith('Darwin'):
+ unpacked_file_name = os.path.join(download_dir, 'stackql')
+ command = f'pkgutil --expand-full {archive_file_name} {unpacked_file_name}'
+ if os.path.exists(unpacked_file_name):
+ os.system(f'rm -rf {unpacked_file_name}')
+ os.system(command)
+
+ else: # Handle Windows and Linux
+ with zipfile.ZipFile(archive_file_name, 'r') as zip_ref:
+ zip_ref.extractall(download_dir)
+
+ # Specific check for Windows to ensure `stackql.exe` is extracted
+ if system_platform.startswith("Windows"):
+ if not os.path.exists(binary_path) and os.path.exists(os.path.join(download_dir, "stackql")):
+ os.rename(os.path.join(download_dir, "stackql"), binary_path)
+
+ # Confirm binary presence and set permissions
+ if os.path.exists(binary_path):
+ print(f"StackQL executable successfully located at: {binary_path}")
+ os.chmod(binary_path, 0o755)
+ else:
+ print(f"ERROR: Expected binary '{binary_path}' not found after extraction.")
+ exit(1)
+
+ except Exception as e:
+ print(f"ERROR: [setup_binary] {str(e)}")
+ exit(1)
+
+
+def get_binary_version(bin_path):
+ """Gets the version of the stackql binary.
+
+ Args:
+ bin_path (str): The path to the binary
+
+ Returns:
+ tuple: (version, sha)
+ - version: The version number
+ - sha: The git commit sha
+
+ Raises:
+ FileNotFoundError: If the binary is not found
+ Exception: If the version cannot be determined
+ """
+ try:
+ iqlPopen = subprocess.Popen([bin_path] + ["--version"],
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ # Use communicate to fetch the outputs and wait for the process to finish
+ output, _ = iqlPopen.communicate()
+ # Decode the output
+ decoded_output = output.decode('utf-8')
+ # Split to get the version tokens
+ version_tokens = decoded_output.split('\n')[0].split(' ')
+ version = version_tokens[1]
+ sha = version_tokens[3].replace('(', '').replace(')', '')
+ return version, sha
+ except FileNotFoundError:
+ print(f"ERROR: [get_binary_version] {bin_path} not found")
+ exit(1)
+ except Exception as e:
+ error_message = e.args[0]
+ print(f"ERROR: [get_binary_version] {error_message}")
+ exit(1)
+ finally:
+ # Ensure the subprocess is terminated and streams are closed
+ iqlPopen.terminate()
+ if hasattr(iqlPopen, 'stdout') and iqlPopen.stdout:
+ iqlPopen.stdout.close()
+
+
+def format_auth(auth):
+ """Formats an authentication object for use with stackql.
+
+ Args:
+ auth: The authentication object, can be a string or a dict
+
+ Returns:
+ tuple: (auth_obj, auth_str)
+ - auth_obj: The authentication object as a dict
+ - auth_str: The authentication object as a JSON string
+
+ Raises:
+ Exception: If the authentication object is invalid
+ """
+ try:
+ if auth is not None:
+ if isinstance(auth, str):
+ authobj = json.loads(auth)
+ authstr = auth
+ elif isinstance(auth, dict):
+ authobj = auth
+ authstr = json.dumps(auth)
+ return authobj, authstr
+ else:
+ raise Exception("ERROR: [format_auth] auth key supplied with no value")
+ except Exception as e:
+ error_message = e.args[0]
+ print(f"ERROR: [format_auth] {error_message}")
+ exit(1)
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/utils/package.py b/ref-python-packages/pystackql/pystackql/utils/package.py
new file mode 100644
index 0000000..3f1baf1
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/utils/package.py
@@ -0,0 +1,31 @@
+# pystackql/utils/package.py
+
+"""
+Package related utility functions for PyStackQL.
+
+"""
+
+# Conditional import for package metadata retrieval
+try:
+ from importlib.metadata import version, PackageNotFoundError
+except ImportError:
+ # This is for Python versions earlier than 3.8
+ from importlib_metadata import version, PackageNotFoundError
+
+def get_package_version(package_name):
+ """Gets the version of the specified package.
+
+ Args:
+ package_name (str): The name of the package
+
+ Returns:
+ str: The version of the package or None if not found
+ """
+ try:
+ pkg_version = version(package_name)
+ if pkg_version is None:
+ print(f"Warning: Retrieved version for '{package_name}' is None!")
+ return pkg_version
+ except PackageNotFoundError:
+ print(f"Warning: Package '{package_name}' not found!")
+ return None
diff --git a/ref-python-packages/pystackql/pystackql/utils/params.py b/ref-python-packages/pystackql/pystackql/utils/params.py
new file mode 100644
index 0000000..b4d8eec
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/utils/params.py
@@ -0,0 +1,270 @@
+# pystackql/utils/params.py
+
+"""
+Parameter generation utility for StackQL local mode.
+
+This module provides functions to generate command-line parameters for the StackQL binary
+and helps set instance attributes.
+"""
+
+import json
+from .auth import format_auth
+
+def _set_param(params, param_name, value):
+ """Add a parameter and its value to the params list.
+
+ :param params: List of parameters to append to
+ :param param_name: Parameter name to add
+ :param value: Value to add
+ :return: Updated params list
+ """
+ params.append(f"--{param_name}")
+ params.append(str(value))
+ return params
+
+def generate_params_for_execution(base_kwargs, override_kwargs=None):
+ """Generate parameters for a single execution with optional overrides.
+
+ This function generates command-line parameters for executing a query,
+ optionally overriding base parameters with execution-specific ones.
+
+ :param base_kwargs: Base keyword arguments (from constructor)
+ :param override_kwargs: Keyword arguments to override (from execute/executeStmt)
+ :return: List of parameters for StackQL binary
+ """
+ # Merge kwargs, with override_kwargs taking precedence
+ merged_kwargs = base_kwargs.copy()
+ if override_kwargs:
+ merged_kwargs.update(override_kwargs)
+
+ # Initialize parameter list
+ params = ["exec"]
+
+ # Extract parameters from merged_kwargs
+ output = merged_kwargs.get('output', 'dict')
+ backend_storage_mode = merged_kwargs.get('backend_storage_mode', 'memory')
+ backend_file_storage_location = merged_kwargs.get('backend_file_storage_location', 'stackql.db')
+ app_root = merged_kwargs.get('app_root', None)
+ execution_concurrency_limit = merged_kwargs.get('execution_concurrency_limit', -1)
+ dataflow_dependency_max = merged_kwargs.get('dataflow_dependency_max', 50)
+ dataflow_components_max = merged_kwargs.get('dataflow_components_max', 50)
+ custom_registry = merged_kwargs.get('custom_registry', None)
+ custom_auth = merged_kwargs.get('custom_auth', None)
+ sep = merged_kwargs.get('sep', ',')
+ header = merged_kwargs.get('header', False)
+ max_results = merged_kwargs.get('max_results', -1)
+ page_limit = merged_kwargs.get('page_limit', 20)
+ max_depth = merged_kwargs.get('max_depth', 5)
+ api_timeout = merged_kwargs.get('api_timeout', 45)
+ http_debug = merged_kwargs.get('http_debug', False)
+ proxy_host = merged_kwargs.get('proxy_host', None)
+ proxy_port = merged_kwargs.get('proxy_port', -1)
+ proxy_user = merged_kwargs.get('proxy_user', None)
+ proxy_password = merged_kwargs.get('proxy_password', None)
+ proxy_scheme = merged_kwargs.get('proxy_scheme', 'http')
+
+ # Set output format
+ params.append("--output")
+ if output.lower() == "csv":
+ params.append("csv")
+ else:
+ params.append("json")
+
+ # Backend storage settings
+ if backend_storage_mode == 'file':
+ params.append("--sqlBackend")
+ params.append(json.dumps({ "dsn": f"file:{backend_file_storage_location}" }))
+
+ # If app_root is set, use it
+ if app_root is not None:
+ _set_param(params, 'approot', app_root)
+
+ # Set execution parameters
+ _set_param(params, 'execution.concurrency.limit', execution_concurrency_limit)
+ _set_param(params, 'dataflow.dependency.max', dataflow_dependency_max)
+ _set_param(params, 'dataflow.components.max', dataflow_components_max)
+
+ # If custom_auth is set, use it
+ if custom_auth is not None:
+ authobj, authstr = format_auth(custom_auth)
+ params.append("--auth")
+ params.append(authstr)
+
+ # If custom_registry is set, use it
+ if custom_registry is not None:
+ params.append("--registry")
+ params.append(json.dumps({ "url": custom_registry }))
+
+ # CSV output settings
+ if output.lower() == "csv":
+ _set_param(params, 'delimiter', sep)
+
+ if not header:
+ params.append("--hideheaders")
+
+ # App behavioral properties
+ _set_param(params, 'http.response.maxResults', max_results)
+ _set_param(params, 'http.response.pageLimit', page_limit)
+ _set_param(params, 'indirect.depth.max', max_depth)
+ _set_param(params, 'apirequesttimeout', api_timeout)
+
+ if http_debug:
+ params.append("--http.log.enabled")
+
+ # Proxy settings
+ if proxy_host is not None:
+ # Set basic proxy parameters
+ _set_param(params, 'http.proxy.host', proxy_host)
+ _set_param(params, 'http.proxy.port', proxy_port)
+ _set_param(params, 'http.proxy.user', proxy_user)
+ _set_param(params, 'http.proxy.password', proxy_password)
+
+ # Validate and set proxy scheme
+ ALLOWED_PROXY_SCHEMES = {'http', 'https'}
+ if proxy_scheme.lower() not in ALLOWED_PROXY_SCHEMES:
+ raise ValueError(f"Invalid proxy_scheme. Expected one of {ALLOWED_PROXY_SCHEMES}, got {proxy_scheme}.")
+
+ _set_param(params, 'http.proxy.scheme', proxy_scheme.lower())
+
+ # Return the params list
+ return params
+
+def setup_local_mode(instance, **kwargs):
+ """Set up local mode for a StackQL instance.
+
+ This function generates parameters and sets instance attributes
+ for local mode operation.
+
+ :param instance: The StackQL instance
+ :param kwargs: Keyword arguments from the constructor
+ :return: List of parameters for StackQL binary
+ """
+ # Store base kwargs for later use
+ instance._base_kwargs = kwargs.copy()
+
+ # Initialize parameter list
+ params = ["exec"]
+
+ # Extract parameters from kwargs with defaults matching the StackQL.__init__ defaults
+ output = kwargs.get('output', 'dict')
+ backend_storage_mode = kwargs.get('backend_storage_mode', 'memory')
+ backend_file_storage_location = kwargs.get('backend_file_storage_location', 'stackql.db')
+ app_root = kwargs.get('app_root', None)
+ execution_concurrency_limit = kwargs.get('execution_concurrency_limit', -1)
+ dataflow_dependency_max = kwargs.get('dataflow_dependency_max', 50)
+ dataflow_components_max = kwargs.get('dataflow_components_max', 50)
+ custom_registry = kwargs.get('custom_registry', None)
+ custom_auth = kwargs.get('custom_auth', None)
+ sep = kwargs.get('sep', ',')
+ header = kwargs.get('header', False)
+ max_results = kwargs.get('max_results', -1)
+ page_limit = kwargs.get('page_limit', 20)
+ max_depth = kwargs.get('max_depth', 5)
+ api_timeout = kwargs.get('api_timeout', 45)
+ http_debug = kwargs.get('http_debug', False)
+ proxy_host = kwargs.get('proxy_host', None)
+ proxy_port = kwargs.get('proxy_port', -1)
+ proxy_user = kwargs.get('proxy_user', None)
+ proxy_password = kwargs.get('proxy_password', None)
+ proxy_scheme = kwargs.get('proxy_scheme', 'http')
+ download_dir = kwargs.get('download_dir', None)
+ debug = kwargs.get('debug', False)
+ debug_log_file = kwargs.get('debug_log_file', None)
+
+ # Set output format
+ params.append("--output")
+ if output.lower() == "csv":
+ params.append("csv")
+ else:
+ params.append("json")
+
+ # Backend storage settings
+ if backend_storage_mode == 'file':
+ params.append("--sqlBackend")
+ params.append(json.dumps({ "dsn": f"file:{backend_file_storage_location}" }))
+
+ # If app_root is set, use it
+ if app_root is not None:
+ instance.app_root = app_root
+ _set_param(params, 'approot', app_root)
+
+ # Set execution parameters
+ instance.execution_concurrency_limit = execution_concurrency_limit
+ _set_param(params, 'execution.concurrency.limit', execution_concurrency_limit)
+
+ instance.dataflow_dependency_max = dataflow_dependency_max
+ _set_param(params, 'dataflow.dependency.max', dataflow_dependency_max)
+
+ instance.dataflow_components_max = dataflow_components_max
+ _set_param(params, 'dataflow.components.max', dataflow_components_max)
+
+ # If custom_auth is set, use it
+ if custom_auth is not None:
+ authobj, authstr = format_auth(custom_auth)
+ instance.auth = authobj
+ params.append("--auth")
+ params.append(authstr)
+
+ # If custom_registry is set, use it
+ if custom_registry is not None:
+ instance.custom_registry = custom_registry
+ params.append("--registry")
+ params.append(json.dumps({ "url": custom_registry }))
+
+ # CSV output settings
+ if output.lower() == "csv":
+ instance.sep = sep
+ _set_param(params, 'delimiter', sep)
+
+ instance.header = header
+ if not header:
+ params.append("--hideheaders")
+
+ # App behavioral properties
+ instance.max_results = max_results
+ _set_param(params, 'http.response.maxResults', max_results)
+
+ instance.page_limit = page_limit
+ _set_param(params, 'http.response.pageLimit', page_limit)
+
+ instance.max_depth = max_depth
+ _set_param(params, 'indirect.depth.max', max_depth)
+
+ instance.api_timeout = api_timeout
+ _set_param(params, 'apirequesttimeout', api_timeout)
+
+ instance.http_debug = bool(http_debug)
+ if http_debug:
+ params.append("--http.log.enabled")
+
+ # Proxy settings
+ if proxy_host is not None:
+ # Set attributes
+ instance.proxy_host = proxy_host
+ instance.proxy_port = proxy_port
+ instance.proxy_user = proxy_user
+ instance.proxy_password = proxy_password
+
+ # Set basic proxy parameters
+ _set_param(params, 'http.proxy.host', proxy_host)
+ _set_param(params, 'http.proxy.port', proxy_port)
+ _set_param(params, 'http.proxy.user', proxy_user)
+ _set_param(params, 'http.proxy.password', proxy_password)
+
+ # Validate and set proxy scheme
+ ALLOWED_PROXY_SCHEMES = {'http', 'https'}
+ if proxy_scheme.lower() not in ALLOWED_PROXY_SCHEMES:
+ raise ValueError(f"Invalid proxy_scheme. Expected one of {ALLOWED_PROXY_SCHEMES}, got {proxy_scheme}.")
+
+ instance.proxy_scheme = proxy_scheme.lower()
+ _set_param(params, 'http.proxy.scheme', proxy_scheme.lower())
+
+ # Initialize binary manager
+ from ..core.binary import BinaryManager # Import here to avoid circular imports
+ instance.binary_manager = BinaryManager(download_dir)
+ instance.bin_path = instance.binary_manager.bin_path
+ instance.version = instance.binary_manager.version
+ instance.sha = instance.binary_manager.sha
+
+ # Return the params list
+ return params
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/pystackql/utils/platform.py b/ref-python-packages/pystackql/pystackql/utils/platform.py
new file mode 100644
index 0000000..91889d7
--- /dev/null
+++ b/ref-python-packages/pystackql/pystackql/utils/platform.py
@@ -0,0 +1,40 @@
+# pystackql/utils/platform.py
+
+"""
+Platform-related utility functions for PyStackQL.
+
+This module contains functions for platform detection and package information.
+"""
+
+import os
+import platform
+
+def is_binary_local(system_platform):
+ """Checks if the binary exists at the specified local path.
+
+ Args:
+ system_platform (str): The operating system platform
+
+ Returns:
+ bool: True if the binary exists at the expected local path
+ """
+ if system_platform == 'Linux' and os.path.exists('/usr/local/bin/stackql'):
+ return True
+ return False
+
+def get_platform():
+ """Gets the current platform information.
+
+ Returns:
+ tuple: (platform_string, system_value)
+ - platform_string: A string with platform details
+ - system_value: The operating system name
+ """
+ system_val = platform.system()
+ machine_val = platform.machine()
+ platform_val = platform.platform()
+ python_version_val = platform.python_version()
+ return (
+ f"{system_val} {machine_val} ({platform_val}), Python {python_version_val}",
+ system_val
+ )
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/requirements.txt b/ref-python-packages/pystackql/requirements.txt
new file mode 100644
index 0000000..5270cf7
--- /dev/null
+++ b/ref-python-packages/pystackql/requirements.txt
@@ -0,0 +1,24 @@
+# Core dependencies
+pandas>=1.3.0
+requests>=2.25.0
+IPython>=7.0.0
+termcolor>=1.1.0
+PyYAML>=5.4.0
+
+# Documentation
+sphinx>=4.0.0
+sphinx-rtd-theme>=1.0.0
+
+# Testing
+pytest>=6.2.5
+pytest-cov>=2.12.0
+nose>=1.3.7 # For backward compatibility
+
+# Platform-independent psycopg installation
+psycopg[binary]>=3.1.0 # Uses binary wheels where available
+
+# Async support
+nest-asyncio>=1.5.5 # For running async code in Jupyter
+
+# Optional utilities
+tqdm>=4.61.0 # For progress bars
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/run_server_tests.py b/ref-python-packages/pystackql/run_server_tests.py
new file mode 100644
index 0000000..ce9e554
--- /dev/null
+++ b/ref-python-packages/pystackql/run_server_tests.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python3
+"""
+Test runner script for PyStackQL.
+
+This script runs all the PyStackQL tests in server mode. It can be used to run
+individual test files or all tests.
+
+A running instance of the stackql server is required to run the server tests.
+
+Examples:
+ # Run all tests
+ python run_server_tests.py
+
+ # Run specific test files
+ python run_server_tests.py tests/test_server.py
+
+ # Run with verbose output
+ python run_server_tests.py -v
+"""
+
+import sys
+import os
+import pytest
+from termcolor import colored
+
+# Add the current directory to the Python path
+sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
+
+# Add the tests directory to the Python path
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'tests')))
+
+def main():
+ """Run the tests."""
+ print(colored("\n===== PyStackQL Server Test Runner =====\n", "cyan"))
+
+ # Default pytest arguments
+ args = ["-v"]
+
+ # Add any specific test files passed as arguments
+ if len(sys.argv) > 1:
+ args.extend(sys.argv[1:])
+ else:
+ # If no specific tests were requested, run all non-server test files
+ args.extend([
+ "tests/test_server.py",
+ "tests/test_server_magic.py"
+ ])
+
+ # Run pytest with the arguments
+ return pytest.main(args)
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/run_tests.py b/ref-python-packages/pystackql/run_tests.py
new file mode 100644
index 0000000..930ba58
--- /dev/null
+++ b/ref-python-packages/pystackql/run_tests.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+"""
+Test runner script for PyStackQL.
+
+This script runs all the PyStackQL tests. It can be used to run
+individual test files or all tests.
+
+Examples:
+ # Run all tests
+ python run_tests.py
+
+ # Run specific test files
+ python run_tests.py tests/test_core.py tests/test_query_execution.py
+
+ # Run with verbose output
+ python run_tests.py -v
+"""
+
+import sys
+import os
+import pytest
+from termcolor import colored
+
+# Add the current directory to the Python path
+sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
+
+# Add the tests directory to the Python path
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'tests')))
+
+def main():
+ """Run the tests."""
+ print(colored("\n===== PyStackQL Test Runner =====\n", "cyan"))
+
+ # Default pytest arguments
+ args = ["-v"]
+
+ # Add any specific test files passed as arguments
+ if len(sys.argv) > 1:
+ args.extend(sys.argv[1:])
+ else:
+ # If no specific tests were requested, run all non-server test files
+ args.extend([
+ "tests/test_core.py",
+ "tests/test_query_execution.py",
+ "tests/test_output_formats.py",
+ "tests/test_magic.py",
+ "tests/test_async.py"
+ ])
+
+ # Run pytest with the arguments
+ return pytest.main(args)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/server-status.sh b/ref-python-packages/pystackql/server-status.sh
new file mode 100644
index 0000000..bdedf52
--- /dev/null
+++ b/ref-python-packages/pystackql/server-status.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+# Search for the stackql process
+stackql_process=$(ps -ef | grep '[s]tackql')
+
+# Check if the process is running
+if [ -z "$stackql_process" ]; then
+ echo "Server is not running."
+else
+ # Extract the port and PID using awk/sed
+ port=$(echo "$stackql_process" | sed -n 's/.*--pgsrv.port=\([0-9]*\).*/\1/p')
+ pid=$(echo "$stackql_process" | awk '{print $2}')
+
+ # Check if port extraction was successful
+ if [ -z "$port" ]; then
+ echo "Server is running but could not detect the port (PID $pid)"
+ else
+ echo "Server is running on port $port (PID $pid)"
+ fi
+fi
diff --git a/ref-python-packages/pystackql/start-stackql-server.sh b/ref-python-packages/pystackql/start-stackql-server.sh
new file mode 100644
index 0000000..82f8d96
--- /dev/null
+++ b/ref-python-packages/pystackql/start-stackql-server.sh
@@ -0,0 +1,8 @@
+# start server if not running
+echo "checking if server is running"
+if [ -z "$(ps | grep stackql)" ]; then
+ nohup ./stackql -v --pgsrv.port=5466 srv &
+ sleep 5
+else
+ echo "server is already running"
+fi
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/stop-stackql-server.sh b/ref-python-packages/pystackql/stop-stackql-server.sh
new file mode 100644
index 0000000..762f6e8
--- /dev/null
+++ b/ref-python-packages/pystackql/stop-stackql-server.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# Find the process ID of the StackQL server
+PID=$(pgrep -f "stackql")
+
+if [ -z "$PID" ]; then
+ echo "stackql server is not running."
+else
+ echo "stopping stackql server (PID: $PID)..."
+ kill $PID
+ echo "stackql server stopped."
+fi
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/tests/README.md b/ref-python-packages/pystackql/tests/README.md
new file mode 100644
index 0000000..6491481
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/README.md
@@ -0,0 +1,158 @@
+# PyStackQL Testing Guide
+
+## Overview
+
+This guide explains the PyStackQL testing framework. The tests have been designed to:
+
+1. Focus on provider-agnostic queries where possible
+2. Use the Homebrew provider for provider-specific tests (no authentication required)
+3. Be organized into logical modules based on functionality
+4. Support both local execution and GitHub Codespaces
+
+## Test Structure
+
+The tests are organized into these main files:
+
+- `test_constants.py`: Common constants and helper functions
+- `conftest.py`: Test fixtures and setup
+- `test_core.py`: Core functionality tests
+- `test_query_execution.py`: Query execution tests
+- `test_output_formats.py`: Output format tests
+- `test_magic.py`: Magic extension tests
+- `test_async.py`: Async functionality tests
+- `test_server.py`: Server mode tests
+
+## Running Tests
+
+### Running All Tests
+
+To run all tests:
+
+```bash
+python run_tests.py
+```
+
+### Running Specific Tests
+
+To run specific test files:
+
+```bash
+python run_tests.py tests/test_core.py tests/test_query_execution.py
+```
+
+### Running with Extra Verbosity
+
+```bash
+python run_tests.py -v
+```
+
+### Running Server Tests
+
+Server tests are skipped by default because they require a running StackQL server. To run these tests:
+
+1. Start a StackQL server:
+ ```bash
+ stackql srv --pgsrv.address 127.0.0.1 --pgsrv.port 5466
+ ```
+
+2. Run the server tests:
+ ```bash
+ python run_tests.py tests/test_server.py -v
+ ```
+
+## Test Categories
+
+### Core Tests
+
+Tests the basic properties and attributes of the `StackQL` class:
+
+- `properties()` method
+- `version`, `package_version`, `platform` attributes
+- Binary path and download directory
+- Upgrade functionality
+
+### Query Execution Tests
+
+Tests the query execution functionality with provider-agnostic queries:
+
+- Literal values (integers, strings, floats)
+- Expressions
+- JSON extraction
+- Homebrew provider queries
+- Registry pull operations
+
+### Output Format Tests
+
+Tests the different output formats:
+
+- Dict output
+- Pandas output with type checking
+- CSV output with different separators and headers
+- Error handling for invalid configurations
+
+### Magic Tests
+
+Tests the Jupyter magic extensions:
+
+- Line and cell magic in non-server mode
+- Line and cell magic in server mode
+- Result storage in user namespace
+- Display options
+
+### Async Tests
+
+Tests the async query execution functionality:
+
+- `executeQueriesAsync` with different output formats
+- Concurrent queries with the Homebrew provider
+- Error handling
+
+### Server Tests
+
+Tests the server mode functionality (requires a running server):
+
+- Server connectivity
+- Query execution in server mode
+- Statement execution in server mode
+- Different output formats in server mode
+
+## Test Data
+
+The tests use:
+
+1. **Simple literals and expressions**:
+ ```sql
+ SELECT 1 as literal_int_value
+ SELECT 1.001 as literal_float_value
+ SELECT 'test' as literal_string_value
+ SELECT 1=1 as expression
+ ```
+
+2. **Homebrew provider queries**:
+ ```sql
+ SELECT name, full_name, tap FROM homebrew.formula.formula WHERE formula_name = 'stackql'
+ SELECT * FROM homebrew.formula.vw_usage_metrics WHERE formula_name = 'stackql'
+ ```
+
+3. **Registry operations**:
+ ```sql
+ REGISTRY PULL homebrew
+ ```
+
+## Testing in GitHub Codespaces
+
+When running in GitHub Codespaces:
+
+1. The tests automatically detect if they're running in GitHub Actions and skip the binary upgrade
+2. The server tests are skipped by default (can be enabled if needed)
+3. Async tests might be skipped on Windows due to asyncio issues
+
+## Adding New Tests
+
+When adding new tests:
+
+1. Use provider-agnostic queries where possible
+2. For provider-specific tests, prefer the Homebrew provider
+3. Add new tests to the appropriate test file based on functionality
+4. Update `run_tests.py` if adding a new test file
+5. Follow the existing patterns for consistency
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/tests/__init__.py b/ref-python-packages/pystackql/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ref-python-packages/pystackql/tests/conftest.py b/ref-python-packages/pystackql/tests/conftest.py
new file mode 100644
index 0000000..ecc13cd
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/conftest.py
@@ -0,0 +1,64 @@
+# tests/conftest.py
+
+"""
+Common test setup and fixtures for PyStackQL tests.
+"""
+
+import os
+import sys
+import platform
+import time
+import pytest
+import subprocess
+import signal
+from unittest.mock import MagicMock
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+# Add the current directory to the path so we can import test_constants
+sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
+
+from pystackql import StackQL
+from tests.test_constants import SERVER_ADDRESS, SERVER_PORT, REGISTRY_PULL_HOMEBREW_QUERY
+
+# Global variables to store server process
+server_process = None
+
+@pytest.fixture(scope="session", autouse=True)
+def setup_stackql():
+ """
+ Session-wide fixture to download stackql binary and setup server.
+ This runs once before all tests.
+ """
+ print("\nDownloading and setting up stackql binary...")
+ stackql = StackQL()
+
+ # Check if we're running in GitHub Actions
+ is_github_actions = os.environ.get('GITHUB_ACTIONS') == 'true'
+ if not is_github_actions:
+ print("Running tests outside of GitHub Actions, upgrading stackql binary...")
+ stackql.upgrade()
+
+ # Pull the homebrew provider for provider-specific tests
+ print("Pulling homebrew provider for tests...")
+ result = stackql.executeStmt(REGISTRY_PULL_HOMEBREW_QUERY)
+ print(result)
+
+ # Return the StackQL instance for use in tests
+ return stackql
+
+@pytest.fixture
+def mock_interactive_shell():
+ """Create a mock IPython shell for testing."""
+ class MockInteractiveShell:
+ def __init__(self):
+ self.user_ns = {}
+ self.register_magics_called = False
+
+ def register_magics(self, magic_instance):
+ """Mock for registering magics."""
+ self.magics = magic_instance
+ self.register_magics_called = True
+
+ return MockInteractiveShell()
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/tests/test_async.py b/ref-python-packages/pystackql/tests/test_async.py
new file mode 100644
index 0000000..9bcfdba
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_async.py
@@ -0,0 +1,120 @@
+# tests/test_async.py
+
+"""
+Async functionality tests for PyStackQL in non-server mode.
+
+This module tests the async query execution functionality of the StackQL class.
+"""
+
+import os
+import sys
+import platform
+import pytest
+import pandas as pd
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+# Add the current directory to the path so we can import test_constants
+sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
+
+from pystackql import StackQL
+from tests.test_constants import (
+ ASYNC_QUERIES,
+ print_test_result,
+ async_test_decorator
+)
+
+# Skip all tests on Windows due to asyncio issues
+pytestmark = pytest.mark.skipif(
+ platform.system() == "Windows",
+ reason="Skipping async tests on Windows"
+)
+
+class TestAsyncFunctionality:
+ """Tests for PyStackQL async functionality in non-server mode."""
+
+ @async_test_decorator
+ async def test_execute_queries_async_dict_output(self):
+ """Test executeQueriesAsync with dict output format."""
+ stackql = StackQL()
+ results = await stackql.executeQueriesAsync(ASYNC_QUERIES)
+
+ # Check result structure
+ assert isinstance(results, list), "Results should be a list"
+ assert all(isinstance(item, dict) for item in results), "Each item in results should be a dict"
+
+ # Check result content
+ assert len(results) > 0, "Results should not be empty"
+ assert all("formula_name" in item for item in results), "Each item should have 'formula_name' column"
+
+ # Extract formula names
+ formula_names = [item["formula_name"] for item in results if "formula_name" in item]
+
+ # Check that we have the expected formula names
+ assert any("stackql" in str(name) for name in formula_names), "Results should include 'stackql'"
+ assert any("terraform" in str(name) for name in formula_names), "Results should include 'terraform'"
+
+ print_test_result(f"Async executeQueriesAsync with dict output test\nRESULT COUNT: {len(results)}",
+ isinstance(results, list) and all(isinstance(item, dict) for item in results),
+ is_async=True)
+
+ @async_test_decorator
+ async def test_execute_queries_async_pandas_output(self):
+ """Test executeQueriesAsync with pandas output format."""
+ stackql = StackQL(output='pandas')
+ result = await stackql.executeQueriesAsync(ASYNC_QUERIES)
+
+ # Check result structure
+ assert isinstance(result, pd.DataFrame), "Result should be a pandas DataFrame"
+ assert not result.empty, "DataFrame should not be empty"
+ assert "formula_name" in result.columns, "DataFrame should have 'formula_name' column"
+
+ # Extract formula names
+ formula_values = result["formula_name"].tolist()
+
+ # Check that we have the expected formula names
+ assert any("stackql" in str(name) for name in formula_values), "Results should include 'stackql'"
+ assert any("terraform" in str(name) for name in formula_values), "Results should include 'terraform'"
+
+ # Check that numeric columns exist
+ numeric_columns = [
+ "installs_30d", "installs_90d", "installs_365d",
+ "install_on_requests_30d", "install_on_requests_90d", "install_on_requests_365d"
+ ]
+ for col in numeric_columns:
+ assert col in result.columns, f"DataFrame should have '{col}' column"
+
+ # Check that the column can be converted to numeric
+ try:
+ pd.to_numeric(result[col])
+ numeric_conversion_success = True
+ except (ValueError, TypeError):
+ numeric_conversion_success = False
+
+ assert numeric_conversion_success, f"Column '{col}' should be convertible to numeric"
+
+ print_test_result(f"Async executeQueriesAsync with pandas output test\nRESULT COUNT: {len(result)}",
+ isinstance(result, pd.DataFrame) and not result.empty,
+ is_async=True)
+
+ @async_test_decorator
+ async def test_execute_queries_async_csv_output(self):
+ """Test that executeQueriesAsync with csv output raises ValueError."""
+ stackql = StackQL(output='csv')
+
+ with pytest.raises(ValueError) as exc_info:
+ await stackql.executeQueriesAsync(ASYNC_QUERIES)
+
+ # Check exception message
+ error_msg = str(exc_info.value)
+ assert "executeQueriesAsync supports only" in error_msg, "Error message should mention supported formats"
+ assert "dict" in error_msg, "Error message should mention 'dict'"
+ assert "pandas" in error_msg, "Error message should mention 'pandas'"
+
+ print_test_result(f"Async executeQueriesAsync with csv output test",
+ "executeQueriesAsync supports only" in error_msg,
+ is_async=True)
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
diff --git a/ref-python-packages/pystackql/tests/test_constants.py b/ref-python-packages/pystackql/tests/test_constants.py
new file mode 100644
index 0000000..86caf1c
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_constants.py
@@ -0,0 +1,120 @@
+# tests/test_constants.py
+
+"""
+Test constants and helper functions for PyStackQL tests.
+"""
+
+import os
+import re
+import sys
+import time
+import platform
+import subprocess
+from termcolor import colored
+import pandas as pd
+
+# Server connection settings
+SERVER_PORT = 5466
+SERVER_ADDRESS = "127.0.0.1"
+
+# Expected properties and patterns for validation
+EXPECTED_PROPERTIES = [
+ "bin_path", "params",
+ "output", "platform", "server_mode", "sha", "version",
+ "package_version" # Modified: removed "download_dir" as it's no longer exposed
+]
+
+EXPECTED_VERSION_PATTERN = r'^v?(\d+\.\d+\.\d+)$'
+EXPECTED_PACKAGE_VERSION_PATTERN = r'^(\d+\.\d+\.\d+)$'
+EXPECTED_PLATFORM_PATTERN = r'^(Windows|Linux|Darwin) (\w+) \(([^)]+)\), Python (\d+\.\d+\.\d+)$'
+
+# Get custom download directory based on platform
+def get_custom_download_dir():
+ """Return a platform-specific custom download directory."""
+ custom_download_dirs = {
+ 'windows': 'C:\\temp',
+ 'darwin': '/tmp',
+ 'linux': '/tmp'
+ }
+ return custom_download_dirs.get(platform.system().lower(), '/tmp')
+
+# Basic test queries that don't require authentication
+LITERAL_INT_QUERY = "SELECT 1 as literal_int_value"
+LITERAL_FLOAT_QUERY = "SELECT 1.001 as literal_float_value"
+LITERAL_STRING_QUERY = "SELECT 'test' as literal_string_value"
+EXPRESSION_TRUE_QUERY = "SELECT 1=1 as expression"
+EXPRESSION_FALSE_QUERY = "SELECT 1=0 as expression"
+EMPTY_RESULT_QUERY = "SELECT 1 WHERE 1=0"
+JSON_EXTRACT_QUERY = """
+SELECT
+ json_extract('{"Key":"StackName","Value":"aws-stack"}', '$.Key') as key,
+ json_extract('{"Key":"StackName","Value":"aws-stack"}', '$.Value') as value
+"""
+
+# Homebrew provider queries (no authentication required)
+HOMEBREW_FORMULA_QUERY = "SELECT name, full_name, tap FROM homebrew.formula.formula WHERE formula_name = 'stackql'"
+HOMEBREW_METRICS_QUERY = "SELECT * FROM homebrew.formula.vw_usage_metrics WHERE formula_name = 'stackql'"
+
+# Registry pull queries
+REGISTRY_PULL_HOMEBREW_QUERY = "REGISTRY PULL homebrew"
+
+# Async test queries
+ASYNC_QUERIES = [
+ "SELECT * FROM homebrew.formula.vw_usage_metrics WHERE formula_name = 'stackql'",
+ "SELECT * FROM homebrew.formula.vw_usage_metrics WHERE formula_name = 'terraform'"
+]
+
+# Pattern to match registry pull response
+def registry_pull_resp_pattern(provider):
+ """Returns a regex pattern to match a successful registry pull message."""
+ return r"%s provider, version 'v\d+\.\d+\.\d+' successfully installed\s*" % provider
+
+# Test result printer
+def print_test_result(test_name, condition=True, server_mode=False, is_ipython=False, is_async=False):
+ """Prints a formatted test result.
+
+ Args:
+ test_name: Name or description of the test
+ condition: Whether the test passed (True) or failed (False)
+ server_mode: Whether the test was run in server mode
+ is_ipython: Whether the test involved IPython magic
+ is_async: Whether the test involved async functionality
+ """
+ status_header = colored("[PASSED] ", 'green') if condition else colored("[FAILED] ", 'red')
+ headers = [status_header]
+
+ if server_mode:
+ headers.append(colored("[SERVER MODE]", 'yellow'))
+ if is_ipython:
+ headers.append(colored("[MAGIC EXT]", 'blue'))
+ if is_async:
+ headers.append(colored("[ASYNC]", 'magenta'))
+
+ headers.append(test_name)
+ message = " ".join(headers)
+
+ print("\n" + message)
+
+# Decorators for test setup
+def pystackql_test_setup(**kwargs):
+ """Decorator to set up a StackQL instance with specified parameters."""
+ def decorator(func):
+ def wrapper(self, *args):
+ try:
+ del self.stackql
+ except AttributeError:
+ pass
+ self.stackql = self.StackQL(**kwargs)
+ func(self, *args)
+ return wrapper
+ return decorator
+
+def async_test_decorator(func):
+ """Decorator to run async tests with asyncio."""
+ def wrapper(*args, **kwargs):
+ import asyncio
+ if asyncio.iscoroutinefunction(func):
+ return asyncio.run(func(*args, **kwargs))
+ else:
+ return func(*args, **kwargs)
+ return wrapper
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/tests/test_core.py b/ref-python-packages/pystackql/tests/test_core.py
new file mode 100644
index 0000000..8295dab
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_core.py
@@ -0,0 +1,143 @@
+# tests/test_core.py
+
+"""
+Core functionality tests for PyStackQL.
+
+This module tests the basic attributes and properties of the StackQL class.
+"""
+
+import os
+import re
+import sys
+import platform
+import pytest
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+# Add the current directory to the path so we can import test_constants
+sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
+
+from pystackql import StackQL
+from tests.test_constants import (
+ EXPECTED_PROPERTIES,
+ EXPECTED_VERSION_PATTERN,
+ EXPECTED_PACKAGE_VERSION_PATTERN,
+ EXPECTED_PLATFORM_PATTERN,
+ get_custom_download_dir,
+ print_test_result,
+ pystackql_test_setup
+)
+
+class TestStackQLCore:
+ """Tests for core PyStackQL functionality."""
+
+ StackQL = StackQL # For use with pystackql_test_setup decorator
+
+ @pystackql_test_setup()
+ def test_properties_method(self):
+ """Test that the properties() method returns the expected properties."""
+ properties = self.stackql.properties()
+
+ # Check that properties is a dictionary
+ assert isinstance(properties, dict), "properties should be a dictionary"
+
+ # Check that all expected properties are present
+ missing_keys = [key for key in EXPECTED_PROPERTIES if key not in properties]
+ assert len(missing_keys) == 0, f"Missing keys in properties: {', '.join(missing_keys)}"
+
+ # Check property types
+ assert isinstance(properties["bin_path"], str), "bin_path should be of type str"
+ assert isinstance(properties["params"], list), "params should be of type list"
+ assert isinstance(properties["server_mode"], bool), "server_mode should be of type bool"
+ assert isinstance(properties["output"], str), "output should be of type str"
+
+ print_test_result(f"Properties method test\nPROPERTIES: {properties}", True)
+
+ @pystackql_test_setup()
+ def test_version_attribute(self):
+ """Test that the version attribute contains a valid version string."""
+ version = self.stackql.version
+ assert version is not None, "version should not be None"
+
+ is_valid_semver = bool(re.match(EXPECTED_VERSION_PATTERN, version))
+ assert is_valid_semver, f"version '{version}' does not match expected pattern"
+
+ print_test_result(f"Version attribute test\nVERSION: {version}", is_valid_semver)
+
+ @pystackql_test_setup()
+ def test_package_version_attribute(self):
+ """Test that the package_version attribute contains a valid version string."""
+ package_version = self.stackql.package_version
+ assert package_version is not None, "package_version should not be None"
+
+ is_valid_semver = bool(re.match(EXPECTED_PACKAGE_VERSION_PATTERN, package_version))
+ assert is_valid_semver, f"package_version '{package_version}' does not match expected pattern"
+
+ print_test_result(f"Package version attribute test\nPACKAGE VERSION: {package_version}", is_valid_semver)
+
+ @pystackql_test_setup()
+ def test_platform_attribute(self):
+ """Test that the platform attribute contains valid platform information."""
+ platform_string = self.stackql.platform
+ assert platform_string is not None, "platform should not be None"
+
+ is_valid_platform = bool(re.match(EXPECTED_PLATFORM_PATTERN, platform_string))
+ assert is_valid_platform, f"platform '{platform_string}' does not match expected pattern"
+
+ print_test_result(f"Platform attribute test\nPLATFORM: {platform_string}", is_valid_platform)
+
+ @pystackql_test_setup()
+ def test_bin_path_attribute(self):
+ """Test that the bin_path attribute points to an existing binary."""
+ assert os.path.exists(self.stackql.bin_path), f"Binary not found at {self.stackql.bin_path}"
+
+ print_test_result(f"Binary path attribute test\nBINARY PATH: {self.stackql.bin_path}",
+ os.path.exists(self.stackql.bin_path))
+
+ @pystackql_test_setup(download_dir=get_custom_download_dir())
+ def test_custom_download_dir(self):
+ """Test that a custom download_dir is used correctly."""
+ # Check that version is not None (binary was found)
+ version = self.stackql.version
+ assert version is not None, "version should not be None"
+
+ # Check that the binary exists at the expected location in the custom directory
+ expected_download_dir = get_custom_download_dir()
+ binary_name = 'stackql' if platform.system().lower() != 'windows' else 'stackql.exe'
+ expected_binary_path = os.path.join(expected_download_dir, binary_name)
+
+ # Check if binary exists
+ if not os.path.exists(expected_binary_path):
+ # Give it time to download if needed
+ import time
+ time.sleep(5)
+
+ assert os.path.exists(expected_binary_path), f"No binary found at {expected_binary_path}"
+
+ print_test_result(f"Custom download directory test\nCUSTOM_DOWNLOAD_DIR: {expected_download_dir}",
+ version is not None and os.path.exists(expected_binary_path))
+
+ @pytest.mark.skip(reason="Skipping upgrade test to avoid unnecessary downloads during regular testing")
+ @pystackql_test_setup()
+ def test_upgrade_method(self):
+ """Test that the upgrade method updates the binary."""
+ initial_version = self.stackql.version
+ initial_sha = self.stackql.sha
+
+ # Perform the upgrade
+ upgrade_message = self.stackql.upgrade()
+
+ # Check that we got a valid message
+ assert "stackql upgraded to version" in upgrade_message, "Upgrade message not as expected"
+
+ # Verify that the version attributes were updated
+ assert self.stackql.version is not None, "version should not be None after upgrade"
+ assert self.stackql.sha is not None, "sha should not be None after upgrade"
+
+ print_test_result(f"Upgrade method test\nINITIAL VERSION: {initial_version}, SHA: {initial_sha}\n"
+ f"NEW VERSION: {self.stackql.version}, SHA: {self.stackql.sha}",
+ "stackql upgraded to version" in upgrade_message)
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/tests/test_error_detection.py b/ref-python-packages/pystackql/tests/test_error_detection.py
new file mode 100644
index 0000000..b3aa35c
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_error_detection.py
@@ -0,0 +1,371 @@
+# tests/test_error_detection.py
+
+"""
+Error detection tests for PyStackQL.
+
+This module tests the centralized error detection functionality that identifies
+error patterns in query results.
+"""
+
+import os
+import sys
+import json
+import pytest
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+from pystackql.core.error_detector import ErrorDetector
+from pystackql.core.output import OutputFormatter
+
+
+class TestErrorDetector:
+ """Tests for the ErrorDetector class."""
+
+ def setup_method(self):
+ """Set up test fixtures."""
+ self.detector = ErrorDetector()
+
+ def test_detector_initialization(self):
+ """Test that ErrorDetector initializes and loads patterns."""
+ assert self.detector is not None
+ assert isinstance(self.detector.fuzzy_patterns, list)
+ assert isinstance(self.detector.exact_patterns, list)
+ # Check that some patterns were loaded
+ assert len(self.detector.fuzzy_patterns) > 0
+ assert len(self.detector.exact_patterns) > 0
+
+ def test_http_4xx_error_detection(self):
+ """Test detection of HTTP 4xx status codes."""
+ messages = [
+ "http response status code: 404",
+ "http response status code: 400, response body: Bad Request",
+ "HTTP RESPONSE STATUS CODE: 403 Forbidden",
+ ]
+ for msg in messages:
+ assert self.detector.is_error(msg), f"Should detect error in: {msg}"
+
+ def test_http_5xx_error_detection(self):
+ """Test detection of HTTP 5xx status codes."""
+ messages = [
+ "http response status code: 500",
+ "http response status code: 503, service unavailable",
+ "HTTP RESPONSE STATUS CODE: 502 Bad Gateway",
+ ]
+ for msg in messages:
+ assert self.detector.is_error(msg), f"Should detect error in: {msg}"
+
+ def test_exact_match_detection(self):
+ """Test detection of exact match patterns."""
+ messages = [
+ "error: invalid syntax",
+ "ERROR: something went wrong",
+ "Error: connection failed",
+ ]
+ for msg in messages:
+ assert self.detector.is_error(msg), f"Should detect error in: {msg}"
+
+ def test_fuzzy_match_detection(self):
+ """Test detection of fuzzy match patterns."""
+ messages = [
+ "An error occurred during processing",
+ "Operation failed due to timeout",
+ "Cannot find matching operation for this request",
+ "Disparity in fields to insert and supplied data",
+ ]
+ for msg in messages:
+ assert self.detector.is_error(msg), f"Should detect error in: {msg}"
+
+ def test_non_error_messages(self):
+ """Test that non-error messages are not detected as errors."""
+ messages = [
+ "Query executed successfully",
+ "Retrieved 10 rows",
+ "Connection established",
+ "Data retrieved from provider",
+ ]
+ for msg in messages:
+ assert not self.detector.is_error(msg), f"Should not detect error in: {msg}"
+
+ def test_case_insensitive_fuzzy_matching(self):
+ """Test that fuzzy matching is case-insensitive."""
+ messages = [
+ "ERROR occurred",
+ "Error Occurred",
+ "error occurred",
+ "An EXCEPTION was raised",
+ ]
+ for msg in messages:
+ assert self.detector.is_error(msg), f"Should detect error in: {msg}"
+
+ def test_extract_error_info(self):
+ """Test extraction of error information."""
+ msg = "http response status code: 404"
+ info = self.detector.extract_error_info(msg)
+ assert info is not None
+ assert "error" in info
+ assert "detected_pattern" in info
+ assert info["error"] == msg
+ assert info["detected_pattern"] is not None
+
+ def test_extract_error_info_non_error(self):
+ """Test that non-error messages return None."""
+ msg = "Success"
+ info = self.detector.extract_error_info(msg)
+ assert info is None
+
+ def test_empty_string_handling(self):
+ """Test handling of empty strings."""
+ assert not self.detector.is_error("")
+ assert not self.detector.is_error(None)
+
+ def test_non_string_handling(self):
+ """Test handling of non-string inputs."""
+ assert not self.detector.is_error(123)
+ assert not self.detector.is_error([])
+ assert not self.detector.is_error({})
+
+ def test_regex_pattern_loading(self):
+ """Test that regex patterns are loaded and compiled."""
+ assert len(self.detector.regex_patterns) > 0
+ # Check that patterns are tuples of (pattern_str, compiled_regex)
+ for item in self.detector.regex_patterns:
+ assert isinstance(item, tuple)
+ assert len(item) == 2
+ pattern_str, compiled = item
+ assert isinstance(pattern_str, str)
+ # Check it's a compiled regex
+ assert hasattr(compiled, 'search')
+
+ def test_regex_dns_error_detection(self):
+ """Test detection of DNS lookup errors using regex."""
+ messages = [
+ 'Get "https://fred.brew.sh/api/formula/stackql.json?": dial tcp: lookup fred.brew.sh on 8.8.8.8:53: no such host',
+ 'dial tcp: lookup example.com on 1.1.1.1:53: no such host',
+ 'Get "http://api.example.com": dial tcp: lookup api.example.com on 192.168.1.1:53: no such host',
+ ]
+ for msg in messages:
+ assert self.detector.is_error(msg), f"Should detect DNS error in: {msg}"
+
+ def test_regex_connection_refused(self):
+ """Test detection of connection refused errors using regex."""
+ messages = [
+ 'dial tcp 192.168.1.1:5432: connection refused',
+ 'dial tcp [::1]:8080: connection refused',
+ 'unable to connect to server: connection refused',
+ ]
+ for msg in messages:
+ assert self.detector.is_error(msg), f"Should detect connection error in: {msg}"
+
+ def test_regex_timeout_errors(self):
+ """Test detection of timeout errors using regex."""
+ messages = [
+ 'context deadline exceeded',
+ 'dial tcp 10.0.0.1:443: i/o timeout',
+ 'net/http: request canceled while waiting for connection (Client.Timeout exceeded)',
+ 'timeout while waiting for response',
+ ]
+ for msg in messages:
+ assert self.detector.is_error(msg), f"Should detect timeout error in: {msg}"
+
+ def test_regex_case_insensitive(self):
+ """Test that regex matching is case-insensitive."""
+ messages = [
+ 'DIAL TCP: NO SUCH HOST',
+ 'Context Deadline Exceeded',
+ 'Connection Refused',
+ ]
+ for msg in messages:
+ assert self.detector.is_error(msg), f"Should detect error (case-insensitive) in: {msg}"
+
+ def test_extract_error_info_with_regex(self):
+ """Test error info extraction for regex matches."""
+ msg = 'Get "https://example.com": dial tcp: lookup example.com on 8.8.8.8:53: no such host'
+ info = self.detector.extract_error_info(msg)
+ assert info is not None
+ assert info["error"] == msg
+ assert info["pattern_type"] == "regex"
+ assert info["detected_pattern"] is not None
+ # Should match one of the DNS error patterns
+ assert "no such host" in info["detected_pattern"]
+
+
+class TestOutputFormatterErrorDetection:
+ """Tests for error detection integration in OutputFormatter."""
+
+ def setup_method(self):
+ """Set up test fixtures."""
+ self.formatter = OutputFormatter(output_format='dict')
+
+ def test_format_error_in_raw_data(self):
+ """Test detection of errors in raw data strings."""
+ error_data = "http response status code: 404, response body: Not Found"
+ result = self.formatter._format_data(error_data)
+
+ assert isinstance(result, list)
+ assert len(result) > 0
+ assert "error" in result[0]
+
+ def test_format_error_in_json_data(self):
+ """Test detection of errors in JSON-formatted data."""
+ # Simulate data returned by StackQL with an error message
+ data = [
+ {
+ "message": "http response status code: 404",
+ "status": "failed"
+ }
+ ]
+ json_data = json.dumps(data)
+ result = self.formatter._format_data(json_data)
+
+ assert isinstance(result, list)
+ assert len(result) > 0
+ assert "error" in result[0]
+
+ def test_format_valid_data_not_detected_as_error(self):
+ """Test that valid data is not detected as error."""
+ data = [
+ {
+ "formula_name": "python",
+ "version": "3.9.0",
+ "status": "installed"
+ }
+ ]
+ json_data = json.dumps(data)
+ result = self.formatter._format_data(json_data)
+
+ assert isinstance(result, list)
+ assert len(result) > 0
+ # Should return the data, not an error
+ if "error" not in result[0]:
+ assert "formula_name" in result[0] or "version" in result[0]
+
+ def test_check_data_for_errors_in_dict(self):
+ """Test error detection in dictionary data."""
+ data = {
+ "status": "failed",
+ "message": "error: operation failed"
+ }
+ error = self.formatter._check_data_for_errors(data)
+ assert error is not None
+ assert "error" in error.lower()
+
+ def test_check_data_for_errors_in_list(self):
+ """Test error detection in list data."""
+ data = [
+ {"name": "test1", "status": "ok"},
+ {"name": "test2", "message": "http response status code: 500"}
+ ]
+ error = self.formatter._check_data_for_errors(data)
+ assert error is not None
+ assert "http response status code" in error.lower()
+
+ def test_check_data_for_errors_nested(self):
+ """Test error detection in nested data structures."""
+ data = {
+ "results": [
+ {
+ "id": 1,
+ "details": {
+ "status": "error: connection timeout"
+ }
+ }
+ ]
+ }
+ error = self.formatter._check_data_for_errors(data)
+ assert error is not None
+
+ def test_check_data_for_errors_no_error(self):
+ """Test that valid data returns None."""
+ data = {
+ "status": "success",
+ "results": [
+ {"name": "item1", "value": 100},
+ {"name": "item2", "value": 200}
+ ]
+ }
+ error = self.formatter._check_data_for_errors(data)
+ assert error is None
+
+ def test_format_statement_with_error(self):
+ """Test statement result formatting with error detection."""
+ result = {
+ "error": "http response status code: 404"
+ }
+ formatted = self.formatter.format_statement_result(result)
+
+ # Should be formatted as error, not as message
+ if isinstance(formatted, dict):
+ # For dict output, check if it's an error list or message
+ if isinstance(formatted, list):
+ assert "error" in formatted[0]
+ elif "error" in formatted:
+ assert formatted["error"] is not None
+ elif isinstance(formatted, list):
+ assert "error" in formatted[0]
+
+ def test_format_statement_without_error(self):
+ """Test statement result formatting without errors."""
+ result = {
+ "error": "okta provider, version 'v0.5.0' successfully installed"
+ }
+ formatted = self.formatter.format_statement_result(result)
+
+ # Should be formatted as message since it's not an error
+ assert formatted is not None
+
+
+class TestHomebrewProviderErrorScenario:
+ """Tests for the specific homebrew provider error scenario."""
+
+ def setup_method(self):
+ """Set up test fixtures."""
+ self.formatter = OutputFormatter(output_format='dict')
+ self.detector = ErrorDetector()
+
+ def test_homebrew_404_error_detection(self):
+ """Test detection of homebrew 404 error message."""
+ # This is the actual error message from the user's example
+ error_msg = "http response status code: 404, response body: ..."
+
+ # Should be detected as error
+ assert self.detector.is_error(error_msg)
+
+ def test_homebrew_404_formatting(self):
+ """Test formatting of homebrew 404 error."""
+ # Simulate the raw data that would come from StackQL
+ error_data = "http response status code: 404, response body: ..."
+
+ result = self.formatter._format_data(error_data)
+
+ # Should be formatted as error
+ assert isinstance(result, list)
+ assert len(result) > 0
+ assert "error" in result[0]
+ assert "404" in str(result[0]["error"])
+
+ def test_homebrew_valid_formula_not_error(self):
+ """Test that valid homebrew formula data is not detected as error."""
+ # Simulate valid formula data
+ valid_data = [
+ {
+ "formula_name": "python",
+ "full_name": "python@3.9",
+ "homepage": "https://www.python.org",
+ "latest_version": "3.9.7",
+ "license": "Python-2.0"
+ }
+ ]
+ json_data = json.dumps(valid_data)
+
+ result = self.formatter._format_data(json_data)
+
+ # Should return data, not error
+ assert isinstance(result, list)
+ assert len(result) > 0
+ if "error" not in result[0]:
+ assert "formula_name" in result[0]
+
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
diff --git a/ref-python-packages/pystackql/tests/test_kwargs_override.py b/ref-python-packages/pystackql/tests/test_kwargs_override.py
new file mode 100644
index 0000000..d8fab98
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_kwargs_override.py
@@ -0,0 +1,160 @@
+# tests/test_kwargs_override.py
+
+"""
+Tests for kwargs override functionality in execute and executeStmt methods.
+
+This module tests the ability to override constructor parameters via kwargs
+passed to execute() and executeStmt() methods.
+"""
+
+import os
+import sys
+import pytest
+import pandas as pd
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+# Add the current directory to the path so we can import test_constants
+sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
+
+from pystackql import StackQL
+from tests.test_constants import (
+ LITERAL_INT_QUERY,
+ LITERAL_STRING_QUERY,
+ print_test_result,
+ pystackql_test_setup
+)
+
+class TestKwargsOverride:
+ """Tests for kwargs override in execute and executeStmt methods."""
+
+ StackQL = StackQL # For use with pystackql_test_setup decorator
+
+ @pystackql_test_setup(output='csv')
+ def test_execute_output_override_csv_to_dict(self):
+ """Test that output format can be overridden from csv to dict in execute()."""
+ # Instance is configured with CSV output
+ assert self.stackql.output == 'csv', "Instance should be configured with CSV output"
+
+ # Execute with dict output override
+ result = self.stackql.execute(LITERAL_INT_QUERY, output='dict')
+
+ # Check result structure - should be dict format, not csv
+ assert isinstance(result, list), "Result should be a list (dict format)"
+ assert len(result) > 0, "Result should not be empty"
+ assert isinstance(result[0], dict), "Result items should be dicts"
+
+ print_test_result(f"Execute output override csv to dict test\nRESULT TYPE: {type(result)}",
+ isinstance(result, list) and isinstance(result[0], dict))
+
+ @pystackql_test_setup(output='dict')
+ def test_execute_output_override_dict_to_pandas(self):
+ """Test that output format can be overridden from dict to pandas in execute()."""
+ # Instance is configured with dict output
+ assert self.stackql.output == 'dict', "Instance should be configured with dict output"
+
+ # Execute with pandas output override
+ result = self.stackql.execute(LITERAL_STRING_QUERY, output='pandas')
+
+ # Check result structure - should be pandas DataFrame, not dict
+ assert isinstance(result, pd.DataFrame), "Result should be a pandas DataFrame"
+ assert not result.empty, "DataFrame should not be empty"
+
+ print_test_result(f"Execute output override dict to pandas test\nRESULT TYPE: {type(result)}",
+ isinstance(result, pd.DataFrame))
+
+ @pystackql_test_setup(output='pandas')
+ def test_execute_output_override_pandas_to_csv(self):
+ """Test that output format can be overridden from pandas to csv in execute()."""
+ # Instance is configured with pandas output
+ assert self.stackql.output == 'pandas', "Instance should be configured with pandas output"
+
+ # Execute with csv output override
+ result = self.stackql.execute(LITERAL_INT_QUERY, output='csv')
+
+ # Check result structure - should be csv string, not pandas
+ assert isinstance(result, str), "Result should be a string (csv format)"
+ assert "1" in result, "Result should contain the value '1'"
+
+ print_test_result(f"Execute output override pandas to csv test\nRESULT: {result}",
+ isinstance(result, str))
+
+ @pystackql_test_setup(output='dict')
+ def test_execute_multiple_overrides_in_sequence(self):
+ """Test that multiple execute calls with different overrides work correctly."""
+ # Instance is configured with dict output
+ assert self.stackql.output == 'dict', "Instance should be configured with dict output"
+
+ # First execution with dict (default)
+ result1 = self.stackql.execute(LITERAL_INT_QUERY)
+ assert isinstance(result1, list), "First result should be dict format"
+
+ # Second execution with pandas override
+ result2 = self.stackql.execute(LITERAL_STRING_QUERY, output='pandas')
+ assert isinstance(result2, pd.DataFrame), "Second result should be pandas format"
+
+ # Third execution with csv override
+ result3 = self.stackql.execute(LITERAL_INT_QUERY, output='csv')
+ assert isinstance(result3, str), "Third result should be csv format"
+
+ # Fourth execution should still use dict (instance default)
+ result4 = self.stackql.execute(LITERAL_INT_QUERY)
+ assert isinstance(result4, list), "Fourth result should be dict format again"
+
+ print_test_result(f"Multiple overrides in sequence test\nTypes: {[type(r).__name__ for r in [result1, result2, result3, result4]]}",
+ isinstance(result1, list) and
+ isinstance(result2, pd.DataFrame) and
+ isinstance(result3, str) and
+ isinstance(result4, list))
+
+ @pystackql_test_setup(output='csv', header=False)
+ def test_execute_csv_header_override(self):
+ """Test that CSV header setting can be overridden in execute()."""
+ # Instance is configured with CSV output and no header
+ assert self.stackql.output == 'csv', "Instance should be configured with CSV output"
+ assert self.stackql.header is False, "Instance should be configured with header=False"
+
+ # Execute with header override
+ result = self.stackql.execute(LITERAL_INT_QUERY, header=True)
+
+ # Check result structure - should be csv string
+ assert isinstance(result, str), "Result should be a string (csv format)"
+
+ print_test_result(f"CSV header override test\nRESULT: {result}",
+ isinstance(result, str))
+
+ @pystackql_test_setup(output='csv', sep=',')
+ def test_execute_csv_separator_override(self):
+ """Test that CSV separator can be overridden in execute()."""
+ # Instance is configured with CSV output and comma separator
+ assert self.stackql.output == 'csv', "Instance should be configured with CSV output"
+ assert self.stackql.sep == ',', "Instance should be configured with comma separator"
+
+ # Execute with pipe separator override
+ result = self.stackql.execute(LITERAL_INT_QUERY, sep='|')
+
+ # Check result structure - should be csv string
+ assert isinstance(result, str), "Result should be a string (csv format)"
+
+ print_test_result(f"CSV separator override test\nRESULT: {result}",
+ isinstance(result, str))
+
+ @pystackql_test_setup(output='dict')
+ def test_executeStmt_output_override(self):
+ """Test that output format can be overridden in executeStmt()."""
+ # Instance is configured with dict output
+ assert self.stackql.output == 'dict', "Instance should be configured with dict output"
+
+ # Execute a statement with pandas override
+ # Using a simple SELECT that works as a statement
+ result = self.stackql.executeStmt(LITERAL_INT_QUERY, output='pandas')
+
+ # Check result structure - should be pandas DataFrame
+ assert isinstance(result, pd.DataFrame), "Result should be a pandas DataFrame"
+
+ print_test_result(f"ExecuteStmt output override test\nRESULT TYPE: {type(result)}",
+ isinstance(result, pd.DataFrame))
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
diff --git a/ref-python-packages/pystackql/tests/test_magic.py b/ref-python-packages/pystackql/tests/test_magic.py
new file mode 100644
index 0000000..edf1bdf
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_magic.py
@@ -0,0 +1,46 @@
+# tests/test_magic.py
+
+"""
+Non-server magic extension tests for PyStackQL.
+
+This module tests the Jupyter magic extensions for StackQL in non-server mode.
+"""
+
+import os
+import sys
+import pytest
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+# Import the base test class
+from tests.test_magic_base import BaseStackQLMagicTest
+
+# Import directly from the original modules - this is what notebooks would do
+from pystackql import magic
+from pystackql import StackqlMagic
+
+from tests.test_constants import print_test_result
+
+class TestStackQLMagic(BaseStackQLMagicTest):
+ """Tests for the non-server mode magic extension."""
+
+ # Set the class attributes for the base test class
+ magic_module = magic
+ magic_class = StackqlMagic
+ is_server_mode = False
+
+def test_magic_extension_loading(mock_interactive_shell):
+ """Test that non-server magic extension can be loaded."""
+ # Test loading non-server magic
+ magic.load_ipython_extension(mock_interactive_shell)
+ assert hasattr(mock_interactive_shell, 'magics'), "Magic should be registered"
+ assert isinstance(mock_interactive_shell.magics, StackqlMagic), "Registered magic should be StackqlMagic"
+
+ print_test_result("Magic extension loading test",
+ hasattr(mock_interactive_shell, 'magics') and
+ isinstance(mock_interactive_shell.magics, StackqlMagic),
+ False, True)
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/tests/test_magic_base.py b/ref-python-packages/pystackql/tests/test_magic_base.py
new file mode 100644
index 0000000..e5e7b71
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_magic_base.py
@@ -0,0 +1,221 @@
+# tests/test_magic_base.py
+
+"""
+Base test class for Jupyter magic extensions for PyStackQL.
+
+This module provides a base test class for testing both local and server mode
+magic extensions.
+"""
+
+import os
+import sys
+import re
+import pytest
+import pandas as pd
+from unittest.mock import MagicMock, patch
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+# Add the current directory to the path so we can import test_constants
+sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
+
+from tests.test_constants import (
+ LITERAL_INT_QUERY,
+ REGISTRY_PULL_HOMEBREW_QUERY,
+ registry_pull_resp_pattern,
+ print_test_result
+)
+
+class BaseStackQLMagicTest:
+ """Base class for testing StackQL magic extensions."""
+
+ # Each derived class should define:
+ # - magic_module: the module to import
+ # - magic_class: the class to use
+ # - is_server_mode: True for server mode tests, False for local mode tests
+ magic_module = None
+ magic_class = None
+ is_server_mode = None
+
+ @pytest.fixture(autouse=True)
+ def setup_method(self, mock_interactive_shell):
+ """Set up the test environment."""
+ self.shell = mock_interactive_shell
+
+ # Load the magic extension
+ self.magic_module.load_ipython_extension(self.shell)
+
+ # Create the magic instance
+ self.stackql_magic = self.magic_class(shell=self.shell)
+
+ # Set up test data
+ self.query = LITERAL_INT_QUERY
+ self.expected_result = pd.DataFrame({"literal_int_value": [1]})
+ self.statement = REGISTRY_PULL_HOMEBREW_QUERY
+
+ def test_line_magic_query(self):
+ """Test line magic with a query."""
+ # Mock the run_query method to return a known DataFrame
+ self.stackql_magic.run_query = MagicMock(return_value=self.expected_result)
+
+ # Execute the magic with our query
+ result = self.stackql_magic.stackql(line=self.query, cell=None)
+
+ # Validate the outcome
+ assert result.equals(self.expected_result), "Result should match expected DataFrame"
+ assert 'stackql_df' in self.shell.user_ns, "stackql_df should be in user namespace"
+ assert self.shell.user_ns['stackql_df'].equals(self.expected_result), "stackql_df should match expected DataFrame"
+
+ print_test_result(f"Line magic query test{' (server mode)' if self.is_server_mode else ''}",
+ result.equals(self.expected_result) and
+ 'stackql_df' in self.shell.user_ns and
+ self.shell.user_ns['stackql_df'].equals(self.expected_result),
+ self.is_server_mode, True)
+
+ def test_cell_magic_query(self):
+ """Test cell magic with a query."""
+ # Mock the run_query method to return a known DataFrame
+ self.stackql_magic.run_query = MagicMock(return_value=self.expected_result)
+
+ # Execute the magic with our query
+ result = self.stackql_magic.stackql(line="", cell=self.query)
+
+ # Validate the outcome
+ assert result.equals(self.expected_result), "Result should match expected DataFrame"
+ assert 'stackql_df' in self.shell.user_ns, "stackql_df should be in user namespace"
+ assert self.shell.user_ns['stackql_df'].equals(self.expected_result), "stackql_df should match expected DataFrame"
+
+ print_test_result(f"Cell magic query test{' (server mode)' if self.is_server_mode else ''}",
+ result.equals(self.expected_result) and
+ 'stackql_df' in self.shell.user_ns and
+ self.shell.user_ns['stackql_df'].equals(self.expected_result),
+ self.is_server_mode, True)
+
+ def test_cell_magic_query_no_display(self):
+ """Test cell magic with a query and --no-display option."""
+ # Mock the run_query method to return a known DataFrame
+ self.stackql_magic.run_query = MagicMock(return_value=self.expected_result)
+
+ # Execute the magic with our query and --no-display option
+ result = self.stackql_magic.stackql(line="--no-display", cell=self.query)
+
+ # Validate the outcome
+ assert result is None, "Result should be None with --no-display option"
+ assert 'stackql_df' in self.shell.user_ns, "stackql_df should still be in user namespace"
+ assert self.shell.user_ns['stackql_df'].equals(self.expected_result), "stackql_df should match expected DataFrame"
+
+ print_test_result(f"Cell magic query test with --no-display{' (server mode)' if self.is_server_mode else ''}",
+ result is None and
+ 'stackql_df' in self.shell.user_ns and
+ self.shell.user_ns['stackql_df'].equals(self.expected_result),
+ self.is_server_mode, True)
+
+ def test_cell_magic_query_csv_download(self):
+ """Test cell magic with CSV download functionality."""
+ # Mock the run_query method to return a known DataFrame
+ self.stackql_magic.run_query = MagicMock(return_value=self.expected_result)
+
+ # Mock the _display_with_csv_download method to verify it's called
+ self.stackql_magic._display_with_csv_download = MagicMock()
+
+ # Execute the magic with --csv-download option
+ result = self.stackql_magic.stackql(line="--csv-download", cell=self.query)
+
+ # Validate the outcome
+ assert result.equals(self.expected_result), "Result should match expected DataFrame"
+ assert 'stackql_df' in self.shell.user_ns, "stackql_df should be in user namespace"
+ assert self.shell.user_ns['stackql_df'].equals(self.expected_result), "stackql_df should match expected DataFrame"
+
+ # Verify that _display_with_csv_download was called
+ self.stackql_magic._display_with_csv_download.assert_called_once_with(self.expected_result)
+
+ print_test_result(f"Cell magic query test with CSV download{' (server mode)' if self.is_server_mode else ''}",
+ result.equals(self.expected_result) and
+ 'stackql_df' in self.shell.user_ns and
+ self.stackql_magic._display_with_csv_download.called,
+ self.is_server_mode, True)
+
+ def test_cell_magic_query_csv_download_with_no_display(self):
+ """Test that --no-display takes precedence over --csv-download."""
+ # Mock the run_query method to return a known DataFrame
+ self.stackql_magic.run_query = MagicMock(return_value=self.expected_result)
+
+ # Mock the _display_with_csv_download method to verify it's not called
+ self.stackql_magic._display_with_csv_download = MagicMock()
+
+ # Execute the magic with both --csv-download and --no-display options
+ result = self.stackql_magic.stackql(line="--csv-download --no-display", cell=self.query)
+
+ # Validate the outcome
+ assert result is None, "Result should be None with --no-display option"
+ assert 'stackql_df' in self.shell.user_ns, "stackql_df should still be in user namespace"
+ assert self.shell.user_ns['stackql_df'].equals(self.expected_result), "stackql_df should match expected DataFrame"
+
+ # Verify that _display_with_csv_download was NOT called
+ self.stackql_magic._display_with_csv_download.assert_not_called()
+
+ print_test_result(f"Cell magic query test with CSV download and no-display{' (server mode)' if self.is_server_mode else ''}",
+ result is None and
+ 'stackql_df' in self.shell.user_ns and
+ not self.stackql_magic._display_with_csv_download.called,
+ self.is_server_mode, True)
+
+ def test_display_with_csv_download_method(self):
+ """Test the _display_with_csv_download method directly."""
+ import base64
+
+ # Create a test DataFrame
+ test_df = pd.DataFrame({"col1": [1, 2], "col2": ["a", "b"]})
+
+ # Mock IPython display functionality
+ with patch('IPython.display.display') as mock_display, \
+ patch('IPython.display.HTML') as mock_html:
+
+ # Call the method
+ self.stackql_magic._display_with_csv_download(test_df)
+
+ # Verify display was called once (only for HTML, not for DataFrame)
+ assert mock_display.call_count == 1, "Display should be called once"
+
+ # Verify HTML was called once
+ mock_html.assert_called_once()
+
+ # Check that the HTML call contains download link
+ html_call_args = mock_html.call_args[0][0]
+ assert 'download="stackql_results.csv"' in html_call_args
+ assert 'data:text/csv;base64,' in html_call_args
+
+ print_test_result(f"_display_with_csv_download method test{' (server mode)' if self.is_server_mode else ''}",
+ mock_display.call_count == 1 and mock_html.called,
+ self.is_server_mode, True)
+
+ def test_display_with_csv_download_error_handling(self):
+ """Test error handling in _display_with_csv_download method."""
+
+ # Create a mock DataFrame that will raise an exception during to_csv()
+ mock_df = MagicMock()
+ mock_df.to_csv.side_effect = Exception("Test CSV error")
+
+ # Mock IPython display functionality
+ with patch('IPython.display.display') as mock_display, \
+ patch('IPython.display.HTML') as mock_html, \
+ patch('builtins.print') as mock_print:
+
+ # Call the method with the problematic DataFrame
+ self.stackql_magic._display_with_csv_download(mock_df)
+
+ # Verify display was not called in the error case
+ mock_display.assert_not_called()
+
+ # Verify HTML was not called in the error case
+ mock_html.assert_not_called()
+
+ # Verify error message was printed
+ mock_print.assert_called_once()
+ error_message = mock_print.call_args[0][0]
+ assert "Error generating CSV download:" in error_message
+
+ print_test_result(f"_display_with_csv_download error handling test{' (server mode)' if self.is_server_mode else ''}",
+ not mock_display.called and not mock_html.called and mock_print.called,
+ self.is_server_mode, True)
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/tests/test_markdownkv_format.py b/ref-python-packages/pystackql/tests/test_markdownkv_format.py
new file mode 100644
index 0000000..44c5955
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_markdownkv_format.py
@@ -0,0 +1,216 @@
+# tests/test_markdownkv_format.py
+
+"""
+Tests for Markdown-KV output format.
+
+This format is optimized for LLM understanding based on research showing
+it achieves 60.7% accuracy vs 44.3% for CSV when LLMs process tabular data.
+
+Reference: https://www.empiricalagents.com/blog/which-table-format-do-llms-understand-best
+"""
+
+import os
+import sys
+import pytest
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+from pystackql.core.output import OutputFormatter
+
+
+class TestMarkdownKVFormat:
+ """Tests for Markdown-KV output formatting."""
+
+ def setup_method(self):
+ """Set up test fixtures."""
+ self.formatter = OutputFormatter(output_format='markdownkv')
+
+ def test_format_initialization(self):
+ """Test that markdownkv is accepted as a valid output format."""
+ assert self.formatter.output_format == 'markdownkv'
+
+ def test_invalid_format_rejected(self):
+ """Test that invalid formats are rejected."""
+ with pytest.raises(ValueError) as exc_info:
+ OutputFormatter(output_format='invalid')
+ assert "Invalid output format" in str(exc_info.value)
+
+ def test_format_simple_data(self):
+ """Test formatting simple data as Markdown-KV."""
+ import json
+
+ data = [
+ {"id": 1, "name": "Alice", "age": 30},
+ {"id": 2, "name": "Bob", "age": 25}
+ ]
+ json_data = json.dumps(data)
+
+ result = self.formatter._format_data(json_data)
+
+ # Check structure
+ assert isinstance(result, str)
+ assert "# Query Results" in result
+ assert "## Record 1" in result
+ assert "## Record 2" in result
+ assert "id: 1" in result
+ assert "name: Alice" in result
+ assert "age: 30" in result
+ assert "id: 2" in result
+ assert "name: Bob" in result
+
+ def test_format_with_null_values(self):
+ """Test formatting data with null values."""
+ import json
+
+ data = [
+ {"id": 1, "name": "Alice", "city": None}
+ ]
+ json_data = json.dumps(data)
+
+ result = self.formatter._format_data(json_data)
+
+ assert "city: null" in result
+
+ def test_format_empty_data(self):
+ """Test formatting empty data."""
+ result = self.formatter._format_empty()
+
+ assert isinstance(result, str)
+ assert "# Query Results" in result
+ assert "No records found" in result
+
+ def test_format_error(self):
+ """Test formatting error messages."""
+ error_msg = "http response status code: 404"
+
+ result = self.formatter._format_markdownkv_error(error_msg)
+
+ assert isinstance(result, str)
+ assert "# Query Results" in result
+ assert "## Error" in result
+ assert "error: http response status code: 404" in result
+
+ def test_format_statement_result(self):
+ """Test formatting statement results."""
+ result = {
+ "error": "okta provider, version 'v0.5.0' successfully installed"
+ }
+
+ formatted = self.formatter.format_statement_result(result)
+
+ assert isinstance(formatted, str)
+ assert "# Statement Result" in formatted
+ assert "message: okta provider" in formatted
+
+ def test_format_with_code_blocks(self):
+ """Test that code blocks are properly formatted."""
+ import json
+
+ data = [{"id": 1, "name": "Test"}]
+ json_data = json.dumps(data)
+
+ result = self.formatter._format_data(json_data)
+
+ # Count code block markers
+ assert result.count("```") >= 2 # At least opening and closing
+
+ def test_llm_friendly_structure(self):
+ """Test that the output follows LLM-friendly Markdown-KV structure."""
+ import json
+
+ data = [
+ {"employee_id": 1, "department": "Engineering", "salary": 100000}
+ ]
+ json_data = json.dumps(data)
+
+ result = self.formatter._format_data(json_data)
+
+ # Verify hierarchical structure
+ lines = result.split('\n')
+
+ # Should have main header
+ assert any('# Query Results' in line for line in lines)
+
+ # Should have record header
+ assert any('## Record' in line for line in lines)
+
+ # Should have code block with key: value pairs
+ assert 'employee_id: 1' in result
+ assert 'department: Engineering' in result
+ assert 'salary: 100000' in result
+
+ def test_multiple_records_formatting(self):
+ """Test formatting multiple records maintains structure."""
+ import json
+
+ data = [
+ {"id": i, "value": f"test{i}"}
+ for i in range(1, 6)
+ ]
+ json_data = json.dumps(data)
+
+ result = self.formatter._format_data(json_data)
+
+ # Should have 5 record sections
+ for i in range(1, 6):
+ assert f"## Record {i}" in result
+ assert f"id: {i}" in result
+ assert f"value: test{i}" in result
+
+ def test_complex_data_types(self):
+ """Test handling of various data types."""
+ import json
+
+ data = [{
+ "string": "test",
+ "number": 42,
+ "float": 3.14,
+ "boolean": True,
+ "null": None,
+ "empty_string": ""
+ }]
+ json_data = json.dumps(data)
+
+ result = self.formatter._format_data(json_data)
+
+ assert "string: test" in result
+ assert "number: 42" in result
+ assert "float: 3.14" in result
+ assert "boolean: True" in result or "boolean: true" in result.lower()
+ assert "null: null" in result
+ assert "empty_string:" in result
+
+ def test_error_detection_integration(self):
+ """Test that error detection works with markdownkv format."""
+ # HTTP error should be detected
+ error_data = "http response status code: 404, response body: Not Found"
+
+ result = self.formatter._format_data(error_data)
+
+ assert "# Query Results" in result
+ assert "## Error" in result
+ assert "404" in result
+
+
+class TestMarkdownKVServerModeCompatibility:
+ """Tests for markdownkv in server mode scenarios."""
+
+ def test_server_mode_formatting(self):
+ """Test that markdownkv can format server mode results."""
+ formatter = OutputFormatter(output_format='markdownkv')
+
+ # Simulate server mode result (list of dicts from database)
+ data = [
+ {"formula_name": "python", "version": "3.9.0", "license": "Python-2.0"}
+ ]
+
+ result = formatter._format_markdownkv(data)
+
+ assert "# Query Results" in result
+ assert "formula_name: python" in result
+ assert "version: 3.9.0" in result
+
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
diff --git a/ref-python-packages/pystackql/tests/test_output_formats.py b/ref-python-packages/pystackql/tests/test_output_formats.py
new file mode 100644
index 0000000..6a37c96
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_output_formats.py
@@ -0,0 +1,196 @@
+# tests/test_output_formats.py
+
+"""
+Output format tests for PyStackQL.
+
+This module tests the different output formats of the StackQL class.
+"""
+
+import os
+import sys
+import pytest
+import pandas as pd
+from unittest.mock import patch
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+# Add the current directory to the path so we can import test_constants
+sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
+
+from pystackql import StackQL
+from tests.test_constants import (
+ LITERAL_INT_QUERY,
+ LITERAL_STRING_QUERY,
+ HOMEBREW_METRICS_QUERY,
+ print_test_result,
+ pystackql_test_setup
+)
+
+class TestOutputFormats:
+ """Tests for PyStackQL output format functionality."""
+
+ StackQL = StackQL # For use with pystackql_test_setup decorator
+
+ @pystackql_test_setup()
+ def test_dict_output_format(self):
+ """Test that dict output format returns a list of dictionaries."""
+ result = self.stackql.execute(LITERAL_INT_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert all(isinstance(item, dict) for item in result), "Each item in result should be a dict"
+
+ print_test_result(f"Dict output format test\nRESULT: {result}",
+ isinstance(result, list) and all(isinstance(item, dict) for item in result))
+
+ @pystackql_test_setup(output='pandas')
+ def test_pandas_output_format(self):
+ """Test that pandas output format returns a pandas DataFrame."""
+ result = self.stackql.execute(LITERAL_STRING_QUERY)
+
+ # Check result structure
+ assert isinstance(result, pd.DataFrame), "Result should be a pandas DataFrame"
+ assert not result.empty, "DataFrame should not be empty"
+ assert "literal_string_value" in result.columns, "DataFrame should have 'literal_string_value' column"
+
+ # Extract the value
+ value = result["literal_string_value"].iloc[0]
+
+ assert value == "test" or value == '"test"', f"Value should be 'test', got {value}"
+
+ print_test_result(f"Pandas output format test\nRESULT: {result}",
+ isinstance(result, pd.DataFrame) and "literal_string_value" in result.columns)
+
+ @pystackql_test_setup(output='pandas')
+ def test_pandas_output_with_numeric_types(self):
+ """Test that pandas output format handles numeric types correctly."""
+ result = self.stackql.execute(HOMEBREW_METRICS_QUERY)
+
+ # Check result structure
+ assert isinstance(result, pd.DataFrame), "Result should be a pandas DataFrame"
+ assert not result.empty, "DataFrame should not be empty"
+ assert "formula_name" in result.columns, "DataFrame should have 'formula_name' column"
+
+ # Check numeric columns - either directly numeric or string representation
+ numeric_columns = [
+ "installs_30d", "installs_90d", "installs_365d",
+ "install_on_requests_30d", "install_on_requests_90d", "install_on_requests_365d"
+ ]
+
+ # Validate formula name
+ formula_name = result["formula_name"].iloc[0]
+ assert "stackql" in str(formula_name), f"Formula name should contain 'stackql', got {formula_name}"
+
+ # Verify numeric columns exist
+ for col in numeric_columns:
+ assert col in result.columns, f"DataFrame should have '{col}' column"
+
+ # Try to convert to numeric if possible
+ try:
+ pd.to_numeric(result[col])
+ numeric_conversion_success = True
+ except (ValueError, TypeError):
+ numeric_conversion_success = False
+
+ assert numeric_conversion_success, f"Column '{col}' should be convertible to numeric"
+
+ print_test_result(f"Pandas output with numeric types test\nCOLUMNS: {list(result.columns)}",
+ isinstance(result, pd.DataFrame) and
+ all(col in result.columns for col in numeric_columns))
+
+ @pystackql_test_setup(output='csv')
+ def test_csv_output_format(self):
+ """Test that csv output format returns a string."""
+ result = self.stackql.execute(LITERAL_INT_QUERY)
+
+ # Check result structure
+ assert isinstance(result, str), "Result should be a string"
+ # The CSV output might just contain the value (1) or might include the column name
+ # We'll check for either possibility
+ assert "1" in result, "Result should contain the value '1'"
+
+ print_test_result(f"CSV output format test\nRESULT: {result}",
+ isinstance(result, str) and "1" in result)
+
+ @pystackql_test_setup(output='csv')
+ def test_csv_output_with_pipe_separator(self):
+ """Test that csv output format with custom separator is configured correctly."""
+ # Create a new instance with pipe separator
+ stackql_with_pipe = StackQL(output='csv', sep='|')
+
+ # Verify that the separator setting is correct
+ assert stackql_with_pipe.sep == "|", "Separator should be '|'"
+ assert "--delimiter" in stackql_with_pipe.params, "Params should include '--delimiter'"
+ assert "|" in stackql_with_pipe.params, "Params should include '|'"
+
+ # Instead of checking the output (which might be affected by other factors),
+ # we'll focus on verifying that the parameters are set correctly
+ print_test_result(f"CSV output with pipe separator test\nPARAMS: {stackql_with_pipe.params}",
+ stackql_with_pipe.sep == "|" and
+ "--delimiter" in stackql_with_pipe.params and
+ "|" in stackql_with_pipe.params)
+
+ @pystackql_test_setup(output='csv', header=True)
+ def test_csv_output_with_header(self):
+ """Test that csv output format with header works correctly."""
+ result = self.stackql.execute(LITERAL_INT_QUERY)
+
+ # Check result structure
+ assert isinstance(result, str), "Result should be a string"
+
+ # Check that params are set correctly
+ assert self.stackql.header is True, "Header should be True"
+ assert "--hideheaders" not in self.stackql.params, "Params should not include '--hideheaders'"
+
+ print_test_result(f"CSV output with header test\nRESULT: {result}",
+ isinstance(result, str))
+
+ @pystackql_test_setup(output='csv', header=False)
+ def test_csv_output_without_header(self):
+ """Test that csv output format without header works correctly."""
+ result = self.stackql.execute(LITERAL_INT_QUERY)
+
+ # Check result structure
+ assert isinstance(result, str), "Result should be a string"
+
+ # Check that params are set correctly
+ assert self.stackql.header is False, "Header should be False"
+ assert "--hideheaders" in self.stackql.params, "Params should include '--hideheaders'"
+
+ print_test_result(f"CSV output without header test\nRESULT: {result}",
+ isinstance(result, str))
+
+ def test_invalid_output_format(self):
+ """Test that an invalid output format raises a ValueError."""
+ with pytest.raises(ValueError) as exc_info:
+ StackQL(output='invalid')
+
+ # Check that the exception message contains the expected elements
+ # rather than checking for an exact match, which is brittle
+ error_msg = str(exc_info.value)
+ assert "Invalid output" in error_msg, "Error message should mention 'Invalid output'"
+ assert "Expected one of" in error_msg, "Error message should mention 'Expected one of'"
+ assert "dict" in error_msg, "Error message should mention 'dict'"
+ assert "pandas" in error_msg, "Error message should mention 'pandas'"
+ assert "csv" in error_msg, "Error message should mention 'csv'"
+ assert "invalid" in error_msg, "Error message should mention 'invalid'"
+
+ print_test_result(f"Invalid output format test\nERROR: {error_msg}",
+ all(text in error_msg for text in ["Invalid output", "Expected one of", "dict", "pandas", "csv", "invalid"]))
+
+ def test_csv_output_in_server_mode(self):
+ """Test that csv output in server mode raises a ValueError."""
+ with pytest.raises(ValueError) as exc_info:
+ StackQL(server_mode=True, output='csv')
+
+ # Check that the exception message contains the expected elements
+ error_msg = str(exc_info.value)
+ assert "CSV output is not supported in server mode" in error_msg, "Error message should mention CSV not supported"
+ assert "use 'dict' or 'pandas' instead" in error_msg, "Error message should suggest alternatives"
+
+ print_test_result(f"CSV output in server mode test\nERROR: {error_msg}",
+ "CSV output is not supported in server mode" in error_msg)
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
diff --git a/ref-python-packages/pystackql/tests/test_query_execution.py b/ref-python-packages/pystackql/tests/test_query_execution.py
new file mode 100644
index 0000000..15baf6c
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_query_execution.py
@@ -0,0 +1,253 @@
+# tests/test_query_execution.py
+
+"""
+Query execution tests for PyStackQL.
+
+This module tests the query execution functionality of the StackQL class.
+"""
+
+import os
+import re
+import sys
+import pytest
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+# Add the current directory to the path so we can import test_constants
+sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
+
+from pystackql import StackQL
+from tests.test_constants import (
+ LITERAL_INT_QUERY,
+ LITERAL_FLOAT_QUERY,
+ LITERAL_STRING_QUERY,
+ EXPRESSION_TRUE_QUERY,
+ EXPRESSION_FALSE_QUERY,
+ EMPTY_RESULT_QUERY,
+ JSON_EXTRACT_QUERY,
+ HOMEBREW_FORMULA_QUERY,
+ HOMEBREW_METRICS_QUERY,
+ REGISTRY_PULL_HOMEBREW_QUERY,
+ registry_pull_resp_pattern,
+ print_test_result,
+ pystackql_test_setup
+)
+
+class TestQueryExecution:
+ """Tests for PyStackQL query execution functionality."""
+
+ StackQL = StackQL # For use with pystackql_test_setup decorator
+
+ # Helper method to check if a value is numeric
+ def _is_numeric(self, value):
+ """Check if a value is numeric."""
+ if isinstance(value, (int, float)):
+ return True
+ if isinstance(value, str):
+ try:
+ float(value)
+ return True
+ except (ValueError, TypeError):
+ return False
+ return False
+
+ @pystackql_test_setup()
+ def test_execute_literal_int(self):
+ """Test executing a query with a literal integer value."""
+ result = self.stackql.execute(LITERAL_INT_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one row"
+ assert "literal_int_value" in result[0], "Result should have 'literal_int_value' column"
+
+ # Check the value - could be int or string representation
+ value = result[0]["literal_int_value"]
+ assert value == "1" or value == 1, f"Result value should be 1, got {value}"
+
+ print_test_result(f"Execute literal int query test\nRESULT: {result}",
+ value == "1" or value == 1)
+
+ @pystackql_test_setup()
+ def test_execute_literal_float(self):
+ """Test executing a query with a literal float value."""
+ result = self.stackql.execute(LITERAL_FLOAT_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one row"
+ assert "literal_float_value" in result[0], "Result should have 'literal_float_value' column"
+
+ # Check the value - could be float or string representation
+ value = result[0]["literal_float_value"]
+ assert value == "1.001" or value == 1.001, f"Result value should be 1.001, got {value}"
+
+ print_test_result(f"Execute literal float query test\nRESULT: {result}",
+ value == "1.001" or value == 1.001)
+
+ @pystackql_test_setup()
+ def test_execute_literal_string(self):
+ """Test executing a query with a literal string value."""
+ result = self.stackql.execute(LITERAL_STRING_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one row"
+ assert "literal_string_value" in result[0], "Result should have 'literal_string_value' column"
+
+ # Check the value
+ value = result[0]["literal_string_value"]
+ assert value == "test", f"Result value should be 'test', got {value}"
+
+ print_test_result(f"Execute literal string query test\nRESULT: {result}",
+ value == "test")
+
+ @pystackql_test_setup()
+ def test_execute_expression_true(self):
+ """Test executing a query with a true expression."""
+ result = self.stackql.execute(EXPRESSION_TRUE_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one row"
+ assert "expression" in result[0], "Result should have 'expression' column"
+
+ # Check the value - could be int or string
+ value = result[0]["expression"]
+ assert value == "1" or value == 1, f"Result value should be 1 (true), got {value}"
+
+ print_test_result(f"Execute true expression query test\nRESULT: {result}",
+ value == "1" or value == 1)
+
+ @pystackql_test_setup()
+ def test_execute_expression_false(self):
+ """Test executing a query with a false expression."""
+ result = self.stackql.execute(EXPRESSION_FALSE_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one row"
+ assert "expression" in result[0], "Result should have 'expression' column"
+
+ # Check the value - could be int or string
+ value = result[0]["expression"]
+ assert value == "0" or value == 0, f"Result value should be 0 (false), got {value}"
+
+ print_test_result(f"Execute false expression query test\nRESULT: {result}",
+ value == "0" or value == 0)
+
+ @pystackql_test_setup()
+ def test_execute_empty_result(self):
+ """Test executing a query that returns an empty result."""
+ result = self.stackql.execute(EMPTY_RESULT_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 0, "Result should be empty"
+
+ print_test_result(f"Execute empty result query test\nRESULT: {result}", len(result) == 0)
+
+ @pystackql_test_setup()
+ def test_execute_json_extract(self):
+ """Test executing a query that uses the json_extract function."""
+ result = self.stackql.execute(JSON_EXTRACT_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one row"
+ assert "key" in result[0], "Result should have 'key' column"
+ assert "value" in result[0], "Result should have 'value' column"
+
+ # Get the extracted values
+ key_value = result[0]["key"]
+ value_value = result[0]["value"]
+
+ # Check values - with new implementation they should be direct strings
+ assert "StackName" in str(key_value), "Key should contain 'StackName'"
+ assert "aws-stack" in str(value_value), "Value should contain 'aws-stack'"
+
+ print_test_result(f"Execute JSON extract query test\nRESULT: {result}",
+ "StackName" in str(key_value) and "aws-stack" in str(value_value))
+
+ @pystackql_test_setup()
+ def test_execute_homebrew_formula(self):
+ """Test executing a query against the homebrew.formula.formula table."""
+ result = self.stackql.execute(HOMEBREW_FORMULA_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one row"
+ assert "name" in result[0], "Result should have 'name' column"
+ assert "full_name" in result[0], "Result should have 'full_name' column"
+ assert "tap" in result[0], "Result should have 'tap' column"
+
+ # Check formula values - should be direct strings now
+ name_value = result[0]["name"]
+ full_name_value = result[0]["full_name"]
+ tap_value = result[0]["tap"]
+
+ assert "stackql" in str(name_value), f"Name should contain 'stackql', got {name_value}"
+ assert "stackql" in str(full_name_value), f"Full name should contain 'stackql', got {full_name_value}"
+ assert "homebrew/core" in str(tap_value), f"Tap should contain 'homebrew/core', got {tap_value}"
+
+ print_test_result(f"Execute homebrew formula query test\nRESULT: {result}",
+ "stackql" in str(name_value) and
+ "stackql" in str(full_name_value) and
+ "homebrew/core" in str(tap_value))
+
+ @pystackql_test_setup()
+ def test_execute_homebrew_metrics(self):
+ """Test executing a query against the homebrew.formula.vw_usage_metrics view."""
+ result = self.stackql.execute(HOMEBREW_METRICS_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one row"
+
+ # Check column names (not values as they change over time)
+ expected_columns = [
+ "formula_name", "installs_30d", "installs_90d", "installs_365d",
+ "install_on_requests_30d", "install_on_requests_90d", "install_on_requests_365d"
+ ]
+ for col in expected_columns:
+ assert col in result[0], f"Result should have '{col}' column"
+
+ # Check formula name - should be direct string now
+ formula_name = result[0]["formula_name"]
+ assert "stackql" in str(formula_name), f"Formula name should contain 'stackql', got {formula_name}"
+
+ # Check data types - should be numeric or string representations of numbers
+ for col in expected_columns[1:]: # Skip formula_name
+ assert self._is_numeric(result[0][col]), f"Column '{col}' should be numeric or string representation of a number"
+
+ print_test_result(f"Execute homebrew metrics query test\nCOLUMNS: {list(result[0].keys())}",
+ all(col in result[0] for col in expected_columns) and
+ "stackql" in str(formula_name))
+
+ @pystackql_test_setup()
+ def test_execute_stmt_registry_pull(self):
+ """Test executing a registry pull statement."""
+ result = self.stackql.executeStmt(REGISTRY_PULL_HOMEBREW_QUERY)
+
+ # Check result structure (depends on output format)
+ if self.stackql.output == 'dict':
+ assert 'message' in result, "Result should have 'message' key"
+ message = result['message']
+ elif self.stackql.output == 'pandas':
+ assert 'message' in result.columns, "Result should have 'message' column"
+ message = result['message'].iloc[0]
+ elif self.stackql.output == 'csv':
+ message = result
+ else:
+ message = str(result)
+
+ # Check that the message matches the expected pattern
+ expected_pattern = registry_pull_resp_pattern("homebrew")
+ assert re.search(expected_pattern, message), f"Message '{message}' does not match expected pattern"
+
+ print_test_result(f"Execute registry pull statement test\nRESULT: {result}",
+ re.search(expected_pattern, message) is not None)
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
diff --git a/ref-python-packages/pystackql/tests/test_server.py b/ref-python-packages/pystackql/tests/test_server.py
new file mode 100644
index 0000000..80e91f6
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_server.py
@@ -0,0 +1,316 @@
+# tests/test_server.py
+
+"""
+Server mode tests for PyStackQL.
+
+This module tests the server mode functionality of the StackQL class.
+"""
+
+import re
+import os
+import pytest
+import pandas as pd
+from unittest.mock import patch
+from pystackql import StackQL
+from test_constants import (
+ LITERAL_INT_QUERY,
+ LITERAL_STRING_QUERY,
+ HOMEBREW_FORMULA_QUERY,
+ REGISTRY_PULL_HOMEBREW_QUERY,
+ print_test_result,
+ pystackql_test_setup
+)
+
+# @pytest.mark.usefixtures("stackql_server")
+class TestServerMode:
+ """Tests for PyStackQL server mode functionality."""
+
+ StackQL = StackQL # For use with pystackql_test_setup decorator
+ server_available = False # Class-level flag to track server availability
+
+ # @pystackql_test_setup(server_mode=True)
+ # def test_server_mode_connectivity(self):
+ # """Test that server mode connects successfully."""
+ # # Check server_mode flag is set correctly
+ # assert self.stackql.server_mode, "StackQL should be in server mode"
+
+ # # Check server connection object exists
+ # assert hasattr(self.stackql, 'server_connection'), "StackQL should have a server_connection attribute"
+ # assert self.stackql.server_connection is not None, "Server connection object should not be None"
+
+ # # IMPORTANT: Actually test the connection works
+ # connection_working = self.stackql.test_connection()
+
+ # # Print detailed results for debugging
+ # if not connection_working:
+ # print("ā ļø Server connection test failed: unable to execute a simple query")
+ # print(f"Server address: {self.stackql.server_address}")
+ # print(f"Server port: {self.stackql.server_port}")
+ # print("\nā SERVER CONNECTION FAILED - SKIPPING REMAINING SERVER TESTS")
+ # else:
+ # # Set flag indicating server is available
+ # TestServerMode.server_available = True
+
+ # print_test_result("Server mode connectivity test",
+ # self.stackql.server_mode and
+ # hasattr(self.stackql, 'server_connection') and
+ # self.stackql.server_connection is not None and
+ # connection_working, # Include connection check in the pass criteria
+ # True)
+
+ # # Add additional output about the actual connection status
+ # print(f" - Connection status: {'ā
WORKING' if connection_working else 'ā NOT WORKING'}")
+ # print(f" - Expected status: ā
WORKING") # Always expected to be working
+
+ # # Always assert that the connection is working
+ # assert connection_working, "Server connection should be working"
+
+ @pystackql_test_setup(server_mode=True)
+ def test_server_mode_connectivity(self):
+ """Test that server mode connects successfully."""
+ # Initialize class variable
+ TestServerMode.server_available = False
+
+ # Perform basic server connection test
+ connection_working = self.stackql.test_connection()
+
+ if not connection_working:
+ # Log minimal diagnostic info
+ print("\nā ļø Server connection failed")
+ print(f"Address: {self.stackql.server_address}:{self.stackql.server_port}")
+ print("ā Skipping remaining server tests")
+
+ # Fail with a concise message - this will be what shows in the error summary
+ pytest.fail("Server connection failed - please start stackql server")
+
+ # Connection succeeded
+ TestServerMode.server_available = True
+ print("ā
Server connection successful")
+
+ @pystackql_test_setup(server_mode=True)
+ def test_server_mode_execute_stmt(self):
+ """Test executeStmt in server mode."""
+
+ # Skip if server is not available
+ if not TestServerMode.server_available:
+ pytest.skip("Server is not available, skipping test")
+
+ result = self.stackql.executeStmt(REGISTRY_PULL_HOMEBREW_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one item"
+ assert "message" in result[0], "Result should have a 'message' key"
+ assert result[0]["message"] == "OK", "Message should be 'OK'"
+
+ print_test_result(f"Server mode executeStmt test\nRESULT: {result}",
+ isinstance(result, list) and
+ len(result) == 1 and
+ result[0]["message"] == "OK",
+ True)
+
+ @pystackql_test_setup(server_mode=True, output='pandas')
+ def test_server_mode_execute_stmt_pandas(self):
+ """Test executeStmt in server mode with pandas output."""
+
+ # Skip if server is not available
+ if not TestServerMode.server_available:
+ pytest.skip("Server is not available, skipping test")
+
+ result = self.stackql.executeStmt(REGISTRY_PULL_HOMEBREW_QUERY)
+
+ # Check result structure
+ assert isinstance(result, pd.DataFrame), "Result should be a pandas DataFrame"
+ assert not result.empty, "DataFrame should not be empty"
+ assert "message" in result.columns, "DataFrame should have a 'message' column"
+ assert result["message"].iloc[0] == "OK", "Message should be 'OK'"
+
+ print_test_result(f"Server mode executeStmt with pandas output test\nRESULT: {result}",
+ isinstance(result, pd.DataFrame) and
+ not result.empty and
+ result["message"].iloc[0] == "OK",
+ True)
+
+ @pystackql_test_setup(server_mode=True)
+ def test_server_mode_execute(self):
+ """Test execute in server mode."""
+
+ # Skip if server is not available
+ if not TestServerMode.server_available:
+ pytest.skip("Server is not available, skipping test")
+
+ result = self.stackql.execute(LITERAL_INT_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one item"
+ assert "literal_int_value" in result[0], "Result should have a 'literal_int_value' key"
+ # Update assertion to handle string value from server
+ literal_value = result[0]["literal_int_value"]
+ if isinstance(literal_value, str):
+ literal_value = int(literal_value)
+ assert literal_value == 1, "Value should be 1"
+
+ print_test_result(f"Server mode execute test\nRESULT: {result}",
+ isinstance(result, list) and
+ len(result) == 1 and
+ int(result[0]["literal_int_value"]) == 1,
+ True)
+
+ @pystackql_test_setup(server_mode=True, output='pandas')
+ def test_server_mode_execute_pandas(self):
+ """Test execute in server mode with pandas output."""
+
+ # Skip if server is not available
+ if not TestServerMode.server_available:
+ pytest.skip("Server is not available, skipping test")
+
+ result = self.stackql.execute(LITERAL_STRING_QUERY)
+
+ # Check result structure
+ assert isinstance(result, pd.DataFrame), "Result should be a pandas DataFrame"
+ assert not result.empty, "DataFrame should not be empty"
+ assert "literal_string_value" in result.columns, "DataFrame should have a 'literal_string_value' column"
+ assert result["literal_string_value"].iloc[0] == "test", "Value should be 'test'"
+
+ print_test_result(f"Server mode execute with pandas output test\nRESULT: {result}",
+ isinstance(result, pd.DataFrame) and
+ not result.empty and
+ result["literal_string_value"].iloc[0] == "test",
+ True)
+
+ @pystackql_test_setup(server_mode=True)
+ def test_server_mode_provider_query(self):
+ """Test querying a provider in server mode."""
+
+ # Skip if server is not available
+ if not TestServerMode.server_available:
+ pytest.skip("Server is not available, skipping test")
+
+ result = self.stackql.execute(HOMEBREW_FORMULA_QUERY)
+
+ # Check result structure
+ assert isinstance(result, list), "Result should be a list"
+ assert len(result) == 1, "Result should have exactly one item"
+ assert "name" in result[0], "Result should have a 'name' key"
+ assert "full_name" in result[0], "Result should have a 'full_name' key"
+ assert "tap" in result[0], "Result should have a 'tap' key"
+ assert result[0]["name"] == "stackql", "Name should be 'stackql'"
+
+ print_test_result(f"Server mode provider query test\nRESULT: {result}",
+ isinstance(result, list) and
+ len(result) == 1 and
+ result[0]["name"] == "stackql",
+ True)
+
+ # Update mocked tests to use execute_query instead of _run_server_query
+ @patch('pystackql.core.server.ServerConnection.execute_query')
+ def test_server_mode_execute_mocked(self, mock_execute_query):
+ """Test execute in server mode with mocked server response."""
+
+ # Skip if server is not available
+ if not TestServerMode.server_available:
+ pytest.skip("Server is not available, skipping test")
+
+ # Create a StackQL instance in server mode
+ stackql = StackQL(server_mode=True)
+
+ # Mock the server response
+ mock_result = [{"literal_int_value": 1}]
+ mock_execute_query.return_value = mock_result
+
+ # Execute the query
+ result = stackql.execute(LITERAL_INT_QUERY)
+
+ # Check that the mock was called with the correct query
+ mock_execute_query.assert_called_once_with(LITERAL_INT_QUERY)
+
+ # Check result structure
+ assert result == mock_result, "Result should match the mocked result"
+
+ print_test_result(f"Server mode execute test (mocked)\nRESULT: {result}",
+ result == mock_result,
+ True)
+
+ @patch('pystackql.core.server.ServerConnection.execute_query')
+ def test_server_mode_execute_pandas_mocked(self, mock_execute_query):
+ """Test execute in server mode with pandas output and mocked server response."""
+
+ # Skip if server is not available
+ if not TestServerMode.server_available:
+ pytest.skip("Server is not available, skipping test")
+
+ # Create a StackQL instance in server mode with pandas output
+ stackql = StackQL(server_mode=True, output='pandas')
+
+ # Mock the server response
+ mock_result = [{"literal_string_value": "test"}]
+ mock_execute_query.return_value = mock_result
+
+ # Execute the query
+ result = stackql.execute(LITERAL_STRING_QUERY)
+
+ # Check that the mock was called with the correct query
+ mock_execute_query.assert_called_once_with(LITERAL_STRING_QUERY)
+
+ # Check result structure
+ assert isinstance(result, pd.DataFrame), "Result should be a pandas DataFrame"
+ assert not result.empty, "DataFrame should not be empty"
+ assert "literal_string_value" in result.columns, "DataFrame should have a 'literal_string_value' column"
+ assert result["literal_string_value"].iloc[0] == "test", "Value should be 'test'"
+
+ print_test_result(f"Server mode execute with pandas output test (mocked)\nRESULT: {result}",
+ isinstance(result, pd.DataFrame) and
+ not result.empty and
+ result["literal_string_value"].iloc[0] == "test",
+ True)
+
+ @patch('pystackql.core.server.ServerConnection.execute_query')
+ def test_server_mode_execute_stmt_mocked(self, mock_execute_query):
+ """Test executeStmt in server mode with mocked server response."""
+
+ # Skip if server is not available
+ if not TestServerMode.server_available:
+ pytest.skip("Server is not available, skipping test")
+
+ # Create a StackQL instance in server mode
+ stackql = StackQL(server_mode=True)
+
+ # Mock the server response
+ mock_result = [{"message": "OK"}]
+ mock_execute_query.return_value = mock_result
+
+ # Execute the statement
+ result = stackql.executeStmt(REGISTRY_PULL_HOMEBREW_QUERY)
+
+ # Check that the mock was called with the correct query and is_statement=True
+ mock_execute_query.assert_called_once_with(REGISTRY_PULL_HOMEBREW_QUERY, is_statement=True)
+
+ # Check result structure
+ assert result == mock_result, "Result should match the mocked result"
+
+ print_test_result(f"Server mode executeStmt test (mocked)\nRESULT: {result}",
+ result == mock_result,
+ True)
+
+ def test_server_mode_csv_output_error(self):
+ """Test that server mode with csv output raises an error."""
+
+ # Skip if server is not available
+ if not TestServerMode.server_available:
+ pytest.skip("Server is not available, skipping test")
+
+ with pytest.raises(ValueError) as exc_info:
+ StackQL(server_mode=True, output='csv')
+
+ # Check exception message
+ expected_message = "CSV output is not supported in server mode, use 'dict' or 'pandas' instead."
+ assert str(exc_info.value) == expected_message, f"Exception message '{str(exc_info.value)}' does not match expected"
+
+ print_test_result(f"Server mode with csv output error test",
+ str(exc_info.value) == expected_message,
+ True)
+
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
\ No newline at end of file
diff --git a/ref-python-packages/pystackql/tests/test_server_magic.py b/ref-python-packages/pystackql/tests/test_server_magic.py
new file mode 100644
index 0000000..d39084d
--- /dev/null
+++ b/ref-python-packages/pystackql/tests/test_server_magic.py
@@ -0,0 +1,46 @@
+# tests/test_server_magic.py
+
+"""
+Server-mode magic extension tests for PyStackQL.
+
+This module tests the Jupyter magic extensions for StackQL in server mode.
+"""
+
+import os
+import sys
+import pytest
+
+# Add the parent directory to the path so we can import from pystackql
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
+
+# Import the base test class
+from tests.test_magic_base import BaseStackQLMagicTest
+
+# Import directly from the original modules - this is what notebooks would do
+from pystackql import magics
+from pystackql import StackqlServerMagic
+
+from tests.test_constants import print_test_result
+
+class TestStackQLServerMagic(BaseStackQLMagicTest):
+ """Tests for the server mode magic extension."""
+
+ # Set the class attributes for the base test class
+ magic_module = magics
+ magic_class = StackqlServerMagic
+ is_server_mode = True
+
+def test_server_magic_extension_loading(mock_interactive_shell):
+ """Test that server magic extension can be loaded."""
+ # Test loading server magic
+ magics.load_ipython_extension(mock_interactive_shell)
+ assert hasattr(mock_interactive_shell, 'magics'), "Magic should be registered"
+ assert isinstance(mock_interactive_shell.magics, StackqlServerMagic), "Registered magic should be StackqlServerMagic"
+
+ print_test_result("Server magic extension loading test",
+ hasattr(mock_interactive_shell, 'magics') and
+ isinstance(mock_interactive_shell.magics, StackqlServerMagic),
+ True, True)
+
+if __name__ == "__main__":
+ pytest.main(["-v", __file__])
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/.github/ISSUE_TEMPLATE/bug_report.md b/ref-python-packages/stackql-deploy/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..0d88430
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,38 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: '[BUG]'
+labels: 'bug'
+assignees: ''
+
+---
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**To Reproduce**
+Steps to reproduce the behavior:
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+**Expected behavior**
+A clear and concise description of what you expected to happen.
+
+**Screenshots**
+If applicable, add screenshots to help explain your problem.
+
+**Desktop (please complete the following information):**
+ - OS: [e.g. iOS]
+ - Browser [e.g. chrome, safari]
+ - Version [e.g. 22]
+
+**Smartphone (please complete the following information):**
+ - Device: [e.g. iPhone6]
+ - OS: [e.g. iOS8.1]
+ - Browser [e.g. stock browser, safari]
+ - Version [e.g. 22]
+
+**Additional context**
+Add any other context about the problem here.
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/.github/ISSUE_TEMPLATE/feature_request.md b/ref-python-packages/stackql-deploy/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 0000000..9d33cbe
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,20 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+title: "[FEATURE]"
+labels: enhancement
+assignees: ''
+
+---
+
+**Feature Description**
+A clear and concise description of what you want to happen.
+
+**Example(s)**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Possible Approaches or Libraries to Consider**
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+Add any other context or screenshots about the feature request here.
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/.github/ISSUE_TEMPLATE/question.md b/ref-python-packages/stackql-deploy/.github/ISSUE_TEMPLATE/question.md
new file mode 100644
index 0000000..b72dd78
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.github/ISSUE_TEMPLATE/question.md
@@ -0,0 +1,15 @@
+---
+name: Question
+about: Pose a question to the StackQL team
+title: "[QUESTION]"
+labels: question
+assignees: ''
+
+---
+
+
+## Question
+
+This channel is an opportunity to ask ad-hoc questions to the `stackql` team. This channel is in lieu of an official platform for ongoing discussions and questions. Please ask your question :)
+
+**Note**: Questions over github issues will be deprecated and retired once we settle on a platform / process ongoing.
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/.github/PULL_REQUEST_TEMPLATE.md b/ref-python-packages/stackql-deploy/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..ef26eae
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,18 @@
+## Description
+
+Please include a summary of the changes and the related issue(s). Please also include relevant motivation and context.
+
+Fixes #
+
+## Checklist
+
+Please make sure that the following criteria are met:
+
+- [ ] The PR title is descriptive.
+- [ ] For example stacks, I have included a descriptive `README.md` in the example project directory, which describes the stack and includes instructions to deploy or test.
+- [ ] For example stacks, add your stack to the website template library at `website/docs/template-library/..` which gets published to [stackql-deploy.io](https://stackql-deploy.io/docs/template-library) (optional)
+- [ ] I have ā'ed the [stackql](https://github.com/stackql/stackql) and [stackql-deploy](https://github.com/stackql/stackql-deploy) repos.
+
+## Additional Notes
+
+Add any additional information or context that might help the reviewers.
diff --git a/ref-python-packages/stackql-deploy/.github/workflows/claude.yaml b/ref-python-packages/stackql-deploy/.github/workflows/claude.yaml
new file mode 100644
index 0000000..d07f4be
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.github/workflows/claude.yaml
@@ -0,0 +1,36 @@
+name: Claude PR Assistant
+
+on:
+ issue_comment:
+ types: [created]
+ pull_request_review_comment:
+ types: [created]
+ issues:
+ types: [opened, assigned]
+ pull_request_review:
+ types: [submitted]
+
+jobs:
+ claude-code-action:
+ if: |
+ (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
+ (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
+ (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
+ (github.event_name == 'issues' && contains(github.event.issue.body, '@claude'))
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ pull-requests: read
+ issues: read
+ id-token: write
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+
+ - name: Run Claude PR Action
+ uses: anthropics/claude-code-action@beta
+ with:
+ anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
+ timeout_minutes: "60"
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/.github/workflows/lint-check.yml b/ref-python-packages/stackql-deploy/.github/workflows/lint-check.yml
new file mode 100644
index 0000000..979a2f8
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.github/workflows/lint-check.yml
@@ -0,0 +1,33 @@
+name: Lint Check
+
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+
+jobs:
+ check_lint:
+ defaults:
+ run:
+ working-directory: "stackql_deploy"
+ runs-on: ubuntu-latest
+ if: github.event_name == 'pull_request'
+ strategy:
+ max-parallel: 1
+ matrix:
+ python-version: ["3.11"]
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: Install dependencies
+ run: pip install ruff
+
+ - name: Lint check with ruff
+ run: ruff check .
diff --git a/ref-python-packages/stackql-deploy/.github/workflows/prod-web-deploy.yml b/ref-python-packages/stackql-deploy/.github/workflows/prod-web-deploy.yml
new file mode 100644
index 0000000..ab53507
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.github/workflows/prod-web-deploy.yml
@@ -0,0 +1,58 @@
+name: Deploy to GitHub Pages
+
+on:
+ push:
+ branches:
+ - main
+ paths:
+ - 'website/**'
+
+jobs:
+ build:
+ name: Build Docusaurus
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ cache: yarn
+ cache-dependency-path: website/yarn.lock
+
+ - name: Install dependencies
+ run: yarn install --frozen-lockfile
+ working-directory: website
+
+ - name: Build website
+ run: yarn build
+ working-directory: website
+
+ - name: Upload Build Artifact
+ uses: actions/upload-pages-artifact@v3
+ with:
+ path: website/build # Ensure the path is correctly set to the Docusaurus build output
+
+ deploy:
+ name: Deploy to GitHub Pages
+ needs: build
+
+ # Grant GITHUB_TOKEN the permissions required to make a Pages deployment
+ permissions:
+ pages: write # to deploy to Pages
+ id-token: write # to verify the deployment originates from an appropriate source
+
+ # Deploy to the github-pages environment
+ environment:
+ name: github-pages
+ url: ${{ steps.deployment.outputs.page_url }}
+
+ runs-on: ubuntu-latest
+ steps:
+ - name: Deploy to GitHub Pages
+ id: deployment
+ uses: actions/deploy-pages@v4
+ with:
+ working-directory: website/build # Ensures the correct directory is used for deployment
diff --git a/ref-python-packages/stackql-deploy/.github/workflows/star-check.yml b/ref-python-packages/stackql-deploy/.github/workflows/star-check.yml
new file mode 100644
index 0000000..efaaaf4
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.github/workflows/star-check.yml
@@ -0,0 +1,45 @@
+name: Check if PR author has starred the repository
+on:
+ pull_request:
+ types: [opened, synchronize, reopened]
+
+jobs:
+ check-starred:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4.1.7
+
+ - name: Get PR author username
+ id: get-author
+ run: echo "username=${{ github.event.pull_request.user.login }}" >> $GITHUB_ENV
+
+ - name: Pull github provider
+ uses: stackql/stackql-exec@v2.2.1
+ with:
+ is_command: 'true'
+ query: "REGISTRY PULL github;"
+
+ - name: Run stackql query
+ id: check-star
+ uses: stackql/stackql-assert@v2.2.1
+ with:
+ test_query: |
+ SELECT repo, count(*) as has_starred
+ FROM github.activity.repo_stargazers
+ WHERE owner = 'stackql' and repo in ('stackql', 'stackql-deploy') and login = '${{ env.username }}'
+ GROUP BY repo;
+ expected_results_str: '[{"has_starred":"1","repo":"stackql"},{"has_starred":"1","repo":"stackql-deploy"}]'
+ continue-on-error: true
+
+ - name: Check if starred
+ if: always() # Ensures this runs regardless of check-star outcome
+ run: |
+ if [ "${{ steps.check-star.outcome }}" = "success" ]; then
+ echo "::notice::Thanks for your support!"
+ else
+ echo "::error::It seems you haven't starred the StackQL repositories. Please star the following repos before proceeding: https://github.com/stackql/stackql-deploy (this repo) and https://github.com/stackql/stackql (our core repo)"
+ exit 1
+ fi
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/.github/workflows/test-web-deploy.yml b/ref-python-packages/stackql-deploy/.github/workflows/test-web-deploy.yml
new file mode 100644
index 0000000..1e2ef91
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.github/workflows/test-web-deploy.yml
@@ -0,0 +1,31 @@
+name: Test deployment
+
+on:
+ pull_request:
+ branches:
+ - main
+ paths:
+ - 'website/**'
+
+jobs:
+ test-deploy:
+ name: Test deployment
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ cache: yarn
+ cache-dependency-path: website/yarn.lock
+
+ - name: Install dependencies
+ run: yarn install --frozen-lockfile
+ working-directory: website
+
+ - name: Test build website
+ run: yarn build
+ working-directory: website
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/.gitignore b/ref-python-packages/stackql-deploy/.gitignore
new file mode 100644
index 0000000..5028b94
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.gitignore
@@ -0,0 +1,93 @@
+stackql-zip
+stackql-aws-cloud-shell.sh
+stackql-azure-cloud-shell.sh
+stackql-google-cloud-shell.sh
+stackql
+/.stackql
+**/.env
+.pypirc
+stack/
+oss-activity-monitor/
+testcreds/
+*.log
+venv/
+.venv/
+nohup.out
+
+/.ruff_cache
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# Distribution / packaging
+*.egg-info/
+*.egg
+dist/
+build/
+develop-eggs/
+downloads/
+eggs/
+.eggs/
+# lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+.envrc
+
+venv/
+.DS_Store
+myenv/
diff --git a/ref-python-packages/stackql-deploy/.readthedocs.yml b/ref-python-packages/stackql-deploy/.readthedocs.yml
new file mode 100644
index 0000000..d93a9dd
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.readthedocs.yml
@@ -0,0 +1,35 @@
+# Read the Docs configuration file for Sphinx projects
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+# Required
+version: 2
+
+# Set the OS, Python version and other tools you might need
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.11"
+ # You can also specify other tool versions:
+ # nodejs: "20"
+ # rust: "1.70"
+ # golang: "1.20"
+
+# Build documentation in the "docs/" directory with Sphinx
+sphinx:
+ configuration: docs/source/conf.py
+ # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
+ # builder: "dirhtml"
+ # Fail on all warnings to avoid broken references
+ # fail_on_warning: true
+
+# Optionally build your docs in additional formats such as PDF and ePub
+# formats:
+# - pdf
+# - epub
+
+# Optional but recommended, declare the Python requirements required
+# to build your documentation
+# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
+python:
+ install:
+ - requirements: docs/requirements.txt
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/.vscode/settings.json b/ref-python-packages/stackql-deploy/.vscode/settings.json
new file mode 100644
index 0000000..2a3bc49
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/.vscode/settings.json
@@ -0,0 +1,5 @@
+{
+ "files.associations": {
+ "*.iql": "sql"
+ }
+}
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/CHANGELOG.md b/ref-python-packages/stackql-deploy/CHANGELOG.md
new file mode 100644
index 0000000..81989fd
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/CHANGELOG.md
@@ -0,0 +1,122 @@
+# Changelog
+
+## 1.9.4 (2025-10-16)
+
+- added `--output-file` argument
+- added stack level `exports` - pre defined `stack_name`, `stack_env`, `elapsed_time` and user defined
+- Added performance enhancement query strategy
+- Added tab completion
+- Added enhanced logging decorators
+
+## 1.8.6 (2025-07-22)
+
+- Added support for inline `sql` for `command` and `query` resource types
+- Added `sql_escape` filter
+
+## 1.8.5 (2025-06-30)
+
+- Added support for resource scoped variables
+- Added developer credits in `info`
+
+## 1.8.3 (2025-02-08)
+
+- Added walkthrough for databricks bootstrap on aws.
+- Bugfix for export variables on dry run.
+
+## 1.8.2 (2025-01-16)
+
+- Added timing output for `build`, `test` and `teardown` operations
+
+## 1.8.1 (2025-01-11)
+
+- Added `uuid()` templating function
+- Exports evaluation optimization for teardown operations
+
+## 1.8.0 (2024-11-09)
+
+- Added option for command specific authentication
+
+## 1.7.7 (2024-10-09)
+
+- Supported version pinning for providers(aws, gcp, azure and etc) in `manifest` file
+
+## 1.7.6 (2024-10-07)
+
+- Added support for named `exports` (assigning an alias to the column name in the resource query file) - allows for more generalization and reuse of query files
+
+## 1.7.5 (2024-09-28)
+
+- Renamed the variable `vars` to `env_vars` for clarity and consistency
+
+## 1.7.4 (2024-09-19)
+
+- Colorizing the headings in `stack-deploy info` to green
+
+## 1.7.3 (2024-09-18)
+
+- Grouping information into logical sections: `StackQL Deploy CLI`, `StackQL Library`, and `Installed Providers` for `info` command.
+
+## 1.7.2 (2024-09-14)
+
+- Fixed issue with missing `stackql_manifest.yml.template` by updating `MANIFEST.in` to include template files
+
+## v1.7.1 (2024-09-03)
+
+- fixed `teardown` issue
+
+## v1.7.0 (2024-09-02)
+
+- changed `preflight` to `exists` and `postdeploy` to `statecheck`, maintaining backwards compatibility
+- enhanced `multi` resource support
+
+## v1.6.5 (2024-08-31)
+
+- added `multi` type
+- added support for `create` retries
+
+## v1.6.4 (2024-08-29)
+
+- added `from_json` filter
+- additional error handling for method signature mismatches
+
+## v1.6.3 (2024-08-21)
+
+- `createorupdate` skipped if checks pass
+
+## v1.6.2 (2024-08-18)
+
+- added `shell` command to launch a `stackql shell`
+
+## v1.6.1 (2024-08-17)
+
+- removed un-needed env vars from the global context
+
+## v1.6.0 (2024-07-23)
+
+- added support for AWS Cloud Control `PatchDocument` creation for `UPDATE` statements
+
+## v1.5.3 (2024-06-05)
+
+- templating fixes
+
+## v1.5.0 (2024-04-30)
+
+- added `script` resource type
+
+## v1.2.0 (2024-04-23)
+
+- added `exports` anchor
+- support for runtime stack variables
+
+## v1.0.26 (2024-04-18)
+
+- added `init` function
+- added templates
+- improved exception handling
+
+## v1.0.0 (2024-04-16)
+
+### Initial Release
+
+- basic support for `build`, `test` and `teardown` functions
+- added `info` diagnostic functions
diff --git a/ref-python-packages/stackql-deploy/LICENSE b/ref-python-packages/stackql-deploy/LICENSE
new file mode 100644
index 0000000..f0e0b3c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022-2025 StackQL Studios
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/MANIFEST.in b/ref-python-packages/stackql-deploy/MANIFEST.in
new file mode 100644
index 0000000..6e3cd24
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/MANIFEST.in
@@ -0,0 +1,9 @@
+# MANIFEST.in
+include LICENSE
+include README.rst
+recursive-include stackql_deploy/templates *.template
+include stackql_deploy/inc/contributors.csv
+include shell_completions/*.bash
+include shell_completions/*.zsh
+include shell_completions/*.fish
+include shell_completions/*.ps1
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/README.md b/ref-python-packages/stackql-deploy/README.md
new file mode 100644
index 0000000..fe97e84
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/README.md
@@ -0,0 +1,372 @@
+
+
+[logo]: https://stackql.io/img/stackql-logo-bold.png "stackql logo"
+[deploylogo]: https://stackql.io/img/stackql-deploy-logo.png "stackql-deploy logo"
+[stackqlrepo]: https://github.com/stackql/stackql
+[homepage]: https://stackql.io/
+[docs]: https://stackql.io/docs
+[blog]: https://stackql.io/blog
+[registry]: https://github.com/stackql/stackql-provider-registry
+
+
+
+[pypi]: https://pypi.org/project/stackql-deploy/
+
+
+
+[badge1]: https://img.shields.io/badge/platform-windows%20macos%20linux-brightgreen "Platforms"
+[badge2]: https://img.shields.io/pypi/v/stackql-deploy "PyPi Version"
+[badge3]: https://img.shields.io/pypi/dm/stackql-deploy "PyPi Downloads"
+[badge4]: https://img.shields.io/github/license/stackql/stackql "License"
+
+
+
+[discussions]: https://github.com/orgs/stackql/discussions
+[issues]: https://github.com/stackql/stackql-deploy/issues/new/choose
+
+
+
+[twitter]: https://twitter.com/stackql
+
+
+
+
+[![logo]][stackqlrepo]
+![badge1]
+![badge2]
+![badge3]
+![badge4]
+
+
+
+
+### Model driven resource provisioning and deployment framework using StackQL.
+
+
+
+
+
+[**PyPi**][pypi]
+[**Raise an Issue**][issues]
+
+
+
+
+## About The Project
+
+[**`stackql-deploy`**][pypi] is an open-source command line utility which implements a declarative, model driven framework to deploy and manage multi cloud stacks using [**`stackql`**][stackqlrepo]. [**`stackql-deploy`**][pypi] is distributed as a Python script to be used as a CLI tool, do the following to get started:
+
+
+```bash
+pip install stackql-deploy
+```
+
+> **Note for macOS users**
+> to install `stackql-deploy` in a virtual environment (which may be necessary on **macOS**), use the following:
+>
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## About StackQL
+
+StackQL is a utility which allows you to query and interact with cloud and SaaS resources in real time using SQL grammar. StackQL supports a full set of SQL query/DML grammar, including `JOIN`, `UNION` adn subquery functionality and supports mutation operations on cloud and SaaS resources such as `create`, `update` and `delete`, implemented as `INSERT`, `UPDATE` and `DELETE` respectively. StackQL also supports grammar for performing lifecycle operations such as starting or stopping a VM using the `EXEC` statement.
+
+StackQL provider definitions are defined in plaintext OpenAPI extensions to the providers specification. These definitions are then used to generate the SQL schema and the API client. The source for the provider definitions are stored in the [**StackQL Registry**][registry].
+
+## How it works
+
+
+
+A **`stackql-deploy`** project is a directory containing StackQL scripts with a manifest file at the root of the directory, for example:
+
+```
+āāā example_stack
+ā āāā resources
+ā ā āāā monitor_resource_group.iql
+ā āāā stackql_manifest.yml
+```
+
+the `stackql_manifest.yml` defines the resources in the stackql with their properties which can be scoped by environments, for example:
+
+```yaml
+version: 1
+name: example_stack
+description: oss activity monitor stack
+providers:
+ - azure
+globals:
+ - name: subscription_id
+ description: azure subscription id
+ value: "{{ vars.AZURE_SUBSCRIPTION_ID }}"
+ - name: location
+ value: eastus
+ - name: resource_group_name_base
+ value: "activity-monitor"
+resources:
+ - name: monitor_resource_group
+ description: azure resource group for activity monitor
+ props:
+ - name: resource_group_name
+ description: azure resource group name
+ value: "{{ globals.resource_group_name_base }}-{{ globals.stack_env }}"
+ # OR YOU CAN DO...
+ # values:
+ # prd:
+ # value: "activity-monitor-prd"
+ # sit:
+ # value: "activity-monitor-sit"
+ # dev:
+ # value: "activity-monitor-dev"
+```
+
+> use `stackql-deploy init {stack_name}` to create a project directory with sample files
+
+Deployment orchestration using `stackql-deploy` includes:
+
+- **_pre-flight_** checks, which are StackQL queries that check for the existence or current configuration state of a resource
+- **_deployment_** scripts, which are StackQL queries to create or update resoruces (or delete in the case of de-provisioning)
+- **_post-deployment_** tests, which are StackQL queries to confirm that resources were deployed and have the desired state
+
+**Performance Optimization**: `stackql-deploy` uses an intelligent query optimization strategy which is described here:
+
+```mermaid
+graph TB
+ A[Start] --> B{foreach\nresource}
+ B --> C{exports query\navailable?}
+ C -- Yes --> D[try exports first\nš optimal path]
+ C -- No --> E[exists\ncheck]
+ D --> F{exports\nsuccess?}
+ F -- Yes --> G[ā
validated with\n1 query only]
+ F -- No --> E
+ E --> H{resource\nexists?}
+ H -- Yes --> I[run update\nor createorupdate query]
+ H -- No --> J[run create\nor createorupdate query]
+ I --> K[run statecheck check]
+ J --> K
+ G --> L[reuse exports result]
+ K --> M{End}
+ L --> M
+```
+
+### `INSERT`, `UPDATE`, `DELETE` queries
+
+Mutation operations are defined as `.iql` files in the `resources` directory, these are templates that are rendered with properties or environment context variables at run time, for example:
+
+```sql
+/*+ create */
+INSERT INTO azure.resources.resource_groups(
+ resourceGroupName,
+ subscriptionId,
+ data__location
+)
+SELECT
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}'
+
+/*+ update */
+UPDATE azure.resources.resource_groups
+SET data__location = '{{ location }}'
+WHERE resourceGroupName = '{{ resource_group_name }}'
+ AND subscriptionId = '{{ subscription_id }}'
+
+/*+ delete */
+DELETE FROM azure.resources.resource_groups
+WHERE resourceGroupName = '{{ resource_group_name }}' AND subscriptionId = '{{ subscription_id }}'
+```
+
+### Test Queries
+
+Test files are defined as `.iql` files in the `resources` directory, these files define the per-flight and post-deploy checks to be performed, for example:
+
+```sql
+/*+ exists */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND location = '{{ location }}'
+AND JSON_EXTRACT(properties, '$.provisioningState') = 'Succeeded'
+
+/*+ exports */
+SELECT resourceGroupName, location, JSON_EXTRACT(properties, '$.provisioningState') as state
+FROM azure.resources.resource_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+```
+
+### Query Optimization
+
+`stackql-deploy` implements intelligent query optimization that significantly improves performance:
+
+**Traditional Flow (3 queries):**
+1. `exists` - check if resource exists
+2. `statecheck` - validate resource configuration
+3. `exports` - extract variables for dependent resources
+
+**Optimized Flow (1 query in happy path):**
+1. **Try `exports` first** - if this succeeds, it validates existence, state, and extracts variables in one operation
+2. **Fallback to traditional flow** only if exports fails
+
+**Performance Benefits:**
+- Up to **66% reduction** in API calls for existing, correctly configured resources
+- **2-3x faster** deployments in typical scenarios
+- Maintains full validation integrity and backward compatibility
+
+**Best Practice:** Design your `exports` queries to include the validation logic from `statecheck` queries to maximize the benefits of this optimization.
+
+## Usage
+
+
+
+Once installed, use the `build`, `test`, or `teardown` commands as shown here:
+
+```
+stackql-deploy build prd example_stack -e AZURE_SUBSCRIPTION_ID 00000000-0000-0000-0000-000000000000 --dry-run
+stackql-deploy build prd example_stack -e AZURE_SUBSCRIPTION_ID 00000000-0000-0000-0000-000000000000
+stackql-deploy test prd example_stack -e AZURE_SUBSCRIPTION_ID 00000000-0000-0000-0000-000000000000
+stackql-deploy teardown prd example_stack -e AZURE_SUBSCRIPTION_ID 00000000-0000-0000-0000-000000000000
+```
+
+> **Note:** `teardown` deprovisions resources in reverse order to creation
+
+Additional options include:
+
+- `--dry-run`: perform a dry run of the stack operations.
+- `--on-failure=rollback`: action on failure: rollback, ignore or error.
+- `--env-file=.env`: specify an environment variable file.
+- `-e KEY=value`: pass additional environment variables.
+- `--log-level`: logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL), defaults to INFO.
+
+Use `stackql-deploy info` to show information about the package and environment, for example:
+
+```bash
+$ stackql-deploy info
+stackql-deploy CLI
+ Version: 1.7.7
+
+StackQL Library
+ Version: v0.5.748
+ pystackql Version: 3.7.0
+ Platform: Linux x86_64 (Linux-5.15.133.1-microsoft-standard-WSL2-x86_64-with-glibc2.35), Python 3.10.12
+ Binary Path: `/mnt/c/LocalGitRepos/stackql/stackql-deploy/stackql`
+
+Installed Providers
+ aws: v24.07.00246
+ azure: v23.03.00121
+ google: v24.09.00251
+```
+
+Use the `--help` option to see more information about the commands and options available:
+
+```
+stackql-deploy --help
+```
+
+### Tab Completion
+
+**stackql-deploy** supports tab completion for commands and options across multiple shells. To enable tab completion:
+
+```bash
+eval "$(stackql-deploy completion bash)" # activate now
+stackql-deploy completion bash --install # install permanently
+stackql-deploy completion # auto-detect shell
+```
+
+## Building and Testing Locally
+
+To get started with **stackql-deploy**, install it locally using pip:
+
+```bash
+python3 -m venv venv
+source venv/bin/activate
+pip install -e .
+# ...
+deactivate
+rm -rf venv/
+```
+
+### To Remove the Locally Installed Package
+
+```
+pip uninstall stackql-deploy
+pip cache purge
+```
+
+## Building and Deploying to PyPI
+
+To distribute **stackql-deploy** on PyPI, you'll need to ensure that you have all required files set up correctly in your project directory. This typically includes your `setup.py`, `README.rst`, `LICENSE`, and any other necessary files.
+
+### Building the Package
+
+First, ensure you have the latest versions of `setuptools` and `wheel` installed:
+
+```bash
+python3 -m venv venv
+source venv/bin/activate
+# pip install --upgrade setuptools wheel
+pip install --upgrade build
+```
+
+Then, navigate to your project root directory and build the distribution files:
+
+```bash
+rm dist/stackql_deploy*
+python3 -m build
+# or
+# python3 setup.py sdist bdist_wheel
+```
+
+This command generates distribution packages in the `dist/` directory.
+
+### Uploading the Package to PyPI
+
+To upload the package to PyPI, you'll need to use `twine`, a utility for publishing Python packages. First, install `twine`:
+
+```
+pip install twine
+```
+
+Then, use `twine` to upload all of the archives under `dist/`:
+
+```
+twine upload --config-file .pypirc dist/*
+```
+
+### Building the Docs
+
+Navigate to your `docs` directory and build the Sphinx documentation:
+
+```
+cd docs
+make html
+```
+
+## Code Linting
+
+To maintain code quality and consistency, we use `ruff` as the linter for this project. `ruff` offers fast performance and a comprehensive set of linting rules suitable for `stackql-deploy`. You can run the lint check as follows:
+
+```bash
+ruff check .
+```
+
+Note: If you need to install ruff, you can do so with `pip install ruff`.
+
+## Contributing
+
+Contributions are welcome and encouraged.
+
+## License
+
+Distributed under the MIT License. See [`LICENSE`](https://github.com/stackql/stackql-deploy/blob/main/LICENSE) for more information.
+
+## Contact
+
+Get in touch with us via Twitter at [**@stackql**][twitter], email us at [**info@stackql.io**](info@stackql.io) or start a conversation using [**discussions**][discussions].
diff --git a/ref-python-packages/stackql-deploy/README.rst b/ref-python-packages/stackql-deploy/README.rst
new file mode 100644
index 0000000..f74d8aa
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/README.rst
@@ -0,0 +1,324 @@
+.. image:: https://stackql.io/img/stackql-deploy-logo.png
+ :alt: "stackql-deploy logo"
+ :target: https://github.com/stackql/stackql
+ :align: center
+
+==========================================================================
+Model driven resource provisioning and deployment framework using StackQL.
+==========================================================================
+
+.. image:: https://img.shields.io/pypi/v/stackql-deploy
+ :target: https://pypi.org/project/stackql-deploy/
+ :alt: PyPI
+
+.. image:: https://img.shields.io/pypi/dm/stackql-deploy
+ :target: https://pypi.org/project/stackql-deploy/
+ :alt: PyPI - Downloads
+
+.. image:: https://img.shields.io/badge/documentation-%F0%9F%93%96-brightgreen.svg
+ :target: https://stackql-deploy.io/docs
+ :alt: Documentation
+
+==============
+
+**stackql-deploy** is a multi-cloud Infrastructure as Code (IaC) framework using `stackql`_, inspired by dbt (data build tool), which manages data transformation workflows in analytics engineering by treating SQL scripts as models that can be built, tested, and materialized incrementally. You can create a similar framework for infrastructure provisioning with StackQL. The goal is to treat infrastructure-as-code (IaC) queries as models that can be deployed, managed, and interconnected.
+
+This ELT/model-based framework to IaC allows you to provision, test, update and teardown multi-cloud stacks similar to how dbt manages data transformation projects, with the benefits of version control, peer review, and automation. This approach enables you to deploy complex, dependent infrastructure components in a reliable and repeatable manner.
+
+The use of StackQL simplifies the interaction with cloud resources by using SQL-like syntax, making it easier to define and execute complex cloud management operations. Resources are provisioned with ``INSERT`` statements and tests are structured around ``SELECT`` statements.
+
+Features include:
+
+- Dynamic state determination (eliminating the need for state files)
+- Simple flow control with rollback capabilities
+- Single code base for multiple target environments
+- SQL-based definitions for resources and tests
+
+How stackql-deploy Works
+------------------------
+
+**stackql-deploy** orchestrates cloud resource provisioning by parsing SQL-like definitions. It determines the necessity of creating or updating resources based on exists checks, and ensures the creation and correct desired configuration through post-deployment verifications.
+
+.. image:: https://stackql.io/img/blog/stackql-deploy.png
+ :alt: "stackql-deploy"
+ :target: https://github.com/stackql/stackql
+
+Installing from PyPI
+--------------------
+
+To install **stackql-deploy** directly from PyPI, run the following command:
+
+.. code-block:: bash
+
+ pip install stackql-deploy
+
+This will install the latest version of **stackql-deploy** and its dependencies from the Python Package Index.
+
+.. note::
+
+ **Note for macOS users**: to install `stackql-deploy` in a virtual environment (which may be necessary on **macOS**), use the following:
+
+ .. code-block:: bash
+
+ python3 -m venv myenv
+ source myenv/bin/activate
+ pip install stackql-deploy
+
+Running stackql-deploy
+----------------------
+
+Once installed, use the `build`, `test`, or `teardown` commands as shown here:
+
+.. code-block:: none
+
+ stackql-deploy build prd example_stack -e AZURE_SUBSCRIPTION_ID 00000000-0000-0000-0000-000000000000 --dry-run
+ stackql-deploy build prd example_stack -e AZURE_SUBSCRIPTION_ID 00000000-0000-0000-0000-000000000000
+ stackql-deploy test prd example_stack -e AZURE_SUBSCRIPTION_ID 00000000-0000-0000-0000-000000000000
+ stackql-deploy teardown prd example_stack -e AZURE_SUBSCRIPTION_ID 00000000-0000-0000-0000-000000000000
+
+.. note::
+ ``teardown`` deprovisions resources in reverse order to creation
+
+additional options include:
+
+- ``--dry-run``: perform a dry run of the stack operations.
+- ``--on-failure=rollback``: action on failure: rollback, ignore or error.
+- ``--env-file=.env``: specify an environment variable file.
+- ``-e KEY=value```: pass additional environment variables.
+- ``--log-level`` : logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL), defaults to INFO.
+
+use ``stackql-deploy info`` to show information about the package and environment, for example
+
+.. code-block:: none
+
+ $ stackql-deploy info
+ stackql-deploy version: 1.0.0
+ pystackql version : 3.5.4
+ stackql version : v0.5.612
+ stackql binary path : /mnt/c/LocalGitRepos/stackql/stackql-deploy/stackql
+ platform : Linux x86_64 (Linux-5.15.133.1-microsoft-standard-WSL2-x86_64-with-glibc2.35), Python 3.10.12
+
+Use the ``--help`` option to see more information about the commands and options available:
+
+.. code-block:: none
+
+ stackql-deploy --help
+
+Project Structure
+-----------------
+
+**stackql-deploy** uses a modular structure where each component of the infrastructure is defined in separate files, allowing for clear separation of concerns and easy management. This example is based on a stack named ``example_stack``, with a resource named ``monitor_resource_group``.
+
+::
+
+ āāā example_stack
+ ā āāā stackql_manifest.yml
+ ā āāā resources
+ ā āāā monitor_resource_group.iql
+
+.. note::
+ use the ``init`` command to create a new project structure with sample files, for example ``stackql-deploy init example_stack``
+
+Manifest File
+-------------
+
+- **Manifest File**: The ``stackql_manifest.yml`` is used to define your stack and manage dependencies between infrastructure components. This file defines which resources need to be provisioned before others and parameterizes resources based on environment variables or other configurations.
+
+- **Providers**: List the cloud service providers that your stack will interact with. Each provider specified in the list will be initialized and made ready for use with the stack.
+
+ .. code-block:: yaml
+
+ providers:
+ - azure
+ - github
+
+- **Globals**: Defines a set of global variables that can be used across the entire stack configuration. These variables can hold values related to environment settings, default configurations, or any commonly used data.
+
+ .. code-block:: yaml
+
+ globals:
+ - name: subscription_id
+ description: azure subscription id
+ value: "{{ vars.AZURE_SUBSCRIPTION_ID }}"
+ - name: location
+ value: eastus
+ ... (additional globals)
+
+- **Resources**: Describes all the infrastructure components, such as networks, compute instances, databases, etc., that make up your stack. Here you can define the resources, their properties, and any dependencies between them.
+
+ .. code-block:: yaml
+
+ resources:
+ - name: resource_group
+ description: azure resource group for activity monitor app
+ - name: storage_account
+ description: azure storage account for activity monitor app
+ ... (additional properties and exports)
+ ...
+
+ Each resource can have the following attributes:
+
+ - **Name**: A unique identifier for the resource within the stack.
+ - **Description**: A brief explanation of the resource's purpose and functionality.
+ - **Type**: (Optional) Specifies the kind of resource (e.g., 'resource', 'query', 'script').
+ - **Props**: (Optional) Lists the properties of the resource that define its configuration.
+ - **Exports**: (Optional) Variables that are exported by this resource which can be used by other resources.
+ - **Protected**: (Optional) A list of sensitive information that should not be logged or exposed outside secure contexts.
+
+- **Scripts**: If your stack involves the execution of scripts for setup, data manipulation, or deployment actions, they are defined under the resources with a type of 'script'.
+
+ .. code-block:: yaml
+
+ - name: install_dependencies
+ type: script
+ run: |
+ pip install pynacl
+ ...
+
+ The script's execution output can be captured and used within the stack or for further processing.
+
+- **Integration with External Systems**: For stacks that interact with external services like GitHub, special resource types like 'query' can be used to fetch data from these services and use it within your deployment.
+
+ .. code-block:: yaml
+
+ - name: get_github_public_key
+ type: query
+ ... (additional properties and exports)
+
+ This can be useful for dynamic configurations based on external state or metadata.
+
+Resource and Test SQL Files
+----------------------------
+
+These files define the SQL-like commands for creating, updating, and testing the deployment of resources.
+
+.. note::
+ The SQL files use special **anchors** to indicate operations such as create, update, delete for resources,
+ and exists or post-deployment checks for queries. For detailed explanations of these anchors, refer to the
+ `Resource SQL Anchors`_ and `Query SQL Anchors`_ sections.
+
+**Resource SQL (resources/monitor_resource_group.iql):**
+
+.. code-block:: sql
+
+ /*+ create */
+ INSERT INTO azure.resources.resource_groups(
+ resourceGroupName,
+ subscriptionId,
+ data__location
+ )
+ SELECT
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}'
+
+ /*+ update */
+ UPDATE azure.resources.resource_groups
+ SET data__location = '{{ location }}'
+ WHERE resourceGroupName = '{{ resource_group_name }}'
+ AND subscriptionId = '{{ subscription_id }}'
+
+ /*+ delete */
+ DELETE FROM azure.resources.resource_groups
+ WHERE resourceGroupName = '{{ resource_group_name }}' AND subscriptionId = '{{ subscription_id }}'
+
+**Test SQL (resources/monitor_resource_group.iql):**
+
+.. code-block:: sql
+
+ /*+ exists */
+ SELECT COUNT(*) as count FROM azure.storage.accounts
+ WHERE SPLIT_PART(SPLIT_PART(JSON_EXTRACT(properties, '$.primaryEndpoints.blob'), '//', 2), '.', 1) = '{{ storage_account_name }}'
+ AND subscriptionId = '{{ subscription_id }}'
+ AND resourceGroupName = '{{ resource_group_name }}'
+
+ /*+ statecheck, retries=5, retry_delay=5 */
+ SELECT
+ COUNT(*) as count
+ FROM azure.storage.accounts
+ WHERE SPLIT_PART(SPLIT_PART(JSON_EXTRACT(properties, '$.primaryEndpoints.blob'), '//', 2), '.', 1) = '{{ storage_account_name }}'
+ AND subscriptionId = '{{ subscription_id }}'
+ AND resourceGroupName = '{{ resource_group_name }}'
+ AND kind = '{{ storage_kind }}'
+ AND JSON_EXTRACT(sku, '$.name') = 'Standard_LRS'
+ AND JSON_EXTRACT(sku, '$.tier') = 'Standard'
+
+ /*+ exports, retries=5, retry_delay=5 */
+ select json_extract(keys, '$[0].value') as storage_account_key
+ from azure.storage.accounts_keys
+ WHERE resourceGroupName = '{{ resource_group_name }}'
+ AND subscriptionId = '{{ subscription_id }}'
+ AND accountName = '{{ storage_account_name }}'
+
+
+Resource SQL Anchors
+--------------------
+
+Resource SQL files use special anchor comments as directives for the ``stackql-deploy`` tool to indicate the intended operations:
+
+- **/*+ create */**
+ This anchor precedes SQL ``INSERT`` statements for creating new resources.
+
+ .. code-block:: sql
+
+ /*+ create */
+ INSERT INTO azure.resources.resource_groups(
+ resourceGroupName,
+ subscriptionId,
+ data__location
+ )
+ SELECT
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}'
+
+- **/*+ createorupdate */**
+ Specifies an operation to either create a new resource or update an existing one.
+
+- **/*+ update */**
+ Marks SQL ``UPDATE`` statements intended to modify existing resources.
+
+- **/*+ delete */**
+ Tags SQL ``DELETE`` statements for removing resources from the environment.
+
+Query SQL Anchors
+-----------------
+
+Query SQL files contain SQL statements for testing and validation with the following anchors:
+
+- **/*+ exists */**
+ Used to perform initial checks before a deployment.
+
+ .. code-block:: sql
+
+ /*+ exists */
+ SELECT COUNT(*) as count FROM azure.resources.resource_groups
+ WHERE subscriptionId = '{{ subscription_id }}'
+ AND resourceGroupName = '{{ resource_group_name }}'
+
+- **/*+ statecheck, retries=5, retry_delay=5 */**
+ Post-deployment checks to confirm the success of the operation, with optional ``retries`` and ``retry_delay`` parameters.
+
+ .. code-block:: sql
+
+ /*+ statecheck, retries=5, retry_delay=5 */
+ SELECT COUNT(*) as count FROM azure.resources.resource_groups
+ WHERE subscriptionId = '{{ subscription_id }}'
+ AND resourceGroupName = '{{ resource_group_name }}'
+ AND location = '{{ location }}'
+ AND JSON_EXTRACT(properties, '$.provisioningState') = 'Succeeded'
+
+- **/*+ exports, retries=5, retry_delay=5 */**
+ Extracts and exports information after a deployment. Similar to post-deploy checks but specifically for exporting data.
+
+
+.. note::
+ The following parameters are used to control the behavior of retry mechanisms in SQL operations:
+
+ - **``retries``** (optional, integer): Defines the number of times a query should be retried upon failure.
+ - **``retry_delay``** (optional, integer): Sets the delay in seconds between each retry attempt.
+
+
+**stackql-deploy** simplifies cloud resource management by treating infrastructure as flexible, dynamically assessed code.
+
+.. _stackql: https://github.com/stackql/stackql
diff --git a/ref-python-packages/stackql-deploy/cicd/setup/setup-env.sh b/ref-python-packages/stackql-deploy/cicd/setup/setup-env.sh
new file mode 100644
index 0000000..d66967a
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/cicd/setup/setup-env.sh
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+
+CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+export REPOSITORY_ROOT="$(realpath ${CURRENT_DIR}/../..)"
+
+python -m venv ${REPOSITORY_ROOT}/.venv
+
+source ${REPOSITORY_ROOT}/.venv/bin/activate
+
+pip install -r ${REPOSITORY_ROOT}/requirements.txt
+
+cd ${REPOSITORY_ROOT} && python setup.py install
+
+chmod +x examples/databricks/all-purpose-cluster/sec/*.sh
+
+pip freeze
+
diff --git a/ref-python-packages/stackql-deploy/docs/Makefile b/ref-python-packages/stackql-deploy/docs/Makefile
new file mode 100644
index 0000000..8bba488
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS ?=
+SPHINXBUILD = sphinx-build
+SPHINXPROJ = pystackql
+SOURCEDIR = source
+BUILDDIR = build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/docs/make.bat b/ref-python-packages/stackql-deploy/docs/make.bat
new file mode 100644
index 0000000..bb21b16
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/docs/make.bat
@@ -0,0 +1,36 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=source
+set BUILDDIR=build
+set SPHINXPROJ=simpleble
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
+
+:end
+popd
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/docs/requirements.txt b/ref-python-packages/stackql-deploy/docs/requirements.txt
new file mode 100644
index 0000000..bc89b94
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/docs/requirements.txt
@@ -0,0 +1,5 @@
+sphinx_rtd_theme
+click
+python-dotenv
+jinja2
+pystackql
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/docs/source/build.rst b/ref-python-packages/stackql-deploy/docs/source/build.rst
new file mode 100644
index 0000000..e69de29
diff --git a/ref-python-packages/stackql-deploy/docs/source/conf.py b/ref-python-packages/stackql-deploy/docs/source/conf.py
new file mode 100644
index 0000000..07240ca
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/docs/source/conf.py
@@ -0,0 +1,161 @@
+# -*- coding: utf-8 -*-
+#
+# Configuration file for the Sphinx documentation builder.
+#
+# This file does only contain a selection of the most common options. For a
+# full list see the documentation:
+# http://www.sphinx-doc.org/en/stable/config
+
+# -- Path setup --------------------------------------------------------------
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import os
+import sys
+sys.path.insert(0, os.path.abspath('../..'))
+
+
+# -- Project information -----------------------------------------------------
+
+project = 'pystackql'
+copyright = '2021-2024, StackQL Studios'
+author = 'StackQL Studios'
+
+# The short X.Y version
+version = ''
+# The full version, including alpha/beta/rc tags
+release = '3.5.4'
+
+
+# -- General configuration ---------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.viewcode',
+ 'sphinx.ext.todo',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['../_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = 'en'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This pattern also affects html_static_path and html_extra_path .
+exclude_patterns = []
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'default'
+
+
+# -- Options for HTML output -------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'sphinx_rtd_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#
+html_theme_options = {}
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = ['../_static']
+
+# Custom sidebar templates, must be a dictionary that maps document names
+# to template names.
+#
+# The default sidebars (for documents that don't match any pattern) are
+# defined by theme itself. Builtin themes are using these templates by
+# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
+# 'searchbox.html']``.
+#
+# html_sidebars = {}
+
+
+# -- Options for HTMLHelp output ---------------------------------------------
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'stackqldoc'
+
+
+# -- Options for LaTeX output ------------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+
+ 'papersize': 'letterpaper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+
+ 'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+
+ 'preamble': '',
+
+ # Latex figure (float) alignment
+
+ 'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'stackql.tex', 'stackql Documentation',
+ 'StackQL Studios', 'manual'),
+]
+
+
+# -- Options for manual page output ------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'stackql', 'stackql Documentation',
+ [author], 1)
+]
+
+
+# -- Options for Texinfo output ----------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'stackql', 'stackql Documentation',
+ author, 'stackql', 'Query and interact with cloud providers using SQL.',
+ 'Miscellaneous'),
+]
+
+
+# -- Extension configuration -------------------------------------------------
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/docs/source/global_options.rst b/ref-python-packages/stackql-deploy/docs/source/global_options.rst
new file mode 100644
index 0000000..9393a51
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/docs/source/global_options.rst
@@ -0,0 +1,123 @@
+Global Options for stackql-deploy
+=================================
+
+**stackql-deploy** provides several global options that can be used with any of the commands (`build`, `test`, `teardown`). These options allow you to customize the behavior of the tool according to your needs, such as setting the logging level or specifying an environment file.
+
+Available Global Options
+------------------------
+
+The following are the global options available in **stackql-deploy**:
+
+- **--log-level**
+- **--env-file**
+- **-e, --env**
+- **--dry-run**
+- **--on-failure**
+
+.. note::
+ These options can be combined with any command to alter the behavior of **stackql-deploy**.
+
+**--log-level**
+---------------
+
+Sets the logging level for the operation. This determines the verbosity of the output during command execution.
+
+.. code-block:: bash
+
+ --log-level DEBUG
+
+Valid options include:
+
+- ``DEBUG``: Provides detailed logging for troubleshooting.
+- ``INFO``: Gives informational messages about the process.
+- ``WARNING``: Outputs only warnings and errors.
+- ``ERROR``: Shows only error messages.
+- ``CRITICAL``: Logs critical errors only.
+
+**Example**:
+
+.. code-block:: bash
+
+ stackql-deploy build prod ./my_project --log-level INFO
+
+**--env-file**
+---------------
+
+Specifies a custom environment file that contains environment variables to be loaded before executing a command.
+
+.. code-block:: bash
+
+ --env-file path/to/custom.env
+
+**Example**:
+
+.. code-block:: bash
+
+ stackql-deploy test dev ./my_project --env-file .env.production
+
+**-e, --env**
+-------------
+
+Allows you to specify additional environment variables directly on the command line. This is useful for overriding values in the environment file or providing variables that are only needed occasionally.
+
+.. code-block:: bash
+
+ -e KEY=value -e ANOTHER_KEY=another_value
+
+**Example**:
+
+.. code-block:: bash
+
+ stackql-deploy teardown prod ./my_project -e API_KEY=12345 -e FEATURE_FLAG=enabled
+
+**--dry-run**
+-------------
+
+Executes the command without making any changes to the actual resources. This is particularly useful for testing to see what actions the tool would take without applying them.
+
+.. code-block:: bash
+
+ --dry-run
+
+**Example**:
+
+.. code-block:: bash
+
+ stackql-deploy build prod ./my_project --dry-run
+
+**--on-failure**
+----------------
+
+Defines the action to take if the command encounters an error. This option helps manage the failure behavior, particularly in automated scripts or pipelines.
+
+Valid options are:
+
+- ``rollback``: Attempts to revert changes to the previous state.
+- ``ignore``: Continues execution, ignoring the error.
+- ``error``: Stops execution and exits with an error status.
+
+.. code-block:: bash
+
+ --on-failure rollback
+
+**Example**:
+
+.. code-block:: bash
+
+ stackql-deploy build prod ./my_project --on-failure ignore
+
+Using Global Options
+--------------------
+
+Combine these options as needed to customize the execution of **stackql-deploy** commands. For example:
+
+.. code-block:: bash
+
+ stackql-deploy build prod ./my_project --env-file .env.production --log-level DEBUG --dry-run
+
+This command would initiate a dry run of deploying the `./my_project` with a production environment file, with detailed debug logging enabled.
+
+Summary
+-------
+
+Understanding and utilizing the global options in **stackql-deploy** can significantly enhance your control and flexibility when managing deployments. These options allow you to tailor the tool's operation to fit your specific workflow and environmental requirements.
diff --git a/ref-python-packages/stackql-deploy/docs/source/index.rst b/ref-python-packages/stackql-deploy/docs/source/index.rst
new file mode 100644
index 0000000..998d74c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/docs/source/index.rst
@@ -0,0 +1,10 @@
+.. toctree::
+ :maxdepth: 2
+ :caption: Contents:
+
+ intro
+ global_options
+ build
+ test
+ teardown
+ manifest_file
diff --git a/ref-python-packages/stackql-deploy/docs/source/intro.rst b/ref-python-packages/stackql-deploy/docs/source/intro.rst
new file mode 100644
index 0000000..bb8035c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/docs/source/intro.rst
@@ -0,0 +1,72 @@
+Welcome to stackql-deploy
+=========================
+
+**stackql-deploy** is an innovative command-line interface (CLI) tool designed to manage multi-cloud Infrastructure as Code (IaC) using a model akin to the Data Build Tool (dbt) which is prevalent in the field of analytics engineering. This tool leverages the powerful SQL-like query capabilities of StackQL, allowing you to manage and provision cloud infrastructure with unprecedented ease and clarity.
+
+Why stackql-deploy?
+-------------------
+
+**stackql-deploy** transforms the way teams approach Infrastructure as Code. By treating IaC queries as models, this tool enables incremental, manageable, and scalable deployments, testing, and teardowns of cloud infrastructure across multiple environments. Here are a few reasons why **stackql-deploy** stands out:
+
+- **Model-based Approach**: Inspired by dbt's successful model for data transformations, stackql-deploy applies similar principles to infrastructure management, enabling a structured, version-controlled, and testable approach to deploying cloud resources.
+
+- **Unified Multi-Cloud Management**: With stackql-deploy, manage resources across different cloud providers using a single, unified interface. This eliminates the need to juggle multiple tools and APIs, simplifying the complexity traditionally associated with multi-cloud environments.
+
+- **SQL-like Syntax**: StackQLās native SQL-like syntax makes defining and managing cloud resources as simple as writing a SQL query. This reduces the learning curve for teams familiar with SQL and allows them to leverage existing skills and tools.
+
+- **Dynamic State Management**: Unlike traditional IaC tools that require manual handling of state files, stackql-deploy dynamically determines the state of resources, ensuring deployments are always up-to-date with minimal overhead.
+
+Getting Started
+---------------
+
+**stackql-deploy** is easy to install and configure, making it straightforward to integrate into your existing workflows. Whether you are managing a few cloud resources or orchestrating complex multi-cloud environments, starting with **stackql-deploy** is just a few steps away:
+
+1. **Installation**: Install **stackql-deploy** directly from PyPI using pip:
+
+ .. code-block:: bash
+
+ pip install stackql-deploy
+
+ This will install the latest version of **stackql-deploy** and its dependencies from the Python Package Index.
+
+ .. note::
+
+ **Note for macOS users**: If you encounter an `externally-managed-environment` error or prefer to avoid installing packages globally, it is recommended to use a virtual environment. To create and activate a virtual environment on macOS, run the following commands:
+
+ .. code-block:: bash
+
+ python3 -m venv myenv
+ source myenv/bin/activate
+ pip install stackql-deploy
+
+2. **Quick Example**: Hereās a quick example to show how you can deploy a sample resource:
+
+ .. code-block:: bash
+
+ stackql-deploy build prod ./my_project --env-file .env
+
+ This command will deploy resources defined in `./my_project` under the production environment, using environment variables specified in the `.env` file.
+
+3. **Learn More**: Dive deeper into the capabilities of stackql-deploy by exploring the subsequent sections of this documentation, covering everything from detailed command usage to advanced configuration options.
+
+4. **Getting Help**: To see all available commands and options, use the `--help` option:
+
+ .. code-block:: bash
+
+ stackql-deploy --help
+
+5. **Diagnostic Information**: For diagnosing and troubleshooting, use the `info` command to display environment and version information:
+
+ .. code-block:: bash
+
+ stackql-deploy info
+
+ This command will display the version of stackql-deploy, the version of StackQL used, and other pertinent system information, aiding in diagnostics and support.
+
+Whatās Next?
+------------
+
+- Explore :ref:`manifest-file` to learn how to use a manifest file to define and manage your resources.
+- Check out :ref:`deploy`, :ref:`test`, and :ref:`teardown` to learn how to use stackql-deploy for deploying, testing, and safely removing your cloud infrastructure.
+
+.. _stackql: https://github.com/stackql/stackql
diff --git a/ref-python-packages/stackql-deploy/docs/source/manifest_file.rst b/ref-python-packages/stackql-deploy/docs/source/manifest_file.rst
new file mode 100644
index 0000000..2af054f
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/docs/source/manifest_file.rst
@@ -0,0 +1,90 @@
+.. _manifest-file:
+
+Understanding the Manifest File
+===============================
+
+The manifest file in **stackql-deploy** is used for defining and orchestrating the deployment of cloud resources. Modeled after successful data engineering practices, the manifest file allows you to declaratively specify what resources need to be provisioned, in what order, and with what configuration.
+
+Structure of the Manifest File
+------------------------------
+
+The manifest file, named ``stackql_manifest.yml`` and located in the root of your project directory, is a YAML document which defines resources with properties and dependencies for your cloud/SaaS stack. Here's a breakdown of the key sections:
+
+- **version**: Specifies the version of the manifest format. (*optional*)
+- **name**: A unique name for the stack or project. (*optional - if not specified the project directory name is used*)
+- **description**: A brief description of what the stack is for. (*optional*)
+- **providers**: Lists the cloud providers and their configurations used in the stack.
+- **globals**: Defines global variables that can be reused across the resource definitions.
+- **resources**: Lists the cloud resources to be managed, along with their specific configurations and dependencies.
+
+Here's an example of a simple manifest file:
+
+.. code-block:: yaml
+
+ version: 1
+ name: activity_monitor
+ description: OSS activity monitor stack
+ providers:
+ - azure
+ globals:
+ - name: subscription_id
+ description: Azure subscription ID
+ value: "{{ vars.AZURE_SUBSCRIPTION_ID }}"
+ - name: location
+ value: eastus
+ - name: resource_group_name_base
+ value: "activity-monitor"
+ resources:
+ - name: monitor_resource_group
+ description: Azure resource group for activity monitor
+ props:
+ - name: resource_group_name
+ description: Azure resource group name
+ value: "{{ globals.resource_group_name_base }}-{{ globals.stack_env }}"
+
+Using the Manifest File
+-----------------------
+
+**globals**:
+Globals are variables defined at the top level of the manifest file and can be used across multiple resource definitions. They support dynamic values that can be interpolated at runtime using environment variables or other global values.
+
+**resources**:
+Each resource in the ``resources`` section represents a cloud resource that **stackql-deploy** will manage. The resource section includes:
+
+- **name**: Identifier of the resource.
+- **description**: What the resource represents.
+- **props**: Properties or configurations specific to the resource. These can also include conditional logic or environment-specific values.
+
+.. note::
+ The ``stack_env`` is a special global variable for the user-specified environment labelāe.g., ``prod``, ``
+
+Conditional Logic and Environment-Specific Configurations
+----------------------------------------------------------
+
+You can define environment-specific configurations within the resource properties using nested `values` blocks keyed by the environment name. This allows you to tailor the deployment parameters according to the deployment environment (e.g., production, staging, development):
+
+.. code-block:: yaml
+
+ resources:
+ - name: monitor_resource_group
+ props:
+ - name: resource_group_name
+ values:
+ prd:
+ value: "activity-monitor-prd"
+ dev:
+ value: "activity-monitor-dev"
+
+This configuration enables **stackql-deploy** to dynamically select the appropriate setting based on the `stack_env` provided during the deployment command.
+
+Best Practices for Managing Manifest Files
+------------------------------------------
+
+- **Version Control**: Store your manifest files in a version control system to track changes and manage deployments across different stages of your development lifecycle.
+- **Environment Separation**: Keep separate manifest files or sections for different environments to avoid conflicts and unintended deployments.
+- **Security**: Be mindful of sensitive information in your manifest files. Use environment variables or secure vaults to manage credentials or sensitive configurations.
+
+Summary
+-------
+
+The manifest file is a powerful tool in your **stackql-deploy** arsenal, allowing for precise and declarative infrastructure management. By understanding and utilizing the capabilities of the manifest file, you can significantly enhance the efficiency, repeatability, and maintainability of your cloud resource deployments.
diff --git a/ref-python-packages/stackql-deploy/docs/source/teardown.rst b/ref-python-packages/stackql-deploy/docs/source/teardown.rst
new file mode 100644
index 0000000..e69de29
diff --git a/ref-python-packages/stackql-deploy/docs/source/test.rst b/ref-python-packages/stackql-deploy/docs/source/test.rst
new file mode 100644
index 0000000..e69de29
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/README.md b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/README.md
new file mode 100644
index 0000000..f05f129
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/README.md
@@ -0,0 +1,75 @@
+# `stackql-deploy` starter project for `aws`
+
+> for starter projects using other providers, try `stackql-deploy my_stack --provider=azure` or `stackql-deploy my_stack --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `aws` provider:
+
+- [`aws` provider docs](https://stackql.io/registry/aws)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `aws` and `azure` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `aws` provider, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and optionally `AWS_SESSION_TOKEN` must be set, for more information on authentication to `aws` see the [`aws` provider documentation](https://aws.stackql.io/providers/aws).
+
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` and `resources` folders.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build \
+examples/aws/aws-stack sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build \
+examples/aws/aws-stack sit \
+-e AWS_REGION=ap-southeast-2 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test \
+examples/aws/aws-stack sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown \
+examples/aws/aws-stack sit \
+-e AWS_REGION=ap-southeast-2
+```
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_inet_gateway.iql b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_inet_gateway.iql
new file mode 100644
index 0000000..473b4c0
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_inet_gateway.iql
@@ -0,0 +1,52 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.internet_gateways (
+ Tags,
+ region
+)
+SELECT
+'{{ inet_gateway_tags }}',
+'{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ exports */
+SELECT internet_gateway_id FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.internet_gateways
+WHERE data__Identifier = '{{ internet_gateway_id }}'
+AND region = '{{ region }}';
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_inet_gw_attachment.iql b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_inet_gw_attachment.iql
new file mode 100644
index 0000000..28138a8
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_inet_gw_attachment.iql
@@ -0,0 +1,39 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+attachment_type,
+vpc_id
+FROM aws.ec2.vpc_gateway_attachments
+WHERE region = '{{ region }}'
+AND internet_gateway_id = '{{ internet_gateway_id }}'
+AND vpc_id = '{{ vpc_id }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpc_gateway_attachments (
+ InternetGatewayId,
+ VpcId,
+ region
+)
+SELECT
+ '{{ internet_gateway_id }}',
+ '{{ vpc_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+attachment_type,
+vpc_id
+FROM aws.ec2.vpc_gateway_attachments
+WHERE region = '{{ region }}'
+AND internet_gateway_id = '{{ internet_gateway_id }}'
+AND vpc_id = '{{ vpc_id }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.vpc_gateway_attachments
+WHERE data__Identifier = 'IGW|{{ vpc_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_inet_route.iql b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_inet_route.iql
new file mode 100644
index 0000000..105b06b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_inet_route.iql
@@ -0,0 +1,41 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.routes (
+ DestinationCidrBlock,
+ GatewayId,
+ RouteTableId,
+ region
+)
+SELECT
+ '0.0.0.0/0',
+ '{{ internet_gateway_id }}',
+ '{{ route_table_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t;
+
+/*+ exports */
+SELECT data__Identifier as inet_route_indentifer
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0';
+
+/*+ delete */
+DELETE FROM aws.ec2.routes
+WHERE data__Identifier = '{{ inet_route_indentifer }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_route_table.iql b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_route_table.iql
new file mode 100644
index 0000000..6a56af8
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_route_table.iql
@@ -0,0 +1,57 @@
+/*+ exists */
+SELECT count(*) as count FROM
+(
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.route_tables (
+ Tags,
+ VpcId,
+ region
+)
+SELECT
+ '{{ route_table_tags }}',
+ '{{ vpc_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT count(*) as count FROM
+(
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ exports */
+SELECT route_table_id FROM
+(
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.route_tables
+WHERE data__Identifier = '{{ route_table_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_security_group.iql b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_security_group.iql
new file mode 100644
index 0000000..485a761
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_security_group.iql
@@ -0,0 +1,72 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT group_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.security_group_tags
+WHERE region = '{{ region }}'
+AND group_name = '{{ group_name }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY group_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.security_groups (
+ GroupName,
+ GroupDescription,
+ VpcId,
+ SecurityGroupIngress,
+ SecurityGroupEgress,
+ Tags,
+ region
+)
+SELECT
+ '{{ group_name }}',
+ '{{ group_description }}',
+ '{{ vpc_id }}',
+ '{{ security_group_ingress }}',
+ '{{ security_group_egress }}',
+ '{{ sg_tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT group_id,
+security_group_ingress,
+security_group_egress,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.security_group_tags
+WHERE region = '{{ region }}'
+AND group_name = '{{ group_name }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY group_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ exports */
+SELECT group_id as 'security_group_id' FROM
+(
+SELECT group_id,
+security_group_ingress,
+security_group_egress,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.security_group_tags
+WHERE region = '{{ region }}'
+AND group_name = '{{ group_name }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY group_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.security_groups
+WHERE data__Identifier = '{{ security_group_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_subnet.iql b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_subnet.iql
new file mode 100644
index 0000000..5f62cb0
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_subnet.iql
@@ -0,0 +1,66 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT subnet_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.subnet_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY subnet_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.subnets (
+ VpcId,
+ CidrBlock,
+ MapPublicIpOnLaunch,
+ Tags,
+ region
+)
+SELECT
+ '{{ vpc_id }}',
+ '{{ subnet_cidr_block }}',
+ true,
+ '{{ subnet_tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT subnet_id,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.subnet_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY subnet_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ subnet_cidr_block }}';
+
+/*+ exports */
+SELECT subnet_id, availability_zone FROM
+(
+SELECT subnet_id,
+availability_zone,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.subnet_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY subnet_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ subnet_cidr_block }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.subnets
+WHERE data__Identifier = '{{ subnet_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_subnet_rt_assn.iql b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_subnet_rt_assn.iql
new file mode 100644
index 0000000..58c80f4
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_subnet_rt_assn.iql
@@ -0,0 +1,42 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT id
+FROM aws.ec2.subnet_route_table_associations
+WHERE region = '{{ region }}'
+AND route_table_id = '{{ route_table_id }}'
+AND subnet_id = '{{ subnet_id }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.subnet_route_table_associations (
+ RouteTableId,
+ SubnetId,
+ region
+)
+SELECT
+ '{{ route_table_id }}',
+ '{{ subnet_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT id
+FROM aws.ec2.subnet_route_table_associations
+WHERE region = '{{ region }}'
+AND route_table_id = '{{ route_table_id }}'
+AND subnet_id = '{{ subnet_id }}'
+) t;
+
+/*+ exports */
+SELECT id as route_table_assn_id
+FROM aws.ec2.subnet_route_table_associations
+WHERE region = '{{ region }}'
+AND route_table_id = '{{ route_table_id }}'
+AND subnet_id = '{{ subnet_id }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.subnet_route_table_associations
+WHERE data__Identifier = '{{ route_table_assn_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_vpc.iql b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_vpc.iql
new file mode 100644
index 0000000..35b2733
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_vpc.iql
@@ -0,0 +1,63 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpcs (
+ CidrBlock,
+ Tags,
+ EnableDnsSupport,
+ EnableDnsHostnames,
+ region
+)
+SELECT
+ '{{ vpc_cidr_block }}',
+ '{{ vpc_tags }}',
+ true,
+ true,
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ vpc_cidr_block }}';
+
+/*+ exports */
+SELECT vpc_id, vpc_cidr_block FROM
+(
+SELECT vpc_id, cidr_block as "vpc_cidr_block",
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.vpcs
+WHERE data__Identifier = '{{ vpc_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_web_server.iql b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_web_server.iql
new file mode 100644
index 0000000..e479969
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/example_web_server.iql
@@ -0,0 +1,71 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT instance_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.instance_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND subnet_id = '{{ subnet_id }}'
+GROUP BY instance_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ instance_name }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.instances (
+ ImageId,
+ InstanceType,
+ SubnetId,
+ SecurityGroupIds,
+ UserData,
+ Tags,
+ region
+)
+SELECT
+ '{{ ami_id }}',
+ '{{ instance_type }}',
+ '{{ instance_subnet_id }}',
+ '{{ sg_ids }}',
+ '{{ user_data | base64_encode }}',
+ '{{ instance_tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT instance_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.instance_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND subnet_id = '{{ subnet_id }}'
+GROUP BY instance_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ instance_name }}'
+) t;
+
+/*+ exports */
+SELECT instance_id, public_dns_name FROM
+(
+SELECT instance_id, public_dns_name,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.instance_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND subnet_id = '{{ subnet_id }}'
+GROUP BY instance_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ instance_name }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.instances
+WHERE data__Identifier = '{{ instance_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/get_web_server_url.iql b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/get_web_server_url.iql
new file mode 100644
index 0000000..047bcd5
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/resources/get_web_server_url.iql
@@ -0,0 +1,2 @@
+/*+ exports */
+SELECT 'http://' || '{{ public_dns_name }}' as web_server_url
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/aws-stack/stackql_manifest.yml b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/stackql_manifest.yml
new file mode 100644
index 0000000..19f6251
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/aws-stack/stackql_manifest.yml
@@ -0,0 +1,153 @@
+#
+# aws starter project manifest file, add and update values as needed
+#
+version: 1
+name: "aws-stack"
+description: description for "aws-stack"
+providers:
+ - aws
+globals:
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+ - name: example_vpc
+ props:
+ - name: vpc_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ - name: vpc_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ merge:
+ - global_tags
+ exports:
+ - vpc_id
+ - vpc_cidr_block
+ - name: example_subnet
+ props:
+ - name: subnet_cidr_block
+ values:
+ prd:
+ value: "10.0.1.0/24"
+ sit:
+ value: "10.1.1.0/24"
+ dev:
+ value: "10.2.1.0/24"
+ - name: subnet_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-subnet"
+ merge: ['global_tags']
+ exports:
+ - subnet_id
+ - availability_zone
+ - name: example_inet_gateway
+ props:
+ - name: inet_gateway_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-inet-gateway"
+ merge: ['global_tags']
+ exports:
+ - internet_gateway_id
+ - name: example_inet_gw_attachment
+ props: []
+ - name: example_route_table
+ props:
+ - name: route_table_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-route-table"
+ merge: ['global_tags']
+ exports:
+ - route_table_id
+ - name: example_subnet_rt_assn
+ props: []
+ exports:
+ - route_table_assn_id
+ - name: example_inet_route
+ props: []
+ exports:
+ - inet_route_indentifer
+ - name: example_security_group
+ props:
+ - name: group_description
+ value: "web security group for {{ stack_name }} ({{ stack_env }} environment)"
+ - name: group_name
+ value: "{{ stack_name }}-{{ stack_env }}-web-sg"
+ - name: sg_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-web-sg"
+ merge: ['global_tags']
+ - name: security_group_ingress
+ value:
+ - CidrIp: "0.0.0.0/0"
+ Description: Allow HTTP traffic
+ FromPort: 80
+ ToPort: 80
+ IpProtocol: "tcp"
+ - CidrIp: "{{ vpc_cidr_block }}"
+ Description: Allow SSH traffic from the internal network
+ FromPort: 22
+ ToPort: 22
+ IpProtocol: "tcp"
+ - name: security_group_egress
+ value:
+ - CidrIp: "0.0.0.0/0"
+ Description: Allow all outbound traffic
+ FromPort: 0
+ ToPort: 0
+ IpProtocol: "-1"
+ exports:
+ - security_group_id
+ - name: example_web_server
+ props:
+ - name: instance_name
+ value: "{{ stack_name }}-{{ stack_env }}-instance"
+ - name: ami_id
+ value: ami-030a5acd7c996ef60
+ - name: instance_type
+ value: t2.micro
+ - name: instance_subnet_id
+ value: "{{ subnet_id }}"
+ - name: sg_ids
+ value:
+ - "{{ security_group_id }}"
+ - name: user_data
+ value: |
+ #!/bin/bash
+ yum update -y
+ yum install -y httpd
+ systemctl start httpd
+ systemctl enable httpd
+ echo 'StackQL on AWS ' > /var/www/html/index.html
+ echo '' >> /var/www/html/index.html
+ - name: instance_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-instance"
+ merge: ['global_tags']
+ exports:
+ - instance_id
+ - public_dns_name
+ - name: get_web_server_url
+ type: query
+ props: []
+ exports:
+ - web_server_url
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/patch-doc-test/README.md b/ref-python-packages/stackql-deploy/examples/aws/patch-doc-test/README.md
new file mode 100644
index 0000000..0b72a5a
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/patch-doc-test/README.md
@@ -0,0 +1,80 @@
+# `stackql-deploy` starter project for `aws`
+
+> for starter projects using other providers, try `stackql-deploy patch-doc-test --provider=azure` or `stackql-deploy patch-doc-test --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `aws` provider:
+
+- [`aws` provider docs](https://stackql.io/registry/aws)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `aws` and `azure` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `aws` provider, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and optionally `AWS_SESSION_TOKEN` must be set, for more information on authentication to `aws` see the [`aws` provider documentation](https://aws.stackql.io/providers/aws).
+
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` folder.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack named patch-doc-test to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build \
+examples/aws/patch-doc-test \
+sit \
+-e AWS_REGION=ap-southeast-2 \
+--show-queries
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build \
+examples/aws/patch-doc-test \
+sit \
+-e AWS_REGION=ap-southeast-2 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test \
+examples/aws/patch-doc-test \
+sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown \
+examples/aws/patch-doc-test \
+sit \
+-e AWS_REGION=ap-southeast-2
+```
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/aws/patch-doc-test/resources/bucket1.iql b/ref-python-packages/stackql-deploy/examples/aws/patch-doc-test/resources/bucket1.iql
new file mode 100644
index 0000000..b11970b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/patch-doc-test/resources/bucket1.iql
@@ -0,0 +1,54 @@
+/*+ exists */
+SELECT
+COUNT(*) as count
+FROM aws.s3.buckets
+WHERE region = '{{ region }}' AND data__Identifier = '{{ bucket1_name }}'
+
+/*+ create */
+INSERT INTO aws.s3.buckets (
+ BucketName,
+ VersioningConfiguration,
+ Tags,
+ region
+)
+SELECT
+ '{{ bucket1_name }}',
+ '{{ bucket1_versioning_config }}',
+ '{{ bucket1_tags }}',
+ '{{ region }}'
+
+/*+ statecheck, retries=2, retry_delay=1 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+JSON_EQUAL(versioning_configuration, '{{ bucket1_versioning_config }}') as test_versioning_config
+FROM aws.s3.buckets
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket1_name }}'
+) t
+WHERE test_versioning_config = 1;
+
+/*+ exports, retries=2, retry_delay=1 */
+SELECT bucket_name as bucket1_name, arn as bucket1_arn FROM
+(
+SELECT
+bucket_name,
+arn
+FROM aws.s3.buckets
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket1_name }}'
+) t
+
+/*+ update */
+update aws.s3.buckets
+set data__PatchDocument = string('{{ {
+ "VersioningConfiguration": bucket1_versioning_config,
+ "Tags": bucket1_tags
+ } | generate_patch_document }}')
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket1_name }}';
+
+/*+ delete */
+DELETE FROM aws.s3.buckets
+WHERE data__Identifier = '{{ bucket1_name }}'
+AND region = '{{ region }}'
diff --git a/ref-python-packages/stackql-deploy/examples/aws/patch-doc-test/stackql_manifest.yml b/ref-python-packages/stackql-deploy/examples/aws/patch-doc-test/stackql_manifest.yml
new file mode 100644
index 0000000..0244891
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/aws/patch-doc-test/stackql_manifest.yml
@@ -0,0 +1,34 @@
+version: 1
+name: "patch-doc-test"
+description: description for "patch-doc-test"
+providers:
+ - aws
+globals:
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+ - name: bucket1
+ props:
+ - name: bucket1_name
+ value: "{{ stack_name }}-{{ stack_env }}-bucket1"
+ - name: bucket1_versioning_config
+ value:
+ Status: Enabled
+ - name: bucket1_tags
+ merge:
+ - global_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-bucket1"
+ exports:
+ - bucket1_name
+ - bucket1_arn
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/README.md b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/README.md
new file mode 100644
index 0000000..dc2feac
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/README.md
@@ -0,0 +1,79 @@
+# `stackql-deploy` starter project for `azure`
+
+> for starter projects using other providers, try `stackql-deploy my_stack --provider=aws` or `stackql-deploy my_stack --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `azure` provider:
+
+- [`azure` provider docs](https://stackql.io/registry/azure)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `azure` and `azure` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `azure` provider, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and optionally `AWS_SESSION_TOKEN` must be set, for more information on authentication to `azure` see the [`azure` provider documentation](https://azure.stackql.io/providers/azure).
+
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` and `resources` folders.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack to an environment labeled `sit`, run the following:
+
+```bash
+export AZURE_VM_ADMIN_PASSWORD="Your_password_here1"
+stackql-deploy build \
+examples/azure/azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-2a65-43e7-93c2-688bfe4e1468 \
+-e AZURE_VM_ADMIN_PASSWORD=$AZURE_VM_ADMIN_PASSWORD
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build \
+examples/azure/azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-2a65-43e7-93c2-688bfe4e1468 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test \
+examples/azure/azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-2a65-43e7-93c2-688bfe4e1468 \
+-e AZURE_VM_ADMIN_PASSWORD=$AZURE_VM_ADMIN_PASSWORD
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown \
+examples/azure/azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-2a65-43e7-93c2-688bfe4e1468 \
+-e AZURE_VM_ADMIN_PASSWORD=$AZURE_VM_ADMIN_PASSWORD
+```
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_nic.iql b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_nic.iql
new file mode 100644
index 0000000..27be6fc
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_nic.iql
@@ -0,0 +1,35 @@
+/*+ createorupdate */
+INSERT INTO azure.network.interfaces(
+ networkInterfaceName,
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ nic_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{"ipConfigurations": [ {{ nic_ip_config }} ], "networkSecurityGroup": { "id": "{{ network_security_group_id }}"}}',
+ '{{ global_tags }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.interfaces
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkInterfaceName = '{{ nic_name }}';
+
+/*+ exports */
+SELECT id as network_interface_id
+FROM azure.network.interfaces
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkInterfaceName = '{{ nic_name }}';
+
+/*+ delete */
+DELETE FROM azure.network.interfaces
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkInterfaceName = '{{ nic_name }}';
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_nsg.iql b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_nsg.iql
new file mode 100644
index 0000000..5d37386
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_nsg.iql
@@ -0,0 +1,36 @@
+/*+ createorupdate */
+INSERT INTO azure.network.security_groups(
+ networkSecurityGroupName,
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ nsg_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{"securityRules":{{ security_rules }}}',
+ '{{ global_tags }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.security_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkSecurityGroupName = '{{ nsg_name }}'
+AND JSON_EXTRACT(properties, '$.securityRules') IS NOT NULL
+
+/*+ exports */
+SELECT id as network_security_group_id
+FROM azure.network.security_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkSecurityGroupName = '{{ nsg_name }}'
+
+/*+ delete */
+DELETE FROM azure.network.security_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND networkSecurityGroupName = '{{ nsg_name }}'
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_public_ip.iql b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_public_ip.iql
new file mode 100644
index 0000000..5636482
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_public_ip.iql
@@ -0,0 +1,37 @@
+/*+ createorupdate */
+INSERT INTO azure.network.public_ip_addresses(
+ publicIpAddressName,
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ public_ip_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{"publicIPAllocationMethod":"Static"}',
+ '{{ global_tags }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.public_ip_addresses
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND publicIpAddressName = '{{ public_ip_name }}'
+
+/*+ exports */
+SELECT '{{ public_ip_name }}' as public_ip_name,
+JSON_EXTRACT(properties, '$.ipAddress') as public_ip_address,
+id as public_ip_id
+FROM azure.network.public_ip_addresses
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND publicIpAddressName = '{{ public_ip_name }}'
+
+/*+ delete */
+DELETE FROM azure.network.public_ip_addresses
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND publicIpAddressName = '{{ public_ip_name }}'
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_resource_group.iql b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_resource_group.iql
new file mode 100644
index 0000000..dc9c4b6
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_resource_group.iql
@@ -0,0 +1,31 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+
+/*+ create */
+INSERT INTO azure.resources.resource_groups(
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__tags
+)
+SELECT
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{{ global_tags }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND location = '{{ location }}'
+AND JSON_EXTRACT(properties, '$.provisioningState') = 'Succeeded'
+
+/*+ exports */
+SELECT '{{ resource_group_name }}' as resource_group_name
+
+/*+ delete */
+DELETE FROM azure.resources.resource_groups
+WHERE resourceGroupName = '{{ resource_group_name }}' AND subscriptionId = '{{ subscription_id }}'
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_subnet.iql b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_subnet.iql
new file mode 100644
index 0000000..fffb317
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_subnet.iql
@@ -0,0 +1,38 @@
+/*+ createorupdate */
+INSERT INTO azure.network.subnets(
+ subnetName,
+ virtualNetworkName,
+ resourceGroupName,
+ subscriptionId,
+ data__properties
+)
+SELECT
+ '{{ subnet_name }}',
+ '{{ vnet_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{"addressPrefix": "{{ subnet_cidr }}"}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.subnets
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND subnetName = '{{ subnet_name }}'
+AND JSON_EXTRACT(properties, '$.addressPrefix') = '{{ subnet_cidr }}'
+
+/*+ exports */
+SELECT '{{ subnet_name }}' as subnet_name,
+id as subnet_id
+FROM azure.network.subnets
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND subnetName = '{{ subnet_name }}'
+
+/*+ delete */
+DELETE FROM azure.network.subnets
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND subnetName = '{{ subnet_name }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_vm_ext.iql b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_vm_ext.iql
new file mode 100644
index 0000000..6291d15
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_vm_ext.iql
@@ -0,0 +1,36 @@
+/*+ createorupdate */
+INSERT INTO azure.compute.virtual_machine_extensions(
+ resourceGroupName,
+ subscriptionId,
+ vmExtensionName,
+ vmName,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ vm_ext_name }}',
+ '{{ vm_name }}',
+ '{{ location }}',
+ '{ "publisher": "Microsoft.Azure.Extensions", "type": "CustomScript", "typeHandlerVersion": "2.1", "settings": { "commandToExecute": "{{ command_to_execute }}"} }',
+ '{{ global_tags }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM azure.compute.virtual_machine_extensions
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND vmExtensionName = '{{ vm_ext_name }}'
+AND vmName = '{{ vm_name }}'
+
+/*+ exports */
+SELECT 'http://' || '{{ public_ip_address }}' || ':8080' as web_url
+
+/*+ delete */
+DELETE FROM azure.compute.virtual_machine_extensions
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND vmExtensionName = '{{ vm_ext_name }}'
+AND vmName = '{{ vm_name }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_vnet.iql b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_vnet.iql
new file mode 100644
index 0000000..55fc558
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_vnet.iql
@@ -0,0 +1,33 @@
+/*+ createorupdate */
+INSERT INTO azure.network.virtual_networks(
+ virtualNetworkName,
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ vnet_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{"addressSpace": {"addressPrefixes":["{{ vnet_cidr_block }}"]}}',
+ '{{ global_tags }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.virtual_networks
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND JSON_EXTRACT(properties, '$.addressSpace.addressPrefixes[0]') = '{{ vnet_cidr_block }}'
+
+/*+ exports */
+SELECT '{{ vnet_name }}' as vnet_name,
+'{{ vnet_cidr_block }}' as vnet_cidr_block
+
+/*+ delete */
+DELETE FROM azure.network.virtual_networks
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_web_server.iql b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_web_server.iql
new file mode 100644
index 0000000..a069441
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/example_web_server.iql
@@ -0,0 +1,36 @@
+/*+ createorupdate */
+INSERT INTO azure.compute.virtual_machines(
+ resourceGroupName,
+ subscriptionId,
+ vmName,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ vm_name }}',
+ '{{ location }}',
+ '{"hardwareProfile": {{ hardwareProfile }}, "storageProfile": {{ storageProfile }}, "osProfile": {{ osProfile }}, "networkProfile": {{ networkProfile }}}',
+ '{{ global_tags }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM azure.compute.virtual_machines
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND vmName = '{{ vm_name }}'
+
+/*+ exports */
+SELECT id as vm_id, '{{ vm_name }}' as vm_name
+FROM azure.compute.virtual_machines
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND vmName = '{{ vm_name }}'
+
+/*+ delete */
+DELETE FROM azure.compute.virtual_machines
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND vmName = '{{ vm_name }}'
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/hello-stackql.html b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/hello-stackql.html
new file mode 100644
index 0000000..5454a02
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/resources/hello-stackql.html
@@ -0,0 +1,41 @@
+
+
+
+
+
+ StackQL on Azure
+
+
+
+
+
+
diff --git a/ref-python-packages/stackql-deploy/examples/azure/azure-stack/stackql_manifest.yml b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/stackql_manifest.yml
new file mode 100644
index 0000000..acba86c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/azure/azure-stack/stackql_manifest.yml
@@ -0,0 +1,154 @@
+#
+# azure starter project manifest file, add and update values as needed
+#
+version: 1
+name: "azure-stack"
+description: description for "azure-stack"
+providers:
+ - azure
+globals:
+ - name: subscription_id
+ description: azure subscription id
+ value: "{{ AZURE_SUBSCRIPTION_ID }}"
+ - name: location
+ description: default location for resources
+ value: eastus
+ - name: admin_password
+ description: vm admin password
+ value: "{{ AZURE_VM_ADMIN_PASSWORD }}"
+ - name: global_tags
+ value:
+ provisioner: stackql
+ stackName: "{{ stack_name }}"
+ stackEnv: "{{ stack_env }}"
+resources:
+ - name: example_resource_group
+ props:
+ - name: resource_group_name
+ value: "{{ stack_name }}-{{ stack_env }}-rg"
+ exports:
+ - resource_group_name
+ - name: example_vnet
+ props:
+ - name: vnet_name
+ value: "{{ stack_name }}-{{ stack_env }}-vnet"
+ - name: vnet_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ exports:
+ - vnet_name
+ - vnet_cidr_block
+ - name: example_subnet
+ props:
+ - name: subnet_name
+ value: "{{ stack_name }}-{{ stack_env }}-subnet-1"
+ - name: subnet_cidr
+ values:
+ prd:
+ value: "10.0.1.0/24"
+ sit:
+ value: "10.1.1.0/24"
+ dev:
+ value: "10.2.1.0/24"
+ exports:
+ - subnet_name
+ - subnet_id
+ - name: example_public_ip
+ props:
+ - name: public_ip_name
+ value: "{{ stack_name }}-{{ stack_env }}-public-ip"
+ exports:
+ - public_ip_name
+ - public_ip_id
+ - public_ip_address
+ - name: example_nsg
+ props:
+ - name: nsg_name
+ value: "{{ stack_name }}-{{ stack_env }}-nsg"
+ - name: security_rules
+ value:
+ - name: AllowHTTP
+ properties:
+ access: Allow
+ protocol: Tcp
+ direction: Inbound
+ priority: 100
+ sourceAddressPrefix: "*"
+ sourcePortRange: "*"
+ destinationAddressPrefix: "*"
+ destinationPortRange: "8080"
+ - name: AllowSSH
+ properties:
+ access: Allow
+ protocol: Tcp
+ direction: Inbound
+ priority: 200
+ sourceAddressPrefix: "{{ vnet_cidr_block }}"
+ sourcePortRange: "*"
+ destinationAddressPrefix: "*"
+ destinationPortRange: "22"
+ exports:
+ - network_security_group_id
+ - name: example_nic
+ props:
+ - name: nic_name
+ value: "{{ stack_name }}-{{ stack_env }}-nic"
+ - name: nic_ip_config
+ value:
+ name: ipconfig1
+ properties:
+ subnet:
+ id: "{{ subnet_id }}"
+ privateIPAllocationMethod: Dynamic
+ publicIPAddress:
+ id: "{{ public_ip_id }}"
+ exports:
+ - network_interface_id
+ - name: example_web_server
+ props:
+ - name: vm_name
+ value: "{{ stack_name }}-{{ stack_env }}-vm"
+ - name: hardwareProfile
+ value:
+ vmSize: Standard_DS1_v2
+ - name: storageProfile
+ value:
+ imageReference:
+ publisher: Canonical
+ offer: UbuntuServer
+ sku: 18.04-LTS
+ version: latest
+ osDisk:
+ name: "{{ stack_name }}-{{ stack_env }}-vm-disk1"
+ createOption: FromImage
+ managedDisk:
+ storageAccountType: Standard_LRS
+ diskSizeGB: 30
+ - name: osProfile
+ value:
+ computerName: myVM-{{ stack_name }}-{{ stack_env }}
+ adminUsername: azureuser
+ adminPassword: "{{ admin_password}}"
+ linuxConfiguration:
+ disablePasswordAuthentication: false
+ - name: networkProfile
+ value:
+ networkInterfaces:
+ - id: "{{ network_interface_id }}"
+ exports:
+ - vm_name
+ - vm_id
+ - name: example_vm_ext
+ props:
+ - name: vm_ext_name
+ value: "{{ stack_name }}-{{ stack_env }}-microsoft.custom-script-linux"
+ - name: command_to_execute
+ value: |
+ wget -O index.html https://raw.githubusercontent.com/stackql/stackql-deploy/main/examples/azure/azure-stack/resources/hello-stackql.html && nohup busybox httpd -f -p 8080 &
+ exports:
+ - web_url
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/confluent/cmd-specific-auth/README.md b/ref-python-packages/stackql-deploy/examples/confluent/cmd-specific-auth/README.md
new file mode 100644
index 0000000..e56f49d
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/confluent/cmd-specific-auth/README.md
@@ -0,0 +1,63 @@
+# `stackql-deploy` starter project for `aws`
+
+> for starter projects using other providers, try `stackql-deploy cmd-specific-auth --provider=azure` or `stackql-deploy cmd-specific-auth --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `aws` provider:
+
+- [`aws` provider docs](https://stackql.io/registry/aws)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `aws` and `azure` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `aws` provider, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and optionally `AWS_SESSION_TOKEN` must be set, for more information on authentication to `aws` see the [`aws` provider documentation](https://aws.stackql.io/providers/aws).
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` folder.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack named cmd-specific-auth to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build cmd-specific-auth sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build cmd-specific-auth sit \
+-e AWS_REGION=ap-southeast-2 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test cmd-specific-auth sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown cmd-specific-auth sit \
+-e AWS_REGION=ap-southeast-2
+```
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/confluent/cmd-specific-auth/resources/example_vpc.iql b/ref-python-packages/stackql-deploy/examples/confluent/cmd-specific-auth/resources/example_vpc.iql
new file mode 100644
index 0000000..463dbc1
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/confluent/cmd-specific-auth/resources/example_vpc.iql
@@ -0,0 +1,67 @@
+/* defines the provisioning and deprovisioning commands
+used to create, update or delete the resource
+replace queries with your queries */
+
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpcs (
+ CidrBlock,
+ Tags,
+ EnableDnsSupport,
+ EnableDnsHostnames,
+ region
+)
+SELECT
+ '{{ vpc_cidr_block }}',
+ '{{ vpc_tags }}',
+ true,
+ true,
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ vpc_cidr_block }}';
+
+/*+ exports, retries=5, retry_delay=5 */
+SELECT vpc_id, vpc_cidr_block FROM
+(
+SELECT vpc_id, cidr_block as "vpc_cidr_block",
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.vpcs
+WHERE data__Identifier = '{{ vpc_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/confluent/cmd-specific-auth/stackql_manifest.yml b/ref-python-packages/stackql-deploy/examples/confluent/cmd-specific-auth/stackql_manifest.yml
new file mode 100644
index 0000000..7450964
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/confluent/cmd-specific-auth/stackql_manifest.yml
@@ -0,0 +1,40 @@
+#
+# aws starter project manifest file, add and update values as needed
+#
+version: 1
+name: "cmd-specific-auth"
+description: description for "cmd-specific-auth"
+providers:
+ - aws
+globals:
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+ - name: example_vpc
+ description: example vpc resource
+ props:
+ - name: vpc_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ - name: vpc_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ merge: ['global_tags']
+ exports:
+ - vpc_id
+ - vpc_cidr_block
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/README.md b/ref-python-packages/stackql-deploy/examples/databricks/classic/README.md
new file mode 100644
index 0000000..404f7bc
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/README.md
@@ -0,0 +1,245 @@
+# `stackql-deploy` example project for `databricks`
+
+This exercise is to bootstrap a databricks / aws tenancy using `stackql-deploy`. It is an important use case for platform bootstrap and we are excited to perform it with the `stackql` toolchain. We hope you enjoy and find this valuable. Please drop us a note with your forthright opinion on this and check out our issues on github.
+
+## A word of caution
+
+Please take the greatest care in performing this exercise; it will incur expenses, as it involves creating (and destroying) resources which cost money. Please be aware that you **must** cancel your databricks subscription after completing this exercise, otherwise you will incur ongoing expenses. That is, do **not** skip the section [Cancel databricks subscription](#cancel-databricks-subsription). We strongly advise that you verify all resources are destroyed at the conclusion of this exercise. Web pages and certain behaviours may change, so please be thorough in your verification. We will keep this page up-to-date on a best effort basis only. It is very much a case of owner onus applies.
+
+## Manual Setup
+
+Dependencies:
+
+- aws Account Created.
+- Required clickops to set up databricks on aws:
+ - Turn on aws Marketplace `databricks` offering, using [the aws manage subscriptions page](https://console.aws.amazon.com/marketplace/home#/subscriptions), per Figure S1.
+ - Follow the suggested setup flow as directed, from this page. These clickops steps are necessary at this time for initial account setup. The way I followed this, it created a workspace for me at setup, per Figure S3. We shall not use this one and rather, later on we shall dispose of it; because we do not trust auto-created resources out of hand. In the process of creating the databricks subscription, a second aws account is created.
+ - Copy the databricks account id from basically any web page in the databricks console. This is done by clicking on the user icon at the top RHS and then the UI provides a copy shortcut, per Fugure U1. Save this locally for later use, expanded below.
+ - We need the aws account id that was created for the databricks subscription. It is not exactly heralded by the web pages, nor is it actively hidden. It can be captured in a couple of places, including the databricks storage account creatted in the subscription flow, per Figure XA1. copy and save this locally for later use, expanded below.
+ - Create a service principal to use as a "CICD agent", using the page shown in Figure S4.
+ - Grant the CICD agent account admin role, using the page shown in Figure S5.
+ - Create a secret for the CICD agent, using the page shown in Figure S6. At the time you create this, you will need to safely store the client secret and client id, as prompted by the web page. These will be used below.
+- Setup your virtual environment, from the root of this repository `cicd/setup/setup-env.sh`.
+
+Now, is is convenient to use environment variables for context. Note that for our example, there is only one aws account apropos, however this is not always the case for an active professional, so while `DATABRICKS_AWS_ACCOUNT_ID` is the same as `AWS_ACCOUNT_ID` here, it need not always be the case. Create a file in the path `examples/databricks/all-purpose-cluster/sec/env.sh` (relative to the root of this repository) with contents of the form:
+
+```bash
+#!/usr/bin/env bash
+
+export AWS_REGION='us-east-1' # or wherever you want
+export AWS_ACCOUNT_ID=''
+export DATABRICKS_ACCOUNT_ID=''
+export DATABRICKS_AWS_ACCOUNT_ID=''
+
+# These need to be created by clickops under [the account level user managment page](https://accounts.cloud.databricks.com/user-management).
+export DATABRICKS_CLIENT_ID=''
+export DATABRICKS_CLIENT_SECRET=''
+
+## These can be skipped if you run on [aws cloud shell](https://docs.aws.amazon.com/cloudshell/latest/userguide/welcome.html).
+export AWS_SECRET_ACCESS_KEY=''
+export AWS_ACCESS_KEY_ID=''
+
+```
+
+## Optional step: sanity checks with stackql
+
+Now, let us do some sanity checks and housekeeping with `stackql`. This is purely optional. From the root of this repository:
+
+```
+source examples/databricks/all-purpose-cluster/convenience.sh
+stackql shell
+```
+
+This will start a `stackql` interactive shell. Here are some commands you can run (I will not place output here, that will be shared in a corresponding video):
+
+
+```sql
+registry pull databricks_account v24.12.00279;
+registry pull databricks_workspace v24.12.00279;
+
+-- This will fail if accounts, subscription, or credentials are in error.
+select account_id FROM databricks_account.provisioning.credentials WHERE account_id = '';
+select account_id, workspace_name, workspace_id, workspace_status from databricks_account.provisioning.workspaces where account_id = '';
+```
+
+For extra credit, you can (asynchronously) delete the unnecessary workspace with `delete from databricks_account.provisioning.workspaces where account_id = '' and workspace_id = '';`, where you obtain the workspace id from the above query. I have noted that due to some reponse caching it takes a while to disappear from select queries (much longer than disappearance from the web page), and you may want to bounce the `stackql` session to hurry things along. This is not happening on the `stackql` side, but session bouncing forces a token refresh which can help cache busting.
+
+## Lifecycle management
+
+Time to get down to business. From the root of this repository:
+
+```bash
+python3 -m venv myenv
+source examples/databricks/all-purpose-cluster/convenience.sh
+source venv/bin/activate
+pip install stackql-deploy
+```
+
+> alternatively set the `AWS_REGION`, `AWS_ACCOUNT_ID`, `DATABRICKS_ACCOUNT_ID`, `DATABRICKS_AWS_ACCOUNT_ID` along with provider credentials `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, `DATABRICKS_CLIENT_ID`, `DATABRICKS_CLIENT_SECRET`
+
+Then, do a dry run (good for catching **some** environmental issues):
+
+```bash
+stackql-deploy build \
+examples/databricks/all-purpose-cluster dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--dry-run
+```
+
+You will see a verbose rendition of what `stackql-deploy` intends to do.
+
+
+Now, let use do it for real:
+
+```bash
+stackql-deploy build \
+examples/databricks/all-purpose-cluster dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+The output is quite verbose, concludes in:
+
+```
+2025-02-08 12:51:25,914 - stackql-deploy - INFO - š¤ set [databricks_workspace_id] to [482604062392118] in exports
+2025-02-08 12:51:25,915 - stackql-deploy - INFO - ā
successfully deployed databricks_workspace
+2025-02-08 12:51:25,915 - stackql-deploy - INFO - deployment completed in 0:04:09.603631
+š build complete
+```
+
+Success!!!
+
+We can also use `stackql-deploy` to assess if our infra is shipshape:
+
+```bash
+stackql-deploy test \
+examples/databricks/all-purpose-cluster dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+Again, the output is quite verbose, concludes in:
+
+```
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - š¤ set [databricks_workspace_id] to [482604062392118] in exports
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - ā
test passed for databricks_workspace
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - deployment completed in 0:02:30.255860
+š tests complete (dry run: False)
+```
+
+Success!!!
+
+Now, let us teardown our `stackql-deploy` managed infra:
+
+```bash
+stackql-deploy teardown \
+examples/databricks/all-purpose-cluster dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+Takes its time, again verbose, concludes in:
+
+```
+2025-02-08 13:24:17,941 - stackql-deploy - INFO - ā
successfully deleted AWS_iam_cross_account_role
+2025-02-08 13:24:17,942 - stackql-deploy - INFO - deployment completed in 0:03:21.191788
+š§ teardown complete (dry run: False)
+```
+
+Success!!!
+
+## Optional step: verify destruction with stackql
+
+Now, let us do some sanity checks and housekeeping with `stackql`. This is purely optional. From the root of this repository:
+
+```
+
+source examples/databricks/all-purpose-cluster/convenience.sh
+
+stackql shell
+
+```
+
+This will start a `stackql` interactive shell. Here are some commands you can run (I will not place output here):
+
+
+```sql
+
+registry pull databricks_account v24.12.00279;
+
+registry pull databricks_workspace v24.12.00279;
+
+
+
+select account_id, workspace_name, workspace_id, workspace_status from databricks_account.provisioning.workspaces where account_id = '';
+
+```
+
+## Cancel databricks subsription
+
+This is **very** important.
+
+Go to [the aws Marketplace manage subscriptions page](https://console.aws.amazon.com/marketplace/home#/subscriptions), navigate to databricks and then cancel the subscription.
+
+## Figures
+
+
+
+
+**Figure S1**: Create aws databricks subscription.
+
+---
+
+
+
+**Figure S2**: Awaiting aws databricks subscription resources.
+
+---
+
+
+
+**Figure S3**: Auto provisioned workspace.
+
+---
+
+
+
+**Figure U1**: Capture databricks account id.
+
+---
+
+
+
+**Figure XA1**: Capture cross databricks aws account id.
+
+---
+
+
+
+**Figure S4**: Create CICD agent.
+
+---
+
+
+
+**Figure S5**: Grant account admin to CICD agent.
+
+---
+
+
+
+**Figure S6**: Generate secret for CICD agent.
+
+---
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/auto-provisioned-worskpace.png b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/auto-provisioned-worskpace.png
new file mode 100644
index 0000000..a9fbcb6
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/auto-provisioned-worskpace.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/awaiting-subscription-resources.png b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/awaiting-subscription-resources.png
new file mode 100644
index 0000000..9505100
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/awaiting-subscription-resources.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/capture-cross-databricks-aws-account-id.png b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/capture-cross-databricks-aws-account-id.png
new file mode 100644
index 0000000..6fdb3c4
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/capture-cross-databricks-aws-account-id.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/capture-databricks-account-id.png b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/capture-databricks-account-id.png
new file mode 100644
index 0000000..c890299
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/capture-databricks-account-id.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/create-aws-databricks-subscription.png b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/create-aws-databricks-subscription.png
new file mode 100644
index 0000000..b5c9e7f
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/create-aws-databricks-subscription.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/create-cicd-agent.png b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/create-cicd-agent.png
new file mode 100644
index 0000000..faf1643
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/create-cicd-agent.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/generate-secret-ui.png b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/generate-secret-ui.png
new file mode 100644
index 0000000..daf4f23
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/generate-secret-ui.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/grant-account-admin-cicd-agent.png b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/grant-account-admin-cicd-agent.png
new file mode 100644
index 0000000..f50e0c0
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/classic/assets/grant-account-admin-cicd-agent.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/convenience.sh b/ref-python-packages/stackql-deploy/examples/databricks/classic/convenience.sh
new file mode 100644
index 0000000..d4913f6
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/convenience.sh
@@ -0,0 +1,72 @@
+#!/usr/bin/env bash
+
+CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+export REPOSITORY_ROOT="$(realpath $CURRENT_DIR/../../..)"
+
+
+if [ -f "${REPOSITORY_ROOT}/examples/databricks/all-purpose-cluster/sec/env.sh" ];
+then
+ source "${REPOSITORY_ROOT}/examples/databricks/all-purpose-cluster/sec/env.sh"
+fi
+
+if [ "${AWS_REGION}" = "" ];
+then
+ AWS_REGION='us-east-1'
+fi
+
+if [ "${AWS_ACCOUNT_ID}" = "" ];
+then
+ echo "AWS_ACCOUNT_ID must be set" >&2
+ exit 1s
+fi
+
+if [ "${DATABRICKS_ACCOUNT_ID}" = "" ];
+then
+ echo "DATABRICKS_ACCOUNT_ID must be set" >&2
+ exit 1
+fi
+
+if [ "${DATABRICKS_AWS_ACCOUNT_ID}" = "" ];
+then
+ echo "DATABRICKS_AWS_ACCOUNT_ID must be set" >&2
+ exit 1
+fi
+
+if [ "${DATABRICKS_CLIENT_ID}" = "" ];
+then
+ echo "DATABRICKS_CLIENT_ID must be set" >&2
+ exit 1
+fi
+
+if [ "${DATABRICKS_CLIENT_SECRET}" = "" ];
+then
+ echo "DATABRICKS_CLIENT_SECRET must be set" >&2
+ exit 1
+fi
+
+if [ "${AWS_SECRET_ACCESS_KEY}" = "" ];
+then
+ echo "AWS_SECRET_ACCESS_KEY must be set" >&2
+ exit 1
+fi
+
+if [ "${AWS_ACCESS_KEY_ID}" = "" ];
+then
+ echo "AWS_ACCESS_KEY_ID must be set" >&2
+ exit 1
+fi
+
+export AWS_REGION
+export AWS_ACCOUNT_ID
+export DATABRICKS_ACCOUNT_ID
+export DATABRICKS_AWS_ACCOUNT_ID
+
+export DATABRICKS_CLIENT_ID
+export DATABRICKS_CLIENT_SECRET
+
+
+export AWS_SECRET_ACCESS_KEY
+export AWS_ACCESS_KEY_ID
+
+
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/iam/iam_role.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/iam/iam_role.iql
new file mode 100644
index 0000000..eebcfcd
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/iam/iam_role.iql
@@ -0,0 +1,60 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.iam.roles
+WHERE data__Identifier = '{{ role_name }}'
+
+/*+ create */
+INSERT INTO aws.iam.roles (
+ RoleName,
+ Description,
+ Path,
+ AssumeRolePolicyDocument,
+ Policies,
+ Tags,
+ region
+)
+SELECT
+'{{ role_name }}',
+'{{ description }}',
+'{{ path }}',
+'{{ assume_role_policy_document }}',
+'{{ policies }}',
+'{{ global_tags }}',
+'us-east-1'
+
+/*+ update */
+update aws.iam.roles
+set data__PatchDocument = string('{{ {
+ "Description": description,
+ "Path": path,
+ "AssumeRolePolicyDocument": assume_role_policy_document,
+ "Policies": policies,
+ "Tags": global_tags
+ } | generate_patch_document }}')
+WHERE data__Identifier = '{{ role_name }}'
+AND region = 'us-east-1';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM (
+ SELECT
+ max_session_duration,
+ path,
+ JSON_EQUAL(assume_role_policy_document, '{{ assume_role_policy_document }}') as test_assume_role_policy_doc,
+ JSON_EQUAL(policies, '{{ policies }}') as test_policies
+ FROM aws.iam.roles
+ WHERE data__Identifier = '{{ role_name }}')t
+WHERE test_assume_role_policy_doc = 1
+AND test_policies = 1
+AND path = '{{ path }}';
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+'{{ role_name }}' as aws_iam_role_name,
+arn as aws_iam_role_arn
+FROM aws.iam.roles
+WHERE data__Identifier = '{{ role_name }}'
+
+/*+ delete */
+DELETE FROM aws.iam.roles
+WHERE data__Identifier = '{{ role_name }}'
+AND region = 'us-east-1'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/s3/workspace_bucket.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/s3/workspace_bucket.iql
new file mode 100644
index 0000000..a20c908
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/s3/workspace_bucket.iql
@@ -0,0 +1,61 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.s3.buckets
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket_name }}'
+
+/*+ create */
+INSERT INTO aws.s3.buckets (
+ BucketName,
+ OwnershipControls,
+ BucketEncryption,
+ PublicAccessBlockConfiguration,
+ VersioningConfiguration,
+ Tags,
+ region
+)
+SELECT
+ '{{ bucket_name }}',
+ '{{ ownership_controls }}',
+ '{{ bucket_encryption }}',
+ '{{ public_access_block_configuration }}',
+ '{{ versioning_configuration }}',
+ '{{ global_tags }}',
+ '{{ region }}'
+
+/*+ update */
+update aws.s3.buckets
+set data__PatchDocument = string('{{ {
+ "OwnershipControls": ownership_controls,
+ "BucketEncryption": bucket_encryption,
+ "PublicAccessBlockConfiguration": public_access_block_configuration,
+ "VersioningConfiguration": versioning_configuration,
+ "Tags": global_tags
+ } | generate_patch_document }}')
+WHERE
+region = '{{ region }}'
+AND data__Identifier = '{{ bucket_name }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM (
+ SELECT
+ JSON_EQUAL(ownership_controls, '{{ ownership_controls }}') as test_ownership_controls,
+ JSON_EQUAL(bucket_encryption, '{{ bucket_encryption }}') as test_encryption,
+ JSON_EQUAL(public_access_block_configuration, '{{ public_access_block_configuration }}') as test_public_access_block_configuration,
+ JSON_EQUAL(versioning_configuration, '{{ versioning_configuration }}') as test_versioning_configuration
+ FROM aws.s3.buckets
+ WHERE region = '{{ region }}'
+ AND data__Identifier = '{{ bucket_name }}'
+)t
+WHERE test_ownership_controls = 1
+AND test_encryption = 1
+AND test_public_access_block_configuration = 1
+AND test_versioning_configuration = 1
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+arn as aws_s3_workspace_bucket_arn,
+bucket_name as aws_s3_workspace_bucket_name
+FROM aws.s3.buckets
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket_name }}'
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/s3/workspace_bucket_policy.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/s3/workspace_bucket_policy.iql
new file mode 100644
index 0000000..cead151
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/s3/workspace_bucket_policy.iql
@@ -0,0 +1,36 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.s3.bucket_policies
+WHERE region = '{{ region }}'
+AND bucket = '{{ aws_s3_workspace_bucket_name }}';
+
+/*+ create */
+INSERT INTO aws.s3.bucket_policies (
+ Bucket,
+ PolicyDocument,
+ ClientToken,
+ region
+)
+SELECT
+ '{{ aws_s3_workspace_bucket_name }}',
+ '{{ policy_document }}',
+ '{{ uuid() }}',
+ '{{ region }}'
+
+/*+ update */
+update aws.s3.bucket_policies
+set data__PatchDocument = string('{{ {
+ "PolicyDocument": policy_document
+ } | generate_patch_document }}')
+WHERE
+region = '{{ region }}'
+AND data__Identifier = '{{ aws_s3_workspace_bucket_name }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM (
+ SELECT
+ JSON_EQUAL(policy_document, '{{ policy_document }}') as test_policy_document
+ FROM aws.s3.bucket_policies
+ WHERE region = '{{ region }}'
+ AND data__Identifier = '{{ aws_s3_workspace_bucket_name }}')t
+WHERE test_policy_document = 1;
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/elastic_ip.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/elastic_ip.iql
new file mode 100644
index 0000000..d4dd982
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/elastic_ip.iql
@@ -0,0 +1,56 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT allocation_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.eip_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.eips (
+ NetworkBorderGroup,
+ Tags,
+ ClientToken,
+ region
+)
+SELECT
+'{{ region }}',
+'{{ tags }}',
+'{{ idempotency_token }}',
+'{{ region }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT allocation_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.eip_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT allocation_id as eip_allocation_id, public_ip as eip_public_id FROM
+(
+SELECT allocation_id, public_ip,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.eip_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ delete */
+DELETE FROM aws.ec2.eips
+WHERE data__Identifier = '{{ eip_public_id }}|{{ eip_allocation_id}}'
+AND region = '{{ region }}'
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/get_main_route_table_id.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/get_main_route_table_id.iql
new file mode 100644
index 0000000..7679dd2
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/get_main_route_table_id.iql
@@ -0,0 +1,6 @@
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+route_table_id as main_route_table_id
+FROM aws.ec2.route_tables
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/inet_gateway.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/inet_gateway.iql
new file mode 100644
index 0000000..dc42032
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/inet_gateway.iql
@@ -0,0 +1,54 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.internet_gateways (
+ Tags,
+ ClientToken,
+ region
+)
+SELECT
+'{{ tags }}',
+'{{ idempotency_token }}',
+'{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT internet_gateway_id FROM
+(
+SELECT internet_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.internet_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY internet_gateway_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.internet_gateways
+WHERE data__Identifier = '{{ internet_gateway_id }}'
+AND region = '{{ region }}';
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/inet_gw_attachment.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/inet_gw_attachment.iql
new file mode 100644
index 0000000..31b9d25
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/inet_gw_attachment.iql
@@ -0,0 +1,39 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+attachment_type,
+vpc_id
+FROM aws.ec2.vpc_gateway_attachments
+WHERE region = '{{ region }}'
+AND internet_gateway_id = '{{ internet_gateway_id }}'
+AND vpc_id = '{{ vpc_id }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.vpc_gateway_attachments (
+ InternetGatewayId,
+ VpcId,
+ region
+)
+SELECT
+ '{{ internet_gateway_id }}',
+ '{{ vpc_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+attachment_type,
+vpc_id
+FROM aws.ec2.vpc_gateway_attachments
+WHERE region = '{{ region }}'
+AND internet_gateway_id = '{{ internet_gateway_id }}'
+AND vpc_id = '{{ vpc_id }}'
+) t
+
+/*+ delete */
+DELETE FROM aws.ec2.vpc_gateway_attachments
+WHERE data__Identifier = 'IGW|{{ vpc_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/inet_route.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/inet_route.iql
new file mode 100644
index 0000000..b46cc0f
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/inet_route.iql
@@ -0,0 +1,41 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.routes (
+ DestinationCidrBlock,
+ GatewayId,
+ RouteTableId,
+ region
+)
+SELECT
+ '0.0.0.0/0',
+ '{{ internet_gateway_id }}',
+ '{{ route_table_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT data__Identifier as inet_route_indentifer
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0';
+
+/*+ delete */
+DELETE FROM aws.ec2.routes
+WHERE data__Identifier = '{{ inet_route_indentifer }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/nat_gateway.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/nat_gateway.iql
new file mode 100644
index 0000000..081fbd2
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/nat_gateway.iql
@@ -0,0 +1,53 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT nat_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.nat_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.nat_gateways (
+ AllocationId,
+ SubnetId,
+ Tags,
+ region
+)
+SELECT
+ '{{ eip_allocation_id }}',
+ '{{ nat_subnet_id }}',
+ '{{ tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT nat_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.nat_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT nat_gateway_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.nat_gateway_tags
+WHERE region = '{{ region }}'
+GROUP BY allocation_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+
+/*+ delete */
+DELETE FROM aws.ec2.nat_gateways
+WHERE data__Identifier = '{{ nat_gateway_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/nat_inet_route.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/nat_inet_route.iql
new file mode 100644
index 0000000..9e750f6
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/nat_inet_route.iql
@@ -0,0 +1,41 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.routes (
+ DestinationCidrBlock,
+ NatGatewayId,
+ RouteTableId,
+ region
+)
+SELECT
+ '0.0.0.0/0',
+ '{{ nat_gateway_id }}',
+ '{{ route_table_id }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT data__Identifier
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT data__Identifier as nat_inet_route_indentifer
+FROM aws.ec2.routes
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ route_table_id }}|0.0.0.0/0';
+
+/*+ delete */
+DELETE FROM aws.ec2.routes
+WHERE data__Identifier = '{{ inet_route_indentifer }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/route_table.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/route_table.iql
new file mode 100644
index 0000000..7b0aa76
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/route_table.iql
@@ -0,0 +1,54 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ route_table_name }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.route_tables (
+ VpcId,
+ Tags,
+ region
+)
+SELECT
+ '{{ vpc_id }}',
+ '{{ tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ route_table_name }}'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT route_table_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.route_table_tags
+WHERE region = '{{ region }}'
+GROUP BY route_table_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+AND json_extract(tags, '$.Name') = '{{ route_table_name }}'
+
+/*+ delete */
+DELETE FROM aws.ec2.route_tables
+WHERE data__Identifier = '{{ route_table_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/security_group.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/security_group.iql
new file mode 100644
index 0000000..15e9061
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/security_group.iql
@@ -0,0 +1,41 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.ec2.security_groups
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND group_name = '{{ group_name }}'
+
+/*+ create */
+INSERT INTO aws.ec2.security_groups (
+ GroupName,
+ GroupDescription,
+ VpcId,
+ Tags,
+ region
+)
+SELECT
+ '{{ group_name }}',
+ '{{ group_description }}',
+ '{{ vpc_id }}',
+ '{{ tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM aws.ec2.security_groups
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND group_name = '{{ group_name }}'
+AND group_description = '{{ group_description }}'
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT group_id as security_group_id
+FROM aws.ec2.security_groups
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND group_name = '{{ group_name }}'
+
+/*+ delete */
+DELETE FROM aws.ec2.security_groups
+WHERE data__Identifier = '{{ security_group_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/security_group_rules.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/security_group_rules.iql
new file mode 100644
index 0000000..62f79eb
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/security_group_rules.iql
@@ -0,0 +1,27 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.ec2.security_groups
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ security_group_id }}'
+
+/*+ createorupdate */
+update aws.ec2.security_groups
+set data__PatchDocument = string('{{ {
+ "SecurityGroupIngress": security_group_ingress,
+ "SecurityGroupEgress": security_group_egress
+ } | generate_patch_document }}')
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ security_group_id }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+ SELECT
+ JSON_EQUAL(security_group_ingress, '{{ security_group_ingress }}') as ingress_test,
+ JSON_EQUAL(security_group_egress, '{{ security_group_egress }}') as egress_test
+ FROM aws.ec2.security_groups
+ WHERE region = '{{ region }}'
+ AND data__Identifier = '{{ security_group_id }}'
+ AND ingress_test = 1
+ AND egress_test = 1
+) t;
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/subnet.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/subnet.iql
new file mode 100644
index 0000000..83667f5
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/subnet.iql
@@ -0,0 +1,43 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.ec2.subnets
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND cidr_block = '{{ cidr_block }}'
+
+/*+ create */
+INSERT INTO aws.ec2.subnets (
+ VpcId,
+ CidrBlock,
+ AvailabilityZone,
+ MapPublicIpOnLaunch,
+ Tags,
+ region
+)
+SELECT
+ '{{ vpc_id }}',
+ '{{ cidr_block }}',
+ '{{ availability_zone }}',
+ false,
+ '{{ tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM aws.ec2.subnets
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND cidr_block = '{{ cidr_block }}'
+AND availability_zone = '{{ availability_zone }}';
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT subnet_id
+FROM aws.ec2.subnets
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+AND cidr_block = '{{ cidr_block }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.subnets
+WHERE data__Identifier = '{{ subnet_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/subnet_rt_assn.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/subnet_rt_assn.iql
new file mode 100644
index 0000000..d0c8b33
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/subnet_rt_assn.iql
@@ -0,0 +1,34 @@
+/*+ exists */
+select regexp_like(associationSet, '.*{{ subnet_id }}.*') as count from
+aws.ec2_native.route_tables where region = '{{ region }}'
+and routeTableId = '{{ route_table_id }}'
+
+/*+ create */
+INSERT INTO aws.ec2.subnet_route_table_associations (
+ RouteTableId,
+ SubnetId,
+ ClientToken,
+ region
+)
+SELECT
+ '{{ route_table_id }}',
+ '{{ subnet_id }}',
+ '{{ idempotency_token }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+select regexp_like(associationSet, '.*{{ subnet_id }}.*') as count from
+aws.ec2_native.route_tables where region = '{{ region }}'
+and routeTableId = '{{ route_table_id }}'
+
+/*+ exports, retries=5, retry_delay=5 */
+SELECT id as route_table_assn_id
+FROM aws.ec2.subnet_route_table_associations
+WHERE region = '{{ region }}'
+AND route_table_id = '{{ route_table_id }}'
+AND subnet_id = '{{ subnet_id }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.subnet_route_table_associations
+WHERE data__Identifier = '{{ route_table_assn_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/tag_main_vpc_route_table.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/tag_main_vpc_route_table.iql
new file mode 100644
index 0000000..cc03c2a
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/tag_main_vpc_route_table.iql
@@ -0,0 +1,7 @@
+/*+ command */
+update aws.ec2.route_tables
+set data__PatchDocument = string('{{ {
+ "Tags": tags
+ } | generate_patch_document }}')
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ main_route_table_id }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/vpc.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/vpc.iql
new file mode 100644
index 0000000..56e1c54
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/vpc.iql
@@ -0,0 +1,60 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+ SELECT vpc_id,
+ json_group_object(tag_key, tag_value) as tags
+ FROM aws.ec2.vpc_tags
+ WHERE region = '{{ region }}'
+ AND cidr_block = '{{ cidr_block }}'
+ GROUP BY vpc_id
+ HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+ AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+ AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ create */
+INSERT INTO aws.ec2.vpcs (
+ CidrBlock,
+ Tags,
+ EnableDnsSupport,
+ EnableDnsHostnames,
+ ClientToken,
+ region
+)
+SELECT
+ '{{ cidr_block }}',
+ '{{ tags }}',
+ true,
+ true,
+ '{{ idempotency_token }}',
+ '{{ region }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+ SELECT vpc_id,
+ json_group_object(tag_key, tag_value) as tags
+ FROM aws.ec2.vpc_tags
+ WHERE region = '{{ region }}'
+ AND cidr_block = '{{ cidr_block }}'
+ GROUP BY vpc_id
+ HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+ AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+ AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT vpc_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+
+/*+ delete */
+DELETE FROM aws.ec2.vpcs
+WHERE data__Identifier = '{{ vpc_id}}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/vpc_endpoint.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/vpc_endpoint.iql
new file mode 100644
index 0000000..d40f522
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/aws/vpc/vpc_endpoint.iql
@@ -0,0 +1,60 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+ SELECT id,
+ json_group_object(tag_key, tag_value) as tags
+ FROM aws.ec2.vpc_endpoint_tags
+ WHERE region = '{{ region }}'
+ AND service_name = '{{ service_name }}'
+ GROUP BY id
+ HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+ AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+ AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpc_endpoints (
+ ServiceName,
+ VpcEndpointType,
+ VpcId,
+ RouteTableIds,
+ Tags,
+ region
+)
+SELECT
+ '{{ service_name }}',
+ '{{ vpc_endpoint_type }}',
+ '{{ vpc_id }}',
+ '{{ route_table_ids }}',
+ '{{ tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+ SELECT id,
+ json_group_object(tag_key, tag_value) as tags
+ FROM aws.ec2.vpc_endpoint_tags
+ WHERE region = '{{ region }}'
+ AND service_name = '{{ service_name }}'
+ GROUP BY id
+ HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+ AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+ AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT id as s3_gateway_endpoint_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_endpoint_tags
+WHERE region = '{{ region }}'
+AND service_name = '{{ service_name }}'
+GROUP BY id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.vpc_endpoints
+WHERE data__Identifier = 's3_gateway_endpoint_id'
+AND region = 'us-east-1';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/credentials.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/credentials.iql
new file mode 100644
index 0000000..c0d8327
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/credentials.iql
@@ -0,0 +1,41 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}'
+AND credentials_name = '{{ credentials_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.credentials (
+account_id,
+data__credentials_name,
+data__aws_credentials
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ credentials_name }}',
+'{{ aws_credentials }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+credentials_id
+FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}'
+AND credentials_name = '{{ credentials_name }}'
+AND JSON_EXTRACT(aws_credentials, '$.sts_role.role_arn') = '{{ aws_iam_cross_account_role_arn }}'
+) t
+
+/*+ exports */
+SELECT
+'{{ credentials_name }}' as databricks_credentials_name,
+credentials_id as databricks_credentials_id,
+JSON_EXTRACT(aws_credentials, '$.sts_role.external_id') as databricks_role_external_id
+FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}'
+AND credentials_name = '{{ credentials_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}' AND
+credentials_id = '{{ databricks_credentials_id }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/get_users.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/get_users.iql
new file mode 100644
index 0000000..2a978d7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/get_users.iql
@@ -0,0 +1,6 @@
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+JSON_GROUP_ARRAY(JSON_OBJECT('value', id)) as databricks_workspace_group_members
+FROM databricks_account.iam.users
+WHERE account_id = 'ebfcc5a9-9d49-4c93-b651-b3ee6cf1c9ce'
+AND userName in {{ users | sql_list }};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/network.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/network.iql
new file mode 100644
index 0000000..45e0b0a
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/network.iql
@@ -0,0 +1,46 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}'
+AND network_name = '{{ databricks_network_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.networks (
+account_id,
+data__network_name,
+data__vpc_id,
+data__subnet_ids,
+data__security_group_ids
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ databricks_network_name }}',
+'{{ vpc_id }}',
+'{{ subnet_ids }}',
+'{{ security_group_ids }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+JSON_EQUAL(subnet_ids, '{{ subnet_ids }}') as subnet_test,
+JSON_EQUAL(security_group_ids, '{{ security_group_ids }}') as sg_test
+FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}'
+AND network_name = '{{ databricks_network_name }}'
+AND vpc_id = '{{ vpc_id }}'
+AND subnet_test = 1
+AND sg_test = 1
+)t
+
+/*+ exports */
+SELECT
+network_id as databricks_network_id
+FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}' AND
+network_name = '{{ databricks_network_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}' AND
+network_id = '{{ databricks_network_id }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/storage_configuration.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/storage_configuration.iql
new file mode 100644
index 0000000..4e60cfc
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/storage_configuration.iql
@@ -0,0 +1,35 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}'
+AND storage_configuration_name = '{{ storage_configuration_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.storage (
+account_id,
+data__storage_configuration_name,
+data__root_bucket_info
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ storage_configuration_name }}',
+'{{ root_bucket_info }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}'
+AND storage_configuration_name = '{{ storage_configuration_name }}'
+AND JSON_EXTRACT(root_bucket_info, '$.bucket_name') = '{{ aws_s3_workspace_bucket_name }}'
+
+/*+ exports */
+SELECT
+storage_configuration_id as databricks_storage_configuration_id
+FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}'
+AND storage_configuration_name = '{{ storage_configuration_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}' AND
+storage_configuration_id = '{{ databricks_storage_configuration_id }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/update_group_membership.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/update_group_membership.iql
new file mode 100644
index 0000000..375d926
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/update_group_membership.iql
@@ -0,0 +1,6 @@
+/*+ command */
+update databricks_account.iam.groups
+set data__schemas = '["urn:ietf:params:scim:api:messages:2.0:PatchOp"]',
+data__Operations = '[{"op": "replace", "path": "members", "value": {{ databricks_workspace_group_members }} }]'
+WHERE account_id = '{{ databricks_account_id }}'
+AND id = '{{ databricks_group_id }}';
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/workspace.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/workspace.iql
new file mode 100644
index 0000000..9da2dea
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/workspace.iql
@@ -0,0 +1,44 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}'
+AND workspace_name = '{{ workspace_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.workspaces (
+account_id,
+data__workspace_name,
+data__aws_region,
+data__credentials_id,
+data__storage_configuration_id,
+data__pricing_tier
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ workspace_name }}',
+'{{ aws_region }}',
+'{{ credentials_id }}',
+'{{ storage_configuration_id }}',
+'{{ pricing_tier }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}'
+AND workspace_name = '{{ workspace_name }}'
+AND aws_region = '{{ aws_region }}'
+AND credentials_id = '{{ credentials_id }}'
+AND storage_configuration_id = '{{ storage_configuration_id }}'
+AND pricing_tier = '{{ pricing_tier }}'
+
+/*+ exports */
+SELECT workspace_id AS databricks_workspace_id,
+deployment_name AS databricks_deployment_name
+FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}'
+AND workspace_name = '{{ workspace_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/workspace_group.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/workspace_group.iql
new file mode 100644
index 0000000..4d3494a
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/workspace_group.iql
@@ -0,0 +1,31 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}'
+AND displayName = '{{ display_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.iam.groups (
+account_id,
+data__displayName
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ display_name }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}'
+AND displayName = '{{ display_name }}'
+
+/*+ exports */
+SELECT id AS databricks_group_id
+FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}'
+AND displayName = '{{ display_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}' AND
+id = '{{ databricks_group_id }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/workspace_permission_assignments.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/workspace_permission_assignments.iql
new file mode 100644
index 0000000..00387e3
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_account/workspace_permission_assignments.iql
@@ -0,0 +1,32 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.workspace_permission_assignments
+WHERE account_id = '{{ databricks_account_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
+AND JSON_EXTRACT(principal, '$.principal_id') = {{ databricks_group_id }}
+
+/*+ createorupdate */
+INSERT INTO databricks_account.iam.workspace_permission_assignments (
+account_id,
+principal_id,
+workspace_id,
+data__permissions
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ databricks_group_id }}',
+'{{ databricks_workspace_id }}',
+'["ADMIN"]'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.workspace_permission_assignments
+WHERE account_id = '{{ databricks_account_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
+AND JSON_EXTRACT(principal, '$.principal_id') = {{ databricks_group_id }}
+
+/*+ delete */
+DELETE FROM databricks_account.iam.workspace_permission_assignments
+WHERE account_id = '{{ databricks_account_id }}' AND
+principal_id = '{{ databricks_group_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_workspace/all_purpose_cluster.iql b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_workspace/all_purpose_cluster.iql
new file mode 100644
index 0000000..44b3703
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/resources/databricks_workspace/all_purpose_cluster.iql
@@ -0,0 +1,52 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_workspace.compute.clusters
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND cluster_name = '{{ cluster_name }}'
+
+/*+ create */
+INSERT INTO databricks_workspace.compute.clusters (
+deployment_name,
+data__cluster_name,
+data__num_workers,
+data__is_single_node,
+data__kind,
+data__spark_version,
+data__node_type_id,
+data__data_security_mode,
+data__runtime_engine,
+data__single_user_name,
+data__aws_attributes,
+data__custom_tags
+)
+SELECT
+'{{ databricks_deployment_name }}',
+'{{ cluster_name }}',
+ {{ num_workers }},
+ {{ is_single_node }},
+'{{ kind }}',
+'{{ spark_version }}',
+'{{ node_type_id }}',
+'{{ data_security_mode }}',
+'{{ runtime_engine }}',
+'{{ single_user_name }}',
+'{{ aws_attributes }}',
+'{{ custom_tags }}'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_workspace.compute.clusters
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND cluster_name = '{{ cluster_name }}'
+
+/*+ exports */
+SELECT cluster_id AS databricks_cluster_id,
+state AS databricks_cluster_state
+FROM databricks_workspace.compute.clusters
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND cluster_name = '{{ cluster_name }}'
+
+/*+ delete */
+DELETE FROM databricks_workspace.compute.clusters
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND cluster_id = '{{ databricks_cluster_id }}'
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/sec/.gitignore b/ref-python-packages/stackql-deploy/examples/databricks/classic/sec/.gitignore
new file mode 100644
index 0000000..d6b7ef3
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/sec/.gitignore
@@ -0,0 +1,2 @@
+*
+!.gitignore
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/classic/stackql_manifest.yml b/ref-python-packages/stackql-deploy/examples/databricks/classic/stackql_manifest.yml
new file mode 100644
index 0000000..f9be4d1
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/classic/stackql_manifest.yml
@@ -0,0 +1,1059 @@
+version: 1
+name: "databricks-all-purpose-cluster"
+description: creates a databricks workspace and all-purpose cluster
+providers:
+ - aws
+ - databricks_account
+ - databricks_workspace
+globals:
+ - name: databricks_account_id
+ description: databricks account id
+ value: "{{ DATABRICKS_ACCOUNT_ID }}"
+ - name: databricks_aws_account_id
+ description: databricks AWS account id
+ value: "{{ DATABRICKS_AWS_ACCOUNT_ID }}"
+ - name: aws_account
+ description: aws_account id
+ value: "{{ AWS_ACCOUNT_ID }}"
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+# ====================================================================================
+# AWS IAM
+# ====================================================================================
+
+ - name: aws/iam/cross_account_role
+ file: aws/iam/iam_role.iql
+ props:
+ - name: role_name
+ value: "{{ stack_name }}-{{ stack_env }}-role"
+ - name: assume_role_policy_document
+ value:
+ Version: "2012-10-17"
+ Statement:
+ - Sid: ""
+ Effect: "Allow"
+ Principal:
+ AWS: "arn:aws:iam::{{ databricks_aws_account_id }}:root"
+ Action: "sts:AssumeRole"
+ Condition:
+ StringEquals:
+ sts:ExternalId: "{{ databricks_account_id }}"
+ - name: description
+ value: 'allows Databricks to access resources in ({{ stack_name }}-{{ stack_env }})'
+ - name: path
+ value: '/'
+ - name: policies
+ value:
+ - PolicyDocument:
+ Statement:
+ - Sid: Stmt1403287045000
+ Effect: Allow
+ Action:
+ - "ec2:AllocateAddress"
+ - "ec2:AssociateDhcpOptions"
+ - "ec2:AssociateIamInstanceProfile"
+ - "ec2:AssociateRouteTable"
+ - "ec2:AttachInternetGateway"
+ - "ec2:AttachVolume"
+ - "ec2:AuthorizeSecurityGroupEgress"
+ - "ec2:AuthorizeSecurityGroupIngress"
+ - "ec2:CancelSpotInstanceRequests"
+ - "ec2:CreateDhcpOptions"
+ - "ec2:CreateInternetGateway"
+ - "ec2:CreateKeyPair"
+ - "ec2:CreateNatGateway"
+ - "ec2:CreatePlacementGroup"
+ - "ec2:CreateRoute"
+ - "ec2:CreateRouteTable"
+ - "ec2:CreateSecurityGroup"
+ - "ec2:CreateSubnet"
+ - "ec2:CreateTags"
+ - "ec2:CreateVolume"
+ - "ec2:CreateVpc"
+ - "ec2:CreateVpcEndpoint"
+ - "ec2:DeleteDhcpOptions"
+ - "ec2:DeleteInternetGateway"
+ - "ec2:DeleteKeyPair"
+ - "ec2:DeleteNatGateway"
+ - "ec2:DeletePlacementGroup"
+ - "ec2:DeleteRoute"
+ - "ec2:DeleteRouteTable"
+ - "ec2:DeleteSecurityGroup"
+ - "ec2:DeleteSubnet"
+ - "ec2:DeleteTags"
+ - "ec2:DeleteVolume"
+ - "ec2:DeleteVpc"
+ - "ec2:DeleteVpcEndpoints"
+ - "ec2:DescribeAvailabilityZones"
+ - "ec2:DescribeIamInstanceProfileAssociations"
+ - "ec2:DescribeInstanceStatus"
+ - "ec2:DescribeInstances"
+ - "ec2:DescribeInternetGateways"
+ - "ec2:DescribeNatGateways"
+ - "ec2:DescribePlacementGroups"
+ - "ec2:DescribePrefixLists"
+ - "ec2:DescribeReservedInstancesOfferings"
+ - "ec2:DescribeRouteTables"
+ - "ec2:DescribeSecurityGroups"
+ - "ec2:DescribeSpotInstanceRequests"
+ - "ec2:DescribeSpotPriceHistory"
+ - "ec2:DescribeSubnets"
+ - "ec2:DescribeVolumes"
+ - "ec2:DescribeVpcs"
+ - "ec2:DescribeVpcAttribute"
+ - "ec2:DescribeNetworkAcls"
+ - "ec2:DetachInternetGateway"
+ - "ec2:DisassociateIamInstanceProfile"
+ - "ec2:DisassociateRouteTable"
+ - "ec2:ModifyVpcAttribute"
+ - "ec2:ReleaseAddress"
+ - "ec2:ReplaceIamInstanceProfileAssociation"
+ - "ec2:ReplaceRoute"
+ - "ec2:RequestSpotInstances"
+ - "ec2:RevokeSecurityGroupEgress"
+ - "ec2:RevokeSecurityGroupIngress"
+ - "ec2:RunInstances"
+ - "ec2:TerminateInstances"
+ Resource:
+ - "*"
+ - Effect: Allow
+ Action:
+ - "iam:CreateServiceLinkedRole"
+ - "iam:PutRolePolicy"
+ Resource:
+ - arn:aws:iam::*:role/aws-service-role/spot.amazonaws.com/AWSServiceRoleForEC2Spot
+ Condition:
+ StringLike:
+ "iam:AWSServiceName": spot.amazonaws.com
+ Version: '2012-10-17'
+ PolicyName: "{{ stack_name }}-{{ stack_env }}-policy"
+ exports:
+ - aws_iam_role_name: aws_iam_cross_account_role_name
+ - aws_iam_role_arn: aws_iam_cross_account_role_arn
+
+ - name: databricks_account/credentials
+ props:
+ - name: credentials_name
+ value: "{{ stack_name }}-{{ stack_env }}-credentials"
+ - name: aws_credentials
+ value:
+ sts_role:
+ role_arn: "{{ aws_iam_cross_account_role_arn }}"
+ exports:
+ - databricks_credentials_name
+ - databricks_credentials_id
+ - databricks_role_external_id
+
+ - name: aws/iam/databricks_compute_role
+ file: aws/iam/iam_role.iql
+ props:
+ - name: role_name
+ value: "{{ stack_name }}-{{ stack_env }}-compute-role"
+ - name: assume_role_policy_document
+ value:
+ Version: "2012-10-17"
+ Statement:
+ - Action: "sts:AssumeRole"
+ Effect: "Allow"
+ Principal:
+ AWS: "{{ 'arn:aws:iam::314146311478:root' if trustInternalAccount == 'true' else 'arn:aws:iam::414351767826:root' }}"
+ Condition:
+ StringEquals:
+ sts:ExternalId: "{{ databricks_account_id }}"
+ - name: description
+ value: 'allows Databricks to access compute resources in ({{ stack_name }}-{{ stack_env }})'
+ - name: path
+ value: '/'
+ - name: policies
+ value:
+ - PolicyName: "Base"
+ PolicyDocument:
+ Version: "2012-10-17"
+ Statement:
+ - Sid: "CreateEC2ResourcesWithRequestTag"
+ Effect: "Allow"
+ Action:
+ - "ec2:CreateFleet"
+ - "ec2:CreateLaunchTemplate"
+ - "ec2:CreateVolume"
+ - "ec2:RequestSpotInstances"
+ - "ec2:RunInstances"
+ Resource: ["*"]
+ Condition:
+ StringEquals:
+ aws:RequestTag/Vendor: "Databricks"
+ - Sid: "AllowDatabricksTagOnCreate"
+ Effect: "Allow"
+ Action: ["ec2:CreateTags"]
+ Resource: ["*"]
+ Condition:
+ StringEquals:
+ ec2:CreateAction:
+ - "CreateFleet"
+ - "CreateLaunchTemplate"
+ - "CreateVolume"
+ - "RequestSpotInstances"
+ - "RunInstances"
+ - Sid: "UpdateByResourceTags"
+ Effect: "Allow"
+ Action:
+ - "ec2:AssignPrivateIpAddresses"
+ - "ec2:AssociateIamInstanceProfile"
+ - "ec2:AttachVolume"
+ - "ec2:AuthorizeSecurityGroupEgress"
+ - "ec2:AuthorizeSecurityGroupIngress"
+ - "ec2:CancelSpotInstanceRequests"
+ - "ec2:CreateFleet"
+ - "ec2:CreateLaunchTemplate"
+ - "ec2:CreateLaunchTemplateVersion"
+ - "ec2:CreateVolume"
+ - "ec2:DetachVolume"
+ - "ec2:DisassociateIamInstanceProfile"
+ - "ec2:ModifyFleet"
+ - "ec2:ModifyLaunchTemplate"
+ - "ec2:RequestSpotInstances"
+ - "ec2:RevokeSecurityGroupEgress"
+ - "ec2:RevokeSecurityGroupIngress"
+ - "ec2:RunInstances"
+ Resource: ["*"]
+ Condition:
+ StringEquals:
+ ec2:ResourceTag/Vendor: "Databricks"
+ - Sid: "GetByResourceTags"
+ Effect: "Allow"
+ Action: ["ec2:GetLaunchTemplateData"]
+ Resource: ["*"]
+ Condition:
+ StringEquals:
+ ec2:ResourceTag/Vendor: "Databricks"
+ - Sid: "DeleteByResourceTags"
+ Effect: "Allow"
+ Action:
+ - "ec2:DeleteFleets"
+ - "ec2:DeleteLaunchTemplate"
+ - "ec2:DeleteLaunchTemplateVersions"
+ - "ec2:DeleteTags"
+ - "ec2:DeleteVolume"
+ - "ec2:TerminateInstances"
+ Resource: ["*"]
+ Condition:
+ StringEquals:
+ ec2:ResourceTag/Vendor: "Databricks"
+ - Sid: "DescribeResources"
+ Effect: "Allow"
+ Action:
+ - "ec2:DescribeAvailabilityZones"
+ - "ec2:DescribeFleets"
+ - "ec2:DescribeIamInstanceProfileAssociations"
+ - "ec2:DescribeInstances"
+ - "ec2:DescribeInstanceStatus"
+ - "ec2:DescribeInternetGateways"
+ - "ec2:DescribeLaunchTemplates"
+ - "ec2:DescribeLaunchTemplateVersions"
+ - "ec2:DescribeNatGateways"
+ - "ec2:DescribeNetworkAcls"
+ - "ec2:DescribePrefixLists"
+ - "ec2:DescribeReservedInstancesOfferings"
+ - "ec2:DescribeRouteTables"
+ - "ec2:DescribeSecurityGroups"
+ - "ec2:DescribeSpotInstanceRequests"
+ - "ec2:DescribeSpotPriceHistory"
+ - "ec2:DescribeSubnets"
+ - "ec2:DescribeVolumes"
+ - "ec2:DescribeVpcs"
+ - "ec2:GetSpotPlacementScores"
+ Resource: ["*"]
+ exports:
+ - aws_iam_role_name: databricks_compute_role_name
+ - aws_iam_role_arn: databricks_compute_role_arn
+
+# ====================================================================================
+# UC Metastore with KMS Encryption
+# ====================================================================================
+
+ # - name: aws/kms/metastore_key
+ # props:
+ # - name: description
+ # value: "KMS key for {{ stack_name }}-{{ stack_env }} Unity Catalog metastore encryption"
+ # - name: key_policy
+ # value:
+ # Version: "2012-10-17"
+ # Statement:
+ # - Sid: "Enable IAM User Permissions"
+ # Effect: "Allow"
+ # Principal:
+ # AWS: "arn:aws:iam::{{ aws_account }}:root"
+ # Action: "kms:*"
+ # Resource: "*"
+ # - Sid: "Allow Metastore Role to Use Key"
+ # Effect: "Allow"
+ # Principal:
+ # AWS: "arn:aws:iam::{{ aws_account }}:role/{{ stack_name }}-{{ stack_env }}-metastore-role"
+ # Action:
+ # - "kms:Decrypt"
+ # - "kms:Encrypt"
+ # - "kms:GenerateDataKey"
+ # - "kms:DescribeKey"
+ # - "kms:CreateGrant"
+ # - "kms:RetireGrant"
+ # Resource: "*"
+ # - Sid: "Allow Databricks to Use Key"
+ # Effect: "Allow"
+ # Principal:
+ # AWS: "arn:aws:iam::{{ databricks_aws_account_id }}:root"
+ # Action:
+ # - "kms:Decrypt"
+ # - "kms:GenerateDataKey"
+ # - "kms:DescribeKey"
+ # Resource: "*"
+ # Condition:
+ # StringEquals:
+ # "kms:ViaService": "s3.{{ region }}.amazonaws.com"
+ # - name: tags
+ # value:
+ # - Key: Purpose
+ # Value: "UC Metastore Encryption"
+ # merge:
+ # - global_tags
+ # exports:
+ # - key_id: metastore_kms_key_id
+ # - key_arn: metastore_kms_key_arn
+
+ # - name: aws/kms/metastore_key_alias
+ # props:
+ # - name: alias_name
+ # value: "alias/{{ stack_name }}-{{ stack_env }}-metastore"
+ # - name: target_key_id
+ # value: "{{ metastore_kms_key_id }}"
+
+ # - name: aws/s3/metastore_bucket
+ # props:
+ # - name: bucket_name
+ # value: "{{ stack_name }}-{{ stack_env }}-metastore"
+ # - name: ownership_controls
+ # value:
+ # Rules:
+ # - ObjectOwnership: "BucketOwnerPreferred"
+ # - name: bucket_encryption
+ # value:
+ # ServerSideEncryptionConfiguration:
+ # - BucketKeyEnabled: true
+ # ServerSideEncryptionByDefault:
+ # SSEAlgorithm: "aws:kms"
+ # KMSMasterKeyID: "{{ metastore_kms_key_arn }}"
+ # - name: public_access_block_configuration
+ # value:
+ # BlockPublicAcls: true
+ # IgnorePublicAcls: true
+ # BlockPublicPolicy: true
+ # RestrictPublicBuckets: true
+ # - name: versioning_configuration
+ # value:
+ # Status: "Enabled"
+ # - name: lifecycle_configuration
+ # value:
+ # Rules:
+ # - Id: "DeleteOldVersions"
+ # Status: "Enabled"
+ # NoncurrentVersionExpiration:
+ # NoncurrentDays: 30
+ # AbortIncompleteMultipartUpload:
+ # DaysAfterInitiation: 7
+ # - name: logging_configuration
+ # value:
+ # TargetBucket: "{{ stack_name }}-{{ stack_env }}-logs"
+ # TargetPrefix: "s3-access-logs/metastore/"
+ # - name: tags
+ # value:
+ # - Key: Purpose
+ # Value: "Unity Catalog Metastore"
+ # - Key: DataClassification
+ # Value: "Metadata"
+ # merge:
+ # - global_tags
+ # exports:
+ # - bucket_name: aws_s3_metastore_bucket_name
+ # - bucket_arn: aws_s3_metastore_bucket_arn
+
+ # - name: aws/iam/metastore_access_role
+ # file: aws/iam/iam_role.iql
+ # props:
+ # - name: role_name
+ # value: "{{ stack_name }}-{{ stack_env }}-metastore-role"
+ # - name: assume_role_policy_document
+ # value:
+ # Version: "2012-10-17"
+ # Statement:
+ # - Effect: "Allow"
+ # Principal:
+ # AWS:
+ # - "arn:aws:iam::414351767826:role/unity-catalog-prod-UCMasterRole-14S5ZJVKOTYTL"
+ # Action: "sts:AssumeRole"
+ # Condition:
+ # StringEquals:
+ # sts:ExternalId: "0000" # Placeholder
+ # - name: description
+ # value: 'Unity Catalog metastore access role for ({{ stack_name }}-{{ stack_env }})'
+ # - name: path
+ # value: '/'
+ # - name: policies
+ # value:
+ # - PolicyName: "MetastoreS3Access"
+ # PolicyDocument:
+ # Version: "2012-10-17"
+ # Statement:
+ # - Sid: "S3MetastoreBucketAccess"
+ # Effect: "Allow"
+ # Action:
+ # - "s3:GetObject"
+ # - "s3:PutObject"
+ # - "s3:DeleteObject"
+ # - "s3:ListBucket"
+ # - "s3:GetBucketLocation"
+ # - "s3:GetLifecycleConfiguration"
+ # - "s3:PutLifecycleConfiguration"
+ # - "s3:ListBucketMultipartUploads"
+ # - "s3:ListMultipartUploadParts"
+ # - "s3:AbortMultipartUpload"
+ # Resource:
+ # - "{{ aws_s3_metastore_bucket_arn }}/*"
+ # - "{{ aws_s3_metastore_bucket_arn }}"
+ # - Sid: "AssumeRoleSelfTrust"
+ # Effect: "Allow"
+ # Action: ["sts:AssumeRole"]
+ # Resource: ["arn:aws:iam::{{ aws_account }}:role/{{ stack_name }}-{{ stack_env }}-metastore-role"]
+ # - PolicyName: "MetastoreKMSAccess"
+ # PolicyDocument:
+ # Version: "2012-10-17"
+ # Statement:
+ # - Sid: "KMSKeyAccess"
+ # Effect: "Allow"
+ # Action:
+ # - "kms:Decrypt"
+ # - "kms:Encrypt"
+ # - "kms:GenerateDataKey"
+ # - "kms:DescribeKey"
+ # - "kms:CreateGrant"
+ # - "kms:RetireGrant"
+ # Resource:
+ # - "{{ metastore_kms_key_arn }}"
+ # Condition:
+ # StringEquals:
+ # "kms:ViaService": "s3.{{ region }}.amazonaws.com"
+ # - PolicyName: "MetastoreFileEvents"
+ # PolicyDocument:
+ # Version: "2012-10-17"
+ # Statement:
+ # - Sid: "ManagedFileEventsSetupStatement"
+ # Effect: "Allow"
+ # Action:
+ # - "s3:GetBucketNotification"
+ # - "s3:PutBucketNotification"
+ # - "sns:ListSubscriptionsByTopic"
+ # - "sns:GetTopicAttributes"
+ # - "sns:SetTopicAttributes"
+ # - "sns:CreateTopic"
+ # - "sns:TagResource"
+ # - "sns:Publish"
+ # - "sns:Subscribe"
+ # - "sqs:CreateQueue"
+ # - "sqs:DeleteMessage"
+ # - "sqs:ReceiveMessage"
+ # - "sqs:SendMessage"
+ # - "sqs:GetQueueUrl"
+ # - "sqs:GetQueueAttributes"
+ # - "sqs:SetQueueAttributes"
+ # - "sqs:TagQueue"
+ # - "sqs:ChangeMessageVisibility"
+ # - "sqs:PurgeQueue"
+ # Resource:
+ # - "{{ aws_s3_metastore_bucket_arn }}"
+ # - "arn:aws:sqs:{{ region }}:{{ aws_account }}:csms-*"
+ # - "arn:aws:sns:{{ region }}:{{ aws_account }}:csms-*"
+ # - Sid: "ManagedFileEventsListStatement"
+ # Effect: "Allow"
+ # Action: ["sqs:ListQueues", "sqs:ListQueueTags", "sns:ListTopics"]
+ # Resource: "*"
+ # - Sid: "ManagedFileEventsTeardownStatement"
+ # Effect: "Allow"
+ # Action: ["sns:Unsubscribe", "sns:DeleteTopic", "sqs:DeleteQueue"]
+ # Resource:
+ # - "arn:aws:sqs:{{ region }}:{{ aws_account }}:csms-*"
+ # - "arn:aws:sns:{{ region }}:{{ aws_account }}:csms-*"
+ # - name: tags
+ # value:
+ # - Key: Purpose
+ # Value: "Unity Catalog Storage Credential"
+ # merge:
+ # - global_tags
+ # exports:
+ # - aws_iam_role_arn: metastore_access_role_arn
+
+ # - name: databricks_account/metastore
+ # props:
+ # - name: name
+ # value: "{{ stack_name }}-{{ stack_env }}-metastore"
+ # - name: storage_root
+ # value: "s3://{{ aws_s3_metastore_bucket_name }}"
+ # - name: region
+ # value: "{{ region }}"
+ # exports:
+ # - metastore_id: databricks_metastore_id
+
+ # - name: databricks_account/uc_storage_credentials
+ # props:
+ # - name: metastore_id
+ # value: "{{ databricks_metastore_id }}"
+ # - name: credential_info
+ # value:
+ # name: "{{ stack_name }}-{{ stack_env }}-storage-credential"
+ # comment: "Storage credential for {{ stack_name }} {{ stack_env }} metastore S3 access"
+ # read_only: false
+ # aws_iam_role:
+ # role_arn: "{{ metastore_access_role_arn }}"
+ # skip_validation: false
+ # exports:
+ # - credential_id: storage_credential_id
+ # - external_id: storage_credential_external_id
+
+ # - name: aws/iam/update_metastore_role_trust_policy
+ # type: command
+ # props:
+ # - name: role_name
+ # value: "{{ stack_name }}-{{ stack_env }}-metastore-role"
+ # - name: assume_role_policy_document
+ # value:
+ # Version: "2012-10-17"
+ # Statement:
+ # - Effect: "Allow"
+ # Principal:
+ # AWS:
+ # - "arn:aws:iam::414351767826:role/unity-catalog-prod-UCMasterRole-14S5ZJVKOTYTL"
+ # - "arn:aws:iam::{{ aws_account }}:role/{{ stack_name }}-{{ stack_env }}-metastore-role"
+ # Action: "sts:AssumeRole"
+ # Condition:
+ # StringEquals:
+ # sts:ExternalId: "{{ storage_credential_external_id }}"
+
+ # - name: databricks_account/validate_storage_credential
+ # type: command
+ # props:
+ # - name: credential_id
+ # value: "{{ storage_credential_id }}"
+ # - name: metastore_id
+ # value: "{{ databricks_metastore_id }}"
+
+ # - name: databricks_account/external_location
+ # props:
+ # - name: metastore_id
+ # value: "{{ databricks_metastore_id }}"
+ # - name: name
+ # value: "{{ stack_name }}-{{ stack_env }}-metastore-location"
+ # - name: url
+ # value: "s3://{{ aws_s3_metastore_bucket_name }}/"
+ # - name: credential_name
+ # value: "{{ stack_name }}-{{ stack_env }}-storage-credential"
+ # - name: comment
+ # value: "External location for {{ stack_name }} {{ stack_env }} metastore root"
+ # exports:
+ # - external_location_id
+
+ # - name: databricks_account/catalog
+ # props:
+ # - name: metastore_id
+ # value: "{{ databricks_metastore_id }}"
+ # - name: name
+ # value: "{{ stack_name }}_{{ stack_env }}"
+ # - name: comment
+ # value: "Main catalog for {{ stack_name }} {{ stack_env }} environment"
+ # - name: storage_root
+ # value: "s3://{{ aws_s3_metastore_bucket_name }}/catalogs/{{ stack_name }}_{{ stack_env }}"
+ # exports:
+ # - catalog_id
+
+ # - name: databricks_account/metastore_assignment
+ # props:
+ # - name: workspace_id
+ # value: "{{ databricks_workspace_id }}"
+ # - name: metastore_id
+ # value: "{{ databricks_metastore_id }}"
+ # - name: default_catalog_name
+ # value: "{{ stack_name }}_{{ stack_env }}"
+
+ # - name: databricks_account/catalog_workspace_binding
+ # props:
+ # - name: catalog_name
+ # value: "{{ stack_name }}_{{ stack_env }}"
+ # - name: workspace_id
+ # value: "{{ databricks_workspace_id }}"
+
+ # - name: databricks_account/catalog_permissions
+ # props:
+ # - name: catalog_name
+ # value: "{{ stack_name }}_{{ stack_env }}"
+ # - name: principal
+ # value: "{{ databricks_group_id }}"
+ # - name: privileges
+ # value:
+ # - "USE_CATALOG"
+ # - "CREATE_SCHEMA"
+ # - "CREATE_TABLE"
+ # - "CREATE_FUNCTION"
+
+# ====================================================================================
+# AWS VPC Networking
+# ====================================================================================
+
+ # - name: aws/vpc/vpc
+ # props:
+ # - name: cidr_block
+ # values:
+ # prd:
+ # value: "10.53.0.0/16"
+ # sit:
+ # value: "10.1.0.0/16"
+ # dev:
+ # value: "10.2.0.0/16"
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ # merge:
+ # - global_tags
+ # - name: idempotency_token
+ # value: 019447a0-b84a-7b7f-bca5-2ee320207e51
+ # exports:
+ # - vpc_id
+
+ # - name: aws/vpc/nat_subnet
+ # file: aws/vpc/subnet.iql
+ # props:
+ # - name: availability_zone
+ # value: "us-east-1a"
+ # - name: cidr_block
+ # values:
+ # prd:
+ # value: "10.53.0.0/24"
+ # sit:
+ # value: "10.1.0.0/19"
+ # dev:
+ # value: "10.2.0.0/19"
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-nat-subnet"
+ # merge:
+ # - global_tags
+ # exports:
+ # - subnet_id: nat_subnet_id
+
+ # - name: aws/vpc/cluster_subnet1
+ # file: aws/vpc/subnet.iql
+ # props:
+ # - name: availability_zone
+ # value: "us-east-1b"
+ # - name: cidr_block
+ # values:
+ # prd:
+ # value: "10.53.160.0/19"
+ # sit:
+ # value: "10.1.0.0/19"
+ # dev:
+ # value: "10.2.0.0/19"
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-subnet-1"
+ # merge:
+ # - global_tags
+ # exports:
+ # - subnet_id: cluster_subnet1_id
+
+ # - name: aws/vpc/cluster_subnet2
+ # file: aws/vpc/subnet.iql
+ # props:
+ # - name: availability_zone
+ # value: "us-east-1c"
+ # - name: cidr_block
+ # values:
+ # prd:
+ # value: "10.53.192.0/19"
+ # sit:
+ # value: "10.1.32.0/19"
+ # dev:
+ # value: "10.2.32.0/19"
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-subnet-2"
+ # merge:
+ # - global_tags
+ # exports:
+ # - subnet_id: cluster_subnet2_id
+
+ # - name: aws/vpc/inet_gateway
+ # props:
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-inet-gateway"
+ # merge: ['global_tags']
+ # - name: idempotency_token
+ # value: 019447a5-f076-75f8-9173-092df5a66d35
+ # exports:
+ # - internet_gateway_id
+
+ # - name: aws/vpc/inet_gw_attachment
+ # props: []
+
+ # - name: aws/vpc/nat_route_table
+ # file: aws/vpc/route_table.iql
+ # props:
+ # - name: route_table_name
+ # value: "{{ stack_name }}-{{ stack_env }}-nat-route-table"
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-nat-route-table"
+ # merge: ['global_tags']
+ # exports:
+ # - route_table_id: nat_route_table_id
+
+ # - name: aws/vpc/nat_route_to_inet
+ # file: aws/vpc/inet_route.iql
+ # props:
+ # - name: route_table_id
+ # value: "{{ nat_route_table_id }}"
+ # exports:
+ # - inet_route_indentifer: nat_inet_route_indentifer
+
+ # - name: aws/vpc/nat_subnet_rt_assn
+ # file: aws/vpc/subnet_rt_assn.iql
+ # props:
+ # - name: subnet_id
+ # value: "{{ nat_subnet_id }}"
+ # - name: route_table_id
+ # value: "{{ nat_route_table_id }}"
+ # - name: idempotency_token
+ # value: 3eaf3040-1c8e-41a6-8be6-512ccaf5ff4e
+ # exports:
+ # - route_table_assn_id: nat_subnet_rt_assn_id
+
+ # - name: aws/vpc/private_route_table
+ # file: aws/vpc/route_table.iql
+ # props:
+ # - name: route_table_name
+ # value: "{{ stack_name }}-{{ stack_env }}-private-route-table"
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-private-route-table"
+ # merge: ['global_tags']
+ # exports:
+ # - route_table_id: private_route_table_id
+
+ # - name: aws/vpc/subnet_rt_assn1
+ # file: aws/vpc/subnet_rt_assn.iql
+ # props:
+ # - name: route_table_id
+ # value: "{{ private_route_table_id }}"
+ # - name: subnet_id
+ # value: "{{ cluster_subnet1_id }}"
+ # - name: idempotency_token
+ # value: 019447aa-1c7a-775b-91dc-04db7c49f4a7
+ # exports:
+ # - route_table_assn_id: cluster_subnet1_rt_assn_id
+
+ # - name: aws/vpc/subnet_rt_assn2
+ # file: aws/vpc/subnet_rt_assn.iql
+ # props:
+ # - name: route_table_id
+ # value: "{{ private_route_table_id }}"
+ # - name: subnet_id
+ # value: "{{ cluster_subnet2_id }}"
+ # - name: idempotency_token
+ # value: c19c9077-c25d-46a4-a299-7bd93d773e58
+ # exports:
+ # - route_table_assn_id: cluster_subnet2_rt_assn_id
+
+ # - name: aws/vpc/elastic_ip
+ # props:
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-eip"
+ # merge: ['global_tags']
+ # - name: idempotency_token
+ # value: 01945908-b80d-7e51-b52c-5e93dea9cbdb
+ # exports:
+ # - eip_allocation_id
+ # - eip_public_id
+
+ # - name: aws/vpc/nat_gateway
+ # props:
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-nat-gateway"
+ # merge: ['global_tags']
+ # - name: idempotency_token
+ # value: 019447a5-f076-75f8-9173-092df5a66d35
+ # exports:
+ # - nat_gateway_id
+
+ # - name: aws/vpc/nat_inet_route
+ # props:
+ # - name: route_table_id
+ # value: "{{ private_route_table_id }}"
+ # - name: nat_gateway_id
+ # value: "{{ nat_gateway_id }}"
+ # exports:
+ # - nat_inet_route_indentifer
+
+ # - name: aws/vpc/security_group
+ # props:
+ # - name: group_name
+ # value: "{{ stack_name }}-{{ stack_env }}-sg"
+ # - name: group_description
+ # value: "security group for {{ stack_name }} ({{ stack_env }} environment)"
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-sg"
+ # merge: ['global_tags']
+ # exports:
+ # - security_group_id
+
+ # - name: aws/vpc/security_group_rules
+ # props:
+ # - name: security_group_ingress
+ # value:
+ # - FromPort: 0
+ # ToPort: 65535
+ # SourceSecurityGroupOwnerId: "{{ aws_account }}"
+ # IpProtocol: tcp
+ # SourceSecurityGroupId: "{{ security_group_id }}"
+ # - FromPort: 0
+ # ToPort: 65535
+ # SourceSecurityGroupOwnerId: "{{ aws_account }}"
+ # IpProtocol: "udp"
+ # SourceSecurityGroupId: "{{ security_group_id }}"
+ # - CidrIp: "3.237.73.224/28"
+ # FromPort: 443
+ # ToPort: 443
+ # IpProtocol: "tcp"
+ # - CidrIp: "54.156.226.103/32"
+ # FromPort: 443
+ # ToPort: 443
+ # IpProtocol: "tcp"
+ # - name: security_group_egress
+ # value:
+ # - FromPort: 0
+ # ToPort: 65535
+ # IpProtocol: "tcp"
+ # DestinationSecurityGroupId: "{{ security_group_id }}"
+ # Description: "Allow all TCP outbound access to the same security group"
+ # - CidrIp: "0.0.0.0/0"
+ # Description: Allow all outbound traffic
+ # FromPort: -1
+ # ToPort: -1
+ # IpProtocol: "-1"
+ # - CidrIp: "0.0.0.0/0"
+ # FromPort: 3306
+ # ToPort: 3306
+ # IpProtocol: "tcp"
+ # Description: "Allow accessing the Databricks metastore"
+ # - FromPort: 0
+ # ToPort: 65535
+ # IpProtocol: "udp"
+ # DestinationSecurityGroupId: "{{ security_group_id }}"
+ # Description: "Allow all UDP outbound access to the same security group"
+ # - CidrIp: "0.0.0.0/0"
+ # FromPort: 443
+ # ToPort: 443
+ # IpProtocol: "tcp"
+ # Description: "Allow accessing Databricks infrastructure, cloud data sources, and library repositories"
+
+ # - name: databricks_account/network
+ # props:
+ # - name: databricks_network_name
+ # value: "{{ stack_name }}-{{ stack_env }}-network"
+ # - name: subnet_ids
+ # value:
+ # - "{{ cluster_subnet1_id }}"
+ # - "{{ cluster_subnet2_id }}"
+ # - name: security_group_ids
+ # value:
+ # - "{{ security_group_id }}"
+ # exports:
+ # - databricks_network_id
+
+# ====================================================================================
+# AWS Storage
+# ====================================================================================
+
+ # - name: aws/s3/workspace_bucket
+ # props:
+ # - name: bucket_name
+ # value: "{{ stack_name }}-{{ stack_env }}-root-bucket"
+ # - name: ownership_controls
+ # value:
+ # Rules:
+ # - ObjectOwnership: "BucketOwnerPreferred"
+ # - name: bucket_encryption
+ # value:
+ # ServerSideEncryptionConfiguration:
+ # - BucketKeyEnabled: true
+ # ServerSideEncryptionByDefault:
+ # SSEAlgorithm: "AES256"
+ # - name: public_access_block_configuration
+ # value:
+ # BlockPublicAcls: true
+ # IgnorePublicAcls: true
+ # BlockPublicPolicy: true
+ # RestrictPublicBuckets: true
+ # - name: versioning_configuration
+ # value:
+ # Status: "Suspended"
+ # exports:
+ # - aws_s3_workspace_bucket_name
+ # - aws_s3_workspace_bucket_arn
+
+ # - name: aws/s3/workspace_bucket_policy
+ # props:
+ # - name: policy_document
+ # value:
+ # Version: "2012-10-17"
+ # Statement:
+ # - Sid: Grant Databricks Access
+ # Effect: Allow
+ # Principal:
+ # AWS: "arn:aws:iam::{{ databricks_aws_account_id }}:root"
+ # Action:
+ # - "s3:GetObject"
+ # - "s3:GetObjectVersion"
+ # - "s3:PutObject"
+ # - "s3:DeleteObject"
+ # - "s3:ListBucket"
+ # - "s3:GetBucketLocation"
+ # Resource:
+ # - "{{ aws_s3_workspace_bucket_arn }}/*"
+ # - "{{ aws_s3_workspace_bucket_arn }}"
+
+ # - name: aws/vpc/vpc_endpoint
+ # props:
+ # - name: service_name
+ # value: "com.amazonaws.{{ region }}.s3"
+ # - name: vpc_endpoint_type
+ # value: "Gateway"
+ # - name: route_table_ids
+ # value:
+ # - "{{ private_route_table_id }}"
+ # - name: tags
+ # value:
+ # - Key: Name
+ # Value: "{{ stack_name }}-{{ stack_env }}-s3-vpc-endpoint"
+ # merge:
+ # - global_tags
+ # exports:
+ # - s3_gateway_endpoint_id
+
+ # - name: databricks_account/storage_configuration
+ # props:
+ # - name: storage_configuration_name
+ # value: "{{ stack_name }}-{{ stack_env }}-storage"
+ # - name: root_bucket_info
+ # value:
+ # bucket_name: "{{ aws_s3_workspace_bucket_name }}"
+ # exports:
+ # - databricks_storage_configuration_id
+
+# ====================================================================================
+# DBX Workspace
+# ====================================================================================
+
+ # - name: databricks_account/workspace
+ # props:
+ # - name: workspace_name
+ # value: "{{ stack_name }}-{{ stack_env }}-workspace"
+ # - name: network_id
+ # value: "{{ databricks_network_id }}"
+ # - name: aws_region
+ # value: "{{ region }}"
+ # - name: credentials_id
+ # value: "{{ databricks_credentials_id }}"
+ # - name: storage_configuration_id
+ # value: "{{ databricks_storage_configuration_id }}"
+ # - name: pricing_tier
+ # value: PREMIUM
+ # exports:
+ # - databricks_workspace_id
+ # - databricks_deployment_name
+
+ # - name: databricks_account/workspace_group
+ # props:
+ # - name: display_name
+ # value: "{{ stack_name }}-{{ stack_env }}-workspace-admins"
+ # exports:
+ # - databricks_group_id
+
+ # - name: databricks_account/get_users
+ # type: query
+ # props:
+ # - name: users
+ # value:
+ # - "javen@stackql.io"
+ # - "krimmer@stackql.io"
+ # exports:
+ # - databricks_workspace_group_members
+
+ # - name: databricks_account/update_group_membership
+ # type: command
+ # props: []
+
+ # - name: databricks_account/workspace_permission_assignments
+ # props: []
+
+ # - name: databricks_workspace/all_purpose_cluster
+ # props:
+ # - name: cluster_name
+ # value: single-user-single-node-cluster
+ # - name: num_workers
+ # value: 0
+ # - name: is_single_node
+ # value: true
+ # - name: kind
+ # value: CLASSIC_PREVIEW
+ # - name: spark_version
+ # value: 15.4.x-scala2.12
+ # - name: node_type_id
+ # value: m7g.large
+ # - name: data_security_mode
+ # value: SINGLE_USER
+ # - name: runtime_engine
+ # value: PHOTON
+ # - name: single_user_name
+ # value: javen@stackql.io
+ # - name: aws_attributes
+ # value:
+ # ebs_volume_count: 1
+ # ebs_volume_size: 100
+ # - name: custom_tags
+ # description: Additional tags for cluster resources (max 45 tags)
+ # value:
+ # Provisioner: stackql
+ # StackName: "{{ stack_name }}"
+ # StackEnv: "{{ stack_env }}"
+ # exports:
+ # - databricks_cluster_id
+ # - databricks_cluster_state
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/README.md b/ref-python-packages/stackql-deploy/examples/databricks/serverless/README.md
new file mode 100644
index 0000000..409894d
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/README.md
@@ -0,0 +1,245 @@
+# `stackql-deploy` example project for `databricks`
+
+This exercise is to bootstrap a databricks / aws tenancy using `stackql-deploy`. It is an important use case for platform bootstrap and we are excited to perform it with the `stackql` toolchain. We hope you enjoy and find this valuable. Please drop us a note with your forthright opinion on this and check out our issues on github.
+
+## A word of caution
+
+Please take the greatest care in performing this exercise; it will incur expenses, as it involves creating (and destroying) resources which cost money. Please be aware that you **must** cancel your databricks subscription after completing this exercise, otherwise you will incur ongoing expenses. That is, do **not** skip the section [Cancel databricks subscription](#cancel-databricks-subsription). We strongly advise that you verify all resources are destroyed at the conclusion of this exercise. Web pages and certain behaviours may change, so please be thorough in your verification. We will keep this page up-to-date on a best effort basis only. It is very much a case of owner onus applies.
+
+## Manual Setup
+
+Dependencies:
+
+- aws Account Created.
+- Required clickops to set up databricks on aws:
+ - Turn on aws Marketplace `databricks` offering, using [the aws manage subscriptions page](https://console.aws.amazon.com/marketplace/home#/subscriptions), per Figure S1.
+ - Follow the suggested setup flow as directed, from this page. These clickops steps are necessary at this time for initial account setup. The way I followed this, it created a workspace for me at setup, per Figure S3. We shall not use this one and rather, later on we shall dispose of it; because we do not trust auto-created resources out of hand. In the process of creating the databricks subscription, a second aws account is created.
+ - Copy the databricks account id from basically any web page in the databricks console. This is done by clicking on the user icon at the top RHS and then the UI provides a copy shortcut, per Fugure U1. Save this locally for later use, expanded below.
+ - We need the aws account id that was created for the databricks subscription. It is not exactly heralded by the web pages, nor is it actively hidden. It can be captured in a couple of places, including the databricks storage account creatted in the subscription flow, per Figure XA1. copy and save this locally for later use, expanded below.
+ - Create a service principal to use as a "CICD agent", using the page shown in Figure S4.
+ - Grant the CICD agent account admin role, using the page shown in Figure S5.
+ - Create a secret for the CICD agent, using the page shown in Figure S6. At the time you create this, you will need to safely store the client secret and client id, as prompted by the web page. These will be used below.
+- Setup your virtual environment, from the root of this repository `cicd/setup/setup-env.sh`.
+
+Now, is is convenient to use environment variables for context. Note that for our example, there is only one aws account apropos, however this is not always the case for an active professional, so while `DATABRICKS_AWS_ACCOUNT_ID` is the same as `AWS_ACCOUNT_ID` here, it need not always be the case. Create a file in the path `examples/databricks/serverless/sec/env.sh` (relative to the root of this repository) with contents of the form:
+
+```bash
+#!/usr/bin/env bash
+
+export AWS_REGION='us-east-1' # or wherever you want
+export AWS_ACCOUNT_ID=''
+export DATABRICKS_ACCOUNT_ID=''
+export DATABRICKS_AWS_ACCOUNT_ID=''
+
+# These need to be created by clickops under [the account level user managment page](https://accounts.cloud.databricks.com/user-management).
+export DATABRICKS_CLIENT_ID=''
+export DATABRICKS_CLIENT_SECRET=''
+
+## These can be skipped if you run on [aws cloud shell](https://docs.aws.amazon.com/cloudshell/latest/userguide/welcome.html).
+export AWS_SECRET_ACCESS_KEY=''
+export AWS_ACCESS_KEY_ID=''
+
+```
+
+## Optional step: sanity checks with stackql
+
+Now, let us do some sanity checks and housekeeping with `stackql`. This is purely optional. From the root of this repository:
+
+```
+source examples/databricks/serverless/convenience.sh
+stackql shell
+```
+
+This will start a `stackql` interactive shell. Here are some commands you can run (I will not place output here, that will be shared in a corresponding video):
+
+
+```sql
+registry pull databricks_account v24.12.00279;
+registry pull databricks_workspace v24.12.00279;
+
+-- This will fail if accounts, subscription, or credentials are in error.
+select account_id FROM databricks_account.provisioning.credentials WHERE account_id = '';
+select account_id, workspace_name, workspace_id, workspace_status from databricks_account.provisioning.workspaces where account_id = '';
+```
+
+For extra credit, you can (asynchronously) delete the unnecessary workspace with `delete from databricks_account.provisioning.workspaces where account_id = '' and workspace_id = '';`, where you obtain the workspace id from the above query. I have noted that due to some reponse caching it takes a while to disappear from select queries (much longer than disappearance from the web page), and you may want to bounce the `stackql` session to hurry things along. This is not happening on the `stackql` side, but session bouncing forces a token refresh which can help cache busting.
+
+## Lifecycle management
+
+Time to get down to business. From the root of this repository:
+
+```bash
+python3 -m venv myenv
+source examples/databricks/serverless/convenience.sh
+source venv/bin/activate
+pip install stackql-deploy
+```
+
+> alternatively set the `AWS_REGION`, `AWS_ACCOUNT_ID`, `DATABRICKS_ACCOUNT_ID`, `DATABRICKS_AWS_ACCOUNT_ID` along with provider credentials `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, `DATABRICKS_CLIENT_ID`, `DATABRICKS_CLIENT_SECRET`
+
+Then, do a dry run (good for catching **some** environmental issues):
+
+```bash
+stackql-deploy build \
+examples/databricks/serverless dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--dry-run
+```
+
+You will see a verbose rendition of what `stackql-deploy` intends to do.
+
+
+Now, let use do it for real:
+
+```bash
+stackql-deploy build \
+examples/databricks/serverless dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+The output is quite verbose, concludes in:
+
+```
+2025-02-08 12:51:25,914 - stackql-deploy - INFO - š¤ set [databricks_workspace_id] to [482604062392118] in exports
+2025-02-08 12:51:25,915 - stackql-deploy - INFO - ā
successfully deployed databricks_workspace
+2025-02-08 12:51:25,915 - stackql-deploy - INFO - deployment completed in 0:04:09.603631
+š build complete
+```
+
+Success!!!
+
+We can also use `stackql-deploy` to assess if our infra is shipshape:
+
+```bash
+stackql-deploy test \
+examples/databricks/serverless dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+Again, the output is quite verbose, concludes in:
+
+```
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - š¤ set [databricks_workspace_id] to [482604062392118] in exports
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - ā
test passed for databricks_workspace
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - deployment completed in 0:02:30.255860
+š tests complete (dry run: False)
+```
+
+Success!!!
+
+Now, let us teardown our `stackql-deploy` managed infra:
+
+```bash
+stackql-deploy teardown \
+examples/databricks/serverless dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+Takes its time, again verbose, concludes in:
+
+```
+2025-02-08 13:24:17,941 - stackql-deploy - INFO - ā
successfully deleted AWS_iam_cross_account_role
+2025-02-08 13:24:17,942 - stackql-deploy - INFO - deployment completed in 0:03:21.191788
+š§ teardown complete (dry run: False)
+```
+
+Success!!!
+
+## Optional step: verify destruction with stackql
+
+Now, let us do some sanity checks and housekeeping with `stackql`. This is purely optional. From the root of this repository:
+
+```
+
+source examples/databricks/serverless/convenience.sh
+
+stackql shell
+
+```
+
+This will start a `stackql` interactive shell. Here are some commands you can run (I will not place output here):
+
+
+```sql
+
+registry pull databricks_account v24.12.00279;
+
+registry pull databricks_workspace v24.12.00279;
+
+
+
+select account_id, workspace_name, workspace_id, workspace_status from databricks_account.provisioning.workspaces where account_id = '';
+
+```
+
+## Cancel databricks subsription
+
+This is **very** important.
+
+Go to [the aws Marketplace manage subscriptions page](https://console.aws.amazon.com/marketplace/home#/subscriptions), navigate to databricks and then cancel the subscription.
+
+## Figures
+
+
+
+
+**Figure S1**: Create aws databricks subscription.
+
+---
+
+
+
+**Figure S2**: Awaiting aws databricks subscription resources.
+
+---
+
+
+
+**Figure S3**: Auto provisioned workspace.
+
+---
+
+
+
+**Figure U1**: Capture databricks account id.
+
+---
+
+
+
+**Figure XA1**: Capture cross databricks aws account id.
+
+---
+
+
+
+**Figure S4**: Create CICD agent.
+
+---
+
+
+
+**Figure S5**: Grant account admin to CICD agent.
+
+---
+
+
+
+**Figure S6**: Generate secret for CICD agent.
+
+---
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/auto-provisioned-worskpace.png b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/auto-provisioned-worskpace.png
new file mode 100644
index 0000000..a9fbcb6
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/auto-provisioned-worskpace.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/awaiting-subscription-resources.png b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/awaiting-subscription-resources.png
new file mode 100644
index 0000000..9505100
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/awaiting-subscription-resources.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/capture-cross-databricks-aws-account-id.png b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/capture-cross-databricks-aws-account-id.png
new file mode 100644
index 0000000..6fdb3c4
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/capture-cross-databricks-aws-account-id.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/capture-databricks-account-id.png b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/capture-databricks-account-id.png
new file mode 100644
index 0000000..c890299
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/capture-databricks-account-id.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/create-aws-databricks-subscription.png b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/create-aws-databricks-subscription.png
new file mode 100644
index 0000000..b5c9e7f
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/create-aws-databricks-subscription.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/create-cicd-agent.png b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/create-cicd-agent.png
new file mode 100644
index 0000000..faf1643
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/create-cicd-agent.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/generate-secret-ui.png b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/generate-secret-ui.png
new file mode 100644
index 0000000..daf4f23
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/generate-secret-ui.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/grant-account-admin-cicd-agent.png b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/grant-account-admin-cicd-agent.png
new file mode 100644
index 0000000..f50e0c0
Binary files /dev/null and b/ref-python-packages/stackql-deploy/examples/databricks/serverless/assets/grant-account-admin-cicd-agent.png differ
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/convenience.sh b/ref-python-packages/stackql-deploy/examples/databricks/serverless/convenience.sh
new file mode 100644
index 0000000..290ef42
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/convenience.sh
@@ -0,0 +1,72 @@
+#!/usr/bin/env bash
+
+CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+export REPOSITORY_ROOT="$(realpath $CURRENT_DIR/../../..)"
+
+
+if [ -f "${REPOSITORY_ROOT}/examples/databricks/serverless/sec/env.sh" ];
+then
+ source "${REPOSITORY_ROOT}/examples/databricks/serverless/sec/env.sh"
+fi
+
+if [ "${AWS_REGION}" = "" ];
+then
+ AWS_REGION='us-east-1'
+fi
+
+if [ "${AWS_ACCOUNT_ID}" = "" ];
+then
+ echo "AWS_ACCOUNT_ID must be set" >&2
+ exit 1s
+fi
+
+if [ "${DATABRICKS_ACCOUNT_ID}" = "" ];
+then
+ echo "DATABRICKS_ACCOUNT_ID must be set" >&2
+ exit 1
+fi
+
+if [ "${DATABRICKS_AWS_ACCOUNT_ID}" = "" ];
+then
+ echo "DATABRICKS_AWS_ACCOUNT_ID must be set" >&2
+ exit 1
+fi
+
+if [ "${DATABRICKS_CLIENT_ID}" = "" ];
+then
+ echo "DATABRICKS_CLIENT_ID must be set" >&2
+ exit 1
+fi
+
+if [ "${DATABRICKS_CLIENT_SECRET}" = "" ];
+then
+ echo "DATABRICKS_CLIENT_SECRET must be set" >&2
+ exit 1
+fi
+
+if [ "${AWS_SECRET_ACCESS_KEY}" = "" ];
+then
+ echo "AWS_SECRET_ACCESS_KEY must be set" >&2
+ exit 1
+fi
+
+if [ "${AWS_ACCESS_KEY_ID}" = "" ];
+then
+ echo "AWS_ACCESS_KEY_ID must be set" >&2
+ exit 1
+fi
+
+export AWS_REGION
+export AWS_ACCOUNT_ID
+export DATABRICKS_ACCOUNT_ID
+export DATABRICKS_AWS_ACCOUNT_ID
+
+export DATABRICKS_CLIENT_ID
+export DATABRICKS_CLIENT_SECRET
+
+
+export AWS_SECRET_ACCESS_KEY
+export AWS_ACCESS_KEY_ID
+
+
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/outputs/deployment.json b/ref-python-packages/stackql-deploy/examples/databricks/serverless/outputs/deployment.json
new file mode 100644
index 0000000..2e6250c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/outputs/deployment.json
@@ -0,0 +1,10 @@
+{
+ "stack_name": "stackql-serverless",
+ "stack_env": "prd",
+ "databricks_workspace_name": "stackql-serverless-prd-workspace",
+ "databricks_workspace_id": "4014389171618363",
+ "databricks_deployment_name": "dbc-5a3a87f7-6914",
+ "databricks_workspace_status": "RUNNING",
+ "databricks_workspace_url": "https://dbc-5a3a87f7-6914.cloud.databricks.com",
+ "elapsed_time": "0:00:31.470746"
+}
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/outputs/test.json b/ref-python-packages/stackql-deploy/examples/databricks/serverless/outputs/test.json
new file mode 100644
index 0000000..0bb1932
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/outputs/test.json
@@ -0,0 +1,10 @@
+{
+ "stack_name": "stackql-serverless",
+ "stack_env": "prd",
+ "databricks_workspace_name": "stackql-serverless-prd-workspace",
+ "databricks_workspace_id": "4014389171618363",
+ "databricks_deployment_name": "dbc-5a3a87f7-6914",
+ "databricks_workspace_status": "RUNNING",
+ "databricks_workspace_url": "https://dbc-5a3a87f7-6914.cloud.databricks.com",
+ "elapsed_time": "0:00:18.247444"
+}
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/iam/iam_role.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/iam/iam_role.iql
new file mode 100644
index 0000000..4e4f6fa
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/iam/iam_role.iql
@@ -0,0 +1,60 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.iam.roles
+WHERE data__Identifier = '{{ role_name }}'
+
+/*+ create */
+INSERT INTO aws.iam.roles (
+ RoleName,
+ Description,
+ Path,
+ AssumeRolePolicyDocument,
+ Policies,
+ Tags,
+ region
+)
+SELECT
+'{{ role_name }}',
+'{{ description }}',
+'{{ path }}',
+'{{ assume_role_policy_document }}',
+'{{ policies }}',
+'{{ global_tags }}',
+'us-east-1'
+
+/*+ update */
+update aws.iam.roles
+set data__PatchDocument = string('{{ {
+ "Description": description,
+ "Path": path,
+ "AssumeRolePolicyDocument": assume_role_policy_document,
+ "Policies": policies,
+ "Tags": global_tags
+ } | generate_patch_document }}')
+WHERE data__Identifier = '{{ role_name }}'
+AND region = 'us-east-1';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM (
+ SELECT
+ max_session_duration,
+ path,
+ AWS_POLICY_EQUAL(assume_role_policy_document, '{{ assume_role_policy_document }}') as test_assume_role_policy_doc,
+ AWS_POLICY_EQUAL(policies, '{{ policies }}') as test_policies
+ FROM aws.iam.roles
+ WHERE data__Identifier = '{{ role_name }}')t
+WHERE test_assume_role_policy_doc = 1
+AND test_policies = 1
+AND path = '{{ path }}';
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+'{{ role_name }}' as aws_iam_role_name,
+arn as aws_iam_role_arn
+FROM aws.iam.roles
+WHERE data__Identifier = '{{ role_name }}'
+
+/*+ delete */
+DELETE FROM aws.iam.roles
+WHERE data__Identifier = '{{ role_name }}'
+AND region = 'us-east-1'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/iam/update_metastore_access_role.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/iam/update_metastore_access_role.iql
new file mode 100644
index 0000000..2339232
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/iam/update_metastore_access_role.iql
@@ -0,0 +1,7 @@
+/*+ command */
+update aws.iam.roles
+set data__PatchDocument = string('{{ {
+ "AssumeRolePolicyDocument": assume_role_policy_document
+ } | generate_patch_document }}')
+WHERE data__Identifier = '{{ role_name }}'
+AND region = 'us-east-1';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/s3/s3_bucket.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/s3/s3_bucket.iql
new file mode 100644
index 0000000..42741fd
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/s3/s3_bucket.iql
@@ -0,0 +1,58 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.s3.buckets
+WHERE region = '{{ region }}'
+AND data__Identifier = '{{ bucket_name }}'
+
+/*+ create */
+INSERT INTO aws.s3.buckets (
+ BucketName,
+ OwnershipControls,
+ BucketEncryption,
+ PublicAccessBlockConfiguration,
+ VersioningConfiguration,
+ Tags,
+ region
+)
+SELECT
+ '{{ bucket_name }}',
+ '{{ ownership_controls }}',
+ '{{ bucket_encryption }}',
+ '{{ public_access_block_configuration }}',
+ '{{ versioning_configuration }}',
+ '{{ global_tags }}',
+ '{{ region }}'
+
+/*+ update */
+update aws.s3.buckets
+set data__PatchDocument = string('{{ {
+ "OwnershipControls": ownership_controls,
+ "BucketEncryption": bucket_encryption,
+ "PublicAccessBlockConfiguration": public_access_block_configuration,
+ "VersioningConfiguration": versioning_configuration,
+ "Tags": global_tags
+ } | generate_patch_document }}')
+WHERE
+region = '{{ region }}'
+AND data__Identifier = '{{ bucket_name }}'
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+arn,
+bucket_name
+FROM (
+ SELECT
+ arn,
+ bucket_name,
+ JSON_EQUAL(ownership_controls, '{{ ownership_controls }}') as test_ownership_controls,
+ JSON_EQUAL(bucket_encryption, '{{ bucket_encryption }}') as test_encryption,
+ JSON_EQUAL(public_access_block_configuration, '{{ public_access_block_configuration }}') as test_public_access_block_configuration,
+ JSON_EQUAL(versioning_configuration, '{{ versioning_configuration }}') as test_versioning_configuration
+ FROM aws.s3.buckets
+ WHERE region = '{{ region }}'
+ AND data__Identifier = '{{ bucket_name }}'
+)t
+WHERE test_ownership_controls = 1
+AND test_encryption = 1
+AND test_public_access_block_configuration = 1
+AND test_versioning_configuration = 1
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/s3/s3_bucket_policy.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/s3/s3_bucket_policy.iql
new file mode 100644
index 0000000..cead151
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/aws/s3/s3_bucket_policy.iql
@@ -0,0 +1,36 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM aws.s3.bucket_policies
+WHERE region = '{{ region }}'
+AND bucket = '{{ aws_s3_workspace_bucket_name }}';
+
+/*+ create */
+INSERT INTO aws.s3.bucket_policies (
+ Bucket,
+ PolicyDocument,
+ ClientToken,
+ region
+)
+SELECT
+ '{{ aws_s3_workspace_bucket_name }}',
+ '{{ policy_document }}',
+ '{{ uuid() }}',
+ '{{ region }}'
+
+/*+ update */
+update aws.s3.bucket_policies
+set data__PatchDocument = string('{{ {
+ "PolicyDocument": policy_document
+ } | generate_patch_document }}')
+WHERE
+region = '{{ region }}'
+AND data__Identifier = '{{ aws_s3_workspace_bucket_name }}';
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count FROM (
+ SELECT
+ JSON_EQUAL(policy_document, '{{ policy_document }}') as test_policy_document
+ FROM aws.s3.bucket_policies
+ WHERE region = '{{ region }}'
+ AND data__Identifier = '{{ aws_s3_workspace_bucket_name }}')t
+WHERE test_policy_document = 1;
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/credentials.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/credentials.iql
new file mode 100644
index 0000000..687b3f1
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/credentials.iql
@@ -0,0 +1,31 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}'
+AND credentials_name = '{{ credentials_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.credentials (
+account_id,
+data__credentials_name,
+data__aws_credentials
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ credentials_name }}',
+'{{ aws_credentials }}'
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+'{{ credentials_name }}' as databricks_credentials_name,
+credentials_id as databricks_credentials_id,
+JSON_EXTRACT(aws_credentials, '$.sts_role.external_id') as databricks_role_external_id
+FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}'
+AND credentials_name = '{{ credentials_name }}'
+AND JSON_EXTRACT(aws_credentials, '$.sts_role.role_arn') = '{{ aws_iam_cross_account_role_arn }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.credentials
+WHERE account_id = '{{ databricks_account_id }}' AND
+credentials_id = '{{ databricks_credentials_id }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/get_users.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/get_users.iql
new file mode 100644
index 0000000..e94c2d7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/get_users.iql
@@ -0,0 +1,6 @@
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+JSON_GROUP_ARRAY(JSON_OBJECT('value', id)) as databricks_workspace_group_members
+FROM databricks_account.iam.users
+WHERE account_id = '{{ databricks_account_id }}'
+AND userName in {{ users | sql_list }};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/network.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/network.iql
new file mode 100644
index 0000000..fca4c98
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/network.iql
@@ -0,0 +1,41 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}'
+AND network_name = '{{ databricks_network_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.networks (
+account_id,
+data__network_name,
+data__vpc_id,
+data__subnet_ids,
+data__security_group_ids
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ databricks_network_name }}',
+'{{ vpc_id }}',
+'{{ subnet_ids }}',
+'{{ security_group_ids }}'
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+network_id as databricks_network_id
+FROM (
+SELECT
+network_id,
+JSON_EQUAL(subnet_ids, '{{ subnet_ids }}') as subnet_test,
+JSON_EQUAL(security_group_ids, '{{ security_group_ids }}') as sg_test
+FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}'
+AND network_name = '{{ databricks_network_name }}'
+AND vpc_id = '{{ vpc_id }}'
+)t
+WHERE subnet_test = 1
+AND sg_test = 1
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.networks
+WHERE account_id = '{{ databricks_account_id }}' AND
+network_id = '{{ databricks_network_id }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/storage_configuration.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/storage_configuration.iql
new file mode 100644
index 0000000..8318df1
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/storage_configuration.iql
@@ -0,0 +1,29 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}'
+AND storage_configuration_name = '{{ storage_configuration_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.storage (
+account_id,
+data__storage_configuration_name,
+data__root_bucket_info
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ storage_configuration_name }}',
+'{{ root_bucket_info }}'
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+storage_configuration_id as databricks_storage_configuration_id
+FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}'
+AND storage_configuration_name = '{{ storage_configuration_name }}'
+AND JSON_EXTRACT(root_bucket_info, '$.bucket_name') = '{{ aws_s3_workspace_bucket_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.storage
+WHERE account_id = '{{ databricks_account_id }}' AND
+storage_configuration_id = '{{ databricks_storage_configuration_id }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/update_group_membership.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/update_group_membership.iql
new file mode 100644
index 0000000..375d926
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/update_group_membership.iql
@@ -0,0 +1,6 @@
+/*+ command */
+update databricks_account.iam.groups
+set data__schemas = '["urn:ietf:params:scim:api:messages:2.0:PatchOp"]',
+data__Operations = '[{"op": "replace", "path": "members", "value": {{ databricks_workspace_group_members }} }]'
+WHERE account_id = '{{ databricks_account_id }}'
+AND id = '{{ databricks_group_id }}';
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/workspace.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/workspace.iql
new file mode 100644
index 0000000..1a7efc1
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/workspace.iql
@@ -0,0 +1,42 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}'
+AND workspace_name = '{{ workspace_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.provisioning.workspaces (
+account_id,
+data__workspace_name,
+data__aws_region,
+data__credentials_id,
+data__storage_configuration_id,
+data__pricing_tier
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ workspace_name }}',
+'{{ aws_region }}',
+'{{ credentials_id }}',
+'{{ storage_configuration_id }}',
+'{{ pricing_tier }}'
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+'{{ workspace_name }}' AS databricks_workspace_name,
+workspace_id AS databricks_workspace_id,
+deployment_name AS databricks_deployment_name,
+workspace_status AS databricks_workspace_status,
+'https://' || deployment_name || '.cloud.databricks.com' AS databricks_workspace_url
+FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}'
+AND workspace_name = '{{ workspace_name }}'
+AND aws_region = '{{ aws_region }}'
+AND credentials_id = '{{ credentials_id }}'
+AND storage_configuration_id = '{{ storage_configuration_id }}'
+AND pricing_tier = '{{ pricing_tier }}'
+
+/*+ delete */
+DELETE FROM databricks_account.provisioning.workspaces
+WHERE account_id = '{{ databricks_account_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/workspace_group.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/workspace_group.iql
new file mode 100644
index 0000000..d2d0d13
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/workspace_group.iql
@@ -0,0 +1,26 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}'
+AND displayName = '{{ display_name }}'
+
+/*+ create */
+INSERT INTO databricks_account.iam.groups (
+account_id,
+data__displayName
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ display_name }}'
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT id AS databricks_group_id,
+displayName AS databricks_group_name
+FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}'
+AND displayName = '{{ display_name }}'
+
+/*+ delete */
+DELETE FROM databricks_account.iam.groups
+WHERE account_id = '{{ databricks_account_id }}' AND
+id = '{{ databricks_group_id }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/workspace_permission_assignments.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/workspace_permission_assignments.iql
new file mode 100644
index 0000000..00387e3
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_account/workspace_permission_assignments.iql
@@ -0,0 +1,32 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.workspace_permission_assignments
+WHERE account_id = '{{ databricks_account_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
+AND JSON_EXTRACT(principal, '$.principal_id') = {{ databricks_group_id }}
+
+/*+ createorupdate */
+INSERT INTO databricks_account.iam.workspace_permission_assignments (
+account_id,
+principal_id,
+workspace_id,
+data__permissions
+)
+SELECT
+'{{ databricks_account_id }}',
+'{{ databricks_group_id }}',
+'{{ databricks_workspace_id }}',
+'["ADMIN"]'
+
+/*+ statecheck, retries=3, retry_delay=5 */
+SELECT COUNT(*) as count
+FROM databricks_account.iam.workspace_permission_assignments
+WHERE account_id = '{{ databricks_account_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
+AND JSON_EXTRACT(principal, '$.principal_id') = {{ databricks_group_id }}
+
+/*+ delete */
+DELETE FROM databricks_account.iam.workspace_permission_assignments
+WHERE account_id = '{{ databricks_account_id }}' AND
+principal_id = '{{ databricks_group_id }}' AND
+workspace_id = '{{ databricks_workspace_id }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_workspace/external_location.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_workspace/external_location.iql
new file mode 100644
index 0000000..4d993d0
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_workspace/external_location.iql
@@ -0,0 +1,40 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_workspace.unitycatalog.external_locations
+WHERE name = '{{ name | replace('-', '_') }}' AND
+deployment_name = '{{ databricks_deployment_name }}';
+
+/*+ create */
+INSERT INTO databricks_workspace.unitycatalog.external_locations (
+deployment_name,
+data__name,
+data__url,
+data__credential_name,
+data__read_only,
+data__comment,
+data__skip_validation
+)
+SELECT
+'{{ databricks_deployment_name }}',
+'{{ name | replace('-', '_') }}',
+'{{ url }}',
+'{{ credential_name | replace('-', '_') }}',
+{{ read_only }},
+'{{ comment }}',
+{{ skip_validation }}
+;
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT name as external_location_name
+FROM databricks_workspace.unitycatalog.external_locations
+WHERE name = '{{ name | replace('-', '_') }}' AND
+deployment_name = '{{ databricks_deployment_name }}'
+AND url = '{{ url }}' AND
+credential_name = '{{ credential_name | replace('-', '_') }}' AND
+read_only = {{ read_only }} AND
+comment = '{{ comment }}';
+
+/*+ delete */
+DELETE FROM databricks_workspace.unitycatalog.external_locations
+WHERE name = '{{ name | replace('-', '_') }}' AND
+deployment_name = '{{ databricks_deployment_name }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_workspace/storage_credential.iql b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_workspace/storage_credential.iql
new file mode 100644
index 0000000..65dd110
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/resources/databricks_workspace/storage_credential.iql
@@ -0,0 +1,37 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM databricks_workspace.unitycatalog.storage_credentials
+WHERE name = '{{ name | replace('-', '_') | upper }}' AND
+deployment_name = '{{ databricks_deployment_name }}';
+
+/*+ create */
+INSERT INTO databricks_workspace.unitycatalog.storage_credentials (
+deployment_name,
+data__name,
+data__comment,
+data__read_only,
+data__aws_iam_role,
+data__skip_validation
+)
+SELECT
+'{{ databricks_deployment_name }}',
+'{{ name | replace('-', '_') | upper }}',
+'{{ comment }}',
+'{{ read_only }}',
+'{{ aws_iam_role }}',
+'{{ skip_validation }}'
+;
+
+/*+ exports, retries=3, retry_delay=5 */
+SELECT
+name as storage_credential_name,
+JSON_EXTRACT(aws_iam_role, '$.external_id') as storage_credential_external_id
+FROM databricks_workspace.unitycatalog.storage_credentials
+WHERE name = '{{ name | replace('-', '_') | upper }}' AND
+deployment_name = '{{ databricks_deployment_name }}' AND
+JSON_EXTRACT(aws_iam_role, '$.role_arn') = '{{ metastore_access_role_arn }}';
+
+/*+ delete */
+DELETE FROM databricks_workspace.unitycatalog.storage_credentials
+WHERE name = '{{ name | replace('-', '_') | upper }}' AND
+deployment_name = '{{ databricks_deployment_name }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/sec/.gitignore b/ref-python-packages/stackql-deploy/examples/databricks/serverless/sec/.gitignore
new file mode 100644
index 0000000..d6b7ef3
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/sec/.gitignore
@@ -0,0 +1,2 @@
+*
+!.gitignore
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/serverless/stackql_manifest.yml b/ref-python-packages/stackql-deploy/examples/databricks/serverless/stackql_manifest.yml
new file mode 100644
index 0000000..b9f540e
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/serverless/stackql_manifest.yml
@@ -0,0 +1,499 @@
+version: 1
+name: "stackql-serverless"
+description: creates a serverless databricks workspace
+providers:
+ - aws
+ - databricks_account
+ - databricks_workspace
+globals:
+ - name: databricks_account_id
+ description: databricks account id
+ value: "{{ DATABRICKS_ACCOUNT_ID }}"
+ - name: databricks_aws_account_id
+ description: databricks AWS account id
+ value: "{{ DATABRICKS_AWS_ACCOUNT_ID }}"
+ - name: aws_account
+ description: aws_account id
+ value: "{{ AWS_ACCOUNT_ID }}"
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+
+# ====================================================================================
+# IAM and Cloud Credentials
+# ====================================================================================
+
+ - name: aws/iam/cross_account_role
+ file: aws/iam/iam_role.iql
+ props:
+ - name: role_name
+ value: "{{ stack_name }}-{{ stack_env }}-role"
+ - name: assume_role_policy_document
+ value:
+ Version: "2012-10-17"
+ Statement:
+ - Sid: ""
+ Effect: "Allow"
+ Principal:
+ AWS: "arn:aws:iam::{{ databricks_aws_account_id }}:root"
+ Action: "sts:AssumeRole"
+ Condition:
+ StringEquals:
+ sts:ExternalId: "{{ databricks_account_id }}"
+ - name: description
+ value: 'allows Databricks to access resources in ({{ stack_name }}-{{ stack_env }})'
+ - name: path
+ value: '/'
+ - name: policies
+ value:
+ - PolicyDocument:
+ Statement:
+ - Sid: Stmt1403287045000
+ Effect: Allow
+ Action:
+ - "ec2:AllocateAddress"
+ - "ec2:AssociateDhcpOptions"
+ - "ec2:AssociateIamInstanceProfile"
+ - "ec2:AssociateRouteTable"
+ - "ec2:AttachInternetGateway"
+ - "ec2:AttachVolume"
+ - "ec2:AuthorizeSecurityGroupEgress"
+ - "ec2:AuthorizeSecurityGroupIngress"
+ - "ec2:CancelSpotInstanceRequests"
+ - "ec2:CreateDhcpOptions"
+ - "ec2:CreateInternetGateway"
+ - "ec2:CreateKeyPair"
+ - "ec2:CreateNatGateway"
+ - "ec2:CreatePlacementGroup"
+ - "ec2:CreateRoute"
+ - "ec2:CreateRouteTable"
+ - "ec2:CreateSecurityGroup"
+ - "ec2:CreateSubnet"
+ - "ec2:CreateTags"
+ - "ec2:CreateVolume"
+ - "ec2:CreateVpc"
+ - "ec2:CreateVpcEndpoint"
+ - "ec2:DeleteDhcpOptions"
+ - "ec2:DeleteInternetGateway"
+ - "ec2:DeleteKeyPair"
+ - "ec2:DeleteNatGateway"
+ - "ec2:DeletePlacementGroup"
+ - "ec2:DeleteRoute"
+ - "ec2:DeleteRouteTable"
+ - "ec2:DeleteSecurityGroup"
+ - "ec2:DeleteSubnet"
+ - "ec2:DeleteTags"
+ - "ec2:DeleteVolume"
+ - "ec2:DeleteVpc"
+ - "ec2:DeleteVpcEndpoints"
+ - "ec2:DescribeAvailabilityZones"
+ - "ec2:DescribeIamInstanceProfileAssociations"
+ - "ec2:DescribeInstanceStatus"
+ - "ec2:DescribeInstances"
+ - "ec2:DescribeInternetGateways"
+ - "ec2:DescribeNatGateways"
+ - "ec2:DescribePlacementGroups"
+ - "ec2:DescribePrefixLists"
+ - "ec2:DescribeReservedInstancesOfferings"
+ - "ec2:DescribeRouteTables"
+ - "ec2:DescribeSecurityGroups"
+ - "ec2:DescribeSpotInstanceRequests"
+ - "ec2:DescribeSpotPriceHistory"
+ - "ec2:DescribeSubnets"
+ - "ec2:DescribeVolumes"
+ - "ec2:DescribeVpcs"
+ - "ec2:DescribeVpcAttribute"
+ - "ec2:DescribeNetworkAcls"
+ - "ec2:DetachInternetGateway"
+ - "ec2:DisassociateIamInstanceProfile"
+ - "ec2:DisassociateRouteTable"
+ - "ec2:ModifyVpcAttribute"
+ - "ec2:ReleaseAddress"
+ - "ec2:ReplaceIamInstanceProfileAssociation"
+ - "ec2:ReplaceRoute"
+ - "ec2:RequestSpotInstances"
+ - "ec2:RevokeSecurityGroupEgress"
+ - "ec2:RevokeSecurityGroupIngress"
+ - "ec2:RunInstances"
+ - "ec2:TerminateInstances"
+ Resource:
+ - "*"
+ - Effect: Allow
+ Action:
+ - "iam:CreateServiceLinkedRole"
+ - "iam:PutRolePolicy"
+ Resource:
+ - arn:aws:iam::*:role/aws-service-role/spot.amazonaws.com/AWSServiceRoleForEC2Spot
+ Condition:
+ StringLike:
+ "iam:AWSServiceName": spot.amazonaws.com
+ Version: '2012-10-17'
+ PolicyName: "{{ stack_name }}-{{ stack_env }}-policy"
+ exports:
+ - aws_iam_role_name: aws_iam_cross_account_role_name
+ - aws_iam_role_arn: aws_iam_cross_account_role_arn
+
+ - name: databricks_account/credentials
+ props:
+ - name: credentials_name
+ value: "{{ stack_name }}-{{ stack_env }}-credentials"
+ - name: aws_credentials
+ value:
+ sts_role:
+ role_arn: "{{ aws_iam_cross_account_role_arn }}"
+ exports:
+ - databricks_credentials_name
+ - databricks_credentials_id
+ - databricks_role_external_id
+
+# ====================================================================================
+# Storage
+# ====================================================================================
+
+ - name: aws/s3/workspace_bucket
+ file: aws/s3/s3_bucket.iql
+ props:
+ - name: bucket_name
+ value: "{{ stack_name }}-{{ stack_env }}-root-bucket"
+ - name: ownership_controls
+ value:
+ Rules:
+ - ObjectOwnership: "BucketOwnerPreferred"
+ - name: bucket_encryption
+ value:
+ ServerSideEncryptionConfiguration:
+ - BucketKeyEnabled: true
+ ServerSideEncryptionByDefault:
+ SSEAlgorithm: "AES256"
+ - name: public_access_block_configuration
+ value:
+ BlockPublicAcls: true
+ IgnorePublicAcls: true
+ BlockPublicPolicy: true
+ RestrictPublicBuckets: true
+ - name: versioning_configuration
+ value:
+ Status: "Suspended"
+ exports:
+ - arn: aws_s3_workspace_bucket_arn
+ - bucket_name: aws_s3_workspace_bucket_name
+
+ - name: aws/s3/workspace_bucket_policy
+ file: aws/s3/s3_bucket_policy.iql
+ props:
+ - name: policy_document
+ value:
+ Version: "2012-10-17"
+ Statement:
+ - Sid: Grant Databricks Access
+ Effect: Allow
+ Principal:
+ AWS: "arn:aws:iam::{{ databricks_aws_account_id }}:root"
+ Action:
+ - "s3:GetObject"
+ - "s3:GetObjectVersion"
+ - "s3:PutObject"
+ - "s3:DeleteObject"
+ - "s3:ListBucket"
+ - "s3:GetBucketLocation"
+ Resource:
+ - "{{ aws_s3_workspace_bucket_arn }}/*"
+ - "{{ aws_s3_workspace_bucket_arn }}"
+
+ - name: databricks_account/storage_configuration
+ props:
+ - name: storage_configuration_name
+ value: "{{ stack_name }}-{{ stack_env }}-storage"
+ - name: root_bucket_info
+ value:
+ bucket_name: "{{ aws_s3_workspace_bucket_name }}"
+ exports:
+ - databricks_storage_configuration_id
+
+# ====================================================================================
+# UC Storage Credential and Metastore Catalog Bucket
+# ====================================================================================
+
+ - name: aws/s3/metastore_bucket
+ file: aws/s3/s3_bucket.iql
+ props:
+ - name: bucket_name
+ value: "{{ stack_name }}-{{ stack_env }}-metastore"
+ - name: ownership_controls
+ value:
+ Rules:
+ - ObjectOwnership: "BucketOwnerPreferred"
+ - name: bucket_encryption
+ value:
+ ServerSideEncryptionConfiguration:
+ - BucketKeyEnabled: true
+ ServerSideEncryptionByDefault:
+ SSEAlgorithm: "AES256"
+ - name: public_access_block_configuration
+ value:
+ BlockPublicAcls: true
+ IgnorePublicAcls: true
+ BlockPublicPolicy: true
+ RestrictPublicBuckets: true
+ - name: versioning_configuration
+ value:
+ Status: "Suspended"
+ exports:
+ - arn: aws_s3_metastore_bucket_arn
+ - bucket_name: aws_s3_metastore_bucket_name
+
+ - name: aws/iam/metastore_access_role
+ file: aws/iam/iam_role.iql
+ props:
+ - name: role_name
+ value: "{{ stack_name }}-{{ stack_env }}-metastore-role"
+ - name: assume_role_policy_document
+ value:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: "Allow"
+ Principal:
+ AWS:
+ - "arn:aws:iam::414351767826:role/unity-catalog-prod-UCMasterRole-14S5ZJVKOTYTL"
+ Action: "sts:AssumeRole"
+ Condition:
+ StringEquals:
+ sts:ExternalId: "0000" # Placeholder
+ - name: description
+ value: 'Unity Catalog metastore access role for ({{ stack_name }}-{{ stack_env }})'
+ - name: path
+ value: '/'
+ - name: policies
+ value:
+ - PolicyName: "MetastoreS3Policy"
+ PolicyDocument:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: "Allow"
+ Action:
+ - "s3:GetObject"
+ - "s3:PutObject"
+ - "s3:DeleteObject"
+ - "s3:ListBucket"
+ - "s3:GetBucketLocation"
+ - "s3:ListBucketMultipartUploads"
+ - "s3:ListMultipartUploadParts"
+ - "s3:AbortMultipartUpload"
+ Resource:
+ - "{{ aws_s3_metastore_bucket_arn }}/*"
+ - "{{ aws_s3_metastore_bucket_arn }}"
+
+ # - Effect: "Allow"
+ # Action:
+ # - "kms:Decrypt"
+ # - "kms:Encrypt"
+ # - "kms:GenerateDataKey*"
+ # Resource:
+ # - "arn:aws:kms:"
+
+ - Effect: "Allow"
+ Action:
+ - "sts:AssumeRole"
+ Resource:
+ - "arn:aws:iam::{{ databricks_aws_account_id }}:role/{{ stack_name }}-{{ stack_env }}-metastore-role"
+
+ - Sid: "ManagedFileEventsSetupStatement"
+ Effect: "Allow"
+ Action:
+ - "s3:GetBucketNotification"
+ - "s3:PutBucketNotification"
+ - "sns:ListSubscriptionsByTopic"
+ - "sns:GetTopicAttributes"
+ - "sns:SetTopicAttributes"
+ - "sns:CreateTopic"
+ - "sns:TagResource"
+ - "sns:Publish"
+ - "sns:Subscribe"
+ - "sqs:CreateQueue"
+ - "sqs:DeleteMessage"
+ - "sqs:ReceiveMessage"
+ - "sqs:SendMessage"
+ - "sqs:GetQueueUrl"
+ - "sqs:GetQueueAttributes"
+ - "sqs:SetQueueAttributes"
+ - "sqs:TagQueue"
+ - "sqs:ChangeMessageVisibility"
+ - "sqs:PurgeQueue"
+ Resource:
+ - "{{ aws_s3_metastore_bucket_arn }}"
+ - "arn:aws:sqs:*:*:csms-*"
+ - "arn:aws:sns:*:*:csms-*"
+
+ - Sid: "ManagedFileEventsListStatement"
+ Effect: "Allow"
+ Action:
+ - "sqs:ListQueues"
+ - "sqs:ListQueueTags"
+ - "sns:ListTopics"
+ Resource:
+ - "arn:aws:sqs:*:*:csms-*"
+ - "arn:aws:sns:*:*:csms-*"
+
+ - Sid: "ManagedFileEventsTeardownStatement"
+ Effect: "Allow"
+ Action:
+ - "sns:Unsubscribe"
+ - "sns:DeleteTopic"
+ - "sqs:DeleteQueue"
+ Resource:
+ - "arn:aws:sqs:*:*:csms-*"
+ - "arn:aws:sns:*:*:csms-*"
+ - name: tags
+ value:
+ - Key: Purpose
+ Value: "Unity Catalog Storage Credential"
+ merge:
+ - global_tags
+ skip_validation: true
+ exports:
+ - aws_iam_role_arn: metastore_access_role_arn
+
+# ====================================================================================
+# DBX Workspace
+# ====================================================================================
+
+ - name: databricks_account/workspace
+ props:
+ - name: workspace_name
+ value: "{{ stack_name }}-{{ stack_env }}-workspace"
+ - name: aws_region
+ value: "{{ region }}"
+ - name: credentials_id
+ value: "{{ databricks_credentials_id }}"
+ - name: storage_configuration_id
+ value: "{{ databricks_storage_configuration_id }}"
+ - name: pricing_tier
+ value: PREMIUM
+ exports:
+ - databricks_workspace_name
+ - databricks_workspace_id
+ - databricks_deployment_name
+ - databricks_workspace_status
+ - databricks_workspace_url
+
+ - name: databricks_account/workspace_group
+ props:
+ - name: display_name
+ value: "{{ stack_name }}-{{ stack_env }}-workspace-admins"
+ exports:
+ - databricks_group_id
+ - databricks_group_name
+
+ - name: databricks_account/get_users
+ type: query
+ props:
+ - name: users
+ value:
+ - "javen@stackql.io"
+ - "krimmer@stackql.io"
+ exports:
+ - databricks_workspace_group_members
+
+ - name: databricks_account/update_group_membership
+ type: command
+ props: []
+
+ - name: databricks_account/workspace_permission_assignments
+ props: []
+
+ - name: databricks_workspace/storage_credential
+ props:
+ - name: name
+ value: "{{ stack_name }}_{{ stack_env }}_storage_credential"
+ - name: comment
+ value: "Storage credential for {{ stack_name }} {{ stack_env }} metastore S3 access"
+ - name: read_only
+ value: false
+ - name: aws_iam_role
+ value:
+ role_arn: "{{ metastore_access_role_arn }}"
+ - name: skip_validation
+ value: false
+ exports:
+ - storage_credential_name
+ - storage_credential_external_id
+
+ - name: aws/iam/update_metastore_access_role
+ type: command
+ props:
+ - name: role_name
+ value: "{{ stack_name }}-{{ stack_env }}-metastore-role"
+ - name: assume_role_policy_document
+ value:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: "Allow"
+ Principal:
+ AWS:
+ - "arn:aws:iam::414351767826:role/unity-catalog-prod-UCMasterRole-14S5ZJVKOTYTL"
+ - "{{ metastore_access_role_arn }}"
+ Action: "sts:AssumeRole"
+ Condition:
+ StringEquals:
+ sts:ExternalId: "{{ storage_credential_external_id }}"
+
+ - name: databricks_workspace/unitycatalog/credential_grants
+ type: command
+ props:
+ - name: privileges
+ value:
+ - "ALL_PRIVILEGES"
+ - "MANAGE"
+ sql: |
+ UPDATE databricks_workspace.unitycatalog.grants
+ SET data__changes = '[{"add": {{ privileges }},"principal": "{{ databricks_group_name }}"}]'
+ WHERE full_name = '{{ storage_credential_name }}' AND
+ securable_type = 'storage_credential' AND
+ deployment_name = '{{ databricks_deployment_name }}';
+
+ - name: databricks_workspace/external_location
+ props:
+ - name: name
+ value: "{{ stack_name }}_{{ stack_env }}_external_location"
+ - name: comment
+ value: "External location for {{ stack_name }} {{ stack_env }} metastore S3 access"
+ - name: url
+ value: "s3://{{ aws_s3_metastore_bucket_name }}/unitycatalog/demo"
+ - name: credential_name
+ value: "{{ storage_credential_name }}"
+ - name: read_only
+ value: false
+ - name: skip_validation
+ value: false
+ exports:
+ - external_location_name
+
+ - name: databricks_workspace/unitycatalog/location_grants
+ type: command
+ props:
+ - name: privileges
+ value:
+ - "ALL_PRIVILEGES"
+ - "MANAGE"
+ sql: |
+ UPDATE databricks_workspace.unitycatalog.grants
+ SET data__changes = '[{"add": {{ privileges }},"principal": "{{ databricks_group_name }}"}]'
+ WHERE full_name = '{{ external_location_name }}' AND
+ securable_type = 'external_location' AND
+ deployment_name = '{{ databricks_deployment_name }}';
+
+exports:
+ - databricks_workspace_name
+ - databricks_workspace_id
+ - databricks_deployment_name
+ - databricks_workspace_status
+ - databricks_workspace_url
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/README.md b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/README.md
new file mode 100644
index 0000000..409894d
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/README.md
@@ -0,0 +1,245 @@
+# `stackql-deploy` example project for `databricks`
+
+This exercise is to bootstrap a databricks / aws tenancy using `stackql-deploy`. It is an important use case for platform bootstrap and we are excited to perform it with the `stackql` toolchain. We hope you enjoy and find this valuable. Please drop us a note with your forthright opinion on this and check out our issues on github.
+
+## A word of caution
+
+Please take the greatest care in performing this exercise; it will incur expenses, as it involves creating (and destroying) resources which cost money. Please be aware that you **must** cancel your databricks subscription after completing this exercise, otherwise you will incur ongoing expenses. That is, do **not** skip the section [Cancel databricks subscription](#cancel-databricks-subsription). We strongly advise that you verify all resources are destroyed at the conclusion of this exercise. Web pages and certain behaviours may change, so please be thorough in your verification. We will keep this page up-to-date on a best effort basis only. It is very much a case of owner onus applies.
+
+## Manual Setup
+
+Dependencies:
+
+- aws Account Created.
+- Required clickops to set up databricks on aws:
+ - Turn on aws Marketplace `databricks` offering, using [the aws manage subscriptions page](https://console.aws.amazon.com/marketplace/home#/subscriptions), per Figure S1.
+ - Follow the suggested setup flow as directed, from this page. These clickops steps are necessary at this time for initial account setup. The way I followed this, it created a workspace for me at setup, per Figure S3. We shall not use this one and rather, later on we shall dispose of it; because we do not trust auto-created resources out of hand. In the process of creating the databricks subscription, a second aws account is created.
+ - Copy the databricks account id from basically any web page in the databricks console. This is done by clicking on the user icon at the top RHS and then the UI provides a copy shortcut, per Fugure U1. Save this locally for later use, expanded below.
+ - We need the aws account id that was created for the databricks subscription. It is not exactly heralded by the web pages, nor is it actively hidden. It can be captured in a couple of places, including the databricks storage account creatted in the subscription flow, per Figure XA1. copy and save this locally for later use, expanded below.
+ - Create a service principal to use as a "CICD agent", using the page shown in Figure S4.
+ - Grant the CICD agent account admin role, using the page shown in Figure S5.
+ - Create a secret for the CICD agent, using the page shown in Figure S6. At the time you create this, you will need to safely store the client secret and client id, as prompted by the web page. These will be used below.
+- Setup your virtual environment, from the root of this repository `cicd/setup/setup-env.sh`.
+
+Now, is is convenient to use environment variables for context. Note that for our example, there is only one aws account apropos, however this is not always the case for an active professional, so while `DATABRICKS_AWS_ACCOUNT_ID` is the same as `AWS_ACCOUNT_ID` here, it need not always be the case. Create a file in the path `examples/databricks/serverless/sec/env.sh` (relative to the root of this repository) with contents of the form:
+
+```bash
+#!/usr/bin/env bash
+
+export AWS_REGION='us-east-1' # or wherever you want
+export AWS_ACCOUNT_ID=''
+export DATABRICKS_ACCOUNT_ID=''
+export DATABRICKS_AWS_ACCOUNT_ID=''
+
+# These need to be created by clickops under [the account level user managment page](https://accounts.cloud.databricks.com/user-management).
+export DATABRICKS_CLIENT_ID=''
+export DATABRICKS_CLIENT_SECRET=''
+
+## These can be skipped if you run on [aws cloud shell](https://docs.aws.amazon.com/cloudshell/latest/userguide/welcome.html).
+export AWS_SECRET_ACCESS_KEY=''
+export AWS_ACCESS_KEY_ID=''
+
+```
+
+## Optional step: sanity checks with stackql
+
+Now, let us do some sanity checks and housekeeping with `stackql`. This is purely optional. From the root of this repository:
+
+```
+source examples/databricks/serverless/convenience.sh
+stackql shell
+```
+
+This will start a `stackql` interactive shell. Here are some commands you can run (I will not place output here, that will be shared in a corresponding video):
+
+
+```sql
+registry pull databricks_account v24.12.00279;
+registry pull databricks_workspace v24.12.00279;
+
+-- This will fail if accounts, subscription, or credentials are in error.
+select account_id FROM databricks_account.provisioning.credentials WHERE account_id = '';
+select account_id, workspace_name, workspace_id, workspace_status from databricks_account.provisioning.workspaces where account_id = '';
+```
+
+For extra credit, you can (asynchronously) delete the unnecessary workspace with `delete from databricks_account.provisioning.workspaces where account_id = '' and workspace_id = '';`, where you obtain the workspace id from the above query. I have noted that due to some reponse caching it takes a while to disappear from select queries (much longer than disappearance from the web page), and you may want to bounce the `stackql` session to hurry things along. This is not happening on the `stackql` side, but session bouncing forces a token refresh which can help cache busting.
+
+## Lifecycle management
+
+Time to get down to business. From the root of this repository:
+
+```bash
+python3 -m venv myenv
+source examples/databricks/serverless/convenience.sh
+source venv/bin/activate
+pip install stackql-deploy
+```
+
+> alternatively set the `AWS_REGION`, `AWS_ACCOUNT_ID`, `DATABRICKS_ACCOUNT_ID`, `DATABRICKS_AWS_ACCOUNT_ID` along with provider credentials `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, `DATABRICKS_CLIENT_ID`, `DATABRICKS_CLIENT_SECRET`
+
+Then, do a dry run (good for catching **some** environmental issues):
+
+```bash
+stackql-deploy build \
+examples/databricks/serverless dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--dry-run
+```
+
+You will see a verbose rendition of what `stackql-deploy` intends to do.
+
+
+Now, let use do it for real:
+
+```bash
+stackql-deploy build \
+examples/databricks/serverless dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+The output is quite verbose, concludes in:
+
+```
+2025-02-08 12:51:25,914 - stackql-deploy - INFO - š¤ set [databricks_workspace_id] to [482604062392118] in exports
+2025-02-08 12:51:25,915 - stackql-deploy - INFO - ā
successfully deployed databricks_workspace
+2025-02-08 12:51:25,915 - stackql-deploy - INFO - deployment completed in 0:04:09.603631
+š build complete
+```
+
+Success!!!
+
+We can also use `stackql-deploy` to assess if our infra is shipshape:
+
+```bash
+stackql-deploy test \
+examples/databricks/serverless dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+Again, the output is quite verbose, concludes in:
+
+```
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - š¤ set [databricks_workspace_id] to [482604062392118] in exports
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - ā
test passed for databricks_workspace
+2025-02-08 13:15:45,821 - stackql-deploy - INFO - deployment completed in 0:02:30.255860
+š tests complete (dry run: False)
+```
+
+Success!!!
+
+Now, let us teardown our `stackql-deploy` managed infra:
+
+```bash
+stackql-deploy teardown \
+examples/databricks/serverless dev \
+-e AWS_REGION=${AWS_REGION} \
+-e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \
+-e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \
+-e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \
+--show-queries
+```
+
+Takes its time, again verbose, concludes in:
+
+```
+2025-02-08 13:24:17,941 - stackql-deploy - INFO - ā
successfully deleted AWS_iam_cross_account_role
+2025-02-08 13:24:17,942 - stackql-deploy - INFO - deployment completed in 0:03:21.191788
+š§ teardown complete (dry run: False)
+```
+
+Success!!!
+
+## Optional step: verify destruction with stackql
+
+Now, let us do some sanity checks and housekeeping with `stackql`. This is purely optional. From the root of this repository:
+
+```
+
+source examples/databricks/serverless/convenience.sh
+
+stackql shell
+
+```
+
+This will start a `stackql` interactive shell. Here are some commands you can run (I will not place output here):
+
+
+```sql
+
+registry pull databricks_account v24.12.00279;
+
+registry pull databricks_workspace v24.12.00279;
+
+
+
+select account_id, workspace_name, workspace_id, workspace_status from databricks_account.provisioning.workspaces where account_id = '';
+
+```
+
+## Cancel databricks subsription
+
+This is **very** important.
+
+Go to [the aws Marketplace manage subscriptions page](https://console.aws.amazon.com/marketplace/home#/subscriptions), navigate to databricks and then cancel the subscription.
+
+## Figures
+
+
+
+
+**Figure S1**: Create aws databricks subscription.
+
+---
+
+
+
+**Figure S2**: Awaiting aws databricks subscription resources.
+
+---
+
+
+
+**Figure S3**: Auto provisioned workspace.
+
+---
+
+
+
+**Figure U1**: Capture databricks account id.
+
+---
+
+
+
+**Figure XA1**: Capture cross databricks aws account id.
+
+---
+
+
+
+**Figure S4**: Create CICD agent.
+
+---
+
+
+
+**Figure S5**: Grant account admin to CICD agent.
+
+---
+
+
+
+**Figure S6**: Generate secret for CICD agent.
+
+---
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/databricks_workspace/catalog.iql b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/databricks_workspace/catalog.iql
new file mode 100644
index 0000000..5d7df7e
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/databricks_workspace/catalog.iql
@@ -0,0 +1,27 @@
+/*+ create */
+INSERT INTO databricks_workspace.unitycatalog.catalogs (
+deployment_name,
+data__name,
+data__comment,
+data__storage_root
+)
+SELECT
+'{{ databricks_deployment_name }}',
+'{{ name }}',
+'{{ comment }}',
+'{{ storage_root }}'
+;
+
+/*+ statecheck */
+SELECT COUNT(*) as count
+FROM databricks_workspace.unitycatalog.catalogs
+WHERE name = '{{ name }}' AND
+deployment_name = '{{ databricks_deployment_name }}'
+AND storage_root = '{{ storage_root }}' AND
+comment = '{{ comment }}';
+
+/*+ exports */
+SELECT name as catalog_name
+FROM databricks_workspace.unitycatalog.catalogs
+WHERE name = '{{ name }}' AND
+deployment_name = '{{ databricks_deployment_name }}';
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/databricks_workspace/schema.iql b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/databricks_workspace/schema.iql
new file mode 100644
index 0000000..b662259
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/databricks_workspace/schema.iql
@@ -0,0 +1,27 @@
+/*+ create */
+INSERT INTO databricks_workspace.unitycatalog.schemas (
+deployment_name,
+data__name,
+data__catalog_name,
+data__comment
+)
+SELECT
+'{{ databricks_deployment_name }}',
+'{{ name }}',
+'{{ catalog_name }}',
+'{{ comment }}'
+;
+
+/*+ statecheck */
+SELECT COUNT(*) as count
+FROM databricks_workspace.unitycatalog.schemas
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND catalog_name = '{{ catalog_name }}'
+AND name = '{{ name }}';
+
+/*+ exports */
+SELECT name as schema_name
+FROM databricks_workspace.unitycatalog.schemas
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND catalog_name = '{{ catalog_name }}'
+AND name = '{{ name }}';
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/databricks_workspace/service_principal.iql b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/databricks_workspace/service_principal.iql
new file mode 100644
index 0000000..355adee
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/databricks_workspace/service_principal.iql
@@ -0,0 +1,31 @@
+/*+ create */
+INSERT INTO databricks_workspace.iam.service_principals (
+deployment_name,
+data__displayName,
+data__active
+)
+SELECT
+'{{ databricks_deployment_name }}',
+'{{ name }}',
+true
+;
+
+/*+ statecheck */
+SELECT COUNT(*) as count
+FROM databricks_workspace.iam.service_principals
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND displayName = '{{ name }}'
+AND active = true;
+
+/*+ exports */
+SELECT id as service_principal_id,
+applicationId as service_principal_application_id,
+displayName as service_principal_name
+FROM databricks_workspace.iam.service_principals
+WHERE deployment_name = '{{ databricks_deployment_name }}'
+AND displayName = '{{ name }}';
+
+/*+ delete */
+DELETE FROM databricks_workspace.iam.service_principals
+WHERE id = '{{ service_principal_id }}' AND
+deployment_name = '{{ databricks_deployment_name }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/snowflake/statement.iql b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/snowflake/statement.iql
new file mode 100644
index 0000000..decce8c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/resources/snowflake/statement.iql
@@ -0,0 +1,19 @@
+/*+ command */
+INSERT INTO snowflake.sqlapi.statements (
+data__statement,
+data__timeout,
+data__database,
+data__schema,
+data__warehouse,
+"User-Agent",
+endpoint
+)
+SELECT
+'{{ statement | sql_escape }}',
+{{ timeout }},
+'{{ database }}',
+'{{ schema }}',
+'{{ warehouse }}',
+'{{ "User-Agent" }}',
+'{{ snowflake_endpoint }}'
+;
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/stackql_manifest.yml b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/stackql_manifest.yml
new file mode 100644
index 0000000..c5d0e2c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/databricks/snowflake-interoperability/stackql_manifest.yml
@@ -0,0 +1,287 @@
+version: 1
+name: "snowflake-interoperability"
+description: "Databricks Snowflake Interoperability Example"
+providers:
+ - snowflake
+ - databricks_account
+ - databricks_workspace
+globals:
+ - name: databricks_workspace_name
+ description: databricks workspace name
+ value: "{{ DATABRICKS_WORKSPACE_NAME }}"
+ - name: databricks_metastore_name
+ description: databricks metastore name
+ value: "{{ DATABRICKS_METASTORE_NAME }}"
+ - name: databricks_external_location
+ description: databricks external location
+ value: "{{ DATABRICKS_EXTERNAL_LOCATION }}"
+ - name: databricks_admin_group
+ description: databricks admin group
+ value: "{{ DATABRICKS_ADMIN_GROUP }}"
+ - name: databricks_account_id
+ description: databricks account id
+ value: "{{ DATABRICKS_ACCOUNT_ID }}"
+ - name: snowflake_endpoint
+ description: snowflake endpoint (org-account)
+ value: "{{ SNOWFLAKE_ORG }}-{{ SNOWFLAKE_ACCOUNT }}"
+ - name: snowflake_db
+ description: snowflake database
+ value: "{{ SNOWFLAKE_DB }}"
+ - name: snowflake_schema
+ description: snowflake schema
+ value: "{{ SNOWFLAKE_SCHEMA }}"
+ - name: snowflake_whse
+ description: snowflake warehouse (org-account)
+ value: "{{ SNOWFLAKE_WAREHOUSE }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+
+# ====================================================================================
+# Source Required Variables
+# ====================================================================================
+
+ - name: get_workspace_deployment_name
+ type: query
+ props: []
+ sql: |
+ SELECT
+ deployment_name as databricks_deployment_name,
+ workspace_status,
+ workspace_status_message
+ FROM databricks_account.provisioning.workspaces
+ WHERE account_id = '{{ databricks_account_id }}';
+ exports:
+ - databricks_deployment_name
+ - workspace_status
+ - workspace_status_message
+
+ - name: get_metastore_id
+ type: query
+ props: []
+ sql: |
+ SELECT
+ metastore_id
+ FROM databricks_workspace.unitycatalog.metastores
+ WHERE deployment_name = '{{ databricks_deployment_name }}'
+ AND name = '{{ databricks_metastore_name }}';
+ exports:
+ - metastore_id
+
+# ====================================================================================
+# Enable External Access
+# ====================================================================================
+
+ - name: enable_external_access
+ type: command
+ props: []
+ sql: |
+ UPDATE databricks_workspace.unitycatalog.metastores
+ SET data__external_access_enabled = 'true'
+ WHERE id = '{{ metastore_id }}' AND
+ deployment_name = '{{ databricks_deployment_name }}';
+
+# ====================================================================================
+# DBX UC Catalog and Schema
+# ====================================================================================
+
+ - name: interoperability_catalog
+ file: databricks_workspace/catalog.iql
+ props:
+ - name: name
+ value: uc_interoperability
+ - name: comment
+ value: "Interoperability demonstration catalog for Databricks and Snowflake"
+ - name: storage_root
+ value: "{{ databricks_external_location }}"
+ exports:
+ - catalog_name
+
+ - name: catalog_grants
+ type: command
+ props:
+ - name: privileges
+ value:
+ - "ALL_PRIVILEGES"
+ - "MANAGE"
+ sql: |
+ UPDATE databricks_workspace.unitycatalog.grants
+ SET data__changes = '[{"add": {{ privileges }},"principal": "{{ databricks_admin_group }}"}]'
+ WHERE full_name = '{{ catalog_name }}' AND
+ securable_type = 'catalog' AND
+ deployment_name = '{{ databricks_deployment_name }}';
+
+ - name: interoperability_schema
+ file: databricks_workspace/schema.iql
+ props:
+ - name: name
+ value: demo_schema
+ - name: catalog_name
+ value: "{{ catalog_name}}"
+ - name: comment
+ value: "Demo schema for interoperability"
+ exports:
+ - schema_name
+
+# ====================================================================================
+# Create Bronze Iceberg Table, Silver and Gold Delta Tables - Do this in the Workspace
+# ===================================================================================
+
+# ====================================================================================
+# Create Service Principal and Secret for Catalog Integration
+# ====================================================================================
+
+ - name: service_principal
+ file: databricks_workspace/service_principal.iql
+ props:
+ - name: name
+ value: interoperability_service_principal
+ exports:
+ - service_principal_name
+ - service_principal_application_id
+ - service_principal_id
+
+ - name: service_principal_grant_external_use
+ type: command
+ props:
+ - name: privileges
+ value:
+ - "EXTERNAL_USE_SCHEMA"
+ - "SELECT"
+ - "USE_CATALOG"
+ - "USE_SCHEMA"
+ sql: |
+ UPDATE databricks_workspace.unitycatalog.grants
+ SET data__changes = '[{"add": {{ privileges }},"principal": "{{ service_principal_application_id }}"}]'
+ WHERE full_name = '{{ catalog_name }}' AND
+ securable_type = 'catalog' AND
+ deployment_name = '{{ databricks_deployment_name }}';
+
+ - name: service_principal_secret
+ type: query
+ props:
+ - name: name
+ value: interoperability_service_principal
+ sql: |
+ INSERT INTO databricks_account.oauth.service_principal_secrets (
+ account_id,
+ service_principal_id
+ )
+ SELECT
+ '{{ databricks_account_id }}',
+ '{{ service_principal_id }}'
+ RETURNING secret
+ ;
+ exports:
+ - secret
+ # protected:
+ # - secret
+
+# ====================================================================================
+# (Snowflake) Create Catalog Integration and Related Objects
+# ====================================================================================
+
+ - name: snowflake_catalog_integration
+ type: command
+ file: snowflake/statement.iql
+ props:
+ - name: '"User-Agent"'
+ value: stackql
+ - name: statement
+ value: |
+ CREATE CATALOG INTEGRATION IF NOT EXISTS unity_catalog_demo_int
+ CATALOG_SOURCE = ICEBERG_REST
+ TABLE_FORMAT = ICEBERG
+ CATALOG_NAMESPACE = '{{ schema_name }}'
+ REST_CONFIG = (
+ CATALOG_URI = 'https://{{ databricks_deployment_name }}.cloud.databricks.com/api/2.1/unity-catalog/iceberg-rest'
+ WAREHOUSE = '{{ catalog_name }}'
+ ACCESS_DELEGATION_MODE = VENDED_CREDENTIALS
+ )
+ REST_AUTHENTICATION = (
+ TYPE = OAUTH
+ OAUTH_TOKEN_URI = 'https://{{ databricks_deployment_name }}.cloud.databricks.com/oidc/v1/token'
+ OAUTH_CLIENT_ID = '{{ service_principal_application_id }}'
+ OAUTH_CLIENT_SECRET = '{{ secret }}'
+ OAUTH_ALLOWED_SCOPES = ('all-apis', 'sql')
+ )
+ ENABLED = TRUE
+ REFRESH_INTERVAL_SECONDS = 30
+ - name: timeout
+ value: 10
+ - name: database
+ value: '{{ snowflake_db }}'
+ - name: schema
+ value: '{{ snowflake_schema }}'
+ - name: warehouse
+ value: '{{ snowflake_whse }}'
+
+ - name: snowflake_iceberg_bronze_table
+ file: snowflake/statement.iql
+ type: command
+ props:
+ - name: '"User-Agent"'
+ value: stackql
+ - name: statement
+ value: |
+ CREATE OR REPLACE ICEBERG TABLE retail_sales_bronze
+ CATALOG = 'unity_catalog_demo_int'
+ CATALOG_TABLE_NAME = 'retail_sales_bronze'
+ AUTO_REFRESH = TRUE
+ - name: timeout
+ value: 30
+ - name: database
+ value: '{{ snowflake_db }}'
+ - name: schema
+ value: '{{ snowflake_schema }}'
+ - name: warehouse
+ value: '{{ snowflake_whse }}'
+
+ - name: snowflake_delta_silver_table
+ file: snowflake/statement.iql
+ type: command
+ props:
+ - name: '"User-Agent"'
+ value: stackql
+ - name: statement
+ value: |
+ CREATE OR REPLACE ICEBERG TABLE retail_sales_silver
+ CATALOG = 'unity_catalog_demo_int'
+ CATALOG_TABLE_NAME = 'retail_sales_silver'
+ AUTO_REFRESH = TRUE
+ - name: timeout
+ value: 30
+ - name: database
+ value: '{{ snowflake_db }}'
+ - name: schema
+ value: '{{ snowflake_schema }}'
+ - name: warehouse
+ value: '{{ snowflake_whse }}'
+
+ - name: snowflake_delta_gold_table
+ file: snowflake/statement.iql
+ type: command
+ props:
+ - name: '"User-Agent"'
+ value: stackql
+ - name: statement
+ value: |
+ CREATE OR REPLACE ICEBERG TABLE retail_sales_gold
+ CATALOG = 'unity_catalog_demo_int'
+ CATALOG_TABLE_NAME = 'retail_sales_gold'
+ AUTO_REFRESH = TRUE
+ - name: timeout
+ value: 30
+ - name: database
+ value: '{{ snowflake_db }}'
+ - name: schema
+ value: '{{ snowflake_schema }}'
+ - name: warehouse
+ value: '{{ snowflake_whse }}'
+
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/README.md b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/README.md
new file mode 100644
index 0000000..4ef7189
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/README.md
@@ -0,0 +1,66 @@
+# example `stackql-deploy` stack
+
+Based upon the [Kubernetes the Hard Way](https://github.com/kelseyhightower/kubernetes-the-hard-way) project.
+
+## about `stackql-deploy`
+
+[`stackql-deploy`](https://pypi.org/project/stackql-deploy/) is a multi cloud deployment automation and testing framework which is an alternative to Terraform or similar IaC tools. `stackql-deploy` uses a declarative model/ELT based approach to cloud resource deployment (inspired by [`dbt`](https://www.getdbt.com/)). Advantages of `stackql-deploy` include:
+
+- declarative framework
+- no state file (state is determined from the target environment)
+- multi-cloud/omni-cloud ready
+- includes resource tests which can include secure config tests
+
+## instaling `stackql-deploy`
+
+`stackql-deploy` is installed as a python based CLI using...
+
+```bash
+pip install stackql-deploy
+# or
+pip3 install stackql-deploy
+```
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## getting started with `stackql-deploy`
+
+Once installed, use the `init` command to scaffold a sample project directory to get started:
+
+```bash
+stackql-deploy init k8s-the-hard-way
+```
+
+this will create a directory named `k8s-the-hard-way` which can be updated for your stack, as you can see in this project.
+
+## deploying using `stackql-deploy`
+
+```bash
+export GOOGLE_CREDENTIALS=$(cat ./testcreds/k8s-the-hard-way-project-demo-service-account.json)
+# deploy a stack
+stackql-deploy build \
+examples/google/k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run \
+--log-level DEBUG
+
+# test a stack
+stackql-deploy test \
+examples/google/k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run
+
+# teardown a stack
+stackql-deploy teardown \
+examples/google/k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run
+```
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/firewalls.iql b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/firewalls.iql
new file mode 100644
index 0000000..d69607b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/firewalls.iql
@@ -0,0 +1,54 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.firewalls
+WHERE project = '{{ project }}'
+AND name = '{{ fw_name }}'
+
+/*+ create */
+INSERT INTO google.compute.firewalls
+(
+ project,
+ data__name,
+ data__network,
+ data__direction,
+ data__sourceRanges,
+ data__allowed
+)
+SELECT
+ '{{ project }}',
+ '{{ fw_name}}',
+ '{{ vpc_link }}',
+ '{{ fw_direction }}',
+ '{{ fw_source_ranges }}',
+ '{{ fw_allowed }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+network = '{{ vpc_link }}' as test_network,
+direction = '{{ fw_direction }}' as test_direction,
+JSON_EQUAL(allowed, '{{ fw_allowed }}') as test_allowed,
+JSON_EQUAL(sourceRanges, '{{ fw_source_ranges }}') as test_source_ranges
+FROM google.compute.firewalls
+WHERE project = '{{ project }}'
+AND name = '{{ fw_name }}'
+) t
+WHERE test_network = 1
+AND test_direction = 1
+AND test_allowed = 1
+AND test_source_ranges = 1;
+
+/*+ update */
+UPDATE google.compute.firewalls
+SET
+ data__network = '{{ vpc_link }}',
+ data__direction = '{{ fw_direction }}',
+ data__sourceRanges = '{{ fw_source_ranges }}',
+ data__allowed = '{{ fw_allowed }}'
+WHERE firewall = '{{ fw_name}}'
+AND project = '{{ project }}'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.firewalls
+WHERE project = '{{ project }}'
+AND firewall = '{{ fw_name }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/forwarding_rule.iql b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/forwarding_rule.iql
new file mode 100644
index 0000000..2f25e4e
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/forwarding_rule.iql
@@ -0,0 +1,36 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.forwarding_rules
+WHERE region = '{{ region }}'
+AND project = '{{ project }}'
+AND forwardingRule = '{{ forwarding_rule_name }}'
+
+/*+ create */
+INSERT INTO google.compute.forwarding_rules(
+ project,
+ region,
+ data__name,
+ data__IPAddress,
+ data__loadBalancingScheme,
+ data__portRange,
+ data__target
+)
+SELECT
+ '{{ project }}',
+ '{{ region }}',
+ '{{ forwarding_rule_name }}',
+ '{{ address }}',
+ '{{ forwarding_rule_load_balancing_scheme }}',
+ '{{ forwarding_rule_port_range }}',
+ '{{ target_pool_link }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.forwarding_rules
+WHERE region = '{{ region }}'
+AND project = '{{ project }}'
+AND forwardingRule = '{{ forwarding_rule_name }}'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.forwarding_rules
+WHERE project = '{{ project }}'
+ AND region = '{{ region }}'
+ AND forwardingRule = '{{ forwarding_rule_name }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/get_controller_instances.iql b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/get_controller_instances.iql
new file mode 100644
index 0000000..36d7aef
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/get_controller_instances.iql
@@ -0,0 +1,6 @@
+/*+ exports */
+SELECT JSON_GROUP_ARRAY(json_object('instance', selfLink)) as controller_instances
+FROM google.compute.instances
+WHERE project = '{{ project }}'
+AND zone = '{{ default_zone }}'
+AND name like '%-{{ stack_env }}-controller-%'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/health_checks.iql b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/health_checks.iql
new file mode 100644
index 0000000..7154450
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/health_checks.iql
@@ -0,0 +1,45 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.http_health_checks
+WHERE project = '{{ project }}'
+AND httpHealthCheck = '{{ health_check_name }}'
+
+/*+ create */
+INSERT INTO google.compute.http_health_checks(
+ project,
+ data__name,
+ data__checkIntervalSec,
+ data__description,
+ data__healthyThreshold,
+ data__host,
+ data__port,
+ data__requestPath,
+ data__timeoutSec,
+ data__unhealthyThreshold
+)
+SELECT
+ '{{ project }}',
+ '{{ health_check_name }}',
+ {{ health_check_interval_sec }},
+ '{{ health_check_description }}',
+ {{ health_check_healthy_threshold }},
+ '{{ health_check_host }}',
+ {{ health_check_port }},
+ '{{ health_check_path }}',
+ {{ health_check_timeout_sec }},
+ {{ health_check_unhealthy_threshold }}
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.http_health_checks
+WHERE project = '{{ project }}'
+AND httpHealthCheck = '{{ health_check_name }}'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.http_health_checks
+WHERE project = '{{ project }}'
+AND httpHealthCheck = '{{ health_check_name }}'
+
+/*+ exports */
+SELECT selfLink as health_check_link
+FROM google.compute.http_health_checks
+WHERE project = '{{ project }}'
+AND httpHealthCheck = '{{ health_check_name }}'
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/instances.iql b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/instances.iql
new file mode 100644
index 0000000..bf482fa
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/instances.iql
@@ -0,0 +1,61 @@
+/*+ exists */
+SELECT
+CASE
+ WHEN COUNT(*) = {{ num_instances | int }} THEN 1
+ ELSE 0
+END AS count
+FROM google.compute.instances
+WHERE
+project = '{{ project }}'
+AND zone = '{{ default_zone }}'
+AND name IN ({% for i in range(num_instances | int) %}'{{ instance_name_prefix }}-{{ loop.index }}'{% if not loop.last %}, {% endif %}{% endfor %})
+
+/*+ create */
+{% for network_interface in network_interfaces | from_json %}
+INSERT INTO google.compute.instances
+ (
+ zone,
+ project,
+ data__name,
+ data__machineType,
+ data__canIpForward,
+ data__deletionProtection,
+ data__scheduling,
+ data__networkInterfaces,
+ data__disks,
+ data__serviceAccounts,
+ data__tags
+ )
+ SELECT
+'{{ default_zone }}',
+'{{ project }}',
+'{{ instance_name_prefix }}-{{ loop.index }}',
+'{{ machine_type }}',
+true,
+false,
+'{{ scheduling }}',
+'[ {{ network_interface | tojson }} ]',
+'{{ disks }}',
+'{{ service_accounts }}',
+'{{ tags }}';
+{% endfor %}
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT
+CASE
+ WHEN COUNT(*) = {{ num_instances | int }} THEN 1
+ ELSE 0
+END AS count
+FROM google.compute.instances
+WHERE
+project = '{{ project }}'
+AND zone = '{{ default_zone }}'
+AND name IN ({% for i in range(num_instances | int) %}'{{ instance_name_prefix }}-{{ loop.index }}'{% if not loop.last %}, {% endif %}{% endfor %})
+
+/*+ delete */
+{% for network_interface in network_interfaces | from_json %}
+DELETE FROM google.compute.instances
+WHERE project = '{{ project }}'
+AND zone = '{{ default_zone }}'
+AND instance = '{{ instance_name_prefix }}-{{ loop.index }}';
+{% endfor %}
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/network.iql b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/network.iql
new file mode 100644
index 0000000..c1b39d7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/network.iql
@@ -0,0 +1,43 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+
+/*+ create */
+INSERT INTO google.compute.networks
+(
+ project,
+ data__name,
+ data__autoCreateSubnetworks,
+ data__routingConfig
+)
+SELECT
+'{{ project }}',
+'{{ vpc_name }}',
+false,
+'{"routingMode": "REGIONAL"}'
+
+/*+ update */
+UPDATE google.compute.networks
+SET data__autoCreateSubnetworks = false
+AND data__routingConfig = '{"routingMode": "REGIONAL"}'
+WHERE network = '{{ vpc_name }}' AND project = '{{ project }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+AND autoCreateSubnetworks = false
+AND JSON_EXTRACT(routingConfig, '$.routingMode') = 'REGIONAL'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.networks
+WHERE network = '{{ vpc_name }}' AND project = '{{ project }}'
+
+/*+ exports */
+SELECT
+'{{ vpc_name }}' as vpc_name,
+selfLink as vpc_link
+FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/public_address.iql b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/public_address.iql
new file mode 100644
index 0000000..022db98
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/public_address.iql
@@ -0,0 +1,35 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.addresses
+WHERE name = '{{ address_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ create */
+INSERT INTO google.compute.addresses
+(
+ project,
+ region,
+ data__name
+)
+SELECT
+'{{ project }}',
+'{{ region }}',
+'{{ address_name }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.addresses
+WHERE name = '{{ address_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ delete */
+DELETE FROM google.compute.addresses
+WHERE address = '{{ address_name }}' AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ exports */
+SELECT address
+FROM google.compute.addresses
+WHERE name = '{{ address_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/routes.iql b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/routes.iql
new file mode 100644
index 0000000..e40be78
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/routes.iql
@@ -0,0 +1,45 @@
+/*+ exists */
+SELECT
+ CASE
+ WHEN COUNT(*) = {{ num_routes | int }} THEN 1
+ ELSE 0
+ END AS count
+FROM google.compute.routes
+WHERE project = '{{ project }}'
+AND name IN ({% for i in range(num_routes | int) %}'{{ route_name_prefix }}-{{ loop.index }}'{% if not loop.last %}, {% endif %}{% endfor %})
+
+/*+ create */
+{% for route in route_data | from_json %}
+INSERT INTO google.compute.routes(
+ project,
+ data__destRange,
+ data__name,
+ data__network,
+ data__nextHopIp,
+ data__priority
+)
+SELECT
+ '{{ project }}',
+ '{{ route.dest_range }}',
+ '{{ route_name_prefix }}-{{ loop.index }}',
+ '{{ vpc_link }}',
+ '{{ route.next_hop_ip }}',
+ {{ route_priority }};
+{% endfor %}
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT
+ CASE
+ WHEN COUNT(*) = {{ num_routes | int }} THEN 1
+ ELSE 0
+ END AS count
+FROM google.compute.routes
+WHERE project = '{{ project }}'
+AND name IN ({% for i in range(num_routes | int) %}'{{ route_name_prefix }}-{{ loop.index }}'{% if not loop.last %}, {% endif %}{% endfor %})
+
+/*+ delete, retries=20, retry_delay=10 */
+{% for route in route_data | from_json %}
+DELETE FROM google.compute.routes
+WHERE project = '{{ project }}'
+AND route = '{{ route_name_prefix }}-{{ loop.index }}';
+{% endfor %}
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/subnetwork.iql b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/subnetwork.iql
new file mode 100644
index 0000000..7d55eb7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/subnetwork.iql
@@ -0,0 +1,56 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.subnetworks
+WHERE subnetwork = '{{ subnet_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+AND network = '{{ vpc_link }}'
+
+/*+ create, retries=5, retry_delay=10 */
+INSERT INTO google.compute.subnetworks
+(
+ project,
+ region,
+ data__name,
+ data__network,
+ data__ipCidrRange,
+ data__privateIpGoogleAccess
+)
+SELECT
+'{{ project }}',
+'{{ region }}',
+'{{ subnet_name }}',
+'{{ vpc_link }}',
+'{{ ip_cidr_range }}',
+true
+
+/*+ update */
+UPDATE google.compute.subnetworks
+SET
+data__name = '{{ subnet_name }}',
+data__network = '{{ vpc_link }}',
+data__ipCidrRange = '{{ ip_cidr_range }}',
+data__privateIpGoogleAccess = true
+WHERE subnetwork = '{{ subnet_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.subnetworks
+WHERE project = '{{ project }}'
+AND region = '{{ region }}'
+AND subnetwork = '{{ subnet_name }}'
+AND network = '{{ vpc_link }}'
+
+/*+ delete */
+DELETE FROM google.compute.subnetworks
+WHERE subnetwork = '{{ subnet_name }}' AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ exports */
+SELECT
+name as subnet_name,
+selfLink as subnet_link
+FROM google.compute.subnetworks
+WHERE subnetwork = '{{ subnet_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/target_pool.iql b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/target_pool.iql
new file mode 100644
index 0000000..66db671
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/resources/target_pool.iql
@@ -0,0 +1,42 @@
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.target_pools
+WHERE project = '{{ project }}'
+ AND region = '{{ region }}'
+ AND targetPool = '{{ target_pool_name }}'
+
+/*+ create */
+INSERT INTO google.compute.target_pools(
+ project,
+ region,
+ data__name,
+ data__healthChecks,
+ data__instances,
+ data__sessionAffinity
+)
+SELECT
+ '{{ project }}',
+ '{{ region }}',
+ '{{ target_pool_name }}',
+ '{{ target_pool_health_checks }}',
+ '{{ target_pool_instances }}',
+ '{{ target_pool_session_affinity }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.target_pools
+WHERE project = '{{ project }}'
+ AND region = '{{ region }}'
+ AND targetPool = '{{ target_pool_name }}'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.target_pools
+WHERE project = '{{ project }}'
+ AND region = '{{ region }}'
+ AND targetPool = '{{ target_pool_name }}'
+
+/*+ exports */
+SELECT
+selfLink as target_pool_link
+FROM google.compute.target_pools
+WHERE targetPool = '{{ target_pool_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
diff --git a/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/stackql_manifest.yml b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/stackql_manifest.yml
new file mode 100644
index 0000000..e3f0d0e
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/k8s-the-hard-way/stackql_manifest.yml
@@ -0,0 +1,254 @@
+version: 1
+name: kubernetes-the-hard-way
+description: stackql-deploy example for kubernetes-the-hard-way
+providers:
+ - google
+globals:
+- name: project
+ description: google project name
+ value: "{{ GOOGLE_PROJECT }}"
+- name: region
+ value: australia-southeast1
+- name: default_zone
+ value: australia-southeast1-a
+resources:
+- name: network
+ description: vpc network for k8s-the-hard-way sample app
+ props:
+ - name: vpc_name
+ description: name for the vpc
+ value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ exports:
+ - vpc_name
+ - vpc_link
+- name: subnetwork
+ props:
+ - name: subnet_name
+ value: "{{ stack_name }}-{{ stack_env }}-{{ region }}-subnet"
+ - name: ip_cidr_range
+ values:
+ prd:
+ value: 192.168.0.0/16
+ sit:
+ value: 10.10.0.0/16
+ dev:
+ value: 10.240.0.0/24
+ exports:
+ - subnet_name
+ - subnet_link
+- name: public_address
+ props:
+ - name: address_name
+ value: "{{ stack_name }}-{{ stack_env }}-{{ region }}-ip-addr"
+ exports:
+ - address
+- name: controller_instances
+ file: instances.iql
+ type: multi
+ props:
+ - name: num_instances
+ value: 3
+ - name: instance_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-controller"
+ - name: disks
+ value:
+ - autoDelete: true
+ boot: true
+ initializeParams:
+ diskSizeGb: 10
+ sourceImage: https://compute.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/family/ubuntu-2004-lts
+ mode: READ_WRITE
+ type: PERSISTENT
+ - name: machine_type
+ value: "https://compute.googleapis.com/compute/v1/projects/{{ project }}/zones/{{ default_zone }}/machineTypes/f1-micro"
+ - name: scheduling
+ value: {automaticRestart: true}
+ - name: tags
+ value: {items: ["{{ stack_name }}", "controller"]}
+ - name: service_accounts
+ value:
+ - email: default
+ scopes:
+ - https://www.googleapis.com/auth/compute
+ - https://www.googleapis.com/auth/devstorage.read_only
+ - https://www.googleapis.com/auth/logging.write
+ - https://www.googleapis.com/auth/monitoring
+ - https://www.googleapis.com/auth/service.management.readonly
+ - https://www.googleapis.com/auth/servicecontrol
+ - name: network_interfaces
+ values:
+ dev:
+ value:
+ - {networkIP: "10.240.0.10", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.11", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.12", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ sit:
+ value:
+ - {networkIP: "10.10.0.10", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.10.0.11", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.10.0.12", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ prd:
+ value:
+ - {networkIP: "192.168.0.10", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "192.168.0.11", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "192.168.0.12", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+- name: worker_instances
+ file: instances.iql
+ type: multi
+ props:
+ - name: num_instances
+ value: 3
+ - name: instance_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-worker"
+ - name: disks
+ value:
+ - autoDelete: true
+ boot: true
+ initializeParams:
+ diskSizeGb: 10
+ sourceImage: https://compute.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/family/ubuntu-2004-lts
+ mode: READ_WRITE
+ type: PERSISTENT
+ - name: machine_type
+ value: "https://compute.googleapis.com/compute/v1/projects/{{ project }}/zones/{{ default_zone }}/machineTypes/f1-micro"
+ - name: scheduling
+ value: {automaticRestart: true}
+ - name: tags
+ value: {items: ["{{ stack_name }}", "worker"]}
+ - name: service_accounts
+ value:
+ - email: default
+ scopes:
+ - https://www.googleapis.com/auth/compute
+ - https://www.googleapis.com/auth/devstorage.read_only
+ - https://www.googleapis.com/auth/logging.write
+ - https://www.googleapis.com/auth/monitoring
+ - https://www.googleapis.com/auth/service.management.readonly
+ - https://www.googleapis.com/auth/servicecontrol
+ - name: network_interfaces
+ values:
+ dev:
+ value:
+ - {networkIP: "10.240.0.20", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.21", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.22", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ sit:
+ value:
+ - {networkIP: "10.10.0.20", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.10.0.21", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.10.0.22", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ prd:
+ value:
+ - {networkIP: "192.168.0.20", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "192.168.0.21", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "192.168.0.22", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+- name: health_checks
+ props:
+ - name: health_check_name
+ value: "{{ stack_name }}-{{ stack_env }}-kubernetes-health-check"
+ - name: health_check_interval_sec
+ value: 5
+ - name: health_check_description
+ value: Kubernetes Health Check
+ - name: health_check_timeout_sec
+ value: 5
+ - name: health_check_healthy_threshold
+ value: 2
+ - name: health_check_unhealthy_threshold
+ value: 2
+ - name: health_check_host
+ value: kubernetes.default.svc.cluster.local
+ - name: health_check_port
+ value: 80
+ - name: health_check_path
+ value: /healthz
+ exports:
+ - health_check_link
+- name: internal_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-internal-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ values:
+ dev:
+ value: ["10.240.0.0/24", "10.200.0.0/16"]
+ prd:
+ value: ["192.168.0.0/16"]
+ sit:
+ value: ["10.10.0.0/16"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp}, {IPProtocol: udp}, {IPProtocol: icmp}]
+- name: external_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-external-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ value: ["0.0.0.0/0"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp, ports: ["22"]}, {IPProtocol: tcp, ports: ["6443"]},{IPProtocol: icmp}]
+- name: health_check_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-health-check-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ value: ["209.85.152.0/22", "209.85.204.0/22", "35.191.0.0/16"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp}]
+- name: get_controller_instances
+ type: query
+ exports:
+ - controller_instances
+- name: target_pool
+ props:
+ - name: target_pool_name
+ value: "{{ stack_name }}-{{ stack_env }}-target-pool"
+ - name: target_pool_session_affinity
+ value: NONE
+ - name: target_pool_health_checks
+ value: ["{{ health_check_link }}"]
+ - name: target_pool_instances
+ value: "{{ controller_instances }}"
+ exports:
+ - target_pool_link
+- name: forwarding_rule
+ props:
+ - name: forwarding_rule_name
+ value: "{{ stack_name }}-{{ stack_env }}-forwarding-rule"
+ - name: forwarding_rule_load_balancing_scheme
+ value: EXTERNAL
+ - name: forwarding_rule_port_range
+ value: 6443
+- name: routes
+ props:
+ - name: num_routes
+ value: 3
+ - name: route_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-route"
+ - name: route_priority
+ value: 1000
+ - name: route_data
+ values:
+ dev:
+ value:
+ - {dest_range: "10.200.0.0/24", next_hop_ip: "10.240.0.20"}
+ - {dest_range: "10.200.1.0/24", next_hop_ip: "10.240.0.21"}
+ - {dest_range: "10.200.2.0/24", next_hop_ip: "10.240.0.22"}
+ sit:
+ value:
+ - {dest_range: "10.12.0.0/24", next_hop_ip: "10.10.0.20"}
+ - {dest_range: "10.12.1.0/24", next_hop_ip: "10.10.0.21"}
+ - {dest_range: "10.12.2.0/24", next_hop_ip: "10.10.0.22"}
+ prd:
+ value:
+ - {dest_range: "172.16.1.0/24", next_hop_ip: "192.168.0.20"}
+ - {dest_range: "172.16.2.0/24", next_hop_ip: "192.168.0.21"}
+ - {dest_range: "172.16.3.0/24", next_hop_ip: "192.168.0.22"}
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/README.md b/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/README.md
new file mode 100644
index 0000000..486de76
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/README.md
@@ -0,0 +1,72 @@
+# example `stackql-deploy` stack
+
+Based upon the [__terraform-google-load-balanced-vms__](https://github.com/GoogleCloudPlatform/terraform-google-load-balanced-vms) project.
+
+
+
+## about `stackql-deploy`
+
+[`stackql-deploy`](https://pypi.org/project/stackql-deploy/) is a multi cloud deployment automation and testing framework which is an alternative to Terraform or similar IaC tools. `stackql-deploy` uses a declarative model/ELT based approach to cloud resource deployment (inspired by [`dbt`](https://www.getdbt.com/)). Advantages of `stackql-deploy` include:
+
+- declarative framework
+- no state file (state is determined from the target environment)
+- multi-cloud/omni-cloud ready
+- includes resource tests which can include secure config tests
+
+## instaling `stackql-deploy`
+
+`stackql-deploy` is installed as a python based CLI using...
+
+```bash
+pip install stackql-deploy
+# or
+pip3 install stackql-deploy
+```
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## getting started with `stackql-deploy`
+
+Once installed, use the `init` command to scaffold a sample project directory to get started:
+
+```bash
+stackql-deploy init load-balanced-vms
+```
+
+this will create a directory named `load-balanced-vms` which can be updated for your stack, as you can see in this project.
+
+## deploying using `stackql-deploy`
+
+```bash
+export GOOGLE_CREDENTIALS=$(cat ./testcreds/stackql-deploy-project-demo-service-account.json)
+# deploy a stack
+stackql-deploy build \
+examples\google\load-balanced-vms \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run \
+--log-level DEBUG
+
+# test a stack
+stackql-deploy test \
+examples/google/k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run
+
+# teardown a stack
+stackql-deploy teardown \
+examples/google/k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run
+```
+
+
+
+stackql-deploy-project
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/example.tf b/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/example.tf
new file mode 100644
index 0000000..24e7b24
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/example.tf
@@ -0,0 +1,107 @@
+
+# Create a Network Security Group and rule
+resource "azurerm_network_security_group" "tfexample" {
+ name = "my-terraform-nsg"
+ location = azurerm_resource_group.tfexample.location
+ resource_group_name = azurerm_resource_group.tfexample.name
+
+ security_rule {
+ name = "HTTP"
+ priority = 1001
+ direction = "Inbound"
+ access = "Allow"
+ protocol = "Tcp"
+ source_port_range = "*"
+ destination_port_range = "8080"
+ source_address_prefix = "*"
+ destination_address_prefix = "*"
+ }
+
+ tags = {
+ environment = "my-terraform-env"
+ }
+}
+
+# Create a Network Interface
+resource "azurerm_network_interface" "tfexample" {
+ name = "my-terraform-nic"
+ location = azurerm_resource_group.tfexample.location
+ resource_group_name = azurerm_resource_group.tfexample.name
+
+ ip_configuration {
+ name = "my-terraform-nic-ip-config"
+ subnet_id = azurerm_subnet.tfexample.id
+ private_ip_address_allocation = "Dynamic"
+ public_ip_address_id = azurerm_public_ip.tfexample.id
+ }
+
+ tags = {
+ environment = "my-terraform-env"
+ }
+}
+
+# Create a Network Interface Security Group association
+resource "azurerm_network_interface_security_group_association" "tfexample" {
+ network_interface_id = azurerm_network_interface.tfexample.id
+ network_security_group_id = azurerm_network_security_group.tfexample.id
+}
+
+# Create a Virtual Machine
+resource "azurerm_linux_virtual_machine" "tfexample" {
+ name = "my-terraform-vm"
+ location = azurerm_resource_group.tfexample.location
+ resource_group_name = azurerm_resource_group.tfexample.name
+ network_interface_ids = [azurerm_network_interface.tfexample.id]
+ size = "Standard_DS1_v2"
+ computer_name = "myvm"
+ admin_username = "azureuser"
+ admin_password = "Password1234!"
+ disable_password_authentication = false
+
+ source_image_reference {
+ publisher = "Canonical"
+ offer = "UbuntuServer"
+ sku = "18.04-LTS"
+ version = "latest"
+ }
+
+ os_disk {
+ name = "my-terraform-os-disk"
+ storage_account_type = "Standard_LRS"
+ caching = "ReadWrite"
+ }
+
+ tags = {
+ environment = "my-terraform-env"
+ }
+}
+
+# Configurate to run automated tasks in the VM start-up
+resource "azurerm_virtual_machine_extension" "tfexample" {
+ name = "hostname"
+ virtual_machine_id = azurerm_linux_virtual_machine.tfexample.id
+ publisher = "Microsoft.Azure.Extensions"
+ type = "CustomScript"
+ type_handler_version = "2.1"
+
+ settings = < index.html ; nohup busybox httpd -f -p 8080 &"
+ }
+ SETTINGS
+
+ tags = {
+ environment = "my-terraform-env"
+ }
+}
+
+# Data source to access the properties of an existing Azure Public IP Address
+data "azurerm_public_ip" "tfexample" {
+ name = azurerm_public_ip.tfexample.name
+ resource_group_name = azurerm_linux_virtual_machine.tfexample.resource_group_name
+}
+
+# Output variable: Public IP address
+output "public_ip" {
+ value = data.azurerm_public_ip.tfexample.ip_address
+}
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/resources/project_services.iql b/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/resources/project_services.iql
new file mode 100644
index 0000000..d6a1fcb
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/resources/project_services.iql
@@ -0,0 +1,47 @@
+/*+ exists */
+SELECT name FROM google.serviceusage.services
+WHERE parent = '219788095364'
+AND parentType = 'projects'
+AND filter = 'state:ENABLED'
+AND name = 'compute.googleapis.com';
+
+
+projects//services/cloudtrace.googleapis.com
+
+SELECT * FROM google.serviceusage.services
+WHERE name = 'projects/123/services/serviceusage.googleapis.com'
+
+parent, parentType
+
+
+name string The resource name of the consumer and service. A valid name would be: - projects/123/services/serviceusage.googleapis.com
+config object The configuration of the service.
+parent string The resource name of the consumer. A valid name would be: - projects/123
+state string Whether or not the service has been enabled for use by the consumer.
+
+
+
+/*+ createorupdate */
+{% for network_interface in network_interfaces | from_json %}
+DELETE FROM google.compute.instances
+WHERE project = '{{ project }}'
+AND zone = '{{ default_zone }}'
+AND instance = '{{ instance_name_prefix }}-{{ loop.index }}';
+{% endfor %}
+
+
+
+
+{{ range .root_projects }}
+{{ $project := . }}
+{{ range .apis }}
+EXEC google.serviceusage.services.enable
+@name = (
+ SELECT
+ 'projects/' || name || '/services/{{ . }}'
+ FROM google.cloudresourcemanager.projects
+ WHERE parent='{{ $global.organization_id }}'
+ and displayName= '{{ $project.displayName }}'
+);
+{{end}}
+{{end}}
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/stackql_manifest.yml b/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/stackql_manifest.yml
new file mode 100644
index 0000000..3b0feb2
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/google/load-balanced-vms/stackql_manifest.yml
@@ -0,0 +1,153 @@
+version: 1
+name: "gcp-stack"
+description: StackQL-Deploy example for GCP infrastructure setup
+providers:
+ - google
+globals:
+ - name: project_id
+ description: Google Cloud Project ID
+ value: "{{ GOOGLE_PROJECT_ID }}"
+ - name: region
+ description: GCP region
+ value: "us-central1"
+ - name: zone
+ description: GCP zone
+ value: "us-central1-a"
+resources:
+ - name: project_services
+ props:
+ - name: apis
+ value:
+ - compute.googleapis.com
+ # - name: vpc_network
+ # props:
+ # - name: network_name
+ # value: "{{ stack_name }}-network"
+ # - name: subnets
+ # value:
+ # - name: "{{ stack_name }}-subnet"
+ # region: "{{ region }}"
+ # cidr_block: "10.10.10.0/24"
+ # exports:
+ # - network_id
+ # - subnet_id
+ # - name: firewall_rules
+ # props:
+ # - name: allow_ssh
+ # value:
+ # - name: "{{ stack_name }}-allow-ssh"
+ # network: "{{ network_id }}"
+ # allow:
+ # - protocol: "tcp"
+ # ports: ["22"]
+ # source_ranges: ["0.0.0.0/0"]
+ # - name: allow_healthchecks
+ # value:
+ # - name: "{{ stack_name }}-allow-healthchecks"
+ # network: "{{ network_id }}"
+ # allow:
+ # - protocol: "tcp"
+ # source_ranges: ["35.191.0.0/16", "209.85.152.0/22", "209.85.204.0/22"]
+ # exports:
+ # - firewall_rule_ids
+ # - name: compute_instance
+ # props:
+ # - name: instance_name
+ # value: "{{ stack_name }}-exemplar"
+ # - name: machine_type
+ # value: "e2-medium"
+ # - name: boot_disk
+ # value:
+ # - image: "debian-10"
+ # size: 200
+ # - name: network_interface
+ # value:
+ # - subnet: "{{ subnet_id }}"
+ # access_config: []
+ # - name: metadata_startup_script
+ # value: |
+ # apt-get update -y
+ # apt-get install nginx -y
+ # echo 'Hello, StackQL!' > /var/www/html/index.html
+ # exports:
+ # - instance_id
+ # - instance_self_link
+ # - name: instance_snapshot
+ # props:
+ # - name: snapshot_name
+ # value: "{{ stack_name }}-snapshot"
+ # - name: source_disk
+ # value: "{{ instance_self_link }}"
+ # - name: storage_locations
+ # value: ["{{ region }}"]
+ # exports:
+ # - snapshot_id
+ # - name: compute_image
+ # props:
+ # - name: image_name
+ # value: "{{ stack_name }}-image"
+ # - name: source_snapshot
+ # value: "{{ snapshot_id }}"
+ # exports:
+ # - image_id
+ # - name: instance_template
+ # props:
+ # - name: template_name
+ # value: "{{ stack_name }}-template"
+ # - name: machine_type
+ # value: "e2-micro"
+ # - name: disk
+ # value:
+ # - source_image: "{{ image_id }}"
+ # auto_delete: true
+ # - name: network_interface
+ # value:
+ # - subnet: "{{ subnet_id }}"
+ # exports:
+ # - template_id
+ # - name: managed_instance_group
+ # props:
+ # - name: mig_name
+ # value: "{{ stack_name }}-mig"
+ # - name: zone
+ # value: "{{ zone }}"
+ # - name: target_size
+ # value: 3
+ # - name: instance_template
+ # value: "{{ template_id }}"
+ # exports:
+ # - mig_id
+ # - name: load_balancer
+ # props:
+ # - name: lb_name
+ # value: "{{ stack_name }}-lb"
+ # - name: backend_services
+ # value:
+ # - backend:
+ # group: "{{ mig_id }}"
+ # balancing_mode: UTILIZATION
+ # capacity_scaler: 1
+ # - name: health_checks
+ # value:
+ # - name: "{{ stack_name }}-health-check"
+ # port: 80
+ # request_path: "/"
+ # exports:
+ # - lb_ip
+ # - name: health_check_firewall
+ # props:
+ # - name: fw_name
+ # value: "{{ stack_name }}-allow-health-check-fw"
+ # - name: fw_direction
+ # value: "INGRESS"
+ # - name: fw_source_ranges
+ # value: ["35.191.0.0/16", "209.85.152.0/22", "209.85.204.0/22"]
+ # - name: fw_allowed
+ # value:
+ # - protocol: "tcp"
+ # exports:
+ # - fw_id
+ # - name: health_check_test
+ type: query
+ exports:
+ - health_check_result
diff --git a/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/README.md b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/README.md
new file mode 100644
index 0000000..78215d4
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/README.md
@@ -0,0 +1,63 @@
+# `stackql-deploy` starter project for `snowflake`
+
+> for starter projects using other providers, try `stackql-deploy examples/snowflake/entitlements --provider=aws` or `stackql-deploy examples/snowflake/entitlements --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `snowflake` provider:
+
+- [`snowflake` provider docs](https://stackql.io/registry/snowflake)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `azure` and `aws` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `snowflake` provider, `SNOWFLAKE_PAT` must be set, for more information on authentication to `snowflake` see the [`snowflake` provider documentation](https://snowflake.stackql.io/providers/snowflake).
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` folder.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack named examples/snowflake/entitlements to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build examples/snowflake/entitlements sit \
+-e SNOWFLAKE_ORG=OKXVNMC -e SNOWFLAKE_ACCOUNT=VH34026
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build examples/snowflake/entitlements sit \
+-e SNOWFLAKE_ORG=OKXVNMC -e SNOWFLAKE_ACCOUNT=VH34026 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test examples/snowflake/entitlements sit \
+-e SNOWFLAKE_ORG=OKXVNMC -e SNOWFLAKE_ACCOUNT=VH34026
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown examples/snowflake/entitlements sit \
+-e SNOWFLAKE_ORG=OKXVNMC -e SNOWFLAKE_ACCOUNT=VH34026
+```
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/databases.iql b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/databases.iql
new file mode 100644
index 0000000..3f39f52
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/databases.iql
@@ -0,0 +1,83 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM snowflake.database.databases
+WHERE name = '{{ database_name }}'
+AND endpoint = '{{ endpoint }}';
+
+/*+ statecheck, retries=1, retry_delay=2 */
+SELECT COUNT(*) as count
+FROM snowflake.database.databases
+WHERE name = '{{ database_name }}'
+AND endpoint = '{{ endpoint }}'
+AND kind = '{{ kind }}'
+AND comment = '{{ comment }}'
+AND data_retention_time_in_days = {{ data_retention_time_in_days }}
+AND log_level = '{{ log_level }}'
+AND max_data_extension_time_in_days = {{ max_data_extension_time_in_days }}
+AND suspend_task_after_num_failures = {{ suspend_task_after_num_failures }}
+AND trace_level = '{{ trace_level }}'
+AND user_task_managed_initial_warehouse_size = '{{ user_task_managed_initial_warehouse_size }}'
+AND serverless_task_min_statement_size = '{{ serverless_task_min_statement_size }}'
+AND serverless_task_max_statement_size = '{{ serverless_task_max_statement_size }}'
+AND user_task_timeout_ms = {{ user_task_timeout_ms }}
+;
+
+/*+ create */
+INSERT INTO snowflake.database.databases (
+data__name,
+data__kind,
+data__comment,
+data__data_retention_time_in_days,
+data__log_level,
+data__max_data_extension_time_in_days,
+data__suspend_task_after_num_failures,
+data__trace_level,
+data__user_task_managed_initial_warehouse_size,
+data__serverless_task_min_statement_size,
+data__serverless_task_max_statement_size,
+data__user_task_timeout_ms,
+endpoint
+)
+SELECT
+'{{ database_name }}',
+'{{ kind }}',
+'{{ comment }}',
+ {{ data_retention_time_in_days }},
+'{{ log_level }}',
+ {{ max_data_extension_time_in_days }},
+ {{ suspend_task_after_num_failures }},
+'{{ trace_level }}',
+'{{ user_task_managed_initial_warehouse_size }}',
+'{{ serverless_task_min_statement_size }}',
+'{{ serverless_task_max_statement_size }}',
+ {{ user_task_timeout_ms }},
+'{{ endpoint }}'
+;
+
+/*+ update */
+REPLACE snowflake.database.databases
+SET
+kind = '{{ kind }}',
+comment = '{{ comment }}',
+data_retention_time_in_days = '{{ data_retention_time_in_days }}',
+default_ddl_collation = '{{ default_ddl_collation }}',
+log_level = '{{ log_level }}',
+max_data_extension_time_in_days = '{{ max_data_extension_time_in_days }}',
+suspend_task_after_num_failures = '{{ suspend_task_after_num_failures }}',
+trace_level = '{{ trace_level }}',
+user_task_managed_initial_warehouse_size = '{{ user_task_managed_initial_warehouse_size }}',
+serverless_task_min_statement_size = '{{ serverless_task_min_statement_size }}',
+serverless_task_max_statement_size = '{{ serverless_task_max_statement_size }}',
+user_task_timeout_ms = '{{ user_task_timeout_ms }}'
+WHERE
+name = '{{ database_name }}'
+AND data__name = '{{ database_name }}'
+AND endpoint = '{{ endpoint }}';
+
+/*+ exports */
+SELECT '{{ database_name }}' as database_name;
+
+/*+ delete */
+DELETE FROM snowflake.database.databases
+WHERE name = '{{ database_name }}'
+AND endpoint = '{{ endpoint }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/grants.iql b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/grants.iql
new file mode 100644
index 0000000..93c65b7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/grants.iql
@@ -0,0 +1,23 @@
+/*+ createorupdate */
+INSERT INTO snowflake.role.grants (
+data__securable,
+data__securable_type,
+data__grant_option,
+data__privileges,
+name,
+endpoint
+)
+SELECT
+'{{ securable }}',
+'{{ securable_type }}',
+{{ grant_option }},
+'{{ privileges }}',
+'{{ role_name }}',
+'{{ endpoint }}'
+;
+
+/*+ delete */
+DELETE FROM snowflake.role.grants
+WHERE name = '{{ role_name }}'
+AND data__securable_type = '{{ securable_type }}'
+AND endpoint = '{{ endpoint }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/masking_policies.iql b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/masking_policies.iql
new file mode 100644
index 0000000..108d59b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/masking_policies.iql
@@ -0,0 +1,20 @@
+/*+ createorupdate */
+INSERT INTO snowflake.sqlapi.statements (
+data__statement,
+data__timeout,
+data__database,
+data__schema,
+"User-Agent",
+endpoint
+)
+SELECT
+'{{ statement }}',
+{{ timeout }},
+'{{ database }}',
+'{{ schema }}',
+'{{ "User-Agent" }}',
+'{{ endpoint }}'
+;
+
+/*+ exports */
+SELECT '{{ masking_policy_name }}' as masking_policy_name;
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/roles.iql b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/roles.iql
new file mode 100644
index 0000000..cc8e2b0
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/roles.iql
@@ -0,0 +1,21 @@
+/*+ createorupdate */
+INSERT INTO snowflake.role.roles (
+data__name,
+data__comment,
+endpoint,
+createMode
+)
+SELECT
+'{{ role_name }}',
+'{{ comment }}',
+'{{ endpoint }}',
+'orReplace'
+;
+
+/*+ exports */
+SELECT '{{ role_name }}' as role_name;
+
+/*+ delete */
+DELETE FROM snowflake.role.roles
+WHERE name = '{{ role_name }}'
+AND endpoint = '{{ endpoint }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/schemas.iql b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/schemas.iql
new file mode 100644
index 0000000..55a72e6
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/schemas.iql
@@ -0,0 +1,87 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM snowflake.schema.schemas
+WHERE database_name = '{{ database_name }}'
+AND name = '{{ schema_name }}'
+AND endpoint = '{{ endpoint }}';
+
+/*+ statecheck, retries=1, retry_delay=2 */
+SELECT COUNT(*) as count
+FROM snowflake.schema.schemas
+WHERE database_name = '{{ database_name }}'
+AND name = '{{ schema_name }}'
+AND endpoint = '{{ endpoint }}'
+AND kind = '{{ kind }}'
+AND managed_access = {{ managed_access }}
+AND data_retention_time_in_days = {{ data_retention_time_in_days }}
+AND log_level = '{{ log_level }}'
+AND max_data_extension_time_in_days = {{ max_data_extension_time_in_days }}
+AND suspend_task_after_num_failures = {{ suspend_task_after_num_failures }}
+AND trace_level = '{{ trace_level }}'
+AND user_task_managed_initial_warehouse_size = '{{ user_task_managed_initial_warehouse_size }}'
+AND serverless_task_min_statement_size = '{{ serverless_task_min_statement_size }}'
+AND serverless_task_max_statement_size = '{{ serverless_task_max_statement_size }}'
+AND user_task_timeout_ms = {{ user_task_timeout_ms }};
+
+/*+ create */
+INSERT INTO snowflake.schema.schemas (
+data__name,
+data__kind,
+data__managed_access,
+data__data_retention_time_in_days,
+data__log_level,
+data__max_data_extension_time_in_days,
+data__suspend_task_after_num_failures,
+data__trace_level,
+data__user_task_managed_initial_warehouse_size,
+data__serverless_task_min_statement_size,
+data__serverless_task_max_statement_size,
+data__user_task_timeout_ms,
+database_name,
+endpoint
+)
+SELECT
+'{{ schema_name }}',
+'{{ kind }}',
+{{ managed_access }},
+{{ data_retention_time_in_days }},
+'{{ log_level }}',
+{{ max_data_extension_time_in_days }},
+{{ suspend_task_after_num_failures }},
+'{{ trace_level }}',
+'{{ user_task_managed_initial_warehouse_size }}',
+'{{ serverless_task_min_statement_size }}',
+'{{ serverless_task_max_statement_size }}',
+{{ user_task_timeout_ms }},
+'{{ database_name }}',
+'{{ endpoint }}'
+;
+
+/*+ update */
+REPLACE snowflake.schema.schemas
+SET
+kind = '{{ kind }}',
+managed_access = {{ managed_access }},
+data_retention_time_in_days = {{ data_retention_time_in_days }},
+log_level = '{{ log_level }}',
+max_data_extension_time_in_days = {{ max_data_extension_time_in_days }},
+suspend_task_after_num_failures = {{ suspend_task_after_num_failures }},
+trace_level = '{{ trace_level }}',
+user_task_managed_initial_warehouse_size = '{{ user_task_managed_initial_warehouse_size }}',
+serverless_task_min_statement_size = '{{ serverless_task_min_statement_size }}',
+serverless_task_max_statement_size = '{{ serverless_task_max_statement_size }}',
+user_task_timeout_ms = {{ user_task_timeout_ms }}
+WHERE
+database_name = '{{ database_name }}'
+AND name = '{{ schema_name }}'
+AND data__name = '{{ schema_name }}'
+AND endpoint = '{{ endpoint }}';
+
+/*+ exports */
+SELECT '{{ schema_name }}' as schema_name;
+
+/*+ delete */
+DELETE FROM snowflake.schema.schemas
+WHERE database_name = '{{ database_name }}'
+AND name = '{{ schema_name }}'
+AND endpoint = '{{ endpoint }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/statements.iql b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/statements.iql
new file mode 100644
index 0000000..18782c0
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/statements.iql
@@ -0,0 +1,17 @@
+/*+ createorupdate */
+INSERT INTO snowflake.sqlapi.statements (
+data__statement,
+data__timeout,
+data__database,
+data__schema,
+"User-Agent",
+endpoint
+)
+SELECT
+'{{ statement }}',
+{{ timeout }},
+'{{ database }}',
+'{{ schema }}',
+'{{ "User-Agent" }}',
+'{{ endpoint }}'
+;
diff --git a/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/tables.iql b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/tables.iql
new file mode 100644
index 0000000..5411d68
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/tables.iql
@@ -0,0 +1,49 @@
+/*+ exists */
+SELECT COUNT(*) as count
+FROM snowflake.table.tables
+WHERE database_name = '{{ database_name }}'
+AND schema_name = '{{ schema_name }}'
+AND name = '{{ name }}'
+AND endpoint = '{{ endpoint }}';
+
+/*+ statecheck, retries=1, retry_delay=2 */
+SELECT COUNT(*) as count
+FROM snowflake.table.tables
+WHERE database_name = '{{ database_name }}'
+AND schema_name = '{{ schema_name }}'
+AND name = '{{ name }}'
+AND endpoint = '{{ endpoint }}'
+AND kind = '{{ kind }}'
+AND comment = '{{ comment }}';
+
+/*+ create */
+INSERT INTO snowflake.table.tables (
+data__name,
+data__kind,
+data__columns,
+data__constraints,
+data__comment,
+database_name,
+schema_name,
+endpoint
+)
+SELECT
+'{{ name }}',
+'{{ kind }}',
+'{{ columns }}',
+'{{ constraints }}',
+'{{ comment }}',
+'{{ database_name }}',
+'{{ schema_name }}',
+'{{ endpoint }}'
+;
+
+/*+ exports */
+SELECT '{{ name }}' as table_name;
+
+/*+ delete */
+DELETE FROM snowflake.table.tables
+WHERE database_name = '{{ database_name }}'
+AND schema_name = '{{ schema_name }}'
+AND name = '{{ name }}'
+AND endpoint = '{{ endpoint }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/warehouses.iql b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/warehouses.iql
new file mode 100644
index 0000000..70858ec
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/resources/warehouses.iql
@@ -0,0 +1,33 @@
+/*+ createorupdate */
+INSERT INTO snowflake.warehouse.warehouses (
+data__name,
+data__warehouse_type,
+data__warehouse_size,
+data__scaling_policy,
+data__auto_suspend,
+data__auto_resume,
+data__initially_suspended,
+data__comment,
+createMode,
+endpoint
+)
+SELECT
+'{{ name }}',
+'{{ warehouse_type }}',
+'{{ warehouse_size }}',
+'{{ scaling_policy }}',
+{{ auto_suspend }},
+'{{ auto_resume }}',
+'{{ initially_suspended }}',
+'{{ comment }}',
+'orReplace',
+'{{ endpoint }}'
+;
+
+/*+ exports */
+SELECT '{{ name }}' as warehouse_name;
+
+/*+ delete */
+DELETE FROM snowflake.warehouse.warehouses
+WHERE name = '{{ name }}'
+AND endpoint = '{{ endpoint }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/stackql_manifest.yml b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/stackql_manifest.yml
new file mode 100644
index 0000000..8854b97
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/examples/snowflake/entitlements/stackql_manifest.yml
@@ -0,0 +1,337 @@
+version: 1
+name: snowflake_entitlements
+description: entitlements framework for snowflake
+providers:
+ - snowflake
+globals:
+ - name: endpoint
+ description: snowflake endpoint (org-account)
+ value: "{{ SNOWFLAKE_ORG }}-{{ SNOWFLAKE_ACCOUNT }}"
+ - name: global_tags
+ value:
+ provisioner: stackql
+ stack_name: "{{ stack_name }}"
+ stack_env: "{{ stack_env }}"
+resources:
+ - name: main_database
+ file: databases.iql
+ props:
+ - name: database_name
+ value: "{{ stack_name|upper }}_{{ stack_env|upper }}"
+ - name: kind
+ value: PERMANENT
+ - name: comment
+ value: "Main database for {{ stack_name }} {{ stack_env }}"
+ - name: data_retention_time_in_days
+ value: 1
+ - name: log_level
+ value: 'OFF'
+ - name: max_data_extension_time_in_days
+ value: 14
+ - name: suspend_task_after_num_failures
+ value: 10
+ - name: trace_level
+ value: 'OFF'
+ - name: user_task_managed_initial_warehouse_size
+ value: MEDIUM
+ - name: serverless_task_min_statement_size
+ value: XSMALL
+ - name: serverless_task_max_statement_size
+ value: X2LARGE
+ - name: user_task_timeout_ms
+ value: 3600000
+ exports:
+ - database_name: main_database_name
+
+ # shared warehouse
+ - name: shared_warehouse
+ file: warehouses.iql
+ props:
+ - name: name
+ value: ANALYST_WH
+ - name: warehouse_type
+ value: STANDARD
+ - name: warehouse_size
+ value: XSMALL
+ - name: scaling_policy
+ value: ECONOMY
+ - name: auto_suspend
+ value: 300
+ - name: auto_resume
+ value: 'true'
+ - name: initially_suspended
+ value: 'true'
+ - name: comment
+ value: "Analyst warehouse for {{ stack_name }} {{ stack_env }}"
+ exports:
+ - warehouse_name: shared_warehouse_name
+
+ # schemas
+ - name: bronze_schema_src1
+ file: schemas.iql
+ props:
+ - name: database_name
+ value: "{{ main_database_name}}"
+ - name: schema_name
+ value: "BRONZE_SRC1"
+ - name: kind
+ value: PERMANENT
+ - name: managed_access
+ value: true
+ - name: data_retention_time_in_days
+ value: 1
+ - name: log_level
+ value: "OFF"
+ - name: max_data_extension_time_in_days
+ value: 14
+ - name: suspend_task_after_num_failures
+ value: 10
+ - name: trace_level
+ value: "OFF"
+ - name: user_task_managed_initial_warehouse_size
+ value: "MEDIUM"
+ - name: serverless_task_min_statement_size
+ value: "XSMALL"
+ - name: serverless_task_max_statement_size
+ value: "X2LARGE"
+ - name: user_task_timeout_ms
+ value: 3600000
+ exports:
+ - schema_name: bronze_schema_src1_name
+
+ # roles
+ - name: basic_access_role
+ file: roles.iql
+ props:
+ - name: role_name
+ value: "{{ stack_name|upper }}_{{ stack_env|upper }}_BASIC_ACCESS_ROLE"
+ - name: comment
+ value: "Basic access role"
+ exports:
+ - role_name: basic_access_role_name
+
+ - name: pci_access_role
+ file: roles.iql
+ props:
+ - name: role_name
+ value: "{{ stack_name|upper }}_{{ stack_env|upper }}_PCI_ACCESS_ROLE"
+ - name: comment
+ value: "PCI access role"
+ exports:
+ - role_name: pci_access_role_name
+
+ # grants
+ - name: basic_access_role_db_usage
+ file: grants.iql
+ props:
+ - name: role_name
+ value: '{{ basic_access_role_name }}'
+ - name: securable
+ value:
+ name: '{{ main_database_name }}'
+ - name: securable_type
+ value: DATABASE
+ - name: grant_option
+ value: false
+ - name: privileges
+ value: ["USAGE"]
+
+ - name: basic_access_role_schema_usage
+ file: grants.iql
+ props:
+ - name: role_name
+ value: '{{ basic_access_role_name }}'
+ - name: securable
+ value:
+ database: '{{ main_database_name }}'
+ name: '{{ bronze_schema_src1_name }}'
+ - name: securable_type
+ value: SCHEMA
+ - name: grant_option
+ value: false
+ - name: privileges
+ value: ["USAGE"]
+
+ - name: basic_access_role_warehouse_usage
+ file: grants.iql
+ props:
+ - name: role_name
+ value: '{{ basic_access_role_name }}'
+ - name: securable
+ value:
+ name: '{{ shared_warehouse_name }}'
+ - name: securable_type
+ value: WAREHOUSE
+ - name: grant_option
+ value: false
+ - name: privileges
+ value: ["USAGE"]
+
+ - name: pci_access_role_db_usage
+ file: grants.iql
+ props:
+ - name: role_name
+ value: '{{ pci_access_role_name }}'
+ - name: securable
+ value:
+ name: '{{ main_database_name }}'
+ - name: securable_type
+ value: DATABASE
+ - name: grant_option
+ value: false
+ description: >-
+ If true, allows the recipient role to grant the privileges to other
+ roles.
+ - name: privileges
+ value: ["USAGE"]
+
+ - name: pci_access_role_schema_usage
+ file: grants.iql
+ props:
+ - name: role_name
+ value: '{{ pci_access_role_name }}'
+ - name: securable
+ value:
+ database: '{{ main_database_name }}'
+ name: '{{ bronze_schema_src1_name }}'
+ - name: securable_type
+ value: SCHEMA
+ - name: grant_option
+ value: false
+ - name: privileges
+ value: ["USAGE"]
+
+ - name: pci_access_role_warehouse_usage
+ file: grants.iql
+ props:
+ - name: role_name
+ value: '{{ pci_access_role_name }}'
+ - name: securable
+ value:
+ name: '{{ shared_warehouse_name }}'
+ - name: securable_type
+ value: WAREHOUSE
+ - name: grant_option
+ value: false
+ - name: privileges
+ value: ["USAGE"]
+
+ # masking policies
+ - name: create_pci_masking_policy
+ file: masking_policies.iql
+ props:
+ - name: masking_policy_name
+ value: '{{ stack_name|upper }}_{{ stack_env|upper }}_PCI_MASKING_POLICY'
+ - name: '"User-Agent"'
+ value: stackql
+ - name: statement
+ value: |
+ CREATE MASKING POLICY IF NOT EXISTS {{ stack_name|upper }}_{{ stack_env|upper }}_PCI_MASKING_POLICY
+ AS (val STRING) RETURNS STRING ->
+ CASE
+ WHEN CURRENT_ROLE() IN (''{{ pci_access_role_name }}'') THEN val
+ ELSE ''***MASKED***''
+ END
+ - name: timeout
+ value: 10
+ - name: database
+ value: '{{ main_database_name }}'
+ - name: schema
+ value: '{{ bronze_schema_src1_name }}'
+ exports:
+ - masking_policy_name: pci_masking_policy_name
+
+ # customer data table
+ - name: customer_data_table
+ file: tables.iql
+ props:
+ - name: database_name
+ value: '{{ main_database_name }}'
+ - name: schema_name
+ value: '{{ bronze_schema_src1_name }}'
+ - name: name
+ value: 'CUSTOMER_DATA'
+ - name: kind
+ value: 'PERMANENT'
+ - name: comment
+ value: 'Table with PCI masked data'
+ - name: columns
+ value:
+ - name: customer_id
+ datatype: 'INTEGER'
+ nullable: false
+ - name: customer_name
+ datatype: 'VARCHAR(100)'
+ nullable: false
+ - name: email
+ datatype: 'VARCHAR(100)'
+ nullable: false
+ - name: credit_card_number
+ datatype: 'VARCHAR(19)'
+ nullable: true
+ comment: 'PCI sensitive data - masked for non-PCI roles'
+ - name: purchase_amount
+ datatype: 'NUMBER(10,2)'
+ nullable: false
+ - name: purchase_date
+ datatype: 'DATE'
+ nullable: false
+ - name: constraints
+ value:
+ - name: pk_customer_id
+ column_names: ["customer_id"]
+ constraint_type: "PRIMARY KEY"
+ exports:
+ - table_name: customer_data_table_name
+
+ - name: apply_pci_masking_policy
+ file: statements.iql
+ props:
+ - name: '"User-Agent"'
+ value: stackql
+ - name: statement
+ value: |
+ ALTER TABLE {{ main_database_name }}.{{ bronze_schema_src1_name }}.{{ customer_data_table_name}}
+ MODIFY COLUMN credit_card_number
+ SET MASKING POLICY {{ pci_masking_policy_name }}
+ - name: timeout
+ value: 10
+ - name: database
+ value: '{{ main_database_name }}'
+ - name: schema
+ value: '{{ bronze_schema_src1_name }}'
+
+ - name: basic_access_customer_data_table_usage
+ file: grants.iql
+ props:
+ - name: role_name
+ value: '{{ basic_access_role_name }}'
+ - name: securable
+ value:
+ database: '{{ main_database_name }}'
+ schema: '{{ bronze_schema_src1_name }}'
+ name: '{{ customer_data_table_name }}'
+ - name: securable_type
+ value: TABLE
+ - name: grant_option
+ value: false
+ - name: privileges
+ value: ["SELECT"]
+
+ - name: pci_access_customer_data_table_usage
+ file: grants.iql
+ props:
+ - name: role_name
+ value: '{{ pci_access_role_name }}'
+ - name: securable
+ value:
+ database: '{{ main_database_name }}'
+ schema: '{{ bronze_schema_src1_name }}'
+ name: '{{ customer_data_table_name }}'
+ - name: securable_type
+ value: TABLE
+ - name: grant_option
+ value: false
+ - name: privileges
+ value: ["SELECT"]
+
diff --git a/ref-python-packages/stackql-deploy/get-contributors.iql b/ref-python-packages/stackql-deploy/get-contributors.iql
new file mode 100644
index 0000000..e673431
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/get-contributors.iql
@@ -0,0 +1,10 @@
+SELECT login FROM (
+SELECT login, SUM(contributions) total_contributions FROM
+(SELECT login, contributions
+FROM github.repos.contributors
+WHERE owner = 'stackql'
+AND repo IN ('stackql', 'stackql-deploy', 'pystackql', 'google-discovery-to-openapi')
+) t
+GROUP BY login
+ORDER BY total_contributions DESC
+) t1
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/get-contributors.sh b/ref-python-packages/stackql-deploy/get-contributors.sh
new file mode 100644
index 0000000..c7bd7c7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/get-contributors.sh
@@ -0,0 +1 @@
+./stackql exec --infile get-contributors.iql --output csv -f stackql_deploy/inc/contributors.csv -H
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/images/stackql-deploy-logo.png b/ref-python-packages/stackql-deploy/images/stackql-deploy-logo.png
new file mode 100644
index 0000000..c4ddf7f
Binary files /dev/null and b/ref-python-packages/stackql-deploy/images/stackql-deploy-logo.png differ
diff --git a/ref-python-packages/stackql-deploy/images/stackql-deploy.mermaid b/ref-python-packages/stackql-deploy/images/stackql-deploy.mermaid
new file mode 100644
index 0000000..25420a3
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/images/stackql-deploy.mermaid
@@ -0,0 +1,10 @@
+graph TB
+ A[Start] --> B{foreach\nresource}
+ B --> C[exists\ncheck]
+ C --> D{resource\nexists?}
+ D -- Yes --> E[run update\nor createorupdate query]
+ D -- No --> F[run create\nor createorupdate query]
+ E --> G[run statecheck check]
+ F --> G
+ G --> H{End}
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/images/stackql-deploy.png b/ref-python-packages/stackql-deploy/images/stackql-deploy.png
new file mode 100644
index 0000000..d6ba5b9
Binary files /dev/null and b/ref-python-packages/stackql-deploy/images/stackql-deploy.png differ
diff --git a/ref-python-packages/stackql-deploy/pyproject.toml b/ref-python-packages/stackql-deploy/pyproject.toml
new file mode 100644
index 0000000..cbbb265
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/pyproject.toml
@@ -0,0 +1,38 @@
+[tool.poetry.group.dev.dependencies]
+ruff = "^0.6.9"
+
+# Tool-specific configurations
+
+# Ruff configuration (linter)
+[tool.ruff]
+line-length = 120 # Maximum allowed line length
+exclude = [
+ "docs",
+ "examples",
+ "images",
+ "website",
+]
+
+[tool.ruff.format]
+quote-style = "preserve" # Preserve existing quote style (single or double)
+
+[tool.ruff.lint]
+select = ["E", "F", "W"] # Select specific linting rules (E: Errors, F: Pyflakes, W: Warnings)
+ignore = [
+ "F405", # Ignore 'import *' warnings
+]
+
+[tool.ruff.lint.isort]
+force-single-line = true # Force single-line imports for better readability
+
+# Black configuration (code formatter)
+[tool.black]
+line-length = 120 # Same line length limit as Ruff for consistency
+target-version = ['py38'] # Target Python version for formatting
+
+# Flake8 configuration (for additional linting, if needed)
+[tool.flake8]
+max-line-length = 120 # Align with Ruff's line length setting
+ignore = [
+ "E501", # Ignore long line warnings
+]
diff --git a/ref-python-packages/stackql-deploy/requirements.txt b/ref-python-packages/stackql-deploy/requirements.txt
new file mode 100644
index 0000000..ea24142
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/requirements.txt
@@ -0,0 +1,5 @@
+click
+jinja2
+python-dotenv
+pystackql>=3.6.1
+setuptools==75.8.0
diff --git a/ref-python-packages/stackql-deploy/setup.py b/ref-python-packages/stackql-deploy/setup.py
new file mode 100644
index 0000000..c07fbeb
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/setup.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+
+from setuptools import setup, find_namespace_packages
+
+with open('README.rst', encoding='utf-8') as f:
+ readme = f.read()
+
+# with open('LICENSE', encoding='utf-8') as f:
+# license_text = f.read()
+
+setup(
+ name='stackql-deploy',
+ version='1.9.4',
+ description='Model driven resource provisioning and deployment framework using StackQL.',
+ long_description=readme,
+ long_description_content_type='text/x-rst',
+ author='Jeffrey Aven',
+ author_email='javen@stackql.io',
+ url='https://github.com/stackql/stackql-deploy',
+ license='MIT',
+ packages=find_namespace_packages(include=['stackql_deploy*']),
+ package_data={
+ 'stackql_deploy': [
+ 'templates/**/*.template', # Include template files recursively
+ 'inc/contributors.csv' # Fixed path for contributors
+ ],
+ },
+
+ # Install shell completion scripts to system share directory
+ data_files=[
+ ('share/stackql-deploy/completions', [
+ 'shell_completions/stackql-deploy-completion.bash',
+ 'shell_completions/stackql-deploy-completion.zsh',
+ 'shell_completions/stackql-deploy-completion.fish',
+ 'shell_completions/stackql-deploy-completion.ps1',
+ ])
+ ],
+
+ include_package_data=True,
+ install_requires=[
+ 'click',
+ 'python-dotenv',
+ 'jinja2',
+ 'pystackql>=3.8.1',
+ 'PyYAML'
+ ],
+ entry_points={
+ 'console_scripts': [
+ 'stackql-deploy = stackql_deploy.cli:cli',
+ ],
+ },
+ classifiers=[
+ 'Operating System :: Microsoft :: Windows',
+ 'Operating System :: MacOS',
+ 'Operating System :: POSIX :: Linux',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
+ 'Programming Language :: Python :: 3.10',
+ 'Programming Language :: Python :: 3.11',
+ 'Programming Language :: Python :: 3.12',
+ 'Programming Language :: Python :: 3.13',
+ ]
+)
diff --git a/ref-python-packages/stackql-deploy/shell_completions/README.md b/ref-python-packages/stackql-deploy/shell_completions/README.md
new file mode 100644
index 0000000..dc02ec6
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/shell_completions/README.md
@@ -0,0 +1,65 @@
+# Shell Completions for stackql-deploy
+
+This directory contains tab completion scripts for various shells.
+
+## Automatic Installation
+
+The easiest way to install completions:
+
+```bash
+stackql-deploy completion bash --install # for bash
+stackql-deploy completion zsh --install # for zsh
+stackql-deploy completion fish --install # for fish
+stackql-deploy completion powershell --install # for PowerShell
+```
+
+### Activation
+
+To activate immediately (`bash` example shown, similar logic for other shells):
+
+```bash
+eval "$(stackql-deploy completion bash)"
+```
+
+## Manual Installation
+
+### Bash
+
+```bash
+# Add to ~/.bashrc
+echo 'eval "$(stackql-deploy completion bash)"' >> ~/.bashrc
+source ~/.bashrc
+```
+
+### Zsh
+
+```bash
+# Add to ~/.zshrc
+echo 'eval "$(stackql-deploy completion zsh)"' >> ~/.zshrc
+source ~/.zshrc
+```
+
+### Fish
+
+```fish
+# Add to ~/.config/fish/config.fish
+echo 'stackql-deploy completion fish | source' >> ~/.config/fish/config.fish
+source ~/.config/fish/config.fish
+```
+
+### PowerShell
+
+```powershell
+# Add to your PowerShell profile
+Add-Content $PROFILE "`n# stackql-deploy completion`n. (stackql-deploy completion powershell)"
+. $PROFILE
+```
+
+## Files
+
+- `stackql-deploy-completion.bash` - Bash completion script
+- `stackql-deploy-completion.zsh` - Zsh completion script
+- `stackql-deploy-completion.fish` - Fish completion script
+- `stackql-deploy-completion.ps1` - PowerShell completion script
+
+All scripts are static (no Python subprocess calls) for instant performance.
diff --git a/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.bash b/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.bash
new file mode 100644
index 0000000..6b3a6d0
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.bash
@@ -0,0 +1,108 @@
+_stackql_deploy_completion() {
+ local cur prev opts base
+ COMPREPLY=()
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ prev="${COMP_WORDS[COMP_CWORD-1]}"
+
+ # Main commands
+ local commands="build test teardown info init upgrade shell completion"
+
+ # Common options for build/test/teardown
+ local common_opts="--log-level --env-file -e --env --dry-run --show-queries --on-failure --custom-registry --download-dir --help"
+
+ # Get the command (first non-option argument)
+ local cmd=""
+ for ((i=1; i<${#COMP_WORDS[@]}-1; i++)); do
+ if [[ ${COMP_WORDS[i]} != -* ]]; then
+ cmd=${COMP_WORDS[i]}
+ break
+ fi
+ done
+
+ # Completion logic
+ case "${cmd}" in
+ build|test|teardown)
+ # After command, need stack_dir then stack_env
+ local args=()
+ for ((i=2; i<${#COMP_WORDS[@]}-1; i++)); do
+ if [[ ${COMP_WORDS[i]} != -* ]]; then
+ args+=("${COMP_WORDS[i]}")
+ fi
+ done
+
+ if [ ${#args[@]} -eq 0 ]; then
+ # Complete directory names for stack_dir
+ compopt -o dirnames
+ COMPREPLY=( $(compgen -d -- "${cur}") )
+ elif [ ${#args[@]} -eq 1 ]; then
+ # Complete common environment names
+ COMPREPLY=( $(compgen -W "dev staging prod test prd sit uat" -- "${cur}") )
+ else
+ # Complete options
+ COMPREPLY=( $(compgen -W "${common_opts}" -- "${cur}") )
+ fi
+ ;;
+
+ init)
+ # init [--provider]
+ case "${prev}" in
+ --provider)
+ COMPREPLY=( $(compgen -W "aws google azure" -- "${cur}") )
+ ;;
+ init)
+ # Just type the stack name, no completion
+ ;;
+ *)
+ COMPREPLY=( $(compgen -W "--provider --help" -- "${cur}") )
+ ;;
+ esac
+ ;;
+
+ completion)
+ COMPREPLY=( $(compgen -W "bash zsh fish powershell" -- "${cur}") )
+ ;;
+
+ info|upgrade|shell)
+ COMPREPLY=( $(compgen -W "--help --custom-registry --download-dir" -- "${cur}") )
+ ;;
+
+ *)
+ # No command yet, show main commands and global options
+ if [[ ${cur} == -* ]]; then
+ COMPREPLY=( $(compgen -W "--help --version" -- "${cur}") )
+ else
+ COMPREPLY=( $(compgen -W "${commands}" -- "${cur}") )
+ fi
+ ;;
+ esac
+
+ # Handle option arguments
+ case "${prev}" in
+ --log-level)
+ COMPREPLY=( $(compgen -W "DEBUG INFO WARNING ERROR CRITICAL" -- "${cur}") )
+ return 0
+ ;;
+ --env-file)
+ compopt -o default
+ COMPREPLY=( $(compgen -f -X '!*.env' -- "${cur}") $(compgen -d -- "${cur}") )
+ return 0
+ ;;
+ --on-failure)
+ COMPREPLY=( $(compgen -W "rollback ignore error" -- "${cur}") )
+ return 0
+ ;;
+ --custom-registry)
+ # URL completion - just let user type
+ return 0
+ ;;
+ --download-dir)
+ compopt -o dirnames
+ COMPREPLY=( $(compgen -d -- "${cur}") )
+ return 0
+ ;;
+ esac
+
+ return 0
+}
+
+complete -F _stackql_deploy_completion stackql-deploy
diff --git a/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.fish b/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.fish
new file mode 100644
index 0000000..9e4cedf
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.fish
@@ -0,0 +1,83 @@
+# stackql-deploy completions for fish
+
+# Remove any existing completions
+complete -c stackql-deploy -e
+
+# Main commands
+complete -c stackql-deploy -n "__fish_use_subcommand" -a "build" -d "Create or update resources"
+complete -c stackql-deploy -n "__fish_use_subcommand" -a "test" -d "Run test queries for the stack"
+complete -c stackql-deploy -n "__fish_use_subcommand" -a "teardown" -d "Teardown a provisioned stack"
+complete -c stackql-deploy -n "__fish_use_subcommand" -a "info" -d "Display version information"
+complete -c stackql-deploy -n "__fish_use_subcommand" -a "init" -d "Initialize a new project structure"
+complete -c stackql-deploy -n "__fish_use_subcommand" -a "upgrade" -d "Upgrade pystackql and stackql binary"
+complete -c stackql-deploy -n "__fish_use_subcommand" -a "shell" -d "Launch the stackql shell"
+complete -c stackql-deploy -n "__fish_use_subcommand" -a "completion" -d "Install tab completion"
+
+# Common options for build/test/teardown
+set -l common_cmds "build test teardown"
+
+# --log-level
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds" -l log-level -d "Set logging level" -a "DEBUG INFO WARNING ERROR CRITICAL"
+
+# --env-file
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds" -l env-file -d "Environment variables file" -r -F
+
+# -e/--env
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds" -s e -l env -d "Set additional environment variables"
+
+# --dry-run
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds" -l dry-run -d "Perform a dry run"
+
+# --show-queries
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds" -l show-queries -d "Show queries in output logs"
+
+# --on-failure
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds" -l on-failure -d "Action on failure" -a "rollback ignore error"
+
+# --custom-registry
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds" -l custom-registry -d "Custom registry URL"
+
+# --download-dir
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds" -l download-dir -d "Download directory" -r -a "(__fish_complete_directories)"
+
+# --help
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds" -l help -d "Show help message"
+
+# build/test/teardown positional arguments
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds; and not __fish_seen_argument -l log-level -l env-file -s e -l env -l dry-run -l show-queries -l on-failure -l custom-registry -l download-dir" -a "(__fish_complete_directories)" -d "Stack directory"
+
+# Environment names (for second positional argument)
+function __stackql_deploy_needs_env
+ set -l cmd (commandline -opc)
+ set -l cmd_count (count $cmd)
+ # If we have: stackql-deploy build []
+ if test $cmd_count -ge 3
+ set -l has_opts 0
+ for arg in $cmd[3..-1]
+ if string match -q -- '-*' $arg
+ set has_opts 1
+ break
+ end
+ end
+ if test $has_opts -eq 0
+ return 0
+ end
+ end
+ return 1
+end
+
+complete -c stackql-deploy -n "__fish_seen_subcommand_from $common_cmds; and __stackql_deploy_needs_env" -a "dev staging prod test prd sit uat" -d "Environment"
+
+# init command
+complete -c stackql-deploy -n "__fish_seen_subcommand_from init" -l provider -d "Specify provider" -a "aws google azure"
+complete -c stackql-deploy -n "__fish_seen_subcommand_from init" -l help -d "Show help message"
+
+# completion command
+complete -c stackql-deploy -n "__fish_seen_subcommand_from completion" -a "bash zsh fish powershell" -d "Shell type"
+complete -c stackql-deploy -n "__fish_seen_subcommand_from completion" -l install -d "Install completion"
+complete -c stackql-deploy -n "__fish_seen_subcommand_from completion" -l help -d "Show help message"
+
+# info/upgrade/shell commands
+complete -c stackql-deploy -n "__fish_seen_subcommand_from info upgrade shell" -l help -d "Show help message"
+complete -c stackql-deploy -n "__fish_seen_subcommand_from info upgrade shell" -l custom-registry -d "Custom registry URL"
+complete -c stackql-deploy -n "__fish_seen_subcommand_from info upgrade shell" -l download-dir -d "Download directory" -r -a "(__fish_complete_directories)"
diff --git a/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.ps1 b/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.ps1
new file mode 100644
index 0000000..77de13e
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.ps1
@@ -0,0 +1,146 @@
+# stackql-deploy PowerShell completion
+
+using namespace System.Management.Automation
+using namespace System.Management.Automation.Language
+
+Register-ArgumentCompleter -Native -CommandName stackql-deploy -ScriptBlock {
+ param($wordToComplete, $commandAst, $cursorPosition)
+
+ $commands = @{
+ 'build' = 'Create or update resources'
+ 'test' = 'Run test queries for the stack'
+ 'teardown' = 'Teardown a provisioned stack'
+ 'info' = 'Display version information'
+ 'init' = 'Initialize a new project structure'
+ 'upgrade' = 'Upgrade pystackql and stackql binary'
+ 'shell' = 'Launch the stackql shell'
+ 'completion' = 'Install tab completion'
+ }
+
+ $commonOptions = @{
+ '--log-level' = @('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL')
+ '--env-file' = @() # File completion
+ '-e' = @()
+ '--env' = @()
+ '--dry-run' = @()
+ '--show-queries' = @()
+ '--on-failure' = @('rollback', 'ignore', 'error')
+ '--custom-registry' = @()
+ '--download-dir' = @() # Directory completion
+ '--help' = @()
+ }
+
+ $environments = @('dev', 'staging', 'prod', 'test', 'prd', 'sit', 'uat')
+ $providers = @('aws', 'google', 'azure')
+ $shells = @('bash', 'zsh', 'fish', 'powershell')
+
+ # Parse command line
+ $tokens = $commandAst.ToString().Split(' ', [StringSplitOptions]::RemoveEmptyEntries)
+ $command = $null
+ $argCount = 0
+
+ for ($i = 1; $i -lt $tokens.Count; $i++) {
+ if ($tokens[$i] -notmatch '^-') {
+ if ($null -eq $command) {
+ $command = $tokens[$i]
+ } else {
+ $argCount++
+ }
+ }
+ }
+
+ # Complete based on position
+ if ($null -eq $command) {
+ # Complete main commands
+ $commands.GetEnumerator() | Where-Object { $_.Key -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_.Key, $_.Key, 'ParameterValue', $_.Value)
+ }
+ return
+ }
+
+ # Command-specific completion
+ switch ($command) {
+ { $_ -in 'build', 'test', 'teardown' } {
+ if ($argCount -eq 0) {
+ # Complete directories for stack_dir
+ Get-ChildItem -Directory -Path . -Filter "$wordToComplete*" | ForEach-Object {
+ [CompletionResult]::new($_.Name, $_.Name, 'ParameterValue', 'Stack directory')
+ }
+ }
+ elseif ($argCount -eq 1) {
+ # Complete environment names
+ $environments | Where-Object { $_ -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_, $_, 'ParameterValue', 'Environment')
+ }
+ }
+ else {
+ # Complete options
+ $commonOptions.GetEnumerator() | Where-Object { $_.Key -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_.Key, $_.Key, 'ParameterName', $_.Key)
+ }
+ }
+ }
+
+ 'init' {
+ if ($wordToComplete -like '--*') {
+ '--provider', '--help' | Where-Object { $_ -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_, $_, 'ParameterName', $_)
+ }
+ }
+ elseif ($tokens[-2] -eq '--provider') {
+ $providers | Where-Object { $_ -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_, $_, 'ParameterValue', 'Provider')
+ }
+ }
+ }
+
+ 'completion' {
+ if ($argCount -eq 0) {
+ $shells | Where-Object { $_ -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_, $_, 'ParameterValue', 'Shell type')
+ }
+ }
+ '--install', '--help' | Where-Object { $_ -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_, $_, 'ParameterName', $_)
+ }
+ }
+
+ { $_ -in 'info', 'upgrade', 'shell' } {
+ if ($wordToComplete -like '--*') {
+ '--help', '--custom-registry', '--download-dir' | Where-Object { $_ -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_, $_, 'ParameterName', $_)
+ }
+ }
+ }
+ }
+
+ # Handle option values
+ $lastToken = $tokens[-2]
+ switch ($lastToken) {
+ '--log-level' {
+ $commonOptions['--log-level'] | Where-Object { $_ -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_, $_, 'ParameterValue', 'Log level')
+ }
+ }
+ '--on-failure' {
+ $commonOptions['--on-failure'] | Where-Object { $_ -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_, $_, 'ParameterValue', 'Failure action')
+ }
+ }
+ '--env-file' {
+ Get-ChildItem -File -Path . -Filter "*$wordToComplete*.env" | ForEach-Object {
+ [CompletionResult]::new($_.Name, $_.Name, 'ParameterValue', 'Environment file')
+ }
+ }
+ '--download-dir' {
+ Get-ChildItem -Directory -Path . -Filter "$wordToComplete*" | ForEach-Object {
+ [CompletionResult]::new($_.Name, $_.Name, 'ParameterValue', 'Download directory')
+ }
+ }
+ '--provider' {
+ $providers | Where-Object { $_ -like "$wordToComplete*" } | ForEach-Object {
+ [CompletionResult]::new($_, $_, 'ParameterValue', 'Provider')
+ }
+ }
+ }
+}
diff --git a/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.zsh b/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.zsh
new file mode 100644
index 0000000..94651c7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/shell_completions/stackql-deploy-completion.zsh
@@ -0,0 +1,71 @@
+#compdef stackql-deploy
+
+_stackql_deploy() {
+ local -a commands
+ commands=(
+ 'build:Create or update resources'
+ 'test:Run test queries for the stack'
+ 'teardown:Teardown a provisioned stack'
+ 'info:Display version information'
+ 'init:Initialize a new project structure'
+ 'upgrade:Upgrade pystackql and stackql binary'
+ 'shell:Launch the stackql shell'
+ 'completion:Install tab completion'
+ )
+
+ local -a common_opts
+ common_opts=(
+ '--log-level[Set logging level]:level:(DEBUG INFO WARNING ERROR CRITICAL)'
+ '--env-file[Environment variables file]:file:_files -g "*.env"'
+ '(-e --env)'{-e,--env}'[Set additional environment variables]:var:'
+ '--dry-run[Perform a dry run]'
+ '--show-queries[Show queries in output logs]'
+ '--on-failure[Action on failure]:action:(rollback ignore error)'
+ '--custom-registry[Custom registry URL]:url:'
+ '--download-dir[Download directory]:dir:_directories'
+ '--help[Show help message]'
+ )
+
+ _arguments -C \
+ '1: :->command' \
+ '*::arg:->args'
+
+ case $state in
+ command)
+ _describe -t commands 'stackql-deploy commands' commands
+ ;;
+ args)
+ case $words[1] in
+ build|test|teardown)
+ if (( CURRENT == 2 )); then
+ _arguments '2:stack directory:_directories'
+ elif (( CURRENT == 3 )); then
+ _arguments '3:environment:(dev staging prod test prd sit uat)'
+ else
+ _arguments $common_opts
+ fi
+ ;;
+ init)
+ _arguments \
+ '2:stack name:' \
+ '--provider[Specify provider]:provider:(aws google azure)' \
+ '--help[Show help message]'
+ ;;
+ completion)
+ _arguments \
+ '2:shell:(bash zsh fish powershell)' \
+ '--install[Install completion]' \
+ '--help[Show help message]'
+ ;;
+ info|upgrade|shell)
+ _arguments \
+ '--help[Show help message]' \
+ '--custom-registry[Custom registry URL]:url:' \
+ '--download-dir[Download directory]:dir:_directories'
+ ;;
+ esac
+ ;;
+ esac
+}
+
+_stackql_deploy "$@"
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/__init__.py b/ref-python-packages/stackql-deploy/stackql_deploy/__init__.py
new file mode 100644
index 0000000..b0bb384
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/__init__.py
@@ -0,0 +1 @@
+__version__ = '1.9.4'
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/cli.py b/ref-python-packages/stackql-deploy/stackql_deploy/cli.py
new file mode 100644
index 0000000..d8c0016
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/cli.py
@@ -0,0 +1,615 @@
+# cli.py
+import click
+import os
+import sys
+import subprocess
+
+from . import __version__ as deploy_version
+
+from .lib.bootstrap import logger
+from .lib.utils import print_unicode_box, BorderColor
+# from .cmd.build import StackQLProvisioner
+# from .cmd.test import StackQLTestRunner
+# from .cmd.teardown import StackQLDeProvisioner
+from jinja2 import Environment, FileSystemLoader
+from dotenv import dotenv_values
+from pystackql import StackQL
+
+#
+# utility functions
+#
+
+def get_stackql_instance(custom_registry=None, download_dir=None):
+ """Initializes StackQL with the given options."""
+ stackql_kwargs = {}
+ if custom_registry:
+ stackql_kwargs['custom_registry'] = custom_registry
+ if download_dir:
+ stackql_kwargs['download_dir'] = download_dir
+
+ return StackQL(**stackql_kwargs)
+
+def find_stackql_binary(stackql_bin_path, download_dir):
+ """Find the stackql binary in the specified paths."""
+ # First, try to use the binary path from StackQL instance
+ if stackql_bin_path and os.path.isfile(stackql_bin_path):
+ return stackql_bin_path
+
+ # Next, try the download directory if provided
+ if download_dir:
+ binary_path = os.path.join(download_dir, 'stackql')
+ return binary_path if os.path.isfile(binary_path) else None
+
+ # If neither path works, return None
+ return None
+
+def load_env_vars(env_file, overrides):
+ """Load environment variables from a file and apply overrides."""
+ dotenv_path = os.path.join(os.getcwd(), env_file)
+ env_vars = {}
+
+ # Load environment variables from the specified file into a new dict
+ if os.path.exists(dotenv_path):
+ env_vars.update(dotenv_values(dotenv_path)) # Use update to load the .env file
+
+ # Apply overrides from the `-e` option
+ env_vars.update(overrides) # Directly update the dictionary with another dictionary
+
+ return env_vars
+
+def parse_env_var(ctx, param, value):
+ """Parse environment variable options given as 'KEY=VALUE'."""
+ env_vars = {}
+ if value:
+ for item in value:
+ try:
+ key, val = item.split('=', 1)
+ env_vars[key] = val
+ except ValueError:
+ raise click.BadParameter('environment variables must be formatted as KEY=VALUE')
+ return env_vars
+
+def setup_logger(command, args_dict):
+ log_level = args_dict.get('log_level', 'INFO').upper() # Normalize to uppercase
+ valid_levels = {'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'}
+
+ if log_level not in valid_levels:
+ raise click.ClickException(
+ f"Invalid log level: {log_level}. Valid levels are: {', '.join(valid_levels)}"
+ )
+
+ logger.setLevel(log_level)
+ logger.debug(f"'{command}' command called with args: {str(args_dict)}")
+
+def add_common_options(command):
+ common_options = [
+ click.option('--log-level', default='INFO', help='set the logging level.'),
+ click.option('--env-file', default='.env', help='environment variables file.'),
+ click.option(
+ '-e',
+ '--env',
+ multiple=True,
+ callback=parse_env_var,
+ help='set additional environment variables.'
+ ),
+ click.option('--dry-run', is_flag=True, help='perform a dry run of the operation.'),
+ click.option('--show-queries', is_flag=True, help='show queries run in the output logs.'),
+ click.option(
+ "--on-failure",
+ type=click.Choice(["rollback", "ignore", "error"]),
+ default="error",
+ help="action on failure.",
+ )
+ ]
+ for option in common_options:
+ command = option(command)
+ return command
+
+def add_stackql_kwarg_options(command):
+ """Add options that become kwargs for StackQL initialization."""
+ stackql_options = [
+ click.option('--custom-registry', default=None,
+ help='custom registry URL for StackQL.'),
+ click.option('--download-dir', default=None,
+ help='download directory for StackQL.')
+ ]
+ for option in stackql_options:
+ command = option(command)
+ return command
+
+#
+# main entry point
+#
+
+@click.group()
+@click.pass_context
+def cli(ctx):
+ """This is the main CLI entry point."""
+ ctx.ensure_object(dict)
+
+def setup_command_context(
+ ctx,
+ stack_dir,
+ stack_env,
+ log_level,
+ env_file,
+ env,
+ dry_run,
+ show_queries,
+ on_failure,
+ custom_registry,
+ download_dir,
+ command_name
+):
+ """Common initialization for commands."""
+ # Initialize the logger
+ setup_logger(command_name, locals())
+
+ # Initialize the StackQL instance and environment variables
+ stackql = get_stackql_instance(custom_registry, download_dir)
+
+ # Load environment variables from the file and apply overrides
+ env_vars = load_env_vars(env_file, env)
+ return stackql, env_vars
+
+
+#
+# build command
+#
+
+@cli.command()
+@click.argument('stack_dir')
+@click.argument('stack_env')
+@add_common_options
+@add_stackql_kwarg_options
+@click.option('--output-file', default=None,
+ help='File path to write deployment outputs as JSON.')
+@click.pass_context
+def build(ctx, stack_dir, stack_env, log_level, env_file,
+ env, dry_run, show_queries, on_failure,
+ custom_registry, download_dir, output_file):
+ """Create or update resources."""
+
+ from .cmd.build import StackQLProvisioner
+
+ stackql, env_vars = setup_command_context(
+ ctx, stack_dir, stack_env, log_level, env_file,
+ env, dry_run, show_queries, on_failure, custom_registry, download_dir, 'build'
+ )
+ provisioner = StackQLProvisioner(
+ stackql, env_vars, logger, stack_dir, stack_env)
+ stack_name_display = (
+ provisioner.stack_name if provisioner.stack_name
+ else stack_dir
+ )
+ message = (f"Deploying stack: [{stack_name_display}] "
+ f"to environment: [{stack_env}]")
+ print_unicode_box(message, BorderColor.YELLOW)
+
+ provisioner.run(dry_run, show_queries, on_failure, output_file)
+ click.echo("šÆ dry-run build complete" if dry_run
+ else "š build complete")
+
+#
+# teardown command
+#
+
+@cli.command()
+@click.argument('stack_dir')
+@click.argument('stack_env')
+@add_common_options
+@add_stackql_kwarg_options
+@click.pass_context
+def teardown(ctx, stack_dir, stack_env, log_level, env_file,
+ env, dry_run, show_queries, on_failure,
+ custom_registry, download_dir ):
+ """Teardown a provisioned stack."""
+
+ from .cmd.teardown import StackQLDeProvisioner
+
+ stackql, env_vars = setup_command_context(
+ ctx, stack_dir, stack_env, log_level, env_file,
+ env, dry_run, show_queries, on_failure, custom_registry, download_dir, 'teardown'
+ )
+ deprovisioner = StackQLDeProvisioner(
+ stackql, env_vars, logger, stack_dir, stack_env)
+ stack_name_display = (
+ deprovisioner.stack_name if deprovisioner.stack_name
+ else stack_dir
+ )
+ message = (f"Tearing down stack: [{stack_name_display}] "
+ f"in environment: [{stack_env}]")
+ print_unicode_box(message, BorderColor.YELLOW)
+
+ deprovisioner.run(dry_run, show_queries, on_failure)
+ click.echo(f"š§ teardown complete (dry run: {dry_run})")
+
+
+#
+# test command
+#
+
+@cli.command()
+@click.argument('stack_dir')
+@click.argument('stack_env')
+@add_common_options
+@add_stackql_kwarg_options
+@click.option('--output-file', default=None,
+ help='File path to write deployment outputs as JSON.')
+@click.pass_context
+def test(ctx, stack_dir, stack_env, log_level, env_file,
+ env, dry_run, show_queries, on_failure, custom_registry, download_dir, output_file):
+ """Run test queries for the stack."""
+
+ from .cmd.test import StackQLTestRunner
+
+ stackql, env_vars = setup_command_context(
+ ctx, stack_dir, stack_env, log_level, env_file,
+ env, dry_run, show_queries, on_failure, custom_registry, download_dir, 'test'
+ )
+ test_runner = StackQLTestRunner(
+ stackql, env_vars, logger, stack_dir, stack_env)
+ stack_name_display = (
+ test_runner.stack_name if test_runner.stack_name
+ else stack_dir
+ )
+ message = (f"Testing stack: [{stack_name_display}] "
+ f"in environment: [{stack_env}]")
+ print_unicode_box(message, BorderColor.YELLOW)
+
+ test_runner.run(dry_run, show_queries, on_failure, output_file)
+ click.echo(f"š tests complete (dry run: {dry_run})")
+
+#
+# info command
+#
+
+# stackql-deploy --custom-registry="https://registry-dev.stackql.app/providers" --download-dir . info
+@cli.command()
+@click.pass_context
+def info(ctx):
+ """Display version information for stackql-deploy and the stackql library."""
+ stackql = get_stackql_instance(
+ custom_registry=ctx.obj.get('custom_registry'),
+ download_dir=ctx.obj.get('download_dir')
+ )
+
+ click.echo(click.style("stackql-deploy CLI", fg="green", bold=True))
+ click.echo(f" Version: {deploy_version}\n")
+
+ click.echo(click.style("StackQL Library", fg="green", bold=True))
+ click.echo(f" Version: {stackql.version}")
+ click.echo(f" pystackql Version: {stackql.package_version}")
+ click.echo(f" Platform: {stackql.platform}")
+ click.echo(f" Binary Path: {stackql.bin_path}")
+ if ctx.obj.get('custom_registry'):
+ click.echo(f" Custom Registry: {ctx.obj.get('custom_registry')}\n")
+ else:
+ click.echo("")
+
+ click.echo(click.style("Installed Providers", fg="green", bold=True))
+ providers = stackql.execute("SHOW PROVIDERS")
+ for provider in providers:
+ click.echo(f" {provider['name']}: {provider['version']}")
+
+ # Read and display contributors
+ contributors = read_contributors(logger)
+ if contributors:
+ click.echo("\n" + click.style("⨠Special Thanks to our Contributors āØ", fg="green", bold=True))
+
+ # Display 4 contributors per line
+ for i in range(0, len(contributors), 4):
+ # Get up to 4 contributors for this line
+ line_contributors = contributors[i:i+4]
+ # Join with commas
+ line = ", ".join(line_contributors)
+ # Display the line
+ click.echo(f" {line}")
+
+def read_contributors(logger):
+ """Read contributors from CSV file and return as list of dicts."""
+ try:
+ # Look for contributors.csv in package directory
+ package_dir = os.path.dirname(os.path.abspath(__file__))
+ contributors_path = os.path.join(package_dir, 'inc', 'contributors.csv')
+
+ with open(contributors_path, 'r', encoding='utf-8') as f:
+ contributors = [line.strip() for line in f if line.strip()]
+
+ return contributors
+
+ except Exception as e:
+ logger.debug(f"Failed to read contributors: {str(e)}")
+ return []
+
+#
+# shell command
+#
+
+@cli.command()
+@click.pass_context
+def shell(ctx):
+ """Launch the stackql shell."""
+ # Get an instance of StackQL with current context options
+ stackql = get_stackql_instance(
+ custom_registry=ctx.obj.get('custom_registry'),
+ download_dir=ctx.obj.get('download_dir')
+ )
+
+ # Find the stackql binary path
+ stackql_binary_path = find_stackql_binary(stackql.bin_path, ctx.obj.get('download_dir'))
+
+ # If stackql binary is not found, fail with an error
+ if not stackql_binary_path:
+ click.echo("Error: StackQL binary not found in the specified paths.", err=True)
+ sys.exit(1)
+
+ click.echo(f"Launching stackql shell from: {stackql_binary_path}")
+
+ # Launch the stackql shell as a subprocess
+ try:
+ subprocess.run([stackql_binary_path, "shell", "--colorscheme", "null"], check=True)
+ except subprocess.CalledProcessError as e:
+ click.echo(f"Error launching stackql shell: {e}", err=True)
+ sys.exit(1)
+
+
+#
+# upgrade command
+#
+
+@cli.command()
+@click.pass_context
+def upgrade(ctx):
+ """Upgrade the pystackql package and stackql binary to the latest version."""
+
+ stackql = get_stackql_instance()
+ orig_pkg_version = stackql.package_version
+ orig_stackql_version = stackql.version
+ stackql = None
+
+ click.echo("upgrading pystackql package...")
+ try:
+ # Run the pip install command to upgrade pystackql
+ subprocess.run(
+ [sys.executable, "-m", "pip", "install", "--upgrade", "--quiet", "pystackql"],
+ check=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE
+ )
+ # click.echo(result.stdout.decode())
+ click.echo("pystackql package upgraded successfully.")
+ except subprocess.CalledProcessError as e:
+ click.echo(f"Failed to upgrade pystackql: {e.stderr.decode()}", err=True)
+ except Exception as e:
+ click.echo(f"An error occurred: {str(e)}", err=True)
+
+ stackql = get_stackql_instance()
+ new_pkg_version = stackql.package_version
+ if new_pkg_version == orig_pkg_version:
+ click.echo(f"pystackql package version {orig_pkg_version} is already up-to-date.")
+ else:
+ click.echo(f"pystackql package upgraded from {orig_pkg_version} to {new_pkg_version}.")
+
+ click.echo(f"upgrading stackql binary, current version {orig_stackql_version}...")
+ stackql.upgrade()
+
+
+#
+# init command
+#
+SUPPORTED_PROVIDERS = {'aws', 'google', 'azure'}
+DEFAULT_PROVIDER = 'azure'
+
+def create_project_structure(stack_name, provider=None):
+ stack_name = stack_name.replace('_', '-').lower()
+ base_path = os.path.join(os.getcwd(), stack_name)
+ if os.path.exists(base_path):
+ raise click.ClickException(f"directory '{stack_name}' already exists.")
+
+ directories = ['resources']
+ for directory in directories:
+ os.makedirs(os.path.join(base_path, directory), exist_ok=True)
+
+ # Check if provider is supported
+ if provider is None:
+ logger.debug(f"provider not supplied, defaulting to `{DEFAULT_PROVIDER}`")
+ provider = DEFAULT_PROVIDER
+ elif provider not in SUPPORTED_PROVIDERS:
+ provider = DEFAULT_PROVIDER
+ message = (
+ f"provider '{provider}' is not supported for `init`, "
+ f"supported providers are: {', '.join(SUPPORTED_PROVIDERS)}, "
+ f"defaulting to `{DEFAULT_PROVIDER}`"
+ )
+ click.secho(message, fg='yellow', err=False)
+
+ # set template files
+ if provider == 'google':
+ sample_res_name = 'example_vpc'
+ elif provider == 'azure':
+ sample_res_name = 'example_res_grp'
+ elif provider == 'aws':
+ sample_res_name = 'example_vpc'
+
+ template_base_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates', provider)
+ env = Environment(loader=FileSystemLoader(template_base_path))
+
+ logger.debug(f"template base path: {template_base_path}")
+
+ template_files = {
+ 'stackql_manifest.yml.template': os.path.join(base_path, 'stackql_manifest.yml'),
+ 'README.md.template': os.path.join(base_path, 'README.md'),
+ f'resources/{sample_res_name}.iql.template': os.path.join(base_path,'resources', f'{sample_res_name}.iql'),
+ }
+
+ for template_name, output_name in template_files.items():
+ logger.debug(f"template name: {template_name}")
+ logger.debug(f"template output name: {output_name}")
+ template = env.get_template(template_name)
+ rendered_content = template.render(stack_name=stack_name)
+ with open(os.path.join(base_path, output_name), 'w') as f:
+ f.write(rendered_content)
+
+@cli.command()
+@click.argument('stack_name')
+@click.option(
+ "--provider",
+ default=None,
+ help="[OPTIONAL] specify a provider to start your project, supported values: aws, azure, google",
+)
+def init(stack_name, provider):
+ """Initialize a new stackql-deploy project structure."""
+ setup_logger("init", locals())
+ create_project_structure(stack_name, provider=provider)
+ click.echo(f"project {stack_name} initialized successfully.")
+
+#
+# completion command
+#
+
+@cli.command("completion")
+@click.argument(
+ "shell",
+ type=click.Choice(["bash", "zsh", "fish", "powershell"], case_sensitive=False),
+ required=False,
+)
+@click.option("--install", is_flag=True, help="Install completion to shell profile")
+def completion(shell, install):
+ """
+ Shell tab completion for stackql-deploy.
+ Examples:
+ eval "$(stackql-deploy completion bash)" # activate now
+ stackql-deploy completion bash --install # install permanently
+ stackql-deploy completion # auto-detect shell
+ """
+
+ # Auto-detect shell if not provided
+ if not shell:
+ shell = os.environ.get("SHELL", "").split("/")[-1] or "bash"
+ shell = shell.lower()
+
+ # Map shells to completion script files
+ completion_scripts = {
+ "bash": "stackql-deploy-completion.bash",
+ "zsh": "stackql-deploy-completion.zsh",
+ "fish": "stackql-deploy-completion.fish",
+ "powershell": "stackql-deploy-completion.ps1"
+ }
+
+ script_name = completion_scripts.get(shell)
+ if not script_name:
+ click.echo(f"ā Shell '{shell}' not supported. Supported: bash, zsh, fish, powershell", err=True)
+ sys.exit(1)
+
+ # Find the completion script
+ script_path = _find_completion_script(script_name)
+ if not script_path:
+ click.echo(f"ā Completion script not found: {script_name}", err=True)
+ sys.exit(1)
+
+ # Output script for eval/source (default behavior)
+ if not install:
+ with open(script_path, 'r') as f:
+ click.echo(f.read())
+ return
+
+ # Install to shell profile
+ _install_completion_for_shell(shell, script_path)
+
+def _find_completion_script(script_name):
+ """Find completion script in development or installed locations."""
+ from pathlib import Path
+
+ # Development mode: relative to project root
+ cli_file = Path(__file__).resolve()
+ project_root = cli_file.parent.parent
+ dev_path = project_root / "shell_completions" / script_name
+
+ if dev_path.exists():
+ logger.debug(f"Found completion script: {dev_path}")
+ return dev_path
+
+ # Installed mode: check common install locations
+ for prefix in [sys.prefix, sys.base_prefix, '/usr', '/usr/local']:
+ installed_path = Path(prefix) / "share" / "stackql-deploy" / "completions" / script_name
+ if installed_path.exists():
+ logger.debug(f"Found completion script: {installed_path}")
+ return installed_path
+
+ logger.error(f"Completion script {script_name} not found")
+ return None
+
+def _install_completion_for_shell(shell, script_path):
+ """Install completion to shell profile."""
+ from pathlib import Path
+
+ profiles = {
+ "bash": Path.home() / ".bashrc",
+ "zsh": Path.home() / ".zshrc",
+ "fish": Path.home() / ".config/fish/config.fish",
+ "powershell": Path.home() / "Documents/PowerShell/Microsoft.PowerShell_profile.ps1"
+ }
+
+ eval_commands = {
+ "bash": 'eval "$(stackql-deploy completion bash)"',
+ "zsh": 'eval "$(stackql-deploy completion zsh)"',
+ "fish": 'stackql-deploy completion fish | source',
+ "powershell": '. (stackql-deploy completion powershell)'
+ }
+
+ profile_path = profiles.get(shell)
+ eval_cmd = eval_commands.get(shell)
+
+ if not profile_path:
+ click.echo(f"ā Unknown profile for {shell}", err=True)
+ return
+
+ # Ensure profile directory and file exist
+ profile_path.parent.mkdir(parents=True, exist_ok=True)
+ if not profile_path.exists():
+ profile_path.touch()
+
+ # Check if already installed
+ try:
+ content = profile_path.read_text()
+ if "stackql-deploy completion" in content:
+ click.echo(f"ā
Completion already installed in {profile_path}")
+ _show_activation_instructions(shell)
+ return
+ except Exception as e:
+ click.echo(f"ā Error reading profile: {e}", err=True)
+ return
+
+ # Append completion line
+ try:
+ with open(profile_path, "a") as f:
+ f.write(f"\n# stackql-deploy completion\n{eval_cmd}\n")
+ click.echo(f"ā
Completion installed to {profile_path}")
+ _show_activation_instructions(shell)
+ except Exception as e:
+ click.echo(f"ā Error installing completion: {e}", err=True)
+
+def _show_activation_instructions(shell):
+ """Show shell-specific activation instructions."""
+ instructions = {
+ "bash": 'source ~/.bashrc',
+ "zsh": 'source ~/.zshrc',
+ "fish": 'source ~/.config/fish/config.fish',
+ "powershell": '. $PROFILE'
+ }
+
+ click.echo(f"š Activate now: {instructions.get(shell, 'restart your shell')}")
+ click.echo("⨠Or restart your terminal")
+
+cli.add_command(build)
+cli.add_command(test)
+cli.add_command(teardown)
+cli.add_command(info)
+cli.add_command(init)
+cli.add_command(upgrade)
+cli.add_command(shell)
+cli.add_command(completion)
+
+if __name__ == '__main__':
+ cli()
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/cmd/__init__.py b/ref-python-packages/stackql-deploy/stackql_deploy/cmd/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/cmd/base.py b/ref-python-packages/stackql-deploy/stackql_deploy/cmd/base.py
new file mode 100644
index 0000000..d63593d
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/cmd/base.py
@@ -0,0 +1,540 @@
+# cmd/base.py
+import os
+import json
+from ..lib.utils import (
+ perform_retries,
+ run_stackql_command,
+ catch_error_and_exit,
+ run_stackql_query,
+ export_vars,
+ show_query,
+ check_all_dicts,
+ check_exports_as_statecheck_proxy,
+)
+from ..lib.config import load_manifest, get_global_context_and_providers
+from ..lib.filters import setup_environment
+
+class StackQLBase:
+ def __init__(self, stackql, vars, logger, stack_dir, stack_env):
+ self.stackql = stackql
+ self.vars = vars
+ self.logger = logger
+ self.stack_dir = stack_dir
+ self.stack_env = stack_env
+ self.env = setup_environment(self.stack_dir, self.logger)
+ self.manifest = load_manifest(self.stack_dir, self.logger)
+ self.stack_name = self.manifest.get('name', stack_dir)
+ self.global_context, self.providers = get_global_context_and_providers(
+ self.env,
+ self.manifest,
+ self.vars,
+ self.stack_env,
+ self.stack_name,
+ self.stackql,
+ self.logger
+ )
+
+ def process_custom_auth(
+ self,
+ resource,
+ full_context
+ ):
+ custom_auth = resource.get('auth', {})
+ env_vars = {}
+
+ if custom_auth:
+ self.logger.info(f"š custom auth is configured for [{resource['name']}]")
+
+ # Function to recursively search for keys of interest and populate env_vars
+ def extract_env_vars(auth_config):
+ for key, value in auth_config.items():
+ if key in {"username_var", "password_var", "credentialsenvvar", "keyIDenvvar"}:
+ # Retrieve the variable's value from full_context
+ env_var_name = value
+ env_var_value = full_context.get(env_var_name)
+ if env_var_value:
+ env_vars[env_var_name] = env_var_value
+ elif isinstance(value, dict):
+ # Recursively check nested dictionaries
+ extract_env_vars(value)
+
+ # Start extracting env vars from custom_auth
+ extract_env_vars(custom_auth)
+
+ # If no custom auth, return None for both custom_auth and env_vars
+ return (custom_auth if custom_auth else None, env_vars if env_vars else None)
+
+ def process_exports(
+ self,
+ resource,
+ full_context,
+ exports_query,
+ exports_retries,
+ exports_retry_delay,
+ dry_run,
+ show_queries,
+ ignore_missing_exports=False
+ ):
+ expected_exports = resource.get('exports', [])
+
+ # Check if all items in expected_exports are dictionaries
+ all_dicts = check_all_dicts(expected_exports, self.logger)
+
+ if len(expected_exports) > 0:
+ protected_exports = resource.get('protected', [])
+ if dry_run:
+ export_data = {}
+ if all_dicts:
+ for item in expected_exports:
+ for _, val in item.items():
+ # when item is a dictionary,
+ # val(expected_exports) is the key to be exported
+ export_data[val] = ""
+ else:
+ # when item is not a dictionary,
+ # item is the key to be exported
+ for item in expected_exports:
+ export_data[item] = ""
+ export_vars(self, resource, export_data, expected_exports, all_dicts, protected_exports)
+ self.logger.info(
+ f"š¦ dry run exports query for [{resource['name']}]:\n\n/* exports query */\n{exports_query}\n"
+ )
+ else:
+ self.logger.info(f"š¦ exporting variables for [{resource['name']}]...")
+ show_query(show_queries, exports_query, self.logger)
+ custom_auth, env_vars = self.process_custom_auth(resource, full_context)
+ exports = run_stackql_query(
+ exports_query,
+ self.stackql,
+ True,
+ self.logger,
+ custom_auth=custom_auth,
+ env_vars=env_vars,
+ retries=exports_retries,
+ delay=exports_retry_delay
+ )
+ self.logger.debug(f"exports: {exports}")
+
+ if (exports is None or len(exports) == 0):
+ if ignore_missing_exports:
+ return
+ else:
+ show_query(True, exports_query, self.logger)
+ catch_error_and_exit(f"exports query failed for {resource['name']}", self.logger)
+
+ # Check if we received an error from the query execution
+ if (len(exports) >= 1 and isinstance(exports[0], dict)):
+ # Check for our custom error wrapper
+ if '_stackql_deploy_error' in exports[0]:
+ error_msg = exports[0]['_stackql_deploy_error']
+ show_query(True, exports_query, self.logger)
+ catch_error_and_exit(
+ f"exports query failed for {resource['name']}\n\nError details:\n{error_msg}",
+ self.logger
+ )
+ # Check for direct error in result
+ elif 'error' in exports[0]:
+ error_msg = exports[0]['error']
+ show_query(True, exports_query, self.logger)
+ catch_error_and_exit(
+ f"exports query failed for {resource['name']}\n\nError details:\n{error_msg}",
+ self.logger
+ )
+
+ if len(exports) > 1:
+ catch_error_and_exit(
+ f"exports should include one row only, received {str(len(exports))} rows",
+ self.logger
+ )
+
+ if len(exports) == 1 and not isinstance(exports[0], dict):
+ catch_error_and_exit(f"exports must be a dictionary, received {str(exports[0])}", self.logger)
+
+ export = exports[0]
+ if len(exports) == 0:
+ export_data = {}
+ if all_dicts:
+ for item in expected_exports:
+ for key, val in item.items():
+ export_data[val] = ''
+ else:
+ export_data[item] = ''
+ else:
+ export_data = {}
+ for item in expected_exports:
+ if all_dicts:
+ for key, val in item.items():
+ # when item is a dictionary,
+ # compare key(expected_exports) with key(export)
+ # set val(expected_exports) as key and export[key] as value in export_data
+ if isinstance(export.get(key), dict) and 'String' in export[key]:
+ export_data[val] = export[key]['String']
+ else:
+ export_data[val] = export.get(key, '')
+ else:
+ if isinstance(export.get(item), dict) and 'String' in export[item]:
+ export_data[item] = export[item]['String']
+ else:
+ export_data[item] = export.get(item, '')
+ export_vars(self, resource, export_data, expected_exports, all_dicts, protected_exports)
+
+ def process_exports_from_result(self, resource, exports_result, expected_exports):
+ """
+ Process exports data from a result that was already obtained (e.g., from exports proxy).
+ This avoids re-running the exports query when we already have the result.
+ """
+ if not exports_result or len(exports_result) == 0:
+ self.logger.debug(f"No exports data to process for [{resource['name']}] from cached result")
+ return
+
+ # Check if all items in expected_exports are dictionaries
+ all_dicts = check_all_dicts(expected_exports, self.logger)
+ protected_exports = resource.get('protected', [])
+
+ if len(exports_result) > 1:
+ catch_error_and_exit(
+ f"exports should include one row only, received {str(len(exports_result))} rows",
+ self.logger
+ )
+
+ if len(exports_result) == 1 and not isinstance(exports_result[0], dict):
+ catch_error_and_exit(f"exports must be a dictionary, received {str(exports_result[0])}", self.logger)
+
+ export = exports_result[0] if len(exports_result) > 0 else {}
+ export_data = {}
+
+ for item in expected_exports:
+ if all_dicts:
+ for key, val in item.items():
+ # when item is a dictionary,
+ # compare key(expected_exports) with key(export)
+ # set val(expected_exports) as key and export[key] as value in export_data
+ if isinstance(export.get(key), dict) and 'String' in export[key]:
+ export_data[val] = export[key]['String']
+ else:
+ export_data[val] = export.get(key, '')
+ else:
+ if isinstance(export.get(item), dict) and 'String' in export[item]:
+ export_data[item] = export[item]['String']
+ else:
+ export_data[item] = export.get(item, '')
+
+ export_vars(self, resource, export_data, expected_exports, all_dicts, protected_exports)
+
+ def check_if_resource_exists(
+ self,
+ resource_exists,
+ resource,
+ full_context,
+ exists_query,
+ exists_retries,
+ exists_retry_delay,
+ dry_run,
+ show_queries,
+ delete_test=False
+ ):
+ check_type = 'exists'
+ if delete_test:
+ check_type = 'post-delete'
+ if exists_query:
+ if dry_run:
+ self.logger.info(
+ f"š dry run {check_type} check for [{resource['name']}]:\n\n/* exists query */\n{exists_query}\n"
+ )
+ else:
+ self.logger.info(f"š running {check_type} check for [{resource['name']}]...")
+ show_query(show_queries, exists_query, self.logger)
+ custom_auth, env_vars = self.process_custom_auth(resource, full_context)
+ resource_exists = perform_retries(
+ resource,
+ exists_query,
+ exists_retries,
+ exists_retry_delay,
+ self.stackql,
+ self.logger,
+ delete_test,
+ custom_auth=custom_auth,
+ env_vars=env_vars
+ )
+ else:
+ self.logger.info(f"{check_type} check not configured for [{resource['name']}]")
+ if delete_test:
+ resource_exists = False
+ return resource_exists
+
+ def check_if_resource_is_correct_state(
+ self,
+ is_correct_state,
+ resource,
+ full_context,
+ statecheck_query,
+ statecheck_retries,
+ statecheck_retry_delay,
+ dry_run,
+ show_queries
+ ):
+ if statecheck_query:
+ if dry_run:
+ self.logger.info(
+ f"š dry run state check for [{resource['name']}]:\n\n/* state check query */\n{statecheck_query}\n"
+ )
+ else:
+ self.logger.info(f"š running state check for [{resource['name']}]...")
+ show_query(show_queries, statecheck_query, self.logger)
+ custom_auth, env_vars = self.process_custom_auth(resource, full_context)
+ is_correct_state = perform_retries(
+ resource,
+ statecheck_query,
+ statecheck_retries,
+ statecheck_retry_delay,
+ self.stackql,
+ self.logger,
+ False,
+ custom_auth=custom_auth,
+ env_vars=env_vars
+ )
+ if is_correct_state:
+ self.logger.info(f"š [{resource['name']}] is in the desired state")
+ else:
+ self.logger.info(f"š [{resource['name']}] is not in the desired state")
+ else:
+ self.logger.info(f"state check not configured for [{resource['name']}]")
+ is_correct_state = True
+ return is_correct_state
+
+ def check_state_using_exports_proxy(
+ self,
+ resource,
+ full_context,
+ exports_query,
+ exports_retries,
+ exports_retry_delay,
+ dry_run,
+ show_queries
+ ):
+ """
+ Use exports query as a proxy for statecheck. If exports returns empty result,
+ consider the statecheck failed. If exports returns valid data, consider statecheck passed.
+ """
+ if dry_run:
+ self.logger.info(
+ f"š dry run state check using exports proxy for [{resource['name']}]:\n\n"
+ f"/* exports as statecheck proxy */\n{exports_query}\n"
+ )
+ return True
+ else:
+ self.logger.info(f"š running state check using exports proxy for [{resource['name']}]...")
+ show_query(show_queries, exports_query, self.logger)
+ custom_auth, env_vars = self.process_custom_auth(resource, full_context)
+
+ # Run exports query with error suppression
+ exports_result = run_stackql_query(
+ exports_query,
+ self.stackql,
+ True, # suppress_errors=True
+ self.logger,
+ custom_auth=custom_auth,
+ env_vars=env_vars,
+ retries=exports_retries,
+ delay=exports_retry_delay
+ )
+
+ # Use exports result as statecheck proxy
+ is_correct_state = check_exports_as_statecheck_proxy(exports_result, self.logger)
+
+ if is_correct_state:
+ self.logger.info(
+ f"š [{resource['name']}] exports proxy indicates resource is in the desired state"
+ )
+ else:
+ self.logger.info(
+ f"š [{resource['name']}] exports proxy indicates resource is not in the desired state"
+ )
+
+ return is_correct_state, exports_result
+
+ def create_resource(
+ self,
+ is_created_or_updated,
+ resource,
+ full_context,
+ create_query,
+ create_retries,
+ create_retry_delay,
+ dry_run,
+ show_queries,
+ ignore_errors=False
+ ):
+ if dry_run:
+ self.logger.info(
+ f"š§ dry run create for [{resource['name']}]:\n\n/* insert (create) query */\n{create_query}\n"
+ )
+ else:
+ self.logger.info(f"[{resource['name']}] does not exist, creating š§...")
+ show_query(show_queries, create_query, self.logger)
+ custom_auth, env_vars = self.process_custom_auth(resource, full_context)
+ msg = run_stackql_command(
+ create_query,
+ self.stackql,
+ self.logger,
+ custom_auth=custom_auth,
+ env_vars=env_vars,
+ ignore_errors=ignore_errors,
+ retries=create_retries,
+ retry_delay=create_retry_delay
+ )
+ self.logger.debug(f"create response: {msg}")
+ is_created_or_updated = True
+ return is_created_or_updated
+
+ def update_resource(
+ self,
+ is_created_or_updated,
+ resource,
+ full_context,
+ update_query,
+ update_retries,
+ update_retry_delay,
+ dry_run,
+ show_queries,
+ ignore_errors=False
+ ):
+ if update_query:
+ if dry_run:
+ self.logger.info(f"š§ dry run update for [{resource['name']}]:\n\n/* update query */\n{update_query}\n")
+ else:
+ self.logger.info(f"š§ updating [{resource['name']}]...")
+ show_query(show_queries, update_query, self.logger)
+ custom_auth, env_vars = self.process_custom_auth(resource, full_context)
+ msg = run_stackql_command(
+ update_query,
+ self.stackql,
+ self.logger,
+ custom_auth=custom_auth,
+ env_vars=env_vars,
+ ignore_errors=ignore_errors,
+ retries=update_retries,
+ retry_delay=update_retry_delay
+ )
+ self.logger.debug(f"update response: {msg}")
+ is_created_or_updated = True
+ else:
+ self.logger.info(f"update query not configured for [{resource['name']}], skipping update...")
+ return is_created_or_updated
+
+ def delete_resource(
+ self,
+ resource,
+ full_context,
+ delete_query,
+ delete_retries,
+ delete_retry_delay,
+ dry_run,
+ show_queries,
+ ignore_errors=False,
+ ):
+ if delete_query:
+ if dry_run:
+ self.logger.info(f"š§ dry run delete for [{resource['name']}]:\n\n{delete_query}\n")
+ else:
+ self.logger.info(f"š§ deleting [{resource['name']}]...")
+ show_query(show_queries, delete_query, self.logger)
+ custom_auth, env_vars = self.process_custom_auth(resource, full_context)
+ msg = run_stackql_command(
+ delete_query,
+ self.stackql,
+ self.logger,
+ custom_auth=custom_auth,
+ env_vars=env_vars,
+ ignore_errors=ignore_errors,
+ retries=delete_retries,
+ retry_delay=delete_retry_delay
+ )
+ self.logger.debug(f"delete response: {msg}")
+ else:
+ self.logger.info(f"delete query not configured for [{resource['name']}], skipping delete...")
+
+ def run_command(self, command_query, command_retries, command_retry_delay, dry_run, show_queries):
+ if command_query:
+ if dry_run:
+ self.logger.info(f"š§ dry run command:\n\n{command_query}\n")
+ else:
+ self.logger.info("š§ running command...")
+ show_query(show_queries, command_query, self.logger)
+ run_stackql_command(
+ command_query,
+ self.stackql,
+ self.logger,
+ retries=command_retries,
+ retry_delay=command_retry_delay
+ )
+ else:
+ self.logger.info("command query not configured, skipping command...")
+
+ def process_stack_exports(self, dry_run, output_file=None, elapsed_time=None):
+ """
+ Process root-level exports from manifest and write to JSON file
+ """
+ if not output_file:
+ return
+
+ self.logger.info("š¦ processing stack exports...")
+
+ manifest_exports = self.manifest.get('exports', [])
+
+ if dry_run:
+ total_vars = len(manifest_exports) + 3 # +3 for stack_name, stack_env, and elapsed_time
+ self.logger.info(
+ f"š dry run: would export {total_vars} variables to {output_file} "
+ f"(including automatic stack_name, stack_env, and elapsed_time)"
+ )
+ return
+
+ # Collect data in specific order: stack metadata first, user exports, then timing
+ export_data = {}
+ missing_vars = []
+
+ # Always include stack_name and stack_env automatically as first exports
+ export_data['stack_name'] = self.stack_name
+ export_data['stack_env'] = self.stack_env
+
+ for var_name in manifest_exports:
+ # Skip stack_name and stack_env if they're explicitly listed (already added above)
+ if var_name in ('stack_name', 'stack_env'):
+ continue
+
+ if var_name in self.global_context:
+ value = self.global_context[var_name]
+ # Parse JSON strings back to their original type if they were serialized
+ try:
+ if isinstance(value, str) and (value.startswith('[') or value.startswith('{')):
+ value = json.loads(value)
+ except (json.JSONDecodeError, ValueError):
+ # Keep as string if not valid JSON
+ pass
+ export_data[var_name] = value
+ else:
+ missing_vars.append(var_name)
+
+ if missing_vars:
+ catch_error_and_exit(
+ f"exports failed: variables not found in context: {missing_vars}",
+ self.logger
+ )
+
+ # Add elapsed_time as the final automatic export
+ if elapsed_time is not None:
+ export_data['elapsed_time'] = str(elapsed_time)
+
+ # Ensure destination directory exists
+ dest_dir = os.path.dirname(output_file)
+ if dest_dir and not os.path.exists(dest_dir):
+ os.makedirs(dest_dir, exist_ok=True)
+
+ # Write JSON file
+ try:
+ with open(output_file, 'w') as f:
+ json.dump(export_data, f, indent=2)
+ self.logger.info(f"ā
exported {len(export_data)} variables to {output_file}")
+ except Exception as e:
+ catch_error_and_exit(f"failed to write exports file {output_file}: {e}", self.logger)
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/cmd/build.py b/ref-python-packages/stackql-deploy/stackql_deploy/cmd/build.py
new file mode 100644
index 0000000..1e5e85d
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/cmd/build.py
@@ -0,0 +1,426 @@
+# cmd/build.py
+import datetime
+from ..lib.utils import (
+ catch_error_and_exit,
+ export_vars,
+ run_ext_script,
+ get_type,
+ print_unicode_box,
+ BorderColor
+)
+from ..lib.config import get_full_context, render_value
+from ..lib.templating import get_queries, render_inline_template
+from .base import StackQLBase
+
+class StackQLProvisioner(StackQLBase):
+
+ def process_script_resource(self, resource, dry_run, full_context):
+ self.logger.info(f"running script for {resource['name']}...")
+ script_template = resource.get('run', None)
+ if not script_template:
+ catch_error_and_exit("script resource must include 'run' key", self.logger)
+
+ script = self.env.from_string(script_template).render(full_context)
+
+ if dry_run:
+ dry_run_script = script.replace('""', '""')
+ self.logger.info(f"dry run script for [{resource['name']}]:\n\n{dry_run_script}\n")
+ else:
+ self.logger.info(f"running script for [{resource['name']}]...")
+ try:
+ ret_vars = run_ext_script(script, self.logger, resource.get('exports', None))
+ if resource.get('exports', None):
+ self.logger.info(f"exported variables from script: {ret_vars}")
+ export_vars(self, resource, ret_vars, resource.get('exports', []), resource.get('protected', []))
+ except Exception as e:
+ catch_error_and_exit(f"script failed: {e}", self.logger)
+
+ def run(self, dry_run, show_queries, on_failure, output_file=None):
+
+ start_time = datetime.datetime.now()
+
+ self.logger.info(
+ f"deploying [{self.stack_name}] in [{self.stack_env}] environment {'(dry run)' if dry_run else ''}"
+ )
+
+ for resource in self.manifest.get('resources', []):
+
+ print_unicode_box(f"Processing resource: [{resource['name']}]", BorderColor.BLUE)
+
+ type = get_type(resource, self.logger)
+
+ self.logger.info(f"processing resource [{resource['name']}], type: {type}")
+
+ # get full context
+ full_context = get_full_context(self.env, self.global_context, resource, self.logger)
+
+ # Check if the resource has an 'if' condition and evaluate it
+ if 'if' in resource:
+ condition = resource['if']
+ try:
+ # Render the condition with the full context to resolve any template variables
+ rendered_condition = render_value(self.env, condition, full_context, self.logger)
+ # Evaluate the condition
+ condition_result = eval(rendered_condition)
+ if not condition_result:
+ self.logger.info(f"skipping resource [{resource['name']}] due to condition: {condition}")
+ continue
+ except Exception as e:
+ catch_error_and_exit(
+ f"error evaluating condition for resource [{resource['name']}]: {e}",
+ self.logger
+ )
+
+ if type == 'script':
+ self.process_script_resource(resource, dry_run, full_context)
+ continue
+
+ #
+ # get resource queries
+ #
+ if (type == 'command' or type == 'query') and 'sql' in resource:
+ # inline SQL specified in the resource
+ resource_queries = {}
+ inline_query = render_inline_template(self.env,
+ resource["name"],
+ resource["sql"],
+ full_context,
+ self.logger)
+ else:
+ resource_queries = get_queries(self.env,
+ self.stack_dir,
+ 'resources',
+ resource,
+ full_context,
+ self.logger)
+
+ # provisioning queries
+ if type in ('resource', 'multi'):
+ # createorupdate queries supercede create and update queries
+ createorupdate_query = resource_queries.get('createorupdate', {}).get('rendered')
+ createorupdate_retries = resource_queries.get('createorupdate', {}).get('options', {}).get('retries', 1)
+ createorupdate_retry_delay = resource_queries.get(
+ 'createorupdate', {}).get('options', {}).get('retry_delay', 0)
+
+ if not createorupdate_query:
+ create_query = resource_queries.get('create', {}).get('rendered')
+ create_retries = resource_queries.get('create', {}).get('options', {}).get('retries', 1)
+ create_retry_delay = resource_queries.get('create', {}).get('options', {}).get('retry_delay', 0)
+
+ update_query = resource_queries.get('update', {}).get('rendered')
+ update_retries = resource_queries.get('update', {}).get('options', {}).get('retries', 1)
+ update_retry_delay = resource_queries.get('update', {}).get('options', {}).get('retry_delay', 0)
+ else:
+ create_query = createorupdate_query
+ create_retries = createorupdate_retries
+ create_retry_delay = createorupdate_retry_delay
+ update_query = createorupdate_query
+ update_retries = createorupdate_retries
+ update_retry_delay = createorupdate_retry_delay
+
+ if not create_query:
+ catch_error_and_exit(
+ "iql file must include either 'create' or 'createorupdate' anchor.",
+ self.logger
+ )
+
+ # test queries
+ exists_query = resource_queries.get('exists', {}).get('rendered')
+ exists_retries = resource_queries.get('exists', {}).get('options', {}).get('retries', 1)
+ exists_retry_delay = resource_queries.get('exists', {}).get('options', {}).get('retry_delay', 0)
+
+ statecheck_query = resource_queries.get('statecheck', {}).get('rendered')
+ statecheck_retries = resource_queries.get('statecheck', {}).get('options', {}).get('retries', 1)
+ statecheck_retry_delay = resource_queries.get('statecheck', {}).get('options', {}).get('retry_delay', 0)
+
+ exports_query = resource_queries.get('exports', {}).get('rendered')
+ exports_retries = resource_queries.get('exports', {}).get('options', {}).get('retries', 1)
+ exports_retry_delay = resource_queries.get('exports', {}).get('options', {}).get('retry_delay', 0)
+
+ if type == 'query' and not exports_query:
+ if 'sql' in resource:
+ exports_query = inline_query
+ exports_retries = 1
+ exports_retry_delay = 0
+ else:
+ catch_error_and_exit(
+ "inline sql must be supplied or an iql file must be present with an "
+ "'exports' anchor for query type resources.",
+ self.logger
+ )
+
+ exports_result_from_proxy = None # Track exports result if used as proxy
+
+ if type in ('resource', 'multi'):
+
+ ignore_errors = False
+ resource_exists = False
+ is_correct_state = False
+ if type == 'multi':
+ # multi resources ignore errors on create or update
+ ignore_errors = True
+
+ #
+ # State checking logic
+ #
+
+ if createorupdate_query:
+ # Skip all existence and state checks for createorupdate
+ pass
+ else:
+ # Determine the validation strategy based on available queries
+ if statecheck_query:
+ #
+ # Flow 1: Traditional flow when statecheck exists
+ # exists ā create/update ā statecheck ā exports
+ #
+ if exists_query:
+ resource_exists = self.check_if_resource_exists(
+ resource_exists,
+ resource,
+ full_context,
+ exists_query,
+ exists_retries,
+ exists_retry_delay,
+ dry_run,
+ show_queries
+ )
+ else:
+ # Use statecheck as exists check
+ is_correct_state = self.check_if_resource_is_correct_state(
+ is_correct_state,
+ resource,
+ full_context,
+ statecheck_query,
+ statecheck_retries,
+ statecheck_retry_delay,
+ dry_run,
+ show_queries
+ )
+ resource_exists = is_correct_state
+
+ # Pre-deployment state check for existing resources
+ if resource_exists and not is_correct_state:
+ if resource.get('skip_validation', False):
+ self.logger.info(
+ f"skipping validation for [{resource['name']}] as skip_validation is set to true."
+ )
+ is_correct_state = True
+ else:
+ is_correct_state = self.check_if_resource_is_correct_state(
+ is_correct_state,
+ resource,
+ full_context,
+ statecheck_query,
+ statecheck_retries,
+ statecheck_retry_delay,
+ dry_run,
+ show_queries
+ )
+
+ elif exports_query:
+ #
+ # Flow 2: Optimized flow when only exports exists (no statecheck)
+ # Try exports first with FAST FAIL (no retries)
+ # If fails: exists ā create/update ā exports (with retries as statecheck)
+ #
+ self.logger.info(
+ f"š trying exports query first (fast-fail) for optimal validation "
+ f"for [{resource['name']}]"
+ )
+ is_correct_state, exports_result_from_proxy = self.check_state_using_exports_proxy(
+ resource,
+ full_context,
+ exports_query,
+ 1, # Fast fail: only 1 attempt
+ 0, # No delay
+ dry_run,
+ show_queries
+ )
+ resource_exists = is_correct_state
+
+ # If exports succeeded, we're done with validation (happy path)
+ if is_correct_state:
+ self.logger.info(
+ f"ā
[{resource['name']}] validated successfully with fast exports query"
+ )
+ else:
+ # Exports failed, fall back to exists check
+ self.logger.info(
+ f"š fast exports validation failed, falling back to exists check "
+ f"for [{resource['name']}]"
+ )
+ # Clear the failed exports result
+ exports_result_from_proxy = None
+
+ if exists_query:
+ resource_exists = self.check_if_resource_exists(
+ False,
+ resource,
+ full_context,
+ exists_query,
+ exists_retries,
+ exists_retry_delay,
+ dry_run,
+ show_queries
+ )
+ else:
+ # No exists query, assume resource doesn't exist
+ resource_exists = False
+
+ elif exists_query:
+ #
+ # Flow 3: Basic flow with only exists query
+ #
+ resource_exists = self.check_if_resource_exists(
+ resource_exists,
+ resource,
+ full_context,
+ exists_query,
+ exists_retries,
+ exists_retry_delay,
+ dry_run,
+ show_queries
+ )
+ else:
+ catch_error_and_exit(
+ "iql file must include either 'exists', 'statecheck', or 'exports' anchor.",
+ self.logger
+ )
+
+ #
+ # resource does not exist
+ #
+ is_created_or_updated = False
+ if not resource_exists:
+ is_created_or_updated = self.create_resource(
+ is_created_or_updated,
+ resource,
+ full_context,
+ create_query,
+ create_retries,
+ create_retry_delay,
+ dry_run,
+ show_queries,
+ ignore_errors
+ )
+
+ #
+ # resource exists but is not in the correct state
+ #
+ if resource_exists and not is_correct_state:
+ is_created_or_updated = self.update_resource(
+ is_created_or_updated,
+ resource,
+ full_context,
+ update_query,
+ update_retries,
+ update_retry_delay,
+ dry_run,
+ show_queries,
+ ignore_errors
+ )
+
+ #
+ # check state again after create or update
+ #
+ if is_created_or_updated:
+ if statecheck_query:
+ # Use statecheck for post-deploy validation
+ is_correct_state = self.check_if_resource_is_correct_state(
+ is_correct_state,
+ resource,
+ full_context,
+ statecheck_query,
+ statecheck_retries,
+ statecheck_retry_delay,
+ dry_run,
+ show_queries,
+ )
+ elif exports_query:
+ # Use exports as statecheck proxy with proper retries
+ # This handles the case where statecheck doesn't exist
+ self.logger.info(
+ f"š using exports query as post-deploy statecheck "
+ f"for [{resource['name']}]"
+ )
+ # Need to determine retries: if we have statecheck config, use it
+ # Otherwise fall back to exports config
+ post_deploy_retries = statecheck_retries if statecheck_retries > 1 else exports_retries
+ post_deploy_delay = statecheck_retry_delay if statecheck_retries > 1 else exports_retry_delay
+
+ is_correct_state, exports_result_from_proxy = self.check_state_using_exports_proxy(
+ resource,
+ full_context,
+ exports_query,
+ post_deploy_retries,
+ post_deploy_delay,
+ dry_run,
+ show_queries
+ )
+
+ #
+ # statecheck check complete
+ #
+ if not is_correct_state:
+ if not dry_run:
+ catch_error_and_exit(
+ f"ā deployment failed for {resource['name']} after post-deploy checks.",
+ self.logger
+ )
+
+ if type == 'command':
+ # command queries
+ if 'sql' in resource:
+ command_query = inline_query
+ command_retries = 1
+ command_retry_delay = 0
+ else:
+ # SQL from file
+ command_query = resource_queries.get('command', {}).get('rendered')
+ command_retries = resource_queries.get('command', {}).get('options', {}).get('retries', 1)
+ command_retry_delay = resource_queries.get('command', {}).get('options', {}).get('retry_delay', 0)
+ if not command_query:
+ error_msg = (
+ "'sql' should be defined in the resource or the 'command' anchor "
+ "needs to be supplied in the corresponding iql file for command "
+ "type resources."
+ )
+ catch_error_and_exit(error_msg, self.logger)
+
+ self.run_command(command_query, command_retries, command_retry_delay, dry_run, show_queries)
+ #
+ # exports with optimization
+ #
+ if exports_query:
+ # OPTIMIZATION: Skip exports if we already ran it as a proxy and have the result
+ if exports_result_from_proxy is not None and type in ('resource', 'multi'):
+ self.logger.info(f"š¦ reusing exports result from proxy for [{resource['name']}]...")
+ # Process the exports result we already have
+ expected_exports = resource.get('exports', [])
+ if len(expected_exports) > 0:
+ # Use helper method to process the exports data directly
+ self.process_exports_from_result(resource, exports_result_from_proxy, expected_exports)
+ else:
+ # Run exports normally
+ self.process_exports(
+ resource,
+ full_context,
+ exports_query,
+ exports_retries,
+ exports_retry_delay,
+ dry_run,
+ show_queries
+ )
+
+ if not dry_run:
+ if type == 'resource':
+ self.logger.info(f"ā
successfully deployed {resource['name']}")
+ elif type == 'query':
+ self.logger.info(f"ā
successfully exported variables for query in {resource['name']}")
+
+ elapsed_time = datetime.datetime.now() - start_time
+ self.logger.info(f"deployment completed in {elapsed_time}")
+
+ # Process stack-level exports after all resources are deployed
+ self.process_stack_exports(dry_run, output_file, elapsed_time)
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/cmd/teardown.py b/ref-python-packages/stackql-deploy/stackql_deploy/cmd/teardown.py
new file mode 100644
index 0000000..657c3f6
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/cmd/teardown.py
@@ -0,0 +1,216 @@
+# cmd/teardown.py
+import datetime
+from ..lib.utils import (
+ catch_error_and_exit,
+ get_type,
+ print_unicode_box,
+ BorderColor
+)
+from ..lib.config import get_full_context, render_value
+from ..lib.templating import get_queries, render_inline_template
+from .base import StackQLBase
+
+class StackQLDeProvisioner(StackQLBase):
+
+ def collect_exports(self, show_queries, dry_run):
+ self.logger.info(f"collecting exports for [{self.stack_name}] in [{self.stack_env}] environment")
+
+ for resource in self.manifest.get('resources', []):
+
+ type = get_type(resource, self.logger)
+
+ self.logger.info(f"getting exports for resource [{resource['name']}]")
+
+ # get full context
+ full_context = get_full_context(self.env, self.global_context, resource, self.logger)
+
+ # get resource queries
+ if type != 'command':
+ if type == 'query' and 'sql' in resource:
+ # inline SQL specified in the resource
+ test_queries = {}
+ exports_query = render_inline_template(self.env,
+ resource["name"],
+ resource["sql"],
+ full_context,
+ self.logger)
+ exports_retries = 1
+ exports_retry_delay = 0
+ else:
+ test_queries = get_queries(self.env,
+ self.stack_dir,
+ 'resources',
+ resource,
+ full_context,
+ self.logger)
+ exports_query = test_queries.get('exports', {}).get('rendered')
+ exports_retries = test_queries.get('exports', {}).get('options', {}).get('retries', 1)
+ exports_retry_delay = test_queries.get('exports', {}).get('options', {}).get('retry_delay', 0)
+
+ if exports_query:
+ self.process_exports(
+ resource,
+ full_context,
+ exports_query,
+ exports_retries,
+ exports_retry_delay,
+ dry_run,
+ show_queries,
+ ignore_missing_exports=True
+ )
+
+ def run(self, dry_run, show_queries, on_failure):
+
+ start_time = datetime.datetime.now()
+
+ self.logger.info(
+ f"tearing down [{self.stack_name}] in [{self.stack_env}] "
+ f"environment {'(dry run)' if dry_run else ''}"
+ )
+
+ # Collect all exports
+ self.collect_exports(show_queries, dry_run)
+
+ for resource in reversed(self.manifest['resources']):
+
+ print_unicode_box(f"Processing resource: [{resource['name']}]", BorderColor.RED)
+
+ # process resources in reverse order
+ type = get_type(resource, self.logger)
+
+ if type not in ('resource', 'multi'):
+ self.logger.debug(f"skipping resource [{resource['name']}] (type: {type})")
+ continue
+ else:
+ self.logger.info(f"de-provisioning resource [{resource['name']}], type: {type}")
+
+ # get full context
+ full_context = get_full_context(self.env, self.global_context, resource, self.logger)
+
+ # Check if the resource has an 'if' condition and evaluate it
+ if 'if' in resource:
+ condition = resource['if']
+ try:
+ # Render the condition with the full context to resolve any template variables
+ rendered_condition = render_value(self.env, condition, full_context, self.logger)
+ # Evaluate the condition
+ condition_result = eval(rendered_condition)
+ if not condition_result:
+ self.logger.info(f"skipping resource [{resource['name']}] due to condition: {condition}")
+ continue
+ except Exception as e:
+ catch_error_and_exit(
+ f"error evaluating condition for resource [{resource['name']}]: {e}",
+ self.logger
+ )
+
+ # add reverse export map variable to full context
+ if 'exports' in resource:
+ for export in resource['exports']:
+ if isinstance(export, dict):
+ for key, lookup_key in export.items():
+ # Get the value from full_context using the lookup_key
+ if lookup_key in full_context:
+ # Add new mapping using the export key and looked up value
+ full_context[key] = full_context[lookup_key]
+
+ #
+ # get resource queries
+ #
+ resource_queries = get_queries(self.env, self.stack_dir, 'resources', resource, full_context, self.logger)
+
+ exists_query = resource_queries.get('exists', {}).get('rendered')
+ exists_retries = resource_queries.get('exists', {}).get('options', {}).get('retries', 1)
+ exists_retry_delay = resource_queries.get('exists', {}).get('options', {}).get('retry_delay', 0)
+
+ if not exists_query:
+ self.logger.info(
+ f"exists query not defined for [{resource['name']}], "
+ f"trying to use statecheck query as exists query."
+ )
+ exists_query = resource_queries.get('statecheck', {}).get('rendered')
+ exists_retries = resource_queries.get('statecheck', {}).get('options', {}).get('retries', 1)
+ exists_retry_delay = resource_queries.get('statecheck', {}).get('options', {}).get('retry_delay', 0)
+ postdelete_exists_retries = resource_queries.get('statecheck', {}).get(
+ 'options', {}
+ ).get('postdelete_retries', 10)
+ postdelete_exists_retry_delay = resource_queries.get('statecheck', {}).get(
+ 'options', {}
+ ).get('postdelete_retry_delay', 5)
+ else:
+ postdelete_exists_retries = resource_queries.get('exists', {}).get(
+ 'options', {}
+ ).get('postdelete_retries', 10)
+ postdelete_exists_retry_delay = resource_queries.get('exists', {}).get(
+ 'options', {}
+ ).get('postdelete_retry_delay', 5)
+
+ delete_query = resource_queries.get('delete', {}).get('rendered')
+ delete_retries = resource_queries.get('delete', {}).get('options', {}).get('retries', 1)
+ delete_retry_delay = resource_queries.get('delete', {}).get('options', {}).get('retry_delay', 0)
+
+ if not delete_query:
+ self.logger.info(f"delete query not defined for [{resource['name']}], skipping...")
+ continue
+
+ #
+ # pre-delete check
+ #
+ ignore_errors = False
+ resource_exists = True # assume exists
+ if type == 'multi':
+ self.logger.info("pre-delete check not supported for multi resources, skipping...")
+ ignore_errors = True # multi resources ignore errors on create or update
+ elif type == 'resource':
+ resource_exists = self.check_if_resource_exists(
+ resource_exists,
+ resource,
+ full_context,
+ exists_query,
+ exists_retries,
+ exists_retry_delay,
+ dry_run,
+ show_queries
+ )
+
+ #
+ # delete
+ #
+ if resource_exists:
+ self.delete_resource(
+ resource,
+ full_context,
+ delete_query,
+ delete_retries,
+ delete_retry_delay,
+ dry_run,
+ show_queries,
+ ignore_errors
+ )
+ else:
+ self.logger.info(f"resource [{resource['name']}] does not exist, skipping delete")
+ continue
+
+ #
+ # confirm deletion
+ #
+ resource_deleted = self.check_if_resource_exists(
+ False,
+ resource,
+ full_context,
+ exists_query,
+ postdelete_exists_retries,
+ postdelete_exists_retry_delay,
+ dry_run,
+ show_queries,
+ delete_test=True,
+ )
+
+ if resource_deleted:
+ self.logger.info(f"ā
successfully deleted {resource['name']}")
+ else:
+ if not dry_run:
+ catch_error_and_exit(f"ā failed to delete {resource['name']}.", self.logger)
+
+ elapsed_time = datetime.datetime.now() - start_time
+ self.logger.info(f"deployment completed in {elapsed_time}")
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/cmd/test.py b/ref-python-packages/stackql-deploy/stackql_deploy/cmd/test.py
new file mode 100644
index 0000000..526e6e7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/cmd/test.py
@@ -0,0 +1,152 @@
+# cmd/test.py
+import datetime
+from ..lib.utils import (
+ catch_error_and_exit,
+ get_type,
+ print_unicode_box,
+ BorderColor
+)
+from ..lib.config import get_full_context
+from ..lib.templating import get_queries, render_inline_template
+from .base import StackQLBase
+
+class StackQLTestRunner(StackQLBase):
+ def run(self, dry_run, show_queries, on_failure, output_file=None):
+
+ start_time = datetime.datetime.now()
+
+ self.logger.info(
+ f"testing [{self.stack_name}] in [{self.stack_env}] environment {'(dry run)' if dry_run else ''}"
+ )
+
+ for resource in self.manifest.get('resources', []):
+
+ print_unicode_box(f"Processing resource: [{resource['name']}]", BorderColor.BLUE)
+
+ type = get_type(resource, self.logger)
+
+ if type == 'query':
+ self.logger.info(f"exporting variables for [{resource['name']}]")
+ elif type in ('resource', 'multi'):
+ self.logger.info(f"testing resource [{resource['name']}], type: {type}")
+ elif type == 'command':
+ continue
+ else:
+ catch_error_and_exit(f"unknown resource type: {type}", self.logger)
+
+ # get full context
+ full_context = get_full_context(self.env, self.global_context, resource, self.logger)
+
+ #
+ # get test queries
+ #
+ if type == 'query' and 'sql' in resource:
+ # inline SQL specified in the resource
+ test_queries = {}
+ inline_query = render_inline_template(self.env,
+ resource["name"],
+ resource["sql"],
+ full_context,
+ self.logger)
+ else:
+ test_queries = get_queries(self.env,
+ self.stack_dir,
+ 'resources',
+ resource,
+ full_context,
+ self.logger)
+
+
+
+ statecheck_query = test_queries.get('statecheck', {}).get('rendered')
+ statecheck_retries = test_queries.get('statecheck', {}).get('options', {}).get('retries', 1)
+ statecheck_retry_delay = test_queries.get('statecheck', {}).get('options', {}).get('retry_delay', 0)
+
+ exports_query = test_queries.get('exports', {}).get('rendered')
+ exports_retries = test_queries.get('exports', {}).get('options', {}).get('retries', 1)
+ exports_retry_delay = test_queries.get('exports', {}).get('options', {}).get('retry_delay', 0)
+
+ if type == 'query' and not exports_query:
+ if 'sql' in resource:
+ exports_query = inline_query
+ exports_retries = 1
+ exports_retry_delay = 0
+ else:
+ catch_error_and_exit(
+ "inline sql must be supplied or an iql file must be present with an "
+ "'exports' anchor for query type resources.",
+ self.logger
+ )
+ #
+ # statecheck check with optimizations
+ #
+ exports_result_from_proxy = None # Track exports result if used as proxy
+
+ if type in ('resource', 'multi'):
+ if 'skip_validation' in resource:
+ self.logger.info(f"Skipping statecheck for {resource['name']}")
+ is_correct_state = True
+ else:
+ if statecheck_query:
+ is_correct_state = self.check_if_resource_is_correct_state(
+ False,
+ resource,
+ full_context,
+ statecheck_query,
+ statecheck_retries,
+ statecheck_retry_delay,
+ dry_run,
+ show_queries
+ )
+ elif exports_query:
+ # OPTIMIZATION: Use exports as statecheck proxy for test
+ self.logger.info(
+ f"š using exports query as proxy for statecheck test "
+ f"for [{resource['name']}]"
+ )
+ is_correct_state, exports_result_from_proxy = self.check_state_using_exports_proxy(
+ resource,
+ full_context,
+ exports_query,
+ statecheck_retries, # Use statecheck retries when using as statecheck proxy
+ statecheck_retry_delay, # Use statecheck delay when using as statecheck proxy
+ dry_run,
+ show_queries
+ )
+ else:
+ catch_error_and_exit(
+ "iql file must include either 'statecheck' or 'exports' anchor for validation.",
+ self.logger
+ )
+
+ if not is_correct_state and not dry_run:
+ catch_error_and_exit(f"ā test failed for {resource['name']}.", self.logger)
+
+ #
+ # exports with optimization
+ #
+ if exports_query:
+ # OPTIMIZATION: Skip exports if we already ran it as a proxy and have the result
+ if exports_result_from_proxy is not None and type in ('resource', 'multi'):
+ self.logger.info(f"š¦ reusing exports result from proxy for [{resource['name']}]...")
+ # Process the exports result we already have
+ expected_exports = resource.get('exports', [])
+ if len(expected_exports) > 0:
+ # Use helper method to process the exports data directly
+ self.process_exports_from_result(resource, exports_result_from_proxy, expected_exports)
+ else:
+ # Run exports normally
+ self.process_exports(
+ resource, full_context, exports_query, exports_retries,
+ exports_retry_delay, dry_run, show_queries
+ )
+
+ if type == 'resource' and not dry_run:
+ self.logger.info(f"ā
test passed for {resource['name']}")
+
+ elapsed_time = datetime.datetime.now() - start_time
+ self.logger.info(f"deployment completed in {elapsed_time}")
+
+ # Process stack-level exports if specified
+ if output_file:
+ self.process_stack_exports(dry_run, output_file, elapsed_time)
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/inc/contributors.csv b/ref-python-packages/stackql-deploy/stackql_deploy/inc/contributors.csv
new file mode 100644
index 0000000..11a0e5f
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/inc/contributors.csv
@@ -0,0 +1,23 @@
+jeffreyaven
+general-kroll-4-life
+derek10cloud
+yunchengyang515
+mxrch
+jthegedus
+Iqbalabdi
+tomekz
+soysaucewaso
+sanketmp
+ryukinoz
+ryuichi-maeda
+rameshgkwd05
+pinalbaldha
+kieranrimmer
+gsusI
+filipnavara
+drawdrop
+datwiz
+cuishuang
+claude[bot]
+Racso-3141
+BulkBeing
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/lib/__init__.py b/ref-python-packages/stackql-deploy/stackql_deploy/lib/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/lib/bootstrap.py b/ref-python-packages/stackql-deploy/stackql_deploy/lib/bootstrap.py
new file mode 100644
index 0000000..3ace615
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/lib/bootstrap.py
@@ -0,0 +1,8 @@
+# lib/bootstrap.py
+import logging
+
+# Set up logging at the root level
+logging.basicConfig(
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
+)
+logger = logging.getLogger("stackql-deploy")
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/lib/config.py b/ref-python-packages/stackql-deploy/stackql_deploy/lib/config.py
new file mode 100644
index 0000000..57cefcf
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/lib/config.py
@@ -0,0 +1,231 @@
+# lib/config.py
+import os
+import yaml
+import json
+import pprint
+import sys
+from .utils import pull_providers, catch_error_and_exit
+from jinja2 import TemplateError
+from .filters import merge_lists, merge_objects
+
+def to_sql_compatible_json(value):
+ """
+ Convert a Python object to a SQL-compatible format:
+ - string -> string
+ - int -> int
+ - float -> float
+ - dict -> json string
+ - list -> json string
+ - json string -> json string
+ - boolean -> boolean (true, false are returned as is)
+
+ Args:
+ value: The Python object to be converted.
+
+ Returns:
+ A SQL-compatible format.
+ """
+ if isinstance(value, (int, float, bool)):
+ # Return as-is if the value is an int, float, or boolean
+ return value
+
+ if isinstance(value, str):
+ try:
+ # Try to load the string as JSON to see if it's already a valid JSON string
+ json.loads(value)
+ return value # It's a valid JSON string, return as-is
+ except ValueError:
+ # It's not a valid JSON string, so return it as a string
+ return value
+
+ if isinstance(value, (dict, list)):
+ # Convert dicts and lists to JSON strings
+ return json.dumps(value)
+
+ # If the value doesn't match any of the above types, return it as-is
+ return value
+
+def render_value(env, value, context, logger):
+ if isinstance(value, str):
+ try:
+ template = env.from_string(value)
+ rendered = template.render(**context)
+ if rendered in ['True', 'False']:
+ return rendered.replace('True', 'true').replace('False', 'false')
+ return rendered
+ except TemplateError as e:
+ print(f"(config.render_value) error rendering template: {e}")
+ return value
+ elif isinstance(value, dict):
+ return {k: render_value(env, v, context, logger) for k, v in value.items()}
+ elif isinstance(value, list):
+ return [render_value(env, item, context, logger) for item in value]
+ else:
+ return value
+
+def render_globals(env, vars, global_vars, stack_env, stack_name, logger):
+ # Start with only the stack-specific variables in the context
+ global_context = {'stack_env': stack_env, 'stack_name': stack_name}
+
+ logger.debug("(config.render_globals) rendering global variables...")
+ # Now render each global variable using the combined context of env vars and the current global context
+ for global_var in global_vars:
+ # Merge global_context with vars to create a complete context for rendering
+ combined_context = {**vars, **global_context}
+
+ # Render using the combined context
+ rendered_value = render_value(env, global_var['value'], combined_context, logger)
+
+ if not rendered_value:
+ raise ValueError(f"(config.render_globals) global variable '{global_var['name']}' cannot be empty.")
+
+ # Update the context with the rendered global variable
+ logger.debug(
+ f"(config.render_globals) setting global variable [{global_var['name']}] to "
+ f"{to_sql_compatible_json(rendered_value)}"
+ )
+ global_context[global_var['name']] = to_sql_compatible_json(rendered_value)
+
+ return global_context
+
+def render_properties(env, resource_props, global_context, logger):
+ prop_context = {}
+ # Create a resource_context that starts with a copy of global_context
+ # This will be used for rendering and updated as we go, but not returned
+ resource_context = global_context.copy()
+
+ logger.debug("rendering properties...")
+ for prop in resource_props:
+ try:
+ if 'value' in prop:
+ # Use resource_context for rendering, which includes both global vars and
+ # properties that have already been processed
+ rendered_value = render_value(env, prop['value'], resource_context, logger)
+ logger.debug(
+ f"(config.render_properties) setting property [{prop['name']}] to "
+ f"{to_sql_compatible_json(rendered_value)}"
+ )
+ prop_context[prop['name']] = to_sql_compatible_json(rendered_value)
+ # Update resource_context with the new property
+ resource_context[prop['name']] = to_sql_compatible_json(rendered_value)
+ elif 'values' in prop:
+ env_value = prop['values'].get(global_context['stack_env'], {}).get('value')
+ if env_value is not None:
+ # Use resource_context for rendering
+ rendered_value = render_value(env, env_value, resource_context, logger)
+ logger.debug(
+ f"(config.render_properties) setting property [{prop['name']}] using value for "
+ f"{env_value} to {to_sql_compatible_json(rendered_value)}"
+ )
+ prop_context[prop['name']] = to_sql_compatible_json(rendered_value)
+ # Update resource_context with the new property
+ resource_context[prop['name']] = to_sql_compatible_json(rendered_value)
+ else:
+ catch_error_and_exit(
+ f"(config.render_properties) no value specified for property '{prop['name']}' "
+ f"in stack_env '{global_context['stack_env']}'.",
+ logger
+ )
+
+ if 'merge' in prop:
+ logger.debug(f"(config.render_properties) processing merge for [{prop['name']}]")
+ base_value_rendered = prop_context.get(prop['name'], None)
+ base_value = json.loads(base_value_rendered) if base_value_rendered else None
+ base_value_type = type(base_value)
+ logger.debug(
+ f"(config.render_properties) base value for [{prop['name']}]: "
+ f"{base_value_rendered} (type: {base_value_type})"
+ )
+ for merge_item in prop['merge']:
+ # Use resource_context for lookups during merge
+ if merge_item in resource_context:
+ merge_value_rendered = resource_context[merge_item]
+ merge_value = json.loads(merge_value_rendered)
+ merge_value_type = type(merge_value)
+ logger.debug(
+ f"(config.render_properties) [{prop['name']}] merge value [{merge_item}]: "
+ f"{merge_value_rendered} (type: {merge_value_type})"
+ )
+
+ # Determine if we're merging lists or objects
+ if isinstance(base_value, list) and isinstance(merge_value, list):
+ base_value = merge_lists(base_value, merge_value)
+ elif isinstance(base_value, dict) and isinstance(merge_value, dict):
+ base_value = merge_objects(base_value, merge_value)
+ elif base_value is None:
+ # Initialize base_value if it wasn't set before
+ if isinstance(merge_value, list):
+ base_value = merge_value
+ elif isinstance(merge_value, dict):
+ base_value = merge_value
+ else:
+ catch_error_and_exit(
+ f"(config.render_properties) unsupported merge type for '{prop['name']}'",
+ logger
+ )
+ else:
+ catch_error_and_exit(
+ f"(config.render_properties) type mismatch or unsupported merge operation "
+ f"on property '{prop['name']}'.",
+ logger
+ )
+ else:
+ catch_error_and_exit(
+ f"(config.render_properties) merge item '{merge_item}' not found in context.",
+ logger
+ )
+
+ processed_value = to_sql_compatible_json(base_value)
+ prop_context[prop['name']] = processed_value
+ # Update resource_context with the merged property
+ resource_context[prop['name']] = processed_value
+
+ except Exception as e:
+ catch_error_and_exit(f"(config.render_properties) failed to render property '{prop['name']}']: {e}", logger)
+
+ return prop_context
+
+#
+# exported functions
+#
+
+def load_manifest(stack_dir, logger):
+ logger.debug("(config.load_manifest) loading manifest...")
+ try:
+ # Load and parse the stackql_manifest.yml
+ with open(os.path.join(stack_dir, 'stackql_manifest.yml')) as f:
+ return yaml.safe_load(f)
+ except Exception as e:
+ catch_error_and_exit("(config.load_manifest) failed to load manifest: " + str(e), logger)
+
+def get_global_context_and_providers(env, manifest, vars, stack_env, stack_name, stackql, logger):
+ # Extract the global variables from the manifest and include stack_env
+ logger.debug("(config.get_global_context_and_providers) getting global context and pulling providers...")
+ try:
+ global_vars = manifest.get('globals', [])
+ global_context = render_globals(env, vars, global_vars, stack_env, stack_name, logger)
+ providers = manifest.get('providers', [])
+ pull_providers(providers, stackql, logger)
+ return global_context, providers
+ except Exception as e:
+ catch_error_and_exit(
+ "(config.get_global_context_and_providers) failed to prepare the context: " + str(e),
+ logger
+ )
+
+def get_full_context(env, global_context, resource, logger):
+ logger.debug(f"(config.get_full_context) getting full context for {resource['name']}...")
+ try:
+ resource_props = resource.get('props', {})
+ prop_context = render_properties(env, resource_props, global_context, logger)
+ full_context = {**global_context, **prop_context}
+
+ formatted_context = pprint.pformat(full_context, indent=1, width=sys.maxsize)
+ logger.debug(f"(config.get_full_context) full context:\n{formatted_context}")
+
+ return full_context
+ except Exception as e:
+ catch_error_and_exit(
+ f"(config.get_full_context) failed to render properties for {resource.get('name', 'unknown')}: " + str(e),
+ logger
+ )
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/lib/filters.py b/ref-python-packages/stackql-deploy/stackql_deploy/lib/filters.py
new file mode 100644
index 0000000..47f0e3c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/lib/filters.py
@@ -0,0 +1,149 @@
+# lib/filters.py
+import os
+import json
+import base64
+import uuid
+from jinja2 import Environment, FileSystemLoader
+from .utils import catch_error_and_exit
+
+def from_json(value):
+ return json.loads(value)
+
+def base64_encode(value):
+ return base64.b64encode(value.encode()).decode()
+
+def merge_lists(list1, list2):
+ # Helper function to ensure we have Python lists, not JSON strings
+ def ensure_list(input_data):
+ if isinstance(input_data, str):
+ try:
+ # Attempt to decode a JSON string
+ decoded = json.loads(input_data)
+ if isinstance(decoded, list):
+ return decoded
+ except json.JSONDecodeError:
+ pass # If it's not a JSON string, keep it as a string
+ elif isinstance(input_data, list):
+ return input_data
+ raise ValueError("(config.merge_lists) input must be a list or a JSON-encoded list string")
+
+ # Ensure both inputs are lists
+ list1 = ensure_list(list1)
+ list2 = ensure_list(list2)
+
+ # Convert lists to sets of JSON strings to handle unhashable types
+ set1 = set(json.dumps(item, sort_keys=True) for item in list1)
+ set2 = set(json.dumps(item, sort_keys=True) for item in list2)
+
+ # Merge sets
+ merged_set = set1 | set2
+
+ # Convert back to list of dictionaries or original items
+ merged_list = [json.loads(item) for item in merged_set]
+ return merged_list
+
+def merge_objects(obj1, obj2):
+ # Helper function to ensure we have Python dicts, not JSON strings
+ def ensure_dict(input_data):
+ if isinstance(input_data, str):
+ try:
+ # Attempt to decode a JSON string
+ decoded = json.loads(input_data)
+ if isinstance(decoded, dict):
+ return decoded
+ except json.JSONDecodeError:
+ pass # If it's not a JSON string, keep it as a string
+ elif isinstance(input_data, dict):
+ return input_data
+ raise ValueError("(config.merge_objects) input must be a dict or a JSON-encoded dict string")
+
+ # Ensure both inputs are dicts
+ obj1 = ensure_dict(obj1)
+ obj2 = ensure_dict(obj2)
+
+ # Merge the two dictionaries
+ merged_obj = {**obj1, **obj2}
+
+ return merged_obj
+
+def generate_patch_document(properties):
+ """
+ Generates a patch document for the given resource. This is designed for the AWS Cloud Control API, which requires
+ a patch document to update resources.
+ """
+ patch_doc = []
+ for key, value in properties.items():
+ # Check if the value is already a string (indicating it's likely already JSON) and leave it as is
+ if isinstance(value, str):
+ try:
+ # Try to parse the string to confirm it's a JSON object/array
+ parsed_value = json.loads(value)
+ patch_doc.append({"op": "add", "path": f"/{key}", "value": parsed_value})
+ except json.JSONDecodeError:
+ # If it's not a JSON string, treat it as a simple string value
+ patch_doc.append({"op": "add", "path": f"/{key}", "value": value})
+ else:
+ # If it's not a string, add it as a JSON-compatible object
+ patch_doc.append({"op": "add", "path": f"/{key}", "value": value})
+
+ return json.dumps(patch_doc)
+
+def sql_list(input_data):
+ # If the input is already a string representation of a list, parse it
+ if isinstance(input_data, str):
+ try:
+ import json
+ # Parse the string as JSON array
+ python_list = json.loads(input_data)
+ except json.JSONDecodeError:
+ # If it's not valid JSON, treat it as a single item
+ python_list = [input_data]
+ else:
+ python_list = input_data
+
+ # Handle empty list case
+ if not python_list:
+ return '(NULL)'
+
+ # Convert each item to string, wrap in quotes, join with commas
+ quoted_items = [f"'{str(item)}'" for item in python_list]
+ return f"({','.join(quoted_items)})"
+
+def sql_escape(value):
+ """
+ Escapes a string for use as a SQL string literal by doubling any single quotes.
+ This is useful for nested SQL statements where single quotes need to be escaped.
+ Args:
+ value: The string to escape
+ Returns:
+ The escaped string with single quotes doubled
+ """
+ if value is None:
+ return None
+
+ if not isinstance(value, str):
+ value = str(value)
+
+ return value.replace("'", "''")
+
+#
+# exported functions
+#
+
+def setup_environment(stack_dir, logger):
+ logger.debug("(config.setup_environment) setting up environment...")
+ if not os.path.exists(stack_dir):
+ catch_error_and_exit("(config.setup_environment) stack directory does not exist.", logger)
+ env = Environment(
+ loader=FileSystemLoader(os.getcwd()),
+ autoescape=False
+ )
+ env.filters['from_json'] = from_json
+ env.filters['base64_encode'] = base64_encode
+ env.filters['merge_lists'] = merge_lists
+ env.filters['generate_patch_document'] = generate_patch_document
+ env.filters['sql_list'] = sql_list
+ env.filters['sql_escape'] = sql_escape
+ env.globals['uuid'] = lambda: str(uuid.uuid4())
+ logger.debug("custom Jinja filters registered: %s", env.filters.keys())
+ return env
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/lib/templating.py b/ref-python-packages/stackql-deploy/stackql_deploy/lib/templating.py
new file mode 100644
index 0000000..92fd3ad
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/lib/templating.py
@@ -0,0 +1,171 @@
+# lib/templating.py
+import json
+import os
+from .utils import catch_error_and_exit
+from jinja2 import TemplateError
+from pprint import pformat
+
+def parse_anchor(anchor, logger):
+ """Parse anchor to extract key and options."""
+ parts = anchor.split(',')
+ key = parts[0].strip()
+ options = {}
+ for part in parts[1:]:
+ if '=' in part:
+ option_key, option_value = part.split('=')
+ options[option_key.strip()] = int(option_value.strip())
+ return key, options
+
+def is_json(myjson, logger):
+ try:
+ obj = json.loads(myjson)
+ return isinstance(obj, (dict, list)) # Only return True for JSON objects or arrays
+ except ValueError:
+ return False
+
+def render_queries(res_name, env, queries, context, logger):
+ rendered_queries = {}
+ for key, query in queries.items():
+ logger.debug(f"(templating.render_queries) [{res_name}] [{key}] query template:\n\n{query}\n")
+ try:
+ temp_context = context.copy()
+
+ for ctx_key, ctx_value in temp_context.items():
+ if isinstance(ctx_value, str) and is_json(ctx_value, logger):
+ properties = json.loads(ctx_value)
+ # Serialize JSON ensuring booleans are lower case and using correct JSON syntax
+ json_str = json.dumps(
+ properties, ensure_ascii=False, separators=(',', ':')
+ ).replace('True', 'true').replace('False', 'false')
+ # Correctly format JSON to use double quotes and pass directly since template handles quoting
+ # json_str = json_str.replace("'", "\\'") # escape single quotes if any within strings
+ temp_context[ctx_key] = json_str
+ # No need to alter non-JSON strings, assume the template handles them correctly
+
+ template = env.from_string(query)
+ rendered_query = template.render(temp_context)
+ logger.debug(f"(templating.render_queries) [{res_name}] [{key}] rendered query:\n\n{rendered_query}\n")
+ rendered_queries[key] = rendered_query
+
+ except TemplateError as e:
+ raise RuntimeError(f"(templating.render_queries) error rendering query for [{res_name}] [{key}]: {e}")
+ except json.JSONDecodeError:
+ continue # Skip non-JSON content
+
+ return rendered_queries
+
+def load_sql_queries(file_path, logger):
+ """Loads SQL queries from a file, splits them by anchors, and extracts options."""
+ queries = {}
+ options = {}
+ current_anchor = None
+ query_buffer = []
+
+ with open(file_path, 'r') as file:
+ for line in file:
+ if line.startswith('/*+') and '*/' in line:
+ # Store the current query under the last anchor
+ if current_anchor and query_buffer:
+ anchor_key, anchor_options = parse_anchor(current_anchor, logger)
+ queries[anchor_key] = ''.join(query_buffer).strip()
+ options[anchor_key] = anchor_options
+ query_buffer = []
+ # Set the new anchor
+ current_anchor = line[line.find('/*+') + 3:line.find('*/')].strip()
+ else:
+ query_buffer.append(line)
+
+ # Store the last query if any
+ if current_anchor and query_buffer:
+ anchor_key, anchor_options = parse_anchor(current_anchor, logger)
+ queries[anchor_key] = ''.join(query_buffer).strip()
+ options[anchor_key] = anchor_options
+
+ return queries, options
+
+#
+# exported fuctions
+#
+
+def get_queries(env, stack_dir, doc_key, resource, full_context, logger):
+ """Returns an object with query templates, rendered queries, and options for a resource."""
+ result = {}
+
+ if resource.get('file'):
+ template_path = os.path.join(stack_dir, doc_key, resource['file'])
+ else:
+ template_path = os.path.join(stack_dir, doc_key, f"{resource['name']}.iql")
+
+ if not os.path.exists(template_path):
+ catch_error_and_exit(f"(templating.get_queries) query file not found: {template_path}", logger)
+
+ try:
+ query_templates, query_options = load_sql_queries(template_path, logger)
+ rendered_queries = render_queries(resource['name'], env, query_templates, full_context, logger)
+
+ for anchor, template in query_templates.items():
+ # fix backward compatibility for preflight and postdeploy queries
+ if anchor == 'preflight':
+ anchor = 'exists'
+ elif anchor == 'postdeploy':
+ anchor = 'statecheck'
+ # end backward compatibility fix
+ result[anchor] = {
+ "template": template,
+ "rendered": rendered_queries.get(anchor, ""),
+ "options": {
+ "retries": query_options.get(anchor, {}).get('retries', 1),
+ "retry_delay": query_options.get(anchor, {}).get('retry_delay', 0)
+ }
+ }
+
+ formatted_result = pformat(result, width=120, indent=2)
+ logger.debug(f"(templating.get_queries) queries for [{resource['name']}]:\n{formatted_result}")
+ return result
+ except Exception as e:
+ catch_error_and_exit(
+ f"(templating.get_queries) failed to load or render queries for [{resource['name']}]: {str(e)}",
+ logger
+ )
+
+def render_inline_template(env, resource_name, template_string, full_context, logger):
+ """
+ Renders a single template string using the provided context.
+ Similar to get_queries but for inline templates rather than files.
+ """
+ logger.debug(f"(templating.render_inline_template) [{resource_name}] template:\n\n{template_string}\n")
+
+ try:
+ # Process the context the same way as in render_queries
+ temp_context = full_context.copy()
+
+ for ctx_key, ctx_value in temp_context.items():
+ if isinstance(ctx_value, str) and is_json(ctx_value, logger):
+ properties = json.loads(ctx_value)
+ # Serialize JSON ensuring booleans are lower case and using correct JSON syntax
+ json_str = json.dumps(
+ properties, ensure_ascii=False, separators=(',', ':')
+ ).replace('True', 'true').replace('False', 'false')
+ # Correctly format JSON to use double quotes and pass directly since template handles quoting
+ # json_str = json_str.replace("'", "\\'") # escape single quotes if any within strings
+ temp_context[ctx_key] = json_str
+
+ # Render the template
+ template = env.from_string(template_string)
+ rendered_template = template.render(temp_context)
+
+ logger.debug(
+ f"(templating.render_inline_template) [{resource_name}] rendered template:"
+ f"\n\n{rendered_template}\n"
+ )
+ return rendered_template
+
+ except TemplateError as e:
+ raise RuntimeError(f"(templating.render_inline_template) error rendering template for [{resource_name}]: {e}")
+ except json.JSONDecodeError as e:
+ # Handle JSON errors more gracefully
+ logger.warning(f"(templating.render_inline_template) JSON decode error in context for [{resource_name}]: {e}")
+ # Try rendering anyway, might work with non-JSON parts of the context
+ template = env.from_string(template_string)
+ rendered_template = template.render(temp_context)
+ return rendered_template
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/lib/utils.py b/ref-python-packages/stackql-deploy/stackql_deploy/lib/utils.py
new file mode 100644
index 0000000..1e00f8a
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/lib/utils.py
@@ -0,0 +1,517 @@
+# lib/utils.py
+import click
+from enum import Enum
+import time
+import json
+import sys
+import subprocess
+import re
+
+class BorderColor(Enum):
+ YELLOW = '\033[93m' # Bright yellow
+ BLUE = '\033[94m' # Bright blue
+ RED = '\033[91m' # Bright red
+
+def print_unicode_box(message: str, color: BorderColor = BorderColor.YELLOW):
+ border_color = color.value
+ reset_color = '\033[0m'
+
+ lines = message.split('\n')
+ max_length = max(len(line) for line in lines)
+ top_border = border_color + 'ā' + 'ā' * (max_length + 2) + 'ā' + reset_color
+ bottom_border = border_color + 'ā' + 'ā' * (max_length + 2) + 'ā' + reset_color
+
+ click.echo(top_border)
+ for line in lines:
+ click.echo(border_color + 'ā ' + line.ljust(max_length) + ' ā' + reset_color)
+ click.echo(bottom_border)
+
+def catch_error_and_exit(errmsg, logger):
+ logger.error(errmsg)
+ sys.exit("stackql-deploy operation failed š«")
+
+def get_type(resource, logger):
+ type = resource.get('type', 'resource')
+ if type not in ['resource', 'query', 'script', 'multi', 'command']:
+ catch_error_and_exit(f"resource type must be 'resource', 'script', 'multi' or 'query', got '{type}'", logger)
+ else:
+ return type
+
+def run_stackql_query(query, stackql, suppress_errors, logger, custom_auth=None, env_vars=None, retries=0, delay=5):
+ attempt = 0
+ last_error = None
+ while attempt <= retries:
+ try:
+ logger.debug(f"(utils.run_stackql_query) executing stackql query on attempt {attempt + 1}:\n\n{query}\n")
+ result = stackql.execute(query, suppress_errors=suppress_errors, custom_auth=custom_auth, env_vars=env_vars)
+ logger.debug(f"(utils.run_stackql_query) stackql query result (type:{type(result)}): {result}")
+
+ # Check if result is a list (expected outcome)
+ if isinstance(result, list):
+ if len(result) == 0:
+ logger.debug("(utils.run_stackql_query) stackql query executed successfully, retrieved 0 items.")
+ pass
+ elif result and 'error' in result[0]:
+ error_message = result[0]['error']
+ last_error = error_message # Store the error for potential return
+ if not suppress_errors:
+ if attempt == retries:
+ # If retries are exhausted, log the error and exit
+ catch_error_and_exit(
+ (
+ f"(utils.run_stackql_query) error occurred during stackql query execution:\n\n"
+ f"{error_message}\n"
+ ),
+ logger
+ )
+ else:
+ # Log the error and prepare for another attempt
+ logger.error(f"attempt {attempt + 1} failed:\n\n{error_message}\n")
+ elif 'count' in result[0]:
+ # If the result is a count query, return the count
+ logger.debug(
+ f"(utils.run_stackql_query) stackql query executed successfully, "
+ f"retrieved count: {result[0]['count']}."
+ )
+ if int(result[0]['count']) > 1:
+ catch_error_and_exit(
+ f"(utils.run_stackql_query) detected more than one resource matching the query criteria, "
+ f"expected 0 or 1, got {result[0]['count']}\n",
+ logger
+ )
+ return result
+ else:
+ # If no errors or errors are suppressed, return the result
+ logger.debug(
+ f"(utils.run_stackql_query) stackql query executed successfully, retrieved {len(result)} items."
+ )
+ return result
+ else:
+ # Handle unexpected result format
+ if attempt == retries:
+ catch_error_and_exit(
+ "(utils.run_stackql_query) unexpected result format received from stackql query execution.",
+ logger
+ )
+ else:
+ logger.error("(utils.run_stackql_query) unexpected result format, retrying...")
+
+ except Exception as e:
+ # Log the exception and check if retry attempts are exhausted
+ last_error = str(e) # Store the exception for potential return
+ if attempt == retries:
+ catch_error_and_exit(
+ f"(utils.run_stackql_query) an exception occurred during stackql query execution:\n\n{str(e)}\n",
+ logger
+ )
+ else:
+ logger.error(f"(utils.run_stackql_query) exception on attempt {attempt + 1}:\n\n{str(e)}\n")
+
+ # Delay before next attempt
+ time.sleep(delay)
+ attempt += 1
+
+ logger.debug(f"(utils.run_stackql_query) all attempts ({retries + 1}) to execute the query completed.")
+ # If suppress_errors is True and we have an error, return an empty list with error info as a special dict
+ if suppress_errors and last_error:
+ return [{'_stackql_deploy_error': last_error}]
+ # return None
+ return []
+
+def error_detected(result):
+ """parse stdout for known error conditions"""
+ if result['message'].startswith('http response status code: 4') or \
+ result['message'].startswith('http response status code: 5'):
+ return True
+ if result['message'].startswith('error:'):
+ return True
+ if result['message'].startswith('disparity in fields to insert and supplied data'):
+ return True
+ if result['message'].startswith('cannot find matching operation'):
+ return True
+ return False
+
+def run_stackql_command(command,
+ stackql,
+ logger,
+ custom_auth=None,
+ env_vars=None,
+ ignore_errors=False,
+ retries=0,
+ retry_delay=5
+ ):
+ attempt = 0
+ while attempt <= retries:
+ try:
+ logger.debug(
+ f"(utils.run_stackql_command) executing stackql command (attempt {attempt + 1}):\n\n{command}\n"
+ )
+ # If query is start with 'REGISTRY PULL', check version
+ if command.startswith("REGISTRY PULL"):
+ match = re.match(r'(REGISTRY PULL \w+)(::v[\d\.]+)?', command)
+ if match:
+ service_provider = match.group(1)
+ version = match.group(2)
+ if version:
+ command = f"{service_provider} {version[2:]}"
+ else:
+ raise ValueError(
+ (
+ "REGISTRY PULL command must be in the format 'REGISTRY PULL ::v'"
+ "or 'REGISTRY PULL '"
+ )
+ )
+
+ result = stackql.executeStmt(command, custom_auth, env_vars)
+ logger.debug(f"(utils.run_stackql_command) stackql command result:\n\n{result}, type: {type(result)}\n")
+
+ if isinstance(result, dict):
+ # If the result contains a message, it means the execution was successful
+ if 'message' in result:
+ if not ignore_errors and error_detected(result):
+ if attempt < retries:
+ logger.warning(
+ (
+ f"dependent resource(s) may not be ready, retrying in {retry_delay} seconds "
+ f"(attempt {attempt + 1} of {retries + 1})..."
+ )
+ )
+ time.sleep(retry_delay)
+ attempt += 1
+ continue # Retry the command
+ else:
+ catch_error_and_exit(
+ (
+ f"(utils.run_stackql_command) error occurred during stackql command execution:\n\n"
+ f"{result['message']}\n"
+ ),
+ logger
+ )
+ logger.debug(
+ f"(utils.run_stackql_command) stackql command executed successfully:\n\n{result['message']}\n"
+ )
+ return result['message'].rstrip()
+ elif 'error' in result:
+ # Check if the result contains an error message
+ error_message = result['error'].rstrip()
+ catch_error_and_exit(
+ (
+ f"(utils.run_stackql_command) error occurred during stackql command execution:\n\n"
+ f"{error_message}\n"
+ ),
+ logger
+ )
+
+ # If there's no 'error' or 'message', it's an unexpected result format
+ catch_error_and_exit(
+ "(utils.run_stackql_command) unexpected result format received from stackql execution.",
+ logger
+ )
+
+ except Exception as e:
+ # Log the exception and exit
+ catch_error_and_exit(
+ f"(utils.run_stackql_command) an exception occurred during stackql command execution:\n\n{str(e)}\n",
+ logger
+ )
+
+ # Increment attempt counter if not continuing the loop due to retry
+ attempt += 1
+
+def pull_providers(providers, stackql, logger):
+ logger.debug(f"(utils.pull_providers) stackql run time info:\n\n{json.dumps(stackql.properties(), indent=2)}\n")
+ installed_providers = run_stackql_query("SHOW PROVIDERS", stackql, False, logger) # not expecting an error here
+ # check if the provider is already installed
+ for provider in providers:
+ # check if the provider is a specific version
+ if "::" in provider:
+ name, version = provider.split("::")
+ check_provider_version_available(name, version, stackql, logger)
+ found = False
+ # provider is a version which will be installed
+ # installed is a version which is already installed
+ for installed in installed_providers:
+ # if name and version are the same, it's already installed
+ if installed["name"] == name and installed["version"] == version:
+ logger.info(f"provider '{provider}' is already installed.")
+ found = True
+ break
+ # if name is the same but the installed version is higher,
+ # it's already installed(latest version)
+ elif installed["name"] == name and is_installed_version_higher(installed["version"], version, logger):
+ logger.warning(
+ (
+ f"provider '{name}' version '{version}' is not available in the registry, "
+ f"but a higher version '{installed['version']}' is already installed."
+ )
+ )
+ logger.warning(
+ "If you want to install the lower version, you must delete the higher version "
+ "folder from the stackql providers directory."
+ )
+ logger.info(f"provider {name}::{version} is already installed.")
+ found = True
+ break
+ # if not found, pull the provider
+ if not found:
+ logger.info(f"pulling provider '{provider}'...")
+ msg = run_stackql_command(f"REGISTRY PULL {provider}", stackql, logger)
+ logger.info(msg)
+ else:
+ found = False
+ # provider is a name which will be installed
+ # installed is a list of providers which are already installed
+ for installed in installed_providers:
+ if installed["name"] == provider:
+ logger.info(f"provider '{provider}' is already installed.")
+ found = True
+ break
+ # if not found, pull the provider
+ if not found:
+ logger.info(f"pulling provider '{provider}'...")
+ msg = run_stackql_command(f"REGISTRY PULL {provider}", stackql, logger)
+ logger.info(msg)
+
+def check_provider_version_available(provider_name, version, stackql, logger):
+ """Check if the provider version is available in the registry.
+
+ Args:
+ provider_name (str): The name of the provider.
+ version (str): The version of the provider.
+ stackql (StackQL): The StackQL object.
+ logger (Logger): The logger object.
+ """
+ query = f"REGISTRY LIST {provider_name}"
+ try:
+ result = run_stackql_query(query, stackql, True, logger)
+ # result[0]['versions'] is a string, not a list
+ # so we need to split it into a list
+ versions = result[0]['versions'].split(", ")
+ if version not in versions:
+ catch_error_and_exit(
+ (
+ f"(utils.check_provider_version_available) version '{version}' not found "
+ f"for provider '{provider_name}', available versions: {versions}"
+ ),
+ logger
+ )
+ except Exception:
+ catch_error_and_exit(
+ f"(utils.check_provider_version_available) provider '{provider_name}' not found in registry",
+ logger
+ )
+
+def is_installed_version_higher(installed_version, requested_version, logger):
+ """Check if the installed version is higher than the requested version.
+
+ Args:
+ installed_version (str): v24.09.00251
+ requested_version (str): v23.01.00104
+
+ Returns:
+ bool: True if installed version is higher than requested version, False otherwise
+ """
+
+ try:
+ int_installed = int(installed_version.replace("v", "").replace(".", ""))
+ int_requested = int(requested_version.replace("v", "").replace(".", ""))
+ if int_installed > int_requested:
+ return True
+ else:
+ return False
+ except Exception:
+ catch_error_and_exit(
+ (
+ f"(utils.is_installed_version_higher) version comparison failed: "
+ f"installed version '{installed_version}', requested version '{requested_version}'"
+ ),
+ logger
+ )
+
+def run_test(resource, rendered_test_iql, stackql, logger, delete_test=False, custom_auth=None, env_vars=None):
+ try:
+ test_result = run_stackql_query(
+ rendered_test_iql,
+ stackql,
+ True,
+ logger,
+ custom_auth=custom_auth,
+ env_vars=env_vars)
+ logger.debug(f"(utils.run_test) test query result for [{resource['name']}]:\n\n{test_result}\n")
+
+ if test_result == []:
+ if delete_test:
+ logger.debug(f"(utils.run_test) delete test result true for [{resource['name']}]")
+ return True
+ else:
+ logger.debug(f"(utils.run_test) test result false for [{resource['name']}]")
+ return False
+
+ if not test_result or 'count' not in test_result[0]:
+ catch_error_and_exit(
+ f"(utils.run_test) data structure unexpected for [{resource['name']}] test:\n\n{test_result}\n", logger
+ )
+
+ count = int(test_result[0]['count'])
+ if delete_test:
+ if count == 0:
+ logger.debug(f"(utils.run_test) delete test result true for [{resource['name']}].")
+ return True
+ else:
+ logger.debug(
+ f"(utils.run_test) delete test result false for [{resource['name']}], expected 0 got {count}."
+ )
+ return False
+ else:
+ # not a delete test, 1 of the things should exist
+ if count == 1:
+ logger.debug(f"(utils.run_test) test result true for [{resource['name']}].")
+ return True
+ else:
+ logger.debug(f"(utils.run_test) test result false for [{resource['name']}], expected 1 got {count}.")
+ return False
+
+ except Exception as e:
+ catch_error_and_exit(
+ f"(utils.run_test) an exception occurred during testing for [{resource['name']}]:\n\n{str(e)}\n",
+ logger
+ )
+
+def show_query(show_queries, query, logger):
+ if show_queries:
+ logger.info(f"š query:\n\n{query}\n")
+
+def perform_retries(resource,
+ query,
+ retries,
+ delay,
+ stackql,
+ logger,
+ delete_test=False,
+ custom_auth=None,
+ env_vars=None
+ ):
+ attempt = 0
+ start_time = time.time() # Capture the start time of the operation
+ while attempt < retries:
+ result = run_test(resource, query, stackql, logger, delete_test, custom_auth=custom_auth, env_vars=env_vars)
+ if result:
+ return True
+ elapsed = time.time() - start_time # Calculate elapsed time
+ logger.info(
+ f"š attempt {attempt + 1}/{retries}: retrying in {delay} seconds ({int(elapsed)} seconds elapsed)."
+ )
+ time.sleep(delay)
+ attempt += 1
+ elapsed = time.time() - start_time # Calculate total elapsed time
+ return False
+
+def export_vars(self, resource, export, expected_exports, expected_exports_all_dicts, protected_exports):
+ for item in expected_exports:
+ # check if all items are dictionaries
+ if expected_exports_all_dicts:
+ if list(item.values())[0] not in export:
+ catch_error_and_exit(
+ (
+ f"(utils.export_vars) exported item '{list(item.values())[0]}' "
+ f"not found in exports for {resource['name']}.",
+ self.logger
+ )
+ )
+ else:
+ if item not in export:
+ catch_error_and_exit(
+ f"(utils.export_vars) exported item '{item}' not found in exports for {resource['name']}.",
+ self.logger
+ )
+ for key, value in export.items():
+ if key in protected_exports:
+ mask = '*' * len(str(value))
+ self.logger.info(f"š set protected variable [{key}] to [{mask}] in exports")
+ else:
+ self.logger.info(f"š¤ set [{key}] to [{value}] in exports")
+ # Update global context with exported values
+ self.global_context[key] = value
+
+def run_ext_script(cmd, logger, exports=None):
+ try:
+ result = subprocess.run(cmd, stdout=subprocess.PIPE, text=True, shell=True)
+ logger.debug(f"(utils.run_ext_script) script output: {result.stdout}")
+ if not exports:
+ return True
+ except Exception as e:
+ catch_error_and_exit(f"(utils.run_ext_script) script failed: {e}", logger)
+ return None
+
+ # we must be expecting exports
+ try:
+ exported_vars = json.loads(result.stdout)
+ # json_output should be a dictionary
+ if not isinstance(exported_vars, dict):
+ catch_error_and_exit(
+ f"(utils.run_ext_script) external scripts must be convertible to a dictionary {exported_vars}",
+ logger
+ )
+ return None
+ # you should be able to find each name in exports in the output object
+ for export in exports:
+ if export not in exported_vars:
+ catch_error_and_exit(
+ f"(utils.run_ext_script) exported variable '{export}' not found in script output",
+ logger
+ )
+ return None
+ return exported_vars
+ except json.JSONDecodeError:
+ catch_error_and_exit(
+ f"(utils.run_ext_script) external scripts must return a valid JSON object {result.stdout}",
+ logger
+ )
+ return None
+
+def check_all_dicts(items, logger):
+ """ Check if all items(list) are of the same type (either all dicts or all non-dicts).
+ """
+ all_dicts = all(isinstance(item, dict) for item in items)
+ no_dicts = all(not isinstance(item, dict) for item in items)
+
+ if not all_dicts and not no_dicts:
+ catch_error_and_exit(f"type inconsistency: all items({items}) must be either dicts or non-dicts", logger)
+ if all_dicts:
+ return True
+ else:
+ return False
+
+def check_exports_as_statecheck_proxy(exports_result, logger):
+ """
+ Check if exports query result can be used as a statecheck proxy.
+ Returns True if exports indicate resource is in correct state (non-empty result),
+ False if exports indicate statecheck failed (empty result).
+ """
+ logger.debug(f"(utils.check_exports_as_statecheck_proxy) checking exports result: {exports_result}")
+
+ # If exports is None or empty list, consider statecheck failed
+ if exports_result is None or len(exports_result) == 0:
+ logger.debug("(utils.check_exports_as_statecheck_proxy) empty exports result, treating as statecheck failure")
+ return False
+
+ # Check for error conditions in exports result
+ if len(exports_result) >= 1 and isinstance(exports_result[0], dict):
+ # Check for our custom error wrapper
+ if '_stackql_deploy_error' in exports_result[0]:
+ logger.debug(
+ "(utils.check_exports_as_statecheck_proxy) error in exports result, "
+ "treating as statecheck failure"
+ )
+ return False
+ # Check for direct error in result
+ elif 'error' in exports_result[0]:
+ logger.debug(
+ "(utils.check_exports_as_statecheck_proxy) error in exports result, "
+ "treating as statecheck failure"
+ )
+ return False
+
+ # If we have a valid non-empty result, consider statecheck passed
+ logger.debug("(utils.check_exports_as_statecheck_proxy) valid exports result, treating as statecheck success")
+ return True
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/templates/aws/README.md.template b/ref-python-packages/stackql-deploy/stackql_deploy/templates/aws/README.md.template
new file mode 100644
index 0000000..74e30f2
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/templates/aws/README.md.template
@@ -0,0 +1,63 @@
+# `stackql-deploy` starter project for `aws`
+
+> for starter projects using other providers, try `stackql-deploy {{ stack_name }} --provider=azure` or `stackql-deploy {{ stack_name }} --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `aws` provider:
+
+- [`aws` provider docs](https://stackql.io/registry/aws)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `aws` and `azure` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `aws` provider, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and optionally `AWS_SESSION_TOKEN` must be set, for more information on authentication to `aws` see the [`aws` provider documentation](https://aws.stackql.io/providers/aws).
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` folder.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack named {{ stack_name }} to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build {{ stack_name }} sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build {{ stack_name }} sit \
+-e AWS_REGION=ap-southeast-2 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test {{ stack_name }} sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown {{ stack_name }} sit \
+-e AWS_REGION=ap-southeast-2
+```
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/templates/aws/resources/example_vpc.iql.template b/ref-python-packages/stackql-deploy/stackql_deploy/templates/aws/resources/example_vpc.iql.template
new file mode 100644
index 0000000..d4a727c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/templates/aws/resources/example_vpc.iql.template
@@ -0,0 +1,67 @@
+/* defines the provisioning and deprovisioning commands
+used to create, update or delete the resource
+replace queries with your queries */
+
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{% raw %}{{ region }}{% endraw %}'
+AND cidr_block = '{% raw %}{{ vpc_cidr_block }}{% endraw %}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{% raw %}{{ stack_name }}{% endraw %}'
+AND json_extract(tags, '$.StackEnv') = '{% raw %}{{ stack_env }}{% endraw %}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpcs (
+ CidrBlock,
+ Tags,
+ EnableDnsSupport,
+ EnableDnsHostnames,
+ region
+)
+SELECT
+ '{% raw %}{{ vpc_cidr_block }}{% endraw %}',
+ '{% raw %}{{ vpc_tags }}{% endraw %}',
+ true,
+ true,
+ '{% raw %}{{ region }}{% endraw %}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{% raw %}{{ region }}{% endraw %}'
+AND cidr_block = '{% raw %}{{ vpc_cidr_block }}{% endraw %}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{% raw %}{{ stack_name }}{% endraw %}'
+AND json_extract(tags, '$.StackEnv') = '{% raw %}{{ stack_env }}{% endraw %}'
+) t
+WHERE cidr_block = '{% raw %}{{ vpc_cidr_block }}{% endraw %}';
+
+/*+ exports, retries=5, retry_delay=5 */
+SELECT vpc_id, vpc_cidr_block FROM
+(
+SELECT vpc_id, cidr_block as "vpc_cidr_block",
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{% raw %}{{ region }}{% endraw %}'
+AND cidr_block = '{% raw %}{{ vpc_cidr_block }}{% endraw %}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{% raw %}{{ stack_name }}{% endraw %}'
+AND json_extract(tags, '$.StackEnv') = '{% raw %}{{ stack_env }}{% endraw %}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.vpcs
+WHERE data__Identifier = '{% raw %}{{ vpc_id }}{% endraw %}'
+AND region = '{% raw %}{{ region }}{% endraw %}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/templates/aws/stackql_manifest.yml.template b/ref-python-packages/stackql-deploy/stackql_deploy/templates/aws/stackql_manifest.yml.template
new file mode 100644
index 0000000..cc40520
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/templates/aws/stackql_manifest.yml.template
@@ -0,0 +1,40 @@
+#
+# aws starter project manifest file, add and update values as needed
+#
+version: 1
+name: "{{ stack_name }}"
+description: description for "{{ stack_name }}"
+providers:
+ - aws
+globals:
+ - name: region
+ description: aws region
+ value: "{% raw %}{{ AWS_REGION }}{% endraw %}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{% raw %}{{ stack_name }}{% endraw %}"
+ - Key: StackEnv
+ Value: "{% raw %}{{ stack_env }}{% endraw %}"
+resources:
+ - name: example_vpc
+ description: example vpc resource
+ props:
+ - name: vpc_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ - name: vpc_tags
+ value:
+ - Key: Name
+ Value: "{% raw %}{{ stack_name }}-{{ stack_env }}-vpc{% endraw %}"
+ merge: ['global_tags']
+ exports:
+ - vpc_id
+ - vpc_cidr_block
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/templates/azure/README.md.template b/ref-python-packages/stackql-deploy/stackql_deploy/templates/azure/README.md.template
new file mode 100644
index 0000000..da749d3
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/templates/azure/README.md.template
@@ -0,0 +1,63 @@
+# `stackql-deploy` starter project for `azure`
+
+> for starter projects using other providers, try `stackql-deploy {{ stack_name }} --provider=aws` or `stackql-deploy {{ stack_name }} --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `azure` provider:
+
+- [`azure` provider docs](https://stackql.io/registry/azure)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `azure` and `aws` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `azure` provider, `AZURE_TENANT_ID`, `AZURE_CLIENT_ID` and `AZURE_CLIENT_SECRET` must be set (or their must be an authenticated session on the host using `az login`), for more information on authentication to `azure` see the [`azure` provider documentation](https://azure.stackql.io/providers/azure).
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` folder.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack named {{ stack_name }} to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build {{ stack_name }} sit \
+-e AZURE_SUBSCRIPTION_ID=00000000-0000-0000-0000-000000000000
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build {{ stack_name }} sit \
+-e AZURE_SUBSCRIPTION_ID=00000000-0000-0000-0000-000000000000 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test {{ stack_name }} sit \
+-e AZURE_SUBSCRIPTION_ID=00000000-0000-0000-0000-000000000000
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown {{ stack_name }} sit \
+-e AZURE_SUBSCRIPTION_ID=00000000-0000-0000-0000-000000000000
+```
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/templates/azure/resources/example_res_grp.iql.template b/ref-python-packages/stackql-deploy/stackql_deploy/templates/azure/resources/example_res_grp.iql.template
new file mode 100644
index 0000000..bc09859
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/templates/azure/resources/example_res_grp.iql.template
@@ -0,0 +1,33 @@
+/* defines the provisioning and deprovisioning commands
+used to create, update or delete the resource
+replace queries with your queries */
+
+/*+ exists */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{% raw %}{{ subscription_id }}{% endraw %}'
+AND resourceGroupName = '{% raw %}{{ resource_group_name }}{% endraw %}'
+
+/*+ create */
+INSERT INTO azure.resources.resource_groups(
+ resourceGroupName,
+ subscriptionId,
+ data__location
+)
+SELECT
+ '{% raw %}{{ resource_group_name }}{% endraw %}',
+ '{% raw %}{{ subscription_id }}{% endraw %}',
+ '{% raw %}{{ location }}{% endraw %}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{% raw %}{{ subscription_id }}{% endraw %}'
+AND resourceGroupName = '{% raw %}{{ resource_group_name }}{% endraw %}'
+AND location = '{% raw %}{{ location }}{% endraw %}'
+AND JSON_EXTRACT(properties, '$.provisioningState') = 'Succeeded'
+
+/*+ exports */
+SELECT '{% raw %}{{ resource_group_name }}{% endraw %}' as resource_group_name
+
+/*+ delete */
+DELETE FROM azure.resources.resource_groups
+WHERE resourceGroupName = '{% raw %}{{ resource_group_name }}{% endraw %}' AND subscriptionId = '{% raw %}{{ subscription_id }}{% endraw %}'
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/templates/azure/stackql_manifest.yml.template b/ref-python-packages/stackql-deploy/stackql_deploy/templates/azure/stackql_manifest.yml.template
new file mode 100644
index 0000000..58b28e0
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/templates/azure/stackql_manifest.yml.template
@@ -0,0 +1,27 @@
+#
+# azure starter project manifest file, add and update values as needed
+#
+version: 1
+name: "{{ stack_name }}"
+description: description for "{{ stack_name }}"
+providers:
+ - azure
+globals:
+ - name: subscription_id
+ description: azure subscription id
+ value: "{% raw %}{{ AZURE_SUBSCRIPTION_ID }}{% endraw %}"
+ - name: location
+ description: default location for resources
+ value: eastus
+ - name: global_tags
+ value:
+ provisioner: stackql
+ stackName: "{% raw %}{{ stack_name }}{% endraw %}"
+ stackEnv: "{% raw %}{{ stack_env }}{% endraw %}"
+resources:
+ - name: example_res_grp
+ props:
+ - name: resource_group_name
+ value: "{% raw %}{{ stack_name }}-{{ stack_env }}-rg{% endraw %}"
+ exports:
+ - resource_group_name
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/templates/google/README.md.template b/ref-python-packages/stackql-deploy/stackql_deploy/templates/google/README.md.template
new file mode 100644
index 0000000..1b3c2cb
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/templates/google/README.md.template
@@ -0,0 +1,63 @@
+# `stackql-deploy` starter project for `google`
+
+> for starter projects using other providers, try `stackql-deploy {{ stack_name }} --provider=aws` or `stackql-deploy {{ stack_name }} --provider=azure`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `google` provider:
+
+- [`google` provider docs](https://stackql.io/registry/google)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `azure` and `aws` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `google` provider, `GOOGLE_CREDENTIALS` needs to be set at runtime (from the local machine using export GOOGLE_CREDENTIALS=cat creds/my-sa-key.json for example or as a CI variable/secret).
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` folder.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack named {{ stack_name }} to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build {{ stack_name }} sit \
+-e MY_PROJECT_NAME={{ stack_name }}
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build {{ stack_name }} sit \
+-e MY_PROJECT_NAME={{ stack_name }} \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test {{ stack_name }} sit \
+-e MY_PROJECT_NAME={{ stack_name }}
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown {{ stack_name }} sit \
+-e MY_PROJECT_NAME={{ stack_name }}
+```
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/templates/google/resources/example_vpc.iql.template b/ref-python-packages/stackql-deploy/stackql_deploy/templates/google/resources/example_vpc.iql.template
new file mode 100644
index 0000000..8ae8338
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/templates/google/resources/example_vpc.iql.template
@@ -0,0 +1,47 @@
+/* defines the provisioning and deprovisioning commands
+used to create, update or delete the resource
+replace queries with your queries */
+
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.networks
+WHERE name = '{% raw %}{{ vpc_name }}{% endraw %}'
+AND project = '{% raw %}{{ project }}{% endraw %}'
+
+/*+ create */
+INSERT INTO google.compute.networks
+(
+ project,
+ data__name,
+ data__autoCreateSubnetworks,
+ data__routingConfig
+)
+SELECT
+'{% raw %}{{ project }}{% endraw %}',
+'{% raw %}{{ vpc_name }}{% endraw %}',
+false,
+'{"routingMode": "REGIONAL"}'
+
+/*+ update */
+UPDATE google.compute.networks
+SET data__autoCreateSubnetworks = false
+AND data__routingConfig = '{"routingMode": "REGIONAL"}'
+WHERE network = '{% raw %}{{ vpc_name }}{% endraw %}' AND project = '{% raw %}{{ project }}{% endraw %}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.networks
+WHERE name = '{% raw %}{{ vpc_name }}{% endraw %}'
+AND project = '{% raw %}{{ project }}{% endraw %}'
+AND autoCreateSubnetworks = false
+AND JSON_EXTRACT(routingConfig, '$.routingMode') = 'REGIONAL'
+
+/*+ exports */
+SELECT
+'{% raw %}{{ vpc_name }}{% endraw %}' as vpc_name,
+selfLink as vpc_link
+FROM google.compute.networks
+WHERE name = '{% raw %}{{ vpc_name }}{% endraw %}'
+AND project = '{% raw %}{{ project }}{% endraw %}'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.networks
+WHERE network = '{% raw %}{{ vpc_name }}{% endraw %}' AND project = '{% raw %}{{ project }}{% endraw %}'
diff --git a/ref-python-packages/stackql-deploy/stackql_deploy/templates/google/stackql_manifest.yml.template b/ref-python-packages/stackql-deploy/stackql_deploy/templates/google/stackql_manifest.yml.template
new file mode 100644
index 0000000..19fbe41
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stackql_deploy/templates/google/stackql_manifest.yml.template
@@ -0,0 +1,26 @@
+#
+# google starter project manifest file, add and update values as needed
+#
+version: 1
+name: "{{ stack_name }}"
+description: description for "{{ stack_name }}"
+providers:
+ - google
+globals:
+- name: project
+ description: google project name
+ value: "{% raw %}{{ MY_PROJECT_NAME }}{% endraw %}"
+- name: region
+ value: australia-southeast1
+- name: default_zone
+ value: australia-southeast1-a
+resources:
+- name: example_vpc
+ description: example vpc network
+ props:
+ - name: vpc_name
+ description: name for the vpc
+ value: "{% raw %}{{ stack_name }}-{{ stack_env }}-vpc{% endraw %}"
+ exports:
+ - vpc_name
+ - vpc_link
diff --git a/ref-python-packages/stackql-deploy/start-stackql-server.sh b/ref-python-packages/stackql-deploy/start-stackql-server.sh
new file mode 100644
index 0000000..1ac8bf5
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/start-stackql-server.sh
@@ -0,0 +1,9 @@
+# start server if not running
+echo "checking if server is running"
+if [ -z "$(ps | grep stackql)" ]; then
+ echo "starting server"
+ nohup ./stackql -v --pgsrv.port=5444 srv &
+ sleep 5
+else
+ echo "server is already running"
+fi
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/stop-stackql-server.sh b/ref-python-packages/stackql-deploy/stop-stackql-server.sh
new file mode 100644
index 0000000..762f6e8
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/stop-stackql-server.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# Find the process ID of the StackQL server
+PID=$(pgrep -f "stackql")
+
+if [ -z "$PID" ]; then
+ echo "stackql server is not running."
+else
+ echo "stopping stackql server (PID: $PID)..."
+ kill $PID
+ echo "stackql server stopped."
+fi
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/test-derek-aws/README.md b/ref-python-packages/stackql-deploy/test-derek-aws/README.md
new file mode 100644
index 0000000..3c89eb3
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/test-derek-aws/README.md
@@ -0,0 +1,63 @@
+# `stackql-deploy` starter project for `aws`
+
+> for starter projects using other providers, try `stackql-deploy test-derek-aws --provider=azure` or `stackql-deploy test-derek-aws --provider=google`
+
+see the following links for more information on `stackql`, `stackql-deploy` and the `aws` provider:
+
+- [`aws` provider docs](https://stackql.io/registry/aws)
+- [`stackql`](https://github.com/stackql/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
+
+## Overview
+
+__`stackql-deploy`__ is a stateless, declarative, SQL driven Infrastructure-as-Code (IaC) framework. There is no state file required as the current state is assessed for each resource at runtime. __`stackql-deploy`__ is capable of provisioning, deprovisioning and testing a stack which can include resources across different providers, like a stack spanning `aws` and `azure` for example.
+
+## Prerequisites
+
+This example requires `stackql-deploy` to be installed using __`pip install stackql-deploy`__. The host used to run `stackql-deploy` needs the necessary environment variables set to authenticate to your specific provider, in the case of the `aws` provider, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and optionally `AWS_SESSION_TOKEN` must be set, for more information on authentication to `aws` see the [`aws` provider documentation](https://aws.stackql.io/providers/aws).
+
+## Usage
+
+Adjust the values in the [__`stackql_manifest.yml`__](stackql_manifest.yml) file if desired. The [__`stackql_manifest.yml`__](stackql_manifest.yml) file contains resource configuration variables to support multiple deployment environments, these will be used for `stackql` queries in the `resources` folder.
+
+The syntax for the `stackql-deploy` command is as follows:
+
+```bash
+stackql-deploy { build | test | teardown } { stack-directory } { deployment environment} [ optional flags ]
+```
+
+### Deploying a stack
+
+For example, to deploy the stack named test-derek-aws to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build test-derek-aws sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them, for example:
+
+```bash
+stackql-deploy build test-derek-aws sit \
+-e AWS_REGION=ap-southeast-2 \
+--dry-run
+```
+
+### Testing a stack
+
+To test a stack to ensure that all resources are present and in the desired state, run the following (in our `sit` deployment example):
+
+```bash
+stackql-deploy test test-derek-aws sit \
+-e AWS_REGION=ap-southeast-2
+```
+
+### Tearing down a stack
+
+To destroy or deprovision all resources in a stack for our `sit` deployment example, run the following:
+
+```bash
+stackql-deploy teardown test-derek-aws sit \
+-e AWS_REGION=ap-southeast-2
+```
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/test-derek-aws/resources/example_vpc.iql b/ref-python-packages/stackql-deploy/test-derek-aws/resources/example_vpc.iql
new file mode 100644
index 0000000..463dbc1
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/test-derek-aws/resources/example_vpc.iql
@@ -0,0 +1,67 @@
+/* defines the provisioning and deprovisioning commands
+used to create, update or delete the resource
+replace queries with your queries */
+
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpcs (
+ CidrBlock,
+ Tags,
+ EnableDnsSupport,
+ EnableDnsHostnames,
+ region
+)
+SELECT
+ '{{ vpc_cidr_block }}',
+ '{{ vpc_tags }}',
+ true,
+ true,
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ vpc_cidr_block }}';
+
+/*+ exports, retries=5, retry_delay=5 */
+SELECT vpc_id, vpc_cidr_block FROM
+(
+SELECT vpc_id, cidr_block as "vpc_cidr_block",
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.vpcs
+WHERE data__Identifier = '{{ vpc_id }}'
+AND region = '{{ region }}';
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/test-derek-aws/stackql_manifest.yml b/ref-python-packages/stackql-deploy/test-derek-aws/stackql_manifest.yml
new file mode 100644
index 0000000..c411627
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/test-derek-aws/stackql_manifest.yml
@@ -0,0 +1,56 @@
+#
+# aws starter project manifest file, add and update values as needed
+#
+version: 1
+name: "test-derek-aws"
+description: description for "test-derek-aws"
+providers:
+ - aws
+globals:
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+ - name: example_vpc
+ description: example vpc resource
+ if: "'{{ stack_env }}' == 'sit'"
+ props:
+ - name: vpc_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ - name: vpc_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ merge: ['global_tags']
+ exports:
+ - vpc_id
+ - vpc_cidr_block
+ - name: example_vpc_dev
+ description: example vpc resource for dev only
+ if: "'{{ stack_env }}' == 'dev'"
+ file: example_vpc.iql
+ props:
+ - name: vpc_cidr_block
+ value: "10.3.0.0/16"
+ - name: vpc_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ merge: ['global_tags']
+ exports:
+ - vpc_id
+ - vpc_cidr_block
diff --git a/ref-python-packages/stackql-deploy/website/.gitignore b/ref-python-packages/stackql-deploy/website/.gitignore
new file mode 100644
index 0000000..b2d6de3
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/.gitignore
@@ -0,0 +1,20 @@
+# Dependencies
+/node_modules
+
+# Production
+/build
+
+# Generated files
+.docusaurus
+.cache-loader
+
+# Misc
+.DS_Store
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
diff --git a/ref-python-packages/stackql-deploy/website/README.md b/ref-python-packages/stackql-deploy/website/README.md
new file mode 100644
index 0000000..0c6c2c2
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/README.md
@@ -0,0 +1,41 @@
+# Website
+
+This website is built using [Docusaurus](https://docusaurus.io/), a modern static website generator.
+
+### Installation
+
+```
+$ yarn
+```
+
+### Local Development
+
+```
+$ yarn start
+```
+
+This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
+
+### Build
+
+```
+$ yarn build
+```
+
+This command generates static content into the `build` directory and can be served using any static contents hosting service.
+
+### Deployment
+
+Using SSH:
+
+```
+$ USE_SSH=true yarn deploy
+```
+
+Not using SSH:
+
+```
+$ GIT_USER= yarn deploy
+```
+
+If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
diff --git a/ref-python-packages/stackql-deploy/website/babel.config.js b/ref-python-packages/stackql-deploy/website/babel.config.js
new file mode 100644
index 0000000..e00595d
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/babel.config.js
@@ -0,0 +1,3 @@
+module.exports = {
+ presets: [require.resolve('@docusaurus/core/lib/babel/preset')],
+};
diff --git a/ref-python-packages/stackql-deploy/website/docs/cli-reference/build.md b/ref-python-packages/stackql-deploy/website/docs/cli-reference/build.md
new file mode 100644
index 0000000..f568d66
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/cli-reference/build.md
@@ -0,0 +1,123 @@
+---
+title: build
+hide_title: true
+hide_table_of_contents: false
+keywords:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+tags:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+description: Documentation for the build command in StackQL Deploy
+image: "/img/stackql-cover.png"
+---
+
+# `build`
+
+Command used to create or update resources in a StackQL environment.
+
+* * *
+
+## Syntax
+
+stackql-deploy build STACK_DIR STACK_ENV [FLAGS]
+
+* * *
+
+## Arguments
+
+| Argument | Description | Example |
+|--|--|--|
+|`STACK_DIR`|The directory containing the stack configuration files | `my-stack` |
+|`STACK_ENV`|The target environment for the stack deployment | `dev` |
+
+:::info
+
+`STACK_DIR` can be an absolute or relative path.
+
+`STACK_ENV` is a user defined environment symbol (e.g. `dev`, `sit`, `prd`) which is used to deploy your stack to different environments.
+
+:::
+
+## Optional Flags
+
+| Flag | Description | Example |
+|--|--|--|
+|`--log-level` |Set the logging level. Default is `INFO` | `--log-level DEBUG` |
+|`--env-file` |Specify an environment variables file. Default is `.env` | `--env-file .env` |
+|`-e` `--env` |Set additional environment variables (can be used multiple times) | `--env DB_USER=admin` |
+|`--dry-run` |Perform a dry run of the operation. No changes will be made | |
+|`--show-queries` |Display the queries executed in the output logs | |
+|`--output-file` |Export deployment variables to a JSON file after successful deployment | `--output-file ./outputs/deploy.json` |
+|`--download-dir` |Custom download directory for StackQL | `/etc/stackql` |
+|`--custom-registry` |Custom StackQL provider registry URL | `https://myreg` |
+
+:::tip
+
+Exported variables specified as `protected` in the respective resource definition in the `stackql_manifest.yml` file are obfuscated in the logs by default.
+
+:::
+
+* * *
+
+## Examples
+
+### Deploy a stack to a target environment
+
+Deploy the stack defined in the `azure-stack` directory to the `sit` environment, setting additional environment variables to be used in the deployment:
+
+```bash
+stackql-deploy build azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-0000-0000-0000-688bfe4e1468
+```
+
+### Perform a dry run deployment
+
+Perform a dry run or a stack defined in the `aws-stack` directory to a `prd` environment, showing templated queries without actually running them:
+
+```bash
+stackql-deploy build aws-stack prd \
+--dry-run
+```
+
+### Specifying a custom environment file
+
+Use a custom environment file `.env.prod` to supply environment variables to a stack defined in the `gcp-stack` directory to a `prod` environment:
+
+```bash
+stackql build gcp-stack prod \
+--env-file .env.prod
+```
+
+### Export deployment variables to a file
+
+Deploy a stack and export key deployment variables to a JSON file for use in CI/CD workflows or downstream processes:
+
+```bash
+stackql-deploy build databricks-stack prod \
+--output-file ./outputs/deployment.json \
+-e DATABRICKS_ACCOUNT_ID=12345678-1234-1234-1234-123456789012
+```
+
+This will create a JSON file containing the exported variables defined in the `exports` section of your `stackql_manifest.yml`:
+
+```json
+{
+ "stack_name": "my-databricks-workspace",
+ "stack_env": "prod",
+ "workspace_name": "my-databricks-workspace-prod",
+ "workspace_id": "123456789012345",
+ "deployment_name": "dbc-ab123456-789a",
+ "workspace_status": "RUNNING"
+}
+```
+
+:::tip
+
+`stack_name` and `stack_env` are automatically included in all exports and do not need to be listed in the manifest.
+
+:::
diff --git a/ref-python-packages/stackql-deploy/website/docs/cli-reference/info.md b/ref-python-packages/stackql-deploy/website/docs/cli-reference/info.md
new file mode 100644
index 0000000..0b2978b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/cli-reference/info.md
@@ -0,0 +1,84 @@
+---
+title: info
+hide_title: true
+hide_table_of_contents: false
+keywords:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+tags:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+description: Documentation for the info command in StackQL Deploy
+image: "/img/stackql-cover.png"
+---
+
+# `info`
+
+Command used to display version information and environment details for the StackQL Deploy program and its dependencies.
+
+* * *
+
+## Syntax
+
+stackql-deploy info [FLAGS]
+
+* * *
+
+## Optional Flags
+
+| Flag | Description | Example |
+|--|--|--|
+| `--download-dir` | Custom download directory for StackQL | `/etc/stackql` |
+| `--custom-registry` | Custom StackQL provider registry URL | `https://myreg` |
+
+* * *
+
+## Description
+
+The `info` command provides detailed information about the StackQL Deploy environment, including the versions of `stackql-deploy`, `pystackql`, and the `stackql` binary, as well as the paths and platform information. If a custom provider registry is used, that information will also be displayed. Additionally, the command lists all installed providers and their versions.
+
+## Examples
+
+### Display version information
+
+Display the version information of the `stackql-deploy` tool, its dependencies, and the installed providers:
+
+```bash
+stackql-deploy info
+```
+outputs...
+
+```plaintext
+stackql-deploy version: 1.6.1
+pystackql version : 3.6.4
+stackql version : v0.5.708
+stackql binary path : /home/javen/.local/stackql
+platform : Linux x86_64 (Linux-5.15.133.1-microsoft-standard-WSL2-x86_64-with-glibc2.35), Python 3.10.12
+
+installed providers: :
+aws : v24.07.00246
+azure : v24.06.00242
+google : v24.06.00236
+```
+
+### Specify a custom `stackql` binary location
+
+`stackql-deploy` will automatically download the `stackql` binary when a command is run, if the binary does not exist in the default directory. Alternatively, you can supply the `--download-dir` flag to specify the location of an existing `stackql` binary, or have `stackql-deploy` download the `stackql` binary to this location.
+
+```bash
+stackql-deploy info \
+--download-dir /usr/local/bin/stackql
+```
+
+### Specify a custom provider registry URL
+
+By default the public [StackQL Provider Registry](https://github.com/stackql/stackql-provider-registry) is used for provider definitions, to supply custom providers or use an alternate registry, specify the custom regsitry URL using the `--custom-registry` flag. The following example will use the public StackQL dev provider registry.
+
+```bash
+stackql-deploy info \
+--custom-registry="https://registry-dev.stackql.app/providers"
+```
diff --git a/ref-python-packages/stackql-deploy/website/docs/cli-reference/init.md b/ref-python-packages/stackql-deploy/website/docs/cli-reference/init.md
new file mode 100644
index 0000000..b3b58d7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/cli-reference/init.md
@@ -0,0 +1,82 @@
+---
+title: init
+hide_title: true
+hide_table_of_contents: false
+keywords:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+tags:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+description: Documentation for the init command in StackQL Deploy
+image: "/img/stackql-cover.png"
+---
+
+# `init`
+
+Command used to initialize a new `stackql-deploy` project structure.
+
+* * *
+
+## Syntax
+
+stackql-deploy init STACK_DIR [FLAGS]
+
+* * *
+
+## Arguments
+
+| Argument | Description | Example |
+|--|--|--|
+| `STACK_DIR` | The directory (and name) for the project to be created | `my-stack` |
+
+## Optional Flags
+
+| Flag | Description | Example |
+|--|--|--|
+| `--provider` | Specify a cloud provider to start your project with. Supported values: `aws`, `azure`, `google`. Default is `azure`. | `--provider aws` |
+
+* * *
+
+## Description
+
+The `init` command sets up a new project structure for a `stackql-deploy` stack. It creates the necessary directories and populates them with template files tailored to the specified cloud provider.
+
+- If no provider is specified, the default provider is `azure`.
+- The command ensures that the project name is converted to a lower-case, hyphen-separated format.
+- The command also generates provider-specific example templates within the `resources` directory.
+
+Supported providers include:
+
+- **AWS**: Creates a sample VPC resource.
+- **Azure**: Creates a sample Resource Group.
+- **Google Cloud**: Creates a sample VPC resource.
+
+If a provider is not supported, the command will default to `azure` and notify the user.
+
+## Examples
+
+### Initialize a new project with default provider
+
+This command initializes a new project with the name `my-stack` using the default provider (`azure`):
+
+```bash
+stackql-deploy init my-stack
+```
+:::tip
+
+`init` will create your project structure including the stack directory including the `stackql_manifest.yml` and `README.md` files, and a `resources` directory with a sample StackQL resource query file (`.iql` file). You can modify a project to use whichever providers are available in the [StackQL Provider Registry](https://stackql.io/providers).
+
+:::
+
+### Initialize a new project with the `aws` provider
+
+Initialize a new project with the name `my-aws-stack` using `aws` as the provider:
+
+```bash
+stackql-deploy init my-aws-stack --provider aws
+```
diff --git a/ref-python-packages/stackql-deploy/website/docs/cli-reference/shell.md b/ref-python-packages/stackql-deploy/website/docs/cli-reference/shell.md
new file mode 100644
index 0000000..4e4984b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/cli-reference/shell.md
@@ -0,0 +1,70 @@
+---
+title: shell
+hide_title: true
+hide_table_of_contents: false
+keywords:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+tags:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+description: Documentation for the shell command in StackQL Deploy
+image: "/img/stackql-cover.png"
+---
+
+# `shell`
+
+Command used to launch the StackQL interactive shell.
+
+* * *
+
+## Syntax
+
+stackql-deploy shell [FLAGS]
+
+* * *
+
+## Optional Flags
+
+| Flag | Description | Example |
+|--|--|--|
+|`--download-dir` |Custom download directory for StackQL | `/etc/stackql` |
+|`--custom-registry` |Custom StackQL provider registry URL | `https://myreg` |
+
+:::info
+
+The `shell` command launches the interactive StackQL shell. If the `stackql` binary is not found in the provided paths, the command will fail with an error.
+
+:::
+
+* * *
+
+## Examples
+
+### Launch the StackQL shell using the default binary location
+
+This command attempts to launch the StackQL shell using the binary location managed by the `pystackql` package:
+
+```bash
+stackql-deploy shell
+```
+
+### Launch the StackQL shell from a custom download directory
+
+Specify a custom directory where the `stackql` binary is downloaded and run the StackQL shell:
+
+```bash
+stackql-deploy shell --download-dir /usr/local/bin/stackql
+```
+
+### Use a custom registry URL
+
+Launch the StackQL shell using a custom StackQL provider registry:
+
+```bash
+stackql-deploy shell --custom-registry https://mycustomregistry.com
+```
diff --git a/ref-python-packages/stackql-deploy/website/docs/cli-reference/teardown.md b/ref-python-packages/stackql-deploy/website/docs/cli-reference/teardown.md
new file mode 100644
index 0000000..4332f97
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/cli-reference/teardown.md
@@ -0,0 +1,75 @@
+---
+title: teardown
+hide_title: true
+hide_table_of_contents: false
+keywords:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+tags:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+description: Documentation for the teardown command in StackQL Deploy
+image: "/img/stackql-cover.png"
+---
+
+# `teardown`
+
+Command used to deprovision and remove resources in a specified stack in a given environment.
+
+* * *
+
+## Syntax
+
+stackql-deploy teardown STACK_DIR STACK_ENV [FLAGS]
+
+* * *
+
+## Arguments
+
+| Argument | Description | Example |
+|--|--|--|
+| `STACK_DIR` | The directory containing the stack configuration files | `my-stack` |
+| `STACK_ENV` | The target environment for tearing down the stack | `dev` |
+
+:::info
+
+`STACK_DIR` can be an absolute or relative path.
+
+`STACK_ENV` is a user-defined environment symbol (e.g., `dev`, `sit`, `prd`) used to tear down your stack in different environments.
+
+:::
+
+## Optional Flags
+
+| Flag | Description | Example |
+|--|--|--|
+| `--log-level` | Set the logging level. Default is `INFO` | `--log-level DEBUG` |
+| `--env-file` | Specify an environment variables file. Default is `.env` | `--env-file .env` |
+| `-e` `--env` | Set additional environment variables (can be used multiple times) | `--env DB_USER=admin` |
+| `--dry-run` | Perform a dry run of the operation. No changes will be made | |
+| `--show-queries` | Display the queries executed in the output logs | |
+| `--download-dir` |Custom download directory for StackQL | `/etc/stackql` |
+| `--custom-registry` |Custom StackQL provider registry URL | `https://myreg` |
+
+:::tip
+
+Exported variables specified as `protected` in the respective resource definition in the `stackql_manifest.yml` file are obfuscated in the logs by default.
+
+:::
+
+* * *
+
+## Examples
+
+### Teardown a stack in a target environment
+
+Teardown the stack defined in the `azure-stack` directory in the `sit` environment, setting additional environment variables:
+
+```bash
+stackql-deploy teardown azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-0000-0000-0000-688bfe4e1468
+```
diff --git a/ref-python-packages/stackql-deploy/website/docs/cli-reference/test.md b/ref-python-packages/stackql-deploy/website/docs/cli-reference/test.md
new file mode 100644
index 0000000..63db135
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/cli-reference/test.md
@@ -0,0 +1,76 @@
+---
+title: test
+hide_title: true
+hide_table_of_contents: false
+keywords:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+tags:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+description: Documentation for the test command in StackQL Deploy
+image: "/img/stackql-cover.png"
+---
+
+# `test`
+
+Command used to confirm the desired state of resources in a specified stack in a given environment.
+
+* * *
+
+## Syntax
+
+stackql-deploy test STACK_DIR STACK_ENV [FLAGS]
+
+* * *
+
+## Arguments
+
+| Argument | Description | Example |
+|--|--|--|
+| `STACK_DIR` | The directory containing the stack configuration files | `my-stack` |
+| `STACK_ENV` | The target environment for testing the stack | `dev` |
+
+:::info
+
+`STACK_DIR` can be an absolute or relative path.
+
+`STACK_ENV` is a user-defined environment symbol (e.g., `dev`, `sit`, `prd`) used to test your stack in different environments.
+
+:::
+
+## Optional Flags
+
+| Flag | Description | Example |
+|--|--|--|
+| `--log-level` | Set the logging level. Default is `INFO` | `--log-level DEBUG` |
+| `--env-file` | Specify an environment variables file. Default is `.env` | `--env-file .env` |
+| `-e` `--env` | Set additional environment variables (can be used multiple times) | `--env DB_USER=admin` |
+| `--dry-run` | Perform a dry run of the operation. No changes will be made | |
+| `--show-queries` | Display the queries executed in the output logs | |
+| `--download-dir` |Custom download directory for StackQL | `/etc/stackql` |
+| `--custom-registry` |Custom StackQL provider registry URL | `https://myreg` |
+
+:::tip
+
+Exported variables specified as `protected` in the respective resource definition in the `stackql_manifest.yml` file are obfuscated in the logs by default.
+
+:::
+
+* * *
+
+## Examples
+
+### Confirm desired state for a stack in a target environment
+
+Run tests for the stack defined in the `azure-stack` directory in the `sit` environment, setting additional environment variables:
+
+```bash
+stackql-deploy test azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=631d1c6d-0000-0000-0000-688bfe4e1468
+```
+
diff --git a/ref-python-packages/stackql-deploy/website/docs/cli-reference/upgrade.md b/ref-python-packages/stackql-deploy/website/docs/cli-reference/upgrade.md
new file mode 100644
index 0000000..5d95ad2
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/cli-reference/upgrade.md
@@ -0,0 +1,57 @@
+---
+title: upgrade
+hide_title: true
+hide_table_of_contents: false
+keywords:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+tags:
+ - stackql
+ - stackql-deploy
+ - infrastructure-as-code
+ - configuration-as-data
+description: Documentation for the upgrade command in StackQL Deploy
+image: "/img/stackql-cover.png"
+---
+
+# `upgrade`
+
+Command used to upgrade the `pystackql` package and `stackql` binary to the latest versions.
+
+* * *
+
+## Syntax
+
+stackql-deploy upgrade
+
+* * *
+
+## Description
+
+The `upgrade` command automates the process of upgrading both the `pystackql` package and the `stackql` binary to their latest available versions. This ensures that your environment is up-to-date with the latest features, improvements, and security patches.
+
+When the `upgrade` command is run, it first attempts to upgrade the `pystackql` package using `pip`. After that, it upgrades the `stackql` binary to the latest version.
+
+## Examples
+
+### Upgrade `pystackql` and `stackql` to the latest versions
+
+This command will upgrade both the `pystackql` package and the `stackql` binary:
+
+```bash
+stackql-deploy upgrade
+```
+
+outputs...
+
+```plaintext
+upgrading pystackql package...
+pystackql package upgraded successfully.
+pystackql package upgraded from 3.6.4 to 3.7.0.
+upgrading stackql binary, current version v0.5.708...
+stackql binary upgraded to v0.6.002.
+```
+
+If the `pystackql` package or the `stackql` binary is already up-to-date, the command will notify you accordingly.
diff --git a/ref-python-packages/stackql-deploy/website/docs/getting-started.md b/ref-python-packages/stackql-deploy/website/docs/getting-started.md
new file mode 100644
index 0000000..6299e1a
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/getting-started.md
@@ -0,0 +1,185 @@
+---
+id: getting-started
+title: Getting Started
+hide_title: false
+hide_table_of_contents: false
+description: A quick overview of how to get started with StackQL Deploy, including basic concepts and the essential components of a deployment.
+tags: []
+draft: false
+unlisted: false
+---
+
+import File from '/src/components/File';
+
+`stackql-deploy` is a model driven, declarative framework for provisioning, de-provisioning and testing cloud resources. Heard enough and ready to get started? Jump to a [__Quick Start__](#quick-start).
+
+## Installing `stackql-deploy`
+
+Installing `stackql-deploy` globally is as easy as...
+
+```bash
+pip install stackql-deploy
+# or
+pip3 install stackql-deploy
+```
+this will setup `stackql-deploy` and all its dependencies.
+
+> __Note for macOS users__
+> to install `stackql-deploy` in a virtual environment (which may be necessary on __macOS__), use the following:
+> ```bash
+> python3 -m venv myenv
+> source myenv/bin/activate
+> pip install stackql-deploy
+> ```
+
+## How `stackql-deploy` works
+
+The core components of `stackql-deploy` are the __stack directory__, the `stackql_manifest.yml` file and resource query (`.iql`) files. These files define your infrastructure and guide the deployment process.
+
+`stackql-deploy` uses the `stackql_manifest.yml` file in the `stack-dir`, to render query templates (`.iql` files) in the `resources` sub directory of the `stack-dir`, targeting an environment (`stack-env`). `stackql` is used to execute the queries to deploy, test, update or delete resources as directed. This is summarized in the diagram below:
+
+```mermaid
+flowchart LR
+ subgraph stack-dir
+ direction LR
+ B(Manifest File) --> C(Resource Files)
+ end
+
+ A(stackql-deploy) -->|uses...|stack-dir
+ stack-dir -->|deploys to...|D(āļø Your Environment)
+```
+
+### `stackql_manifest.yml` File
+
+The `stackql_manifest.yml` file is the basis of your stack configuration. It contains the definitions of the resources you want to manage, the providers you're using (such as AWS, Google Cloud, or Azure), and the environment-specific settings that will guide the deployment.
+
+This manifest file acts as a blueprint for your infrastructure, describing the resources and how they should be configured. An example `stackql_manifest.yml` file is shown here:
+
+
+
+```yaml
+version: 1
+name: "my-azure-stack"
+description: description for "my-azure-stack"
+providers:
+ - azure
+globals:
+ - name: subscription_id
+ description: azure subscription id
+ value: "{{ AZURE_SUBSCRIPTION_ID }}"
+ - name: location
+ description: default location for resources
+ value: eastus
+ - name: global_tags
+ value:
+ provisioner: stackql
+ stackName: "{{ stack_name }}"
+ stackEnv: "{{ stack_env }}"
+resources:
+ - name: example_res_grp
+ props:
+ - name: resource_group_name
+ value: "{{ stack_name }}-{{ stack_env }}-rg"
+ exports:
+ - resource_group_name
+```
+
+
+
+The `stackql_manifest.yml` file is detailed [__here__](/manifest-file).
+
+### Resource Query Files
+
+Each resource or query defined in the `resources` section of the `stackql_manifest.yml` has an associated StackQL query file (using the `.iql` extension by convention). The query file defines queries to deploy and test a cloud resource. These queries are demarcated by query anchors (or hints). Available query anchors include:
+
+- `exists` : tests for the existence or non-existence of a resource
+- `create` : creates the resource in the desired state using a StackQL `INSERT` statement
+- `update` : updates the resource to the desired state using a StackQL `UPDATE` statement
+- `createorupdate`: for idempotent resources, uses a StackQL `INSERT` statement
+- `statecheck`: tests the state of a resource after a DML operation, typically to determine if the resource is in the desired state
+- `exports` : variables to export from the resource to be used in subsequent queries
+- `delete` : deletes a resource using a StackQL `DELETE` statement
+
+An example resource query file is shown here:
+
+
+
+```sql
+/*+ exists */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+
+/*+ create */
+INSERT INTO azure.resources.resource_groups(
+ resourceGroupName,
+ subscriptionId,
+ data__location
+)
+SELECT
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND location = '{{ location }}'
+AND JSON_EXTRACT(properties, '$.provisioningState') = 'Succeeded'
+
+/*+ exports */
+SELECT '{{ resource_group_name }}' as resource_group_name
+
+/*+ delete */
+DELETE FROM azure.resources.resource_groups
+WHERE resourceGroupName = '{{ resource_group_name }}' AND subscriptionId = '{{ subscription_id }}'
+```
+
+
+
+Resource queries are detailed [__here__](/resource-query-files).
+
+### `stackql-deploy` commands
+
+Basic `stackql-deploy` commands include:
+
+- `build` : provisions a stack to the desired state in a specified environment (including `create` and `update` operations if necessary)
+- `test` : tests a stack to confirm all resources exist and are in their desired state
+- `teardown` : de-provisions a stack
+
+here are some examples:
+
+```bash title="deploy my-azure-stack to the prd environment"
+stackql-deploy build my-azure-stack prd \
+-e AZURE_SUBSCRIPTION_ID=00000000-0000-0000-0000-000000000000
+```
+
+```bash title="test my-azure-stack in the sit environment"
+stackql-deploy test my-azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=00000000-0000-0000-0000-000000000000
+```
+
+```bash title="teardown my-azure-stack in the dev environment"
+stackql-deploy teardown my-azure-stack dev \
+-e AZURE_SUBSCRIPTION_ID=00000000-0000-0000-0000-000000000000
+```
+
+For more detailed information see [`cli-reference/build`](/cli-reference/build), [`cli-reference/test`](/cli-reference/test), [`cli-reference/teardown`](/cli-reference/teardown), or other commands available.
+
+
+### `stackql-deploy` deployment flow
+
+`stackql-deploy` processes the resources defined in the `stackql_manifest.yml` in top down order (`teardown` operations are processed in reverse order).
+
+
+
+## Quick Start
+
+To get up and running quickly, `stackql-deploy` provides a set of quick start templates for common cloud providers. These templates include predefined configurations and resource queries tailored to AWS, Azure, and Google Cloud, among others.
+
+- [**AWS Quick Start Template**](/template-library/aws/vpc-and-ec2-instance): A basic setup for deploying a VPC, including subnets and routing configurations.
+- [**Azure Quick Start Template**](/template-library/azure/simple-vnet-and-vm): A setup for creating a Resource Group with associated resources.
+- [**Google Cloud Quick Start Template**](/template-library/google/k8s-the-hard-way): A configuration for deploying a VPC with network and firewall rules.
+
+These templates are designed to help you kickstart your infrastructure deployment with minimal effort, providing a solid foundation that you can customize to meet your specific needs.
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/github-actions.md b/ref-python-packages/stackql-deploy/website/docs/github-actions.md
new file mode 100644
index 0000000..8586d39
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/github-actions.md
@@ -0,0 +1,163 @@
+---
+id: github-actions
+title: GitHub Actions - StackQL Deploy
+hide_title: false
+hide_table_of_contents: false
+description: Documentation for using the StackQL Deploy GitHub Action to automate infrastructure deployment and testing.
+tags: []
+draft: false
+unlisted: false
+---
+
+# `stackql-deploy` GitHub Action
+
+The [`stackql-deploy` GitHub Action](https://github.com/marketplace/actions/stackql-deploy) allows you to execute `stackql-deploy` commands to deploy or test a stack within your CI/CD pipelines in a GitHub Actions workflow.
+
+## Usage
+
+The `stackql-deploy` GitHub Action will pull the latest `stackql-deploy` package from the [PyPi repository](https://pypi.org/project/stackql-deploy/). The action invokes a `stackql-deploy` command with `inputs` (detailed below). Here is a basic examplf of using the `stackql-deploy` GitHub Action in a workflow.
+
+```yaml {13}
+jobs:
+ stackql-actions-test:
+ name: StackQL Actions Test
+ runs-on: ubuntu-latest
+ env:
+ GOOGLE_CREDENTIALS: ${{ secrets.GOOGLE_CREDENTIALS }} # add additional cloud provider creds here as needed
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Deploy a Stack
+ uses: stackql/setup-deploy@v1.0.1
+ with:
+ command: 'build'
+ stack_dir: 'examples/k8s-the-hard-way'
+ stack_env: 'dev'
+ env_vars: 'GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo'
+```
+
+:::note[Provider Authentication]
+
+Authentication to StackQL providers is managed through environment variables sourced from GitHub Actions Secrets. Ensure you configure the necessary secrets in your repository settings to authenticate with your cloud provider(s).
+
+For more information on provider-specific authentication, refer to the setup instructions available in the [StackQL Provider Registry Docs](https://github.com/stackql/stackql-provider-registry).
+
+:::
+
+
+## Inputs
+
+The following inputs can be configured for the `stackql-deploy` GitHub Action:
+
+| Input | Description | Example |
+|------------------|-----------------------------------------------------------------------------|---------------------------------------------------|
+| `command` | The `stackql-deploy` command to run (`build` or `test`) | `build` |
+| `stack_dir` | The repository directory containing `stackql_manifest.yml` and resources | `examples/k8s-the-hard-way` |
+| `stack_env` | The environment to deploy or test (e.g., `dev`, `prod`) | `dev` |
+| `env_vars` | (Optional) Environment variables or secrets to import into a stack | `GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo` |
+| `env_file` | (Optional) Environment variables sourced from a file | `.env.prod` |
+| `output_file` | (Optional) File path to export deployment variables as JSON | `./outputs/deployment.json` |
+| `show_queries` | (Optional) Show the queries executed in the output logs | `true` |
+| `log_level` | (Optional) Set the logging level (`INFO` or `DEBUG`, defaults to `INFO`) | `DEBUG` |
+| `dry_run` | (Optional) Perform a dry run of the operation | `true` |
+| `custom_registry`| (Optional) Custom registry URL to be used for StackQL | `https://myreg` |
+| `on_failure` | (Optional) Action to take on failure (not implemented yet) | `rollback` |
+
+## Examples
+
+### Deploy a Stack
+
+This example shows how to build a stack (located in `examples/k8s-the-hard-way`) for a development (`dev`) environment:
+
+```yaml
+jobs:
+ stackql-actions-test:
+ name: StackQL Actions Test
+ runs-on: ubuntu-latest
+ env:
+ GOOGLE_CREDENTIALS: ${{ secrets.GOOGLE_CREDENTIALS }} # add additional cloud provider creds here as needed
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Deploy a Stack
+ uses: stackql/setup-deploy@v1.0.1
+ with:
+ command: 'build'
+ stack_dir: 'examples/k8s-the-hard-way'
+ stack_env: 'dev'
+ env_vars: 'GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo'
+```
+
+### Test a Stack
+
+This example shows how to test a stack for a staging (`sit`) environment:
+
+```yaml
+jobs:
+ stackql-actions-test:
+ name: StackQL Actions Test
+ runs-on: ubuntu-latest
+ env:
+ GOOGLE_CREDENTIALS: ${{ secrets.GOOGLE_CREDENTIALS }} # add additional cloud provider creds here as needed
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Test a Stack
+ uses: stackql/setup-deploy@v1.0.1
+ with:
+ command: 'test'
+ stack_dir: 'examples/k8s-the-hard-way'
+ stack_env: 'sit'
+ env_vars: 'GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo'
+```
+
+### Deploy and Export Variables
+
+This example shows how to deploy a stack and export deployment variables to a JSON file for use in subsequent workflow steps:
+
+```yaml
+jobs:
+ deploy-and-process:
+ name: Deploy Stack and Process Outputs
+ runs-on: ubuntu-latest
+ env:
+ DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }}
+ DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Deploy Databricks Stack
+ uses: stackql/setup-deploy@v1.0.1
+ with:
+ command: 'build'
+ stack_dir: 'examples/databricks/serverless'
+ stack_env: 'prod'
+ output_file: './deployment-outputs.json'
+ env_vars: |
+ DATABRICKS_ACCOUNT_ID=${{ secrets.DATABRICKS_ACCOUNT_ID }}
+ AWS_REGION=us-east-1
+ AWS_ACCOUNT_ID=${{ secrets.AWS_ACCOUNT_ID }}
+
+ - name: Parse Deployment Outputs
+ id: parse_outputs
+ run: |
+ echo "workspace_name=$(jq -r '.databricks_workspace_name' ./deployment-outputs.json)" >> $GITHUB_OUTPUT
+ echo "workspace_id=$(jq -r '.databricks_workspace_id' ./deployment-outputs.json)" >> $GITHUB_OUTPUT
+ echo "workspace_status=$(jq -r '.workspace_status' ./deployment-outputs.json)" >> $GITHUB_OUTPUT
+
+ - name: Use Exported Variables
+ run: |
+ echo "Deployed workspace: ${{ steps.parse_outputs.outputs.workspace_name }}"
+ echo "Workspace ID: ${{ steps.parse_outputs.outputs.workspace_id }}"
+ echo "Status: ${{ steps.parse_outputs.outputs.workspace_status }}"
+```
diff --git a/ref-python-packages/stackql-deploy/website/docs/index.md b/ref-python-packages/stackql-deploy/website/docs/index.md
new file mode 100644
index 0000000..27c40c7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/index.md
@@ -0,0 +1,31 @@
+---
+id: index
+title: Welcome
+hide_title: true
+hide_table_of_contents: true
+# keywords: []
+description: ''
+# image: ''
+# slug: ''
+custom_edit_url: null
+tags: []
+draft: false
+unlisted: false
+---
+
+
+
+## Model Driven, Declarative, State File-less , Multi-Cloud IaC
+
+
+
+__`stackql-deploy`__ is a multi-cloud resource provisioning framework using __`stackql`__. It is inspired by dbt (data build tool), which manages data transformation workflows in analytics engineering by treating SQL scripts as models that can be built, tested, and materialized incrementally. With StackQL, you can create a similar framework for cloud and SaaS provisioning. The goal is to treat cloud stacks as models that can be deployed, tested, updated, and de-provisioned, enabling developers to deploy complex, dependent infrastructure components in a reliable and repeatable manner.
+
+### Features
+
+- Dynamic state determination (eliminating the need for state files)
+- Pre-flight and post-deploy assurance tests for resources
+- Simple flow control with rollback capabilities
+- Single code base for multiple target environments
+- SQL-based definitions for resources and tests
+
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest-file.md b/ref-python-packages/stackql-deploy/website/docs/manifest-file.md
new file mode 100644
index 0000000..1dea06c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest-file.md
@@ -0,0 +1,423 @@
+---
+id: manifest-file
+title: Maniftest File
+hide_title: false
+hide_table_of_contents: false
+description: A quick overview of how to get started with StackQL Deploy, including basic concepts and the essential components of a deployment.
+tags: []
+draft: false
+unlisted: false
+---
+
+import * as ManifestFields from './manifest_fields';
+import File from '/src/components/File';
+export const headingColor = '#FF6347';
+
+## Overview
+
+The __`stackql_manifest.yml`__ file is in the root of a project (or stack) directory. This `yaml` file defines all of the resources and their respective properties for all target deployment environments for your stack. Resources are processed in the order in which they are declared in this file, resources can include `exports` which are variables used for subsequent resources in your stack (for example a `vpc_id` needed to deploy a `subnet`). Global variables are configured here as well which can be sourced from external environment variables or secrets.
+
+:::note
+
+Secrets should not be saved in the __`stackql_manifest.yml`__ file, use `globals` and externally sourced variables (using the `-e` or `--env` options) at deploy time.
+
+:::
+
+## Fields
+
+the fields within the __`stackql_manifest.yml`__ file are described in further detail here.
+
+### `name`
+
+
+
+***
+
+### `description`
+
+
+
+***
+
+### `providers`
+
+
+
+***
+
+### `globals`
+
+
+
+***
+
+### `global.name`
+
+
+
+***
+
+### `global.value`
+
+
+
+***
+
+### `global.description`
+
+
+
+***
+
+### `resources`
+
+
+
+***
+
+### `resource.name`
+
+
+
+***
+
+### `resource.type`
+
+
+
+***
+
+### `resource.file`
+
+
+
+***
+
+### `resource.description`
+
+
+
+***
+
+### `resource.auth`
+
+
+
+***
+
+### `resource.exports`
+
+
+
+***
+
+### `resource.protected`
+
+
+
+***
+
+### `resource.if`
+
+
+
+***
+
+### `resource.sql`
+
+
+
+***
+
+### `resource.skip_validation`
+
+
+
+***
+
+### `resource.props`
+
+
+
+***
+
+### `resource.prop.name`
+
+
+
+***
+
+### `resource.prop.description`
+
+
+
+***
+
+### `resource.prop.value`
+
+
+
+***
+
+### `resource.prop.values`
+
+
+
+***
+
+### `resource.prop.merge`
+
+
+
+***
+
+### `exports`
+
+
+
+***
+
+### `version`
+
+
+
+***
+
+## Example manifest file
+
+Here is a complete example of a `stackql_manifest.yml` file for a Google stack, for other examples see the [Template Library](/template-library).
+
+
+
+```yaml
+version: 1
+name: kubernetes-the-hard-way
+description: stackql-deploy example for kubernetes-the-hard-way
+providers:
+ - google
+globals:
+- name: project
+ description: google project name
+ value: "{{ GOOGLE_PROJECT }}"
+- name: region
+ value: australia-southeast1
+- name: default_zone
+ value: australia-southeast1-a
+resources:
+- name: network
+ description: vpc network for k8s-the-hard-way sample app
+ props:
+ - name: vpc_name
+ description: name for the vpc
+ value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ exports:
+ - vpc_name
+ - vpc_link
+- name: subnetwork
+ props:
+ - name: subnet_name
+ value: "{{ stack_name }}-{{ stack_env }}-{{ region }}-subnet"
+ - name: ip_cidr_range
+ values:
+ prd:
+ value: 192.168.0.0/16
+ sit:
+ value: 10.10.0.0/16
+ dev:
+ value: 10.240.0.0/24
+ exports:
+ - subnet_name
+ - subnet_link
+- name: public_address
+ props:
+ - name: address_name
+ value: "{{ stack_name }}-{{ stack_env }}-{{ region }}-ip-addr"
+ exports:
+ - address
+- name: controller_instances
+ file: instances.iql
+ props:
+ - name: num_instances
+ value: 3
+ - name: instance_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-controller"
+ - name: disks
+ value:
+ - autoDelete: true
+ boot: true
+ initializeParams:
+ diskSizeGb: 10
+ sourceImage: https://compute.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/family/ubuntu-2004-lts
+ mode: READ_WRITE
+ type: PERSISTENT
+ - name: machine_type
+ value: "https://compute.googleapis.com/compute/v1/projects/{{ project }}/zones/{{ default_zone }}/machineTypes/f1-micro"
+ - name: scheduling
+ value: {automaticRestart: true}
+ - name: tags
+ value: {items: ["{{ stack_name }}", "controller"]}
+ - name: service_accounts
+ value:
+ - email: default
+ scopes:
+ - https://www.googleapis.com/auth/compute
+ - https://www.googleapis.com/auth/devstorage.read_only
+ - https://www.googleapis.com/auth/logging.write
+ - https://www.googleapis.com/auth/monitoring
+ - https://www.googleapis.com/auth/service.management.readonly
+ - https://www.googleapis.com/auth/servicecontrol
+ - name: network_interfaces
+ values:
+ dev:
+ value:
+ - {networkIP: "10.240.0.10", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.11", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.12", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+- name: worker_instances
+ file: instances.iql
+ props:
+ - name: num_instances
+ value: 3
+ - name: instance_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-worker"
+ - name: disks
+ value:
+ - autoDelete: true
+ boot: true
+ initializeParams:
+ diskSizeGb: 10
+ sourceImage: https://compute.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/family/ubuntu-2004-lts
+ mode: READ_WRITE
+ type: PERSISTENT
+ - name: machine_type
+ value: "https://compute.googleapis.com/compute/v1/projects/{{ project }}/zones/{{ default_zone }}/machineTypes/f1-micro"
+ - name: scheduling
+ value: {automaticRestart: true}
+ - name: tags
+ value: {items: ["{{ stack_name }}", "worker"]}
+ - name: service_accounts
+ value:
+ - email: default
+ scopes:
+ - https://www.googleapis.com/auth/compute
+ - https://www.googleapis.com/auth/devstorage.read_only
+ - https://www.googleapis.com/auth/logging.write
+ - https://www.googleapis.com/auth/monitoring
+ - https://www.googleapis.com/auth/service.management.readonly
+ - https://www.googleapis.com/auth/servicecontrol
+ - name: network_interfaces
+ values:
+ dev:
+ value:
+ - {networkIP: "10.240.0.20", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.21", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.22", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+- name: health_checks
+ props:
+ - name: health_check_name
+ value: kubernetes
+ - name: health_check_interval_sec
+ value: 5
+ - name: health_check_description
+ value: Kubernetes Health Check
+ - name: health_check_timeout_sec
+ value: 5
+ - name: health_check_healthy_threshold
+ value: 2
+ - name: health_check_unhealthy_threshold
+ value: 2
+ - name: health_check_host
+ value: kubernetes.default.svc.cluster.local
+ - name: health_check_port
+ value: 80
+ - name: health_check_path
+ value: /healthz
+ exports:
+ - health_check_link
+- name: internal_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-internal-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ values:
+ dev:
+ value: ["10.240.0.0/24", "10.200.0.0/16"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp}, {IPProtocol: udp}, {IPProtocol: icmp}]
+- name: external_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-external-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ values:
+ dev:
+ value: ["0.0.0.0/0"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp, ports: ["22"]}, {IPProtocol: tcp, ports: ["6443"]},{IPProtocol: icmp}]
+- name: health_check_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-health-check-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ values:
+ dev:
+ value: ["209.85.152.0/22", "209.85.204.0/22", "35.191.0.0/16"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp}]
+- name: get_controller_instances
+ type: query
+ exports:
+ - controller_instances
+- name: target_pool
+ props:
+ - name: target_pool_name
+ value: "{{ stack_name }}-{{ stack_env }}-target-pool"
+ - name: target_pool_session_affinity
+ value: NONE
+ - name: target_pool_health_checks
+ value: ["{{ health_check_link }}"]
+ - name: target_pool_instances
+ value: "{{ controller_instances }}"
+ exports:
+ - target_pool_link
+- name: forwarding_rule
+ props:
+ - name: forwarding_rule_name
+ value: "{{ stack_name }}-{{ stack_env }}-forwarding-rule"
+ - name: forwarding_rule_load_balancing_scheme
+ value: EXTERNAL
+ - name: forwarding_rule_port_range
+ value: 6443
+- name: routes
+ props:
+ - name: num_routes
+ value: 3
+ - name: route_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-route"
+ - name: route_priority
+ value: 1000
+ - name: route_data
+ values:
+ dev:
+ value:
+ - {dest_range: "10.200.0.0/24", next_hop_ip: "10.240.0.20"}
+ - {dest_range: "10.200.1.0/24", next_hop_ip: "10.240.0.21"}
+ - {dest_range: "10.200.2.0/24", next_hop_ip: "10.240.0.22"}
+exports:
+ - vpc_name
+ - vpc_link
+ - subnet_name
+ - address
+ - target_pool_link
+```
+
+
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/description.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/description.mdx
new file mode 100644
index 0000000..8f7a0c6
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/description.mdx
@@ -0,0 +1,14 @@
+import File from '/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Stack description, for documentation purposes only.
+
+
+
+```yaml
+description: stackql-deploy example for kubernetes-the-hard-way
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/exports.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/exports.mdx
new file mode 100644
index 0000000..04d945b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/exports.mdx
@@ -0,0 +1,25 @@
+**Type**: `array of strings` (optional)
+
+**Description**: List of variable names to export to a JSON file after deployment completion. Variables must exist in the deployment context (from globals or resource exports). Use with the `--output-file` CLI argument to specify the destination file.
+
+**Usage**: Use this to extract key deployment outputs for use in CI/CD pipelines, downstream processes, or for record-keeping.
+
+**Example**:
+
+```yaml
+exports:
+ - databricks_workspace_name
+ - databricks_workspace_id
+ - aws_iam_role_arn
+ - deployment_timestamp
+```
+
+**Notes**:
+- `stack_name`, `stack_env`, and `elapsed_time` are automatically included in exports and do not need to be listed
+- Export order: automatic exports (`stack_name`, `stack_env`) first, then user-defined exports, then timing (`elapsed_time`) last
+- `elapsed_time` is formatted as a string showing the total deployment duration (e.g., "0:01:23.456789")
+- Variables are exported exactly as they exist in the deployment context
+- Complex objects and arrays are preserved as JSON structures
+- If a listed variable doesn't exist in the context, deployment will fail
+- Requires `--output-file` CLI argument to be specified, otherwise exports are skipped
+- Exported JSON file contains a flat object with variable names as keys
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals.mdx
new file mode 100644
index 0000000..cdbb0f7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals.mdx
@@ -0,0 +1,29 @@
+import File from '/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Global variables used throughout the stack, can be an empty list if not required. Use the `{{ YOUR_ENV_VAR }}` notation in the `value` field of a `globals` list item to populate a global stack variable from an external environment variable or secret.
+
+
+
+```yaml
+globals:
+- name: project
+ description: google project name
+ value: "{{ GOOGLE_PROJECT }}"
+- name: region
+ value: australia-southeast1
+- name: default_zone
+ value: australia-southeast1-a
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals/description.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals/description.mdx
new file mode 100644
index 0000000..3f71525
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals/description.mdx
@@ -0,0 +1,17 @@
+import File from '/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Global variable description, for documentation purposes only.
+
+
+
+```yaml {3}
+globals:
+- name: region
+ description: default region
+ value: ap-southeast-2
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals/name.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals/name.mdx
new file mode 100644
index 0000000..62a21d7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals/name.mdx
@@ -0,0 +1,16 @@
+import File from '/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Global variable name, this can be referred to in the `resources` section of the `stackql_manifest.yml` file or within queries using the `{{ your_global_var_name }}` syntax.
+
+
+
+```yaml {2}
+globals:
+- name: region
+ value: ap-southeast-2
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals/value.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals/value.mdx
new file mode 100644
index 0000000..dd20382
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/globals/value.mdx
@@ -0,0 +1,28 @@
+import File from '/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Global variable value, this can be a literal or sourced from an environment variable. The following example shows how to supply a string literal for the `value`:
+
+
+
+```yaml {3}
+globals:
+- name: region
+ value: ap-southeast-2
+```
+
+
+
+The following example shows how to source `value` from an environment variable:
+
+
+
+```yaml {3}
+globals:
+- name: region
+ value: {{ AWS_REGION }}
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/index.js b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/index.js
new file mode 100644
index 0000000..9592759
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/index.js
@@ -0,0 +1,26 @@
+export { default as Name } from "./name.mdx";
+export { default as Description } from "./description.mdx";
+export { default as Providers } from "./providers.mdx";
+export { default as Globals } from "./globals.mdx";
+export { default as GlobalName } from "./globals/name.mdx";
+export { default as GlobalDescription } from "./globals/description.mdx";
+export { default as GlobalValue } from "./globals/value.mdx";
+export { default as Resources } from "./resources.mdx";
+export { default as ResourceName } from "./resources/name.mdx";
+export { default as ResourceType } from "./resources/type.mdx";
+export { default as ResourceFile } from "./resources/file.mdx";
+export { default as ResourceDescription } from "./resources/description.mdx";
+export { default as ResourceExports } from "./resources/exports.mdx";
+export { default as ResourceProps } from "./resources/props.mdx";
+export { default as ResourceProtected } from "./resources/protected.mdx";
+export { default as ResourceAuth } from "./resources/auth.mdx";
+export { default as ResourceIf } from "./resources/if.mdx";
+export { default as ResourceSql } from "./resources/sql.mdx";
+export { default as ResourceSkipValidation } from "./resources/skipvalidation.mdx";
+export { default as ResourcePropName } from "./resources/props/name.mdx";
+export { default as ResourcePropDescription } from "./resources/props/description.mdx";
+export { default as ResourcePropValue } from "./resources/props/value.mdx";
+export { default as ResourcePropValues } from "./resources/props/values.mdx";
+export { default as ResourcePropMerge } from "./resources/props/merge.mdx";
+export { default as Exports } from "./exports.mdx";
+export { default as Version } from "./version.mdx";
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/name.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/name.mdx
new file mode 100644
index 0000000..bb2733b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/name.mdx
@@ -0,0 +1,20 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+The name of the stack, by default this is derived by the [__`init`__](/cli-reference/init) command from the stack directory name (replacing `_` with `-` for resource and property name compliance). This name can be overridden, the value for `name` is exposed as a global variable called `stack_name` which is often used with resource or property values so ensure that this string conforms to any naming restrictions.
+
+
+
+```yaml
+name: kubernetes-the-hard-way
+```
+
+
+
+:::tip
+
+Don't embed any environment symbols or designators in the `name` field, these are sourced at deploy time from the `STACK_ENV` argument to the `build`, `test` or `teardown` commands, and exposed for use in resource or property values as a global variable called `stack_env`.
+
+:::
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/providers.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/providers.mdx
new file mode 100644
index 0000000..63d0df3
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/providers.mdx
@@ -0,0 +1,16 @@
+import File from '/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+StackQL cloud or SaaS providers used in the stack. These are pulled from the stackql provider registry if they are not present at deploy time.
+
+
+
+```yaml
+providers:
+ - google
+ - aws
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources.mdx
new file mode 100644
index 0000000..24f5281
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources.mdx
@@ -0,0 +1,59 @@
+import File from '/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Defines resources in your stack, including the properties and their desired state values.
+
+
+
+```yaml
+resources:
+- name: network
+ description: vpc network for k8s-the-hard-way sample app
+ props:
+ - name: vpc_name
+ description: name for the vpc
+ value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ exports:
+ - vpc_name
+ - vpc_link
+```
+
+
+
+:::note
+
+A file with the name of the resource with an `.iql` extension is expected to exist in the `resources` subdirectory of your stack directory. You can reference a different file using the `file` field as shown here:
+
+
+
+```yaml
+- name: controller_instances
+ file: instances.iql
+ props:
+ - name: num_instances
+ value: 3
+ - name: instance_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-controller"
+```
+
+
+
+:::
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/auth.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/auth.mdx
new file mode 100644
index 0000000..e06920f
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/auth.mdx
@@ -0,0 +1,65 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+## Custom Authentication at Resource Level
+
+This feature allows for custom authentication settings to be specified at the resource level within the `stackql_manifest.yml` file. This enables context-specific authentication configurations, such as control plane or data plane context switching within the same stack. Authentication parameters can be overridden by setting specific variable references in the `auth` section.
+
+:::note
+
+This feature requires version 1.8.0 of `stackql-deploy` and version 3.7.0 of `pystackql`, use the following to upgrade components:
+
+```bash
+stackql-deploy upgrade
+```
+
+:::
+
+
+
+The `auth` object will depend upon the provider the resource belongs to, consult the provider documentation in the [StackQL Provider Registry Docs](https://stackql.io/providers).
+
+### Example Usage
+
+
+
+```yaml {4,12-18}
+resources:
+ - name: app_manager_api_key
+ props:
+ - name: display_name
+ value: "{{ stack_name }}-{{ stack_env }}-app-manager-api-key"
+ - name: description
+ value: "Kafka API Key owned by 'app-manager' service account"
+ - name: owner
+ value:
+ id: app_manager_id
+ api_version: app_manager_api_version
+ kind: app_manager_kind
+ exports:
+ - app_manager_api_key_id
+ - app_manager_api_key_secret
+
+ - name: users_topic
+ auth:
+ confluent:
+ type: basic
+ username_var: app_manager_api_key_id
+ password_var: app_manager_api_key_secret
+ props:
+ - name: topic_name
+ value: "users"
+ - name: kafka_cluster
+ value: {{ cluster_id }}
+ - name: rest_endpoint
+ value: {{ cluster_rest_endpoint }}
+```
+
+
+
+This configuration sets up a custom `basic` authentication for the `users_topic` resource, where:
+
+- `username_var` is set to `app_manager_api_key_id`
+- `password_var` is set to `app_manager_api_key_secret`
+
+These variables are defined in the exported section of the `app_manager_api_key` resource and dynamically referenced within the authentication configuration.
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/description.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/description.mdx
new file mode 100644
index 0000000..4995726
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/description.mdx
@@ -0,0 +1,17 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Resource description
+
+
+
+```yaml {3}
+resources:
+- name: instances
+ description: web server instances
+...
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/exports.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/exports.mdx
new file mode 100644
index 0000000..e1ad6f5
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/exports.mdx
@@ -0,0 +1,35 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Variables exported from the `resource`
+
+
+
+```yaml {4}
+resources:
+- name: network
+...
+ exports:
+ - vpc_name
+ - vpc_link
+```
+
+
+
+:::note
+
+Variables listed as `exports` must be returned as columns in a `exports` query, for example:
+
+```sql {3,4}
+/*+ exports */
+SELECT
+'{{ vpc_name }}' as vpc_name,
+selfLink as vpc_link
+FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+```
+
+:::
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/file.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/file.mdx
new file mode 100644
index 0000000..d9a3b64
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/file.mdx
@@ -0,0 +1,48 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Query file for the resource (`.iql` file in the `resources` directory). Defaults to `{resource.name}.iql`
+
+:::tip
+
+Use `file` to reuse the same query template file for multiple different resources, as shown in the following example
+
+:::
+
+
+
+```yaml {4,17}
+...
+resources:
+- name: internal_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-internal-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ values:
+ dev:
+ value: ["10.240.0.0/24", "10.200.0.0/16"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp}, {IPProtocol: udp}, {IPProtocol: icmp}]
+- name: external_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-external-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ values:
+ dev:
+ value: ["0.0.0.0/0"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp, ports: ["22"]}, {IPProtocol: tcp, ports: ["6443"]},{IPProtocol: icmp}]
+...
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/if.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/if.mdx
new file mode 100644
index 0000000..e0c7de4
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/if.mdx
@@ -0,0 +1,47 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+A conditional expression that determines whether a resource should be tested, provisioned, or deprovisioned.
+You can use Python expressions to conditionally determine if a resource should be processed.
+
+
+
+```yaml {3}
+resources:
+- name: get_transfer_kms_key_id
+ if: "environment == 'production'"
+...
+```
+
+
+
+:::info
+
+- Conditions are evaluated as Python expressions.
+- You can reference literals (string, boolean, integer, etc.) or runtime template variables.
+- If the condition evaluates to `True`, the resource is processed; if `False`, it is skipped.
+- Template variables can be referenced using Jinja2 template syntax (`{{ variable }}`).
+
+:::
+
+## Examples
+
+Conditionally process a resource based on environment:
+
+```yaml
+resources:
+ - name: get_transfer_kms_key_id
+ if: "environment == 'production'"
+ ...
+```
+
+Conditionally process based on other variable values:
+
+```yaml
+resources:
+ - name: get_transfer_kms_key_id
+ if: "some_var == '{{ some_other_var_value }}'"
+ ...
+```
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/name.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/name.mdx
new file mode 100644
index 0000000..6adab0d
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/name.mdx
@@ -0,0 +1,16 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+The name of the resource
+
+
+
+```yaml {2}
+resources:
+- name: network
+...
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props.mdx
new file mode 100644
index 0000000..5aa8467
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props.mdx
@@ -0,0 +1,48 @@
+import File from '/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Defines properties for the resource and their desired state values
+
+
+
+```yaml {3}
+resources:
+ - name: logging_bucket
+ props:
+ - name: logging_bucket_name
+ value: "{{ stack_name }}-{{ stack_env }}-logging"
+...
+```
+
+
+
+:::tip
+
+You can also include environment (`stack_env`) selectors using `values` as shown here:
+
+```yaml {5}
+resources:
+ - name: example_vpc
+ props:
+ - name: vpc_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+```
+
+:::
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/description.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/description.mdx
new file mode 100644
index 0000000..8a6797b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/description.mdx
@@ -0,0 +1,18 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Property description
+
+
+
+```yaml {4}
+- name: public_address
+ props:
+ - name: address_name
+ description: web server public ip address
+...
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/merge.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/merge.mdx
new file mode 100644
index 0000000..b6c4bf9
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/merge.mdx
@@ -0,0 +1,50 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+List(s) or object(s) from the context to merge with the current `resource.prop.value`. The `merge` values can be global variables defined in the `globals` section or exported variables from any preceding resources in the stack.
+
+:::note
+
+The `resource.prop.value` or `values` type and `resource.prop.merge` value(s) must be of the same type (both lists or both objects)
+
+:::
+
+
+
+```yaml {29}
+...
+globals:
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+ - name: example_vpc
+ props:
+ - name: vpc_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ - name: vpc_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ merge:
+ - global_tags
+...
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/name.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/name.mdx
new file mode 100644
index 0000000..4310371
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/name.mdx
@@ -0,0 +1,17 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+The name of the property
+
+
+
+```yaml {3}
+- name: public_address
+ props:
+ - name: address_name
+...
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/value.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/value.mdx
new file mode 100644
index 0000000..3d626f3
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/value.mdx
@@ -0,0 +1,20 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+> one of `value` or `values` must be supplied for a resource property
+
+The value for the property
+
+
+
+```yaml {4}
+- name: public_address
+ props:
+ - name: address_name
+ value: "{{ stack_name }}-{{ stack_env }}-{{ region }}-ip-addr"
+...
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/values.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/values.mdx
new file mode 100644
index 0000000..e42d776
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/props/values.mdx
@@ -0,0 +1,26 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+> one of `value` or `values` must be supplied for a resource property
+
+Values for the property based upon the `stack_env` (stack environment selector)
+
+
+
+```yaml {4}
+- name: subnetwork
+ props:
+ - name: ip_cidr_range
+ values:
+ prd:
+ value: 192.168.0.0/16
+ sit:
+ value: 10.10.0.0/16
+ dev:
+ value: 10.240.0.0/24
+...
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/protected.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/protected.mdx
new file mode 100644
index 0000000..079c879
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/protected.mdx
@@ -0,0 +1,22 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Protected variables from the `resource`, these variables are masked in the output logs. Protected variables are a subset of `exports`
+
+
+
+```yaml {8}
+resources:
+ - name: container_registry
+...
+ exports:
+ - acr_url
+ - acr_username
+ - acr_password
+ protected:
+ - acr_password
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/skipvalidation.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/skipvalidation.mdx
new file mode 100644
index 0000000..51313c8
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/skipvalidation.mdx
@@ -0,0 +1,45 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+When set to `true`, the `test` and `build` commands will bypass validation checks for this specific resource. This is particularly useful for resources that are initially created with placeholder values and later updated within the same stack.
+
+
+
+```yaml {19}
+resources:
+ - name: aws/iam/metastore_access_role
+ file: aws/iam/iam_role.iql
+ props:
+ - name: role_name
+ value: "{{ stack_name }}-{{ stack_env }}-metastore-role"
+ - name: assume_role_policy_document
+ value:
+ Version: "2012-10-17"
+ Statement:
+ - Effect: "Allow"
+ Principal:
+ AWS:
+ - "arn:aws:iam::414351767826:role/unity-catalog-prod-UCMasterRole-14S5ZJVKOTYTL"
+ Action: "sts:AssumeRole"
+ Condition:
+ StringEquals:
+ sts:ExternalId: "0000" # Placeholder
+ skip_validation: true
+ exports:
+ - aws_iam_role_arn: metastore_access_role_arn
+```
+
+
+
+:::info
+
+- Use `skip_validation: true` when you need to create a resource with temporary configuration that will be updated later in the stack execution
+- Common use cases include:
+ - Creating IAM roles with placeholder external IDs that will be updated once another dependent resource is created
+ - Setting up initial placeholder credentials that will be modified in a subsequent step
+ - Creating resources with circular dependencies where initial validation would fail
+- This flag only affects the `test` and `build` commands
+
+:::
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/sql.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/sql.mdx
new file mode 100644
index 0000000..5e650f6
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/sql.mdx
@@ -0,0 +1,86 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+For `command` and `query` type resources, you can include SQL statements directly in your resource manifest using the `sql` key. This allows you to write custom SQL commands without needing a separate IQL file.
+
+
+```yaml {5-11}
+resources:
+ - name: databricks_workspace/unitycatalog/grants
+ type: command
+ props: [...]
+ sql: |
+ /*+ update */
+ UPDATE databricks_workspace.unitycatalog.grants
+ SET principal = '{{ principal }}',
+ privileges = {{ privileges }}
+ WHERE full_name = '{{ full_name }}' AND
+ securable_type = '{{ securable_type }}' AND
+ deployment_name = '{{ deployment_name }}';
+```
+
+
+:::info
+- The `sql` key is only supported to `command` and `query` type resources
+- For command resources, either `sql` or a corresponding IQL file with a `command` anchor must be provided, if `sql` is supplied in the manifest this will be used
+- The `sql` key accepts a string containing the SQL statement to execute
+- You can use multi-line strings with the YAML pipe (`|`) character for better readability
+- Template variables can be referenced using Jinja2 template syntax (`{{ variable }}`)
+:::
+
+## When to Use
+
+The `sql` key is particularly useful for:
+
+- Simple commands that don't warrant a separate IQL file
+- One-off operations specific to a particular deployment
+- Custom operations like granting permissions in Unity Catalog
+
+## Examples
+
+### Grant Permissions in Unity Catalog
+
+```yaml
+- name: databricks_workspace/unitycatalog/grants
+ type: command
+ props:
+ - name: full_name
+ value: "my-storage-credential"
+ - name: securable_type
+ value: "storage_credential"
+ - name: deployment_name
+ value: "{{ databricks_deployment_name }}"
+ - name: principal
+ value: "account users"
+ - name: privileges
+ value:
+ - "CREATE_EXTERNAL_LOCATION"
+ - "USE"
+ sql: |
+ /*+ update */
+ UPDATE databricks_workspace.unitycatalog.grants
+ SET principal = '{{ principal }}',
+ privileges = {{ privileges }}
+ WHERE full_name = '{{ full_name }}' AND
+ securable_type = '{{ securable_type }}' AND
+ deployment_name = '{{ deployment_name }}';
+```
+
+### Run a Custom Query with Conditional Logic
+
+You can combine the `sql` key with conditional processing:
+
+```yaml
+- name: custom_command
+ type: command
+ if: "environment == 'production'"
+ props:
+ - name: table_name
+ value: "{{ stack_name }}_audit_log"
+ sql: |
+ /*+ update */
+ INSERT INTO {{ table_name }}
+ VALUES ('{{ stack_name }}', '{{ stack_env }}', '{{ deployment_timestamp }}');
+```
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/type.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/type.mdx
new file mode 100644
index 0000000..009b5fc
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/resources/type.mdx
@@ -0,0 +1,27 @@
+import File from '@site/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Resource type, values include : `resource` (default), `query`, `script`, `multi`, `command`
+
+
+
+```yaml {3}
+resources:
+- name: get_subnets
+ type: query
+...
+```
+
+
+
+:::info
+
+- `resource` will typically include `create`, `update`, `delete`, `exists`, `statecheck` and `exports` methods and is intended for provisioning or configuring a resource
+- `query` is designed to return data using an `exports` method
+- `script` is used to incorporate an external script in your stack definition (non StackQL query)
+- `multi` is used when resources are created in a loop (such as creating 3 vms)
+- `command` is used to run a command (like an `UPDATE`, `INSERT`) during a `build` operation (which does not export any variables)
+
+:::
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/manifest_fields/version.mdx b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/version.mdx
new file mode 100644
index 0000000..122e292
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/manifest_fields/version.mdx
@@ -0,0 +1,14 @@
+import File from '/src/components/File';
+import LeftAlignedTable from '@site/src/components/LeftAlignedTable';
+
+
+
+Document version.
+
+
+
+```yaml
+version: 1
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/resource-query-files.md b/ref-python-packages/stackql-deploy/website/docs/resource-query-files.md
new file mode 100644
index 0000000..b798b30
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/resource-query-files.md
@@ -0,0 +1,284 @@
+---
+id: resource-query-files
+title: Resource Query Files
+hide_title: false
+hide_table_of_contents: false
+description: A quick overview of how to get started with StackQL Deploy, including basic concepts and the essential components of a deployment.
+tags: []
+draft: false
+unlisted: false
+---
+
+import File from '/src/components/File';
+
+Resource query files include the StackQL query templates to provision, de-provision, update and test resources in your stack. Resource query files (`.iql` files) are located in the `resources` subdirectory of your project (stack) directory. The `resources` section of the [`stackql_manifest.yml`](manifest-file) file is used to supply these templates with the correct values for a given environment at deploy time.
+
+:::note
+
+`.iql` is used as a file extension for StackQL query files by convention. This convention originates from the original name for the StackQL project - InfraQL, plus `.sql` was taken...
+
+:::
+
+## Query types
+
+A resource query file (`.iql` file) typically contains multiple StackQL queries. Seperate queries are demarcated by query anchors (or hints), such as `/*+ create */` or `/*+ update */`. These hints must be at the beginning of a line in the file, with the resepective query following on the subsequent lines.
+
+:::tip
+
+StackQL follows the ANSI standard for SQL with some custom extensions. For more information on the StackQL grammar see the [StackQL docs](https://stackql.io/docs).
+
+:::
+
+The types of queries defined in resource files are detailed in the following sections.
+
+### `exists`
+
+`exists` queries are StackQL `SELECT` statements designed to test the existence of a resource by its designated identifier (does not test the desired state). This is used to determine whether a `create` (`INSERT`) or `update` (`UPDATE`) is required. A `exists` query needs to return a single row with a single field named `count`. A `count` value of `1` indicates that the resource exists, a value of `0` would indicate that the resource does not exist.
+
+```sql
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+```
+
+`preflight` is an alias for `exists` for backwards compatability, this will be deprecated in a future release.
+
+### `create`
+
+`create` queries are StackQL `INSERT` statements used to create resources that do not exist (in accordance with the `exists` query).
+
+```sql
+/*+ create */
+INSERT INTO google.compute.networks
+(
+ project,
+ data__name,
+ data__autoCreateSubnetworks,
+ data__routingConfig
+)
+SELECT
+'{{ project }}',
+'{{ vpc_name }}',
+false,
+'{"routingMode": "REGIONAL"}'
+```
+
+### `createorupdate`
+
+`createorupdate` queries can be StackQL `INSERT` or `UPDATE` statements, these queries are used for idempotent resources (as per the given provider if supported), for example:
+
+```sql
+/*+ createorupdate */
+INSERT INTO azure.network.virtual_networks(
+ virtualNetworkName,
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ vnet_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{"addressSpace": {"addressPrefixes":["{{ vnet_cidr }}"]}}',
+ '{{ global_tags }}'
+```
+
+:::tip
+
+You can usually identify idempotent resources using the `SHOW METHODS` command for a given resource, the the below example you can see a `create_or_update` method mapped to StackQL `INSERT`:
+
+```plaintext {12}
+stackql >>show methods in azure.network.virtual_networks;
+|-------------------------------|--------------------------------|---------|
+| MethodName | RequiredParams | SQLVerb |
+|-------------------------------|--------------------------------|---------|
+| get | resourceGroupName, | SELECT |
+| | subscriptionId, | |
+| | virtualNetworkName | |
+|-------------------------------|--------------------------------|---------|
+| list | resourceGroupName, | SELECT |
+| | subscriptionId | |
+|-------------------------------|--------------------------------|---------|
+| create_or_update | resourceGroupName, | INSERT |
+| | subscriptionId, | |
+| | virtualNetworkName | |
+|-------------------------------|--------------------------------|---------|
+| delete | resourceGroupName, | DELETE |
+| | subscriptionId, | |
+| | virtualNetworkName | |
+|-------------------------------|--------------------------------|---------|
+| check_ip_address_availability | ipAddress, resourceGroupName, | EXEC |
+| | subscriptionId, | |
+| | virtualNetworkName | |
+|-------------------------------|--------------------------------|---------|
+```
+
+:::
+
+`createorupdate` queries can also be used if a resource is updating the state of a pre-existing resource, for example:
+
+```sql
+/*+ createorupdate */
+update aws.s3.buckets
+set data__PatchDocument = string('{{ {
+ "NotificationConfiguration": transfer_notification_config
+ } | generate_patch_document }}')
+WHERE
+region = '{{ region }}'
+AND data__Identifier = '{{ transfer_bucket_name }}';
+```
+
+### `delete`
+
+`delete` queries are StackQL `DELETE` statements used to de-provision resources in `teardown` operations.
+
+```sql
+/*+ delete */
+DELETE FROM google.compute.networks
+WHERE network = '{{ vpc_name }}' AND project = '{{ project }}'
+```
+
+### `statecheck`
+
+`statecheck` queries are StackQL `SELECT` statements designed to test the desired state of a resource in an environment. Similar to `exists` queries, `statecheck` queries must return a single row with a single column named `count` with a value of `1` (the resource meets the desired state tests) or `0` (the resource is not in the desired state). As `statecheck` queries are usually run after `create` or `update` queries, it may be necessary to retry the query to account for the time it takes for the resource to be created or updated by the provider.
+
+```sql
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+AND autoCreateSubnetworks = false
+AND JSON_EXTRACT(routingConfig, '$.routingMode') = 'REGIONAL'
+```
+
+:::tip
+
+Useful functions for testing the desired state of a resource include [`JSON_EQUAL`](https://stackql.io/docs/language-spec/functions/json/json_equal), [`AWS_POLICY_EQUAL`](https://stackql.io/docs/language-spec/functions/json/aws_policy_equal), [`JSON_EXTRACT`](https://stackql.io/docs/language-spec/functions/json/json_extract) and [`JSON_EACH`](https://stackql.io/docs/language-spec/functions/json/json_equal).
+
+:::
+
+`postdeploy` is an alias for `statecheck` for backwards compatability, this will be deprecated in a future release.
+
+### `exports`
+
+`exports` queries are StackQL `SELECT` statements which export variables, typically used in subsequent (or dependant) resources. Columns exported in `exports` queries need to be specified in the [`exports` section of the `stackql_manifest.yml` file](manifest-file#resourceexports).
+
+```sql
+/*+ exports */
+SELECT
+'{{ vpc_name }}' as vpc_name,
+selfLink as vpc_link
+FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+```
+
+## Query options
+
+Query options are used with query anchors to provide options for the execution of the query.
+
+### `retries` and `retry_delay`
+
+The `retries` and `retry_delay` query options are typically used for asynchronous or long running provider operations. This will allow the resource time to become available or reach the desired state without failing the stack.
+
+```sql
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.resources.resource_groups
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND location = '{{ location }}'
+AND JSON_EXTRACT(properties, '$.provisioningState') = 'Succeeded'
+```
+
+### `postdelete_retries` and `postdelete_retry_delay`
+
+The `postdelete_retries` and `postdelete_retry_delay` query options are used in `exists` queries and are implemeneted specifically for `teardown` operations, allowing time for the resource to be deleted by the provider.
+
+```sql
+/*+ exists, postdelete_retries=10, postdelete_retry_delay=5 */
+SELECT COUNT(*) as count FROM google.compute.instances
+WHERE name = '{{ instance_name }}'
+AND project = '{{ project }}'
+AND zone = '{{ zone }}'
+```
+
+## Template Filters
+
+StackQL Deploy leverages Jinja2 templating capabilities and extends them with custom filters for infrastructure provisioning. For a complete reference of all available filters, see the [__Template Filters__](template-filters) documentation.
+
+Here are a few commonly used filters:
+
+- `from_json` - Converts JSON strings to Python objects for iteration and manipulation
+- `tojson` - Converts Python objects back to JSON strings
+- `sql_escape` - Properly escapes SQL string literals for nested SQL statements
+- `generate_patch_document` - Creates RFC6902-compliant patch documents for AWS resources
+- `base64_encode` - Encodes strings as base64 for API fields requiring binary data
+
+## Examples
+
+### `resource` type example
+
+This example is a `resource` file for a public IP address in a Google stack.
+
+
+
+```sql
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.addresses
+WHERE name = '{{ address_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ create */
+INSERT INTO google.compute.addresses
+(
+ project,
+ region,
+ data__name
+)
+SELECT
+'{{ project }}',
+'{{ region }}',
+'{{ address_name }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.addresses
+WHERE name = '{{ address_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ delete */
+DELETE FROM google.compute.addresses
+WHERE address = '{{ address_name }}' AND project = '{{ project }}'
+AND region = '{{ region }}'
+
+/*+ exports */
+SELECT address
+FROM google.compute.addresses
+WHERE name = '{{ address_name }}'
+AND project = '{{ project }}'
+AND region = '{{ region }}'
+```
+
+
+
+### `query` type example
+
+This `query` example demonstrates retrieving the KMS key id for a given key alias in AWS.
+
+
+
+```sql
+/*+ exports, retries=5, retry_delay=5 */
+SELECT
+target_key_id as logging_kms_key_id
+FROM aws.kms.aliases
+WHERE region = '{{ region }}'
+AND data__Identifier = 'alias/{{ stack_name }}/{{ stack_env }}/logging';
+```
+
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/template-filters.md b/ref-python-packages/stackql-deploy/website/docs/template-filters.md
new file mode 100644
index 0000000..2967586
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/template-filters.md
@@ -0,0 +1,183 @@
+---
+id: template-filters
+title: Template Filters
+hide_title: false
+hide_table_of_contents: false
+description: Custom and built-in Jinja2 filters available in StackQL Deploy for template processing
+tags: []
+draft: false
+unlisted: false
+---
+
+import File from '/src/components/File';
+
+# Template Filters
+
+StackQL Deploy leverages Jinja2 templating capabilities and extends them with custom filters specifically designed for infrastructure provisioning use cases. These filters help transform data between formats, encode values, generate specialized document formats, and perform other common operations required in IaC configurations.
+
+## Available Filters
+
+### `from_json`
+
+Converts a JSON string to a Python dictionary or list. This is commonly used to enable iteration over complex data structures in templates.
+
+**Example usage:**
+
+```sql
+{% for network_interface in network_interfaces | from_json %}
+INSERT INTO google.compute.instances
+ (
+ /* fields... */
+ )
+ SELECT
+'{{ instance_name_prefix }}-{{ loop.index }}',
+/* other values... */
+'[ {{ network_interface | tojson }} ]';
+{% endfor %}
+```
+
+### `tojson`
+
+A built-in Jinja2 filter that converts a Python dictionary or list into a JSON string. Often used in conjunction with `from_json` when working with complex data structures.
+
+**Example usage:**
+
+```sql
+'[ {{ network_interface | tojson }} ]'
+```
+
+### `generate_patch_document`
+
+Generates a patch document according to [RFC6902](https://datatracker.ietf.org/doc/html/rfc6902), primarily designed for the AWS Cloud Control API which requires patch documents for resource updates.
+
+**Example usage:**
+
+```sql
+update aws.s3.buckets
+set data__PatchDocument = string('{{ {
+ "NotificationConfiguration": transfer_notification_config
+ } | generate_patch_document }}')
+WHERE
+region = '{{ region }}'
+AND data__Identifier = '{{ bucket_name }}';
+```
+
+### `base64_encode`
+
+Encodes a string as base64, which is commonly required for certain API fields that accept binary data.
+
+**Example usage:**
+
+```sql
+INSERT INTO aws.ec2.instances (
+ /* fields... */
+ UserData,
+ region
+)
+SELECT
+ /* values... */
+ '{{ user_data | base64_encode }}',
+ '{{ region }}';
+```
+
+### `sql_list`
+
+Converts a Python list or a JSON array string into a SQL-compatible list format with proper quoting, suitable for use in SQL IN clauses.
+
+**Example usage:**
+
+```sql
+SELECT * FROM aws.ec2.instances
+WHERE region = '{{ region }}'
+AND InstanceId IN {{ instance_ids | sql_list }}
+```
+
+### `sql_escape`
+
+Escapes a string for use as a SQL string literal by doubling any single quotes. This is particularly useful for nested SQL statements where quotes need special handling.
+
+**Example usage:**
+
+```sql
+INSERT INTO snowflake.sqlapi.statements (
+data__statement,
+/* other fields... */
+)
+SELECT
+'{{ statement | sql_escape }}',
+/* other values... */
+;
+```
+
+### `merge_lists`
+
+Merges two lists (or JSON-encoded list strings) into a single list with unique items.
+
+**Example usage:**
+
+```sql
+{% set combined_policies = default_policies | merge_lists(custom_policies) %}
+INSERT INTO aws.iam.policies (
+ /* fields... */
+ PolicyDocument,
+ /* other fields... */
+)
+SELECT
+ /* values... */
+ '{{ combined_policies | tojson }}',
+ /* other values... */
+;
+```
+
+### `merge_objects`
+
+Merges two dictionaries (or JSON-encoded object strings) into a single dictionary. In case of duplicate keys, values from the second dictionary take precedence.
+
+**Example usage:**
+
+```sql
+{% set complete_config = base_config | merge_objects(environment_specific_config) %}
+INSERT INTO aws.lambda.functions (
+ /* fields... */
+ Environment,
+ /* other fields... */
+)
+SELECT
+ /* values... */
+ '{{ complete_config | tojson }}',
+ /* other values... */
+;
+```
+
+## Global Functions
+
+### `uuid`
+
+Generates a random UUID (version 4). Useful for creating unique identifiers.
+
+**Example usage:**
+
+```sql
+INSERT INTO aws.s3.buckets (
+ /* fields... */
+ data__BucketName,
+ /* other fields... */
+)
+SELECT
+ /* values... */
+ '{{ stack_name }}-{{ uuid() }}',
+ /* other values... */
+;
+```
+
+## Filter Chaining
+
+Filters can be chained together to perform multiple transformations in sequence:
+
+```sql
+'{{ user_config | from_json | merge_objects(default_config) | tojson | base64_encode }}'
+```
+
+## Custom Filter Development
+
+The StackQL Deploy filtering system is extensible. If you need additional filters for your specific use case, you can contribute to the project by adding new filters to the `lib/filters.py` file.
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/docs/template-library/aws/vpc-and-ec2-instance.md b/ref-python-packages/stackql-deploy/website/docs/template-library/aws/vpc-and-ec2-instance.md
new file mode 100644
index 0000000..9f93d6b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/template-library/aws/vpc-and-ec2-instance.md
@@ -0,0 +1,360 @@
+---
+id: vpc-and-ec2-instance
+title: AWS VPC and EC2 Instance
+hide_title: false
+hide_table_of_contents: false
+description: A quick overview of how to get started with StackQL Deploy, including basic concepts and the essential components of a deployment.
+tags: []
+draft: false
+unlisted: false
+---
+
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+In this example, we'll demonstrate how to set up a simple VPC with an EC2 instance in AWS using `stackql-deploy`. This setup is ideal for getting started with basic networking and compute resources on AWS.
+
+
+
+
+The EC2 instance is bootstrapped with a web server that serves a simple page using the EC2 instance `UserData` property.
+
+## Deploying the Stack
+
+> install `stackql-deploy` using `pip install stackql-deploy` (see [__Installing stackql-deploy__](/getting-started#installing-stackql-deploy)), set the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environment variables, that's it!
+
+Once you have setup your project directory (your "stack"), you can use the `stackql-deploy` cli application to deploy, test or teardown the stack in any given environment. To deploy the stack to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build aws-stack sit \
+-e AWS_REGION=ap-southeast-2
+```
+Use the `--dry-run` flag to view the queries to be run without actually running them, heres an example of a `dry-run` operation for a `prd` environment:
+
+```bash
+stackql-deploy build aws-stack prd \
+-e AWS_REGION=ap-southeast-2 \
+--dry-run
+```
+
+## stackql_manifest.yml
+
+The `stackql_manifest.yml` defines the resources in yoru stack and their property values (for one or more environments).
+
+
+ Click to expand the stackql_manifest.yml file
+
+```yaml
+version: 1
+name: "aws-stack"
+description: description for "aws-stack"
+providers:
+ - aws
+globals:
+ - name: region
+ description: aws region
+ value: "{{ AWS_REGION }}"
+ - name: global_tags
+ value:
+ - Key: Provisioner
+ Value: stackql
+ - Key: StackName
+ Value: "{{ stack_name }}"
+ - Key: StackEnv
+ Value: "{{ stack_env }}"
+resources:
+ - name: example_vpc
+ props:
+ - name: vpc_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ - name: vpc_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ merge:
+ - global_tags
+ exports:
+ - vpc_id
+ - vpc_cidr_block
+ - name: example_subnet
+ props:
+ - name: subnet_cidr_block
+ values:
+ prd:
+ value: "10.0.1.0/24"
+ sit:
+ value: "10.1.1.0/24"
+ dev:
+ value: "10.2.1.0/24"
+ - name: subnet_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-subnet"
+ merge: ['global_tags']
+ exports:
+ - subnet_id
+ - availability_zone
+ - name: example_inet_gateway
+ props:
+ - name: inet_gateway_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-inet-gateway"
+ merge: ['global_tags']
+ exports:
+ - internet_gateway_id
+ - name: example_inet_gw_attachment
+ props: []
+ - name: example_route_table
+ props:
+ - name: route_table_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-route-table"
+ merge: ['global_tags']
+ exports:
+ - route_table_id
+ - name: example_subnet_rt_assn
+ props: []
+ exports:
+ - route_table_assn_id
+ - name: example_inet_route
+ props: []
+ exports:
+ - inet_route_indentifer
+ - name: example_security_group
+ props:
+ - name: group_description
+ value: "web security group for {{ stack_name }} ({{ stack_env }} environment)"
+ - name: group_name
+ value: "{{ stack_name }}-{{ stack_env }}-web-sg"
+ - name: sg_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-web-sg"
+ merge: ['global_tags']
+ - name: security_group_ingress
+ value:
+ - CidrIp: "0.0.0.0/0"
+ Description: Allow HTTP traffic
+ FromPort: 80
+ ToPort: 80
+ IpProtocol: "tcp"
+ - CidrIp: "{{ vpc_cidr_block }}"
+ Description: Allow SSH traffic from the internal network
+ FromPort: 22
+ ToPort: 22
+ IpProtocol: "tcp"
+ - name: security_group_egress
+ value:
+ - CidrIp: "0.0.0.0/0"
+ Description: Allow all outbound traffic
+ FromPort: 0
+ ToPort: 0
+ IpProtocol: "-1"
+ exports:
+ - security_group_id
+ - name: example_web_server
+ props:
+ - name: instance_name
+ value: "{{ stack_name }}-{{ stack_env }}-instance"
+ - name: ami_id
+ value: ami-030a5acd7c996ef60
+ - name: instance_type
+ value: t2.micro
+ - name: instance_subnet_id
+ value: "{{ subnet_id }}"
+ - name: sg_ids
+ value:
+ - "{{ security_group_id }}"
+ - name: user_data
+ value: |
+ #!/bin/bash
+ yum update -y
+ yum install -y httpd
+ systemctl start httpd
+ systemctl enable httpd
+ echo 'StackQL on AWS ' > /var/www/html/index.html
+ echo '' >> /var/www/html/index.html
+ - name: instance_tags
+ value:
+ - Key: Name
+ Value: "{{ stack_name }}-{{ stack_env }}-instance"
+ merge: ['global_tags']
+ exports:
+ - instance_id
+ - public_dns_name
+```
+
+
+
+## Resource Query Files
+
+Resource query files are templates which are used to create, update, test and delete resources in your stack. Here are some example resource query files in this example:
+
+
+
+
+```sql
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.vpcs (
+ CidrBlock,
+ Tags,
+ EnableDnsSupport,
+ EnableDnsHostnames,
+ region
+)
+SELECT
+ '{{ vpc_cidr_block }}',
+ '{{ vpc_tags }}',
+ true,
+ true,
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT vpc_id,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ vpc_cidr_block
+
+ }}';
+
+/*+ exports */
+SELECT vpc_id, vpc_cidr_block FROM
+(
+SELECT vpc_id, cidr_block as "vpc_cidr_block",
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.vpc_tags
+WHERE region = '{{ region }}'
+AND cidr_block = '{{ vpc_cidr_block }}'
+GROUP BY vpc_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ delete */
+DELETE FROM aws.ec2.vpcs
+WHERE data__Identifier = '{{ vpc_id }}'
+AND region = '{{ region }}';
+```
+
+
+
+
+```sql
+/*+ exists */
+SELECT COUNT(*) as count FROM
+(
+SELECT subnet_id,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.subnet_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY subnet_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t;
+
+/*+ create */
+INSERT INTO aws.ec2.subnets (
+ VpcId,
+ CidrBlock,
+ MapPublicIpOnLaunch,
+ Tags,
+ region
+)
+SELECT
+ '{{ vpc_id }}',
+ '{{ subnet_cidr_block }}',
+ true,
+ '{{ subnet_tags }}',
+ '{{ region }}';
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM
+(
+SELECT subnet_id,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.subnet_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY subnet_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ subnet_cidr_block }}';
+
+/*+ exports */
+SELECT subnet_id, availability_zone FROM
+(
+SELECT subnet_id,
+availability_zone,
+cidr_block,
+json_group_object(tag_key, tag_value) as tags
+FROM aws.ec2.subnet_tags
+WHERE region = '{{ region }}'
+AND vpc_id = '{{ vpc_id }}'
+GROUP BY subnet_id
+HAVING json_extract(tags, '$.Provisioner') = 'stackql'
+AND json_extract(tags, '$.StackName') = '{{ stack_name }}'
+AND json_extract(tags, '$.StackEnv') = '{{ stack_env }}'
+) t
+WHERE cidr_block = '{{ subnet_cidr_block }}';
+
+/*+ delete */
+DELETE FROM aws.ec2.subnets
+WHERE data__Identifier = '{{ subnet_id }}'
+AND region = '{{ region }}';
+```
+
+
+
+
+## More Information
+
+The complete code for this example stack is available [__here__](https://github.com/stackql/stackql-deploy/tree/main/examples/aws/aws-stack). For more information on how to use StackQL and StackQL Deploy, visit:
+
+- [`aws` provider docs](https://stackql.io/providers/aws)
+- [`stackql`](https://github.com/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
diff --git a/ref-python-packages/stackql-deploy/website/docs/template-library/azure/simple-vnet-and-vm.md b/ref-python-packages/stackql-deploy/website/docs/template-library/azure/simple-vnet-and-vm.md
new file mode 100644
index 0000000..c2a22ef
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/template-library/azure/simple-vnet-and-vm.md
@@ -0,0 +1,305 @@
+---
+id: simple-vnet-and-vm
+title: VNet and Virtual Machine
+hide_title: false
+hide_table_of_contents: false
+description: A quick overview of how to get started with StackQL Deploy, including basic concepts and the essential components of a deployment on Azure.
+tags: []
+draft: false
+unlisted: false
+---
+
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+In this example, we'll demonstrate how to set up a simple Virtual Network (VNet) with a Virtual Machine (VM) in Azure using `stackql-deploy`. This setup is ideal for getting started with basic networking and compute resources on Azure.
+
+
+
+
+
+The Virtual Machine is bootstrapped with a web server that serves a simple page using the Azure Custom Script Extension.
+
+## Deploying the Stack
+
+> Install `stackql-deploy` using `pip install stackql` (see [__Installing stackql-deploy__](/getting-started#installing-stackql-deploy)), set the `AZURE_SUBSCRIPTION_ID`, `AZURE_CLIENT_ID`, `AZURE_CLIENT_SECRET`, and `AZURE_TENANT_ID` environment variables, and you're ready to go!
+
+Once you have set up your project directory (your "stack"), you can use the `stackql-deploy` CLI application to deploy, test, or teardown the stack in any given environment. To deploy the stack to an environment labeled `sit`, run the following:
+
+```bash
+stackql-deploy build azure-stack sit \
+-e AZURE_SUBSCRIPTION_ID=$AZURE_SUBSCRIPTION_ID
+```
+
+Use the `--dry-run` flag to view the queries to be run without actually running them. Hereās an example of a `dry-run` operation for a `prd` environment:
+
+```bash
+stackql-deploy build azure-stack prd \
+-e AZURE_SUBSCRIPTION_ID=$AZURE_SUBSCRIPTION_ID \
+--dry-run
+```
+
+## stackql_manifest.yml
+
+The `stackql_manifest.yml` defines the resources in your stack and their property values (for one or more environments).
+
+
+ Click to expand the stackql_manifest.yml file
+
+```yaml
+version: 1
+name: "azure-stack"
+description: description for "azure-stack"
+providers:
+ - azure
+globals:
+ - name: subscription_id
+ description: azure subscription id
+ value: "{{ AZURE_SUBSCRIPTION_ID }}"
+ - name: location
+ description: default location for resources
+ value: eastus
+ - name: global_tags
+ value:
+ provisioner: stackql
+ stackName: "{{ stack_name }}"
+ stackEnv: "{{ stack_env }}"
+resources:
+ - name: example_resource_group
+ props:
+ - name: resource_group_name
+ value: "{{ stack_name }}-{{ stack_env }}-rg"
+ exports:
+ - resource_group_name
+ - name: example_vnet
+ props:
+ - name: vnet_name
+ value: "{{ stack_name }}-{{ stack_env }}-vnet"
+ - name: vnet_cidr_block
+ values:
+ prd:
+ value: "10.0.0.0/16"
+ sit:
+ value: "10.1.0.0/16"
+ dev:
+ value: "10.2.0.0/16"
+ exports:
+ - vnet_name
+ - vnet_cidr_block
+ - name: example_subnet
+ props:
+ - name: subnet_name
+ value: "{{ stack_name }}-{{ stack_env }}-subnet-1"
+ - name: subnet_cidr
+ values:
+ prd:
+ value: "10.0.1.0/24"
+ sit:
+ value: "10.1.1.0/24"
+ dev:
+ value: "10.2.1.0/24"
+ exports:
+ - subnet_name
+ - subnet_id
+ - name: example_public_ip
+ props:
+ - name: public_ip_name
+ value: "{{ stack_name }}-{{ stack_env }}-public-ip"
+ exports:
+ - public_ip_name
+ - public_ip_id
+ - public_ip_address
+ - name: example_nsg
+ props:
+ - name: nsg_name
+ value: "{{ stack_name }}-{{ stack_env }}-nsg"
+ - name: security_rules
+ value:
+ - name: AllowHTTP
+ properties:
+ access: Allow
+ protocol: Tcp
+ direction: Inbound
+ priority: 100
+ sourceAddressPrefix: "*"
+ sourcePortRange: "*"
+ destinationAddressPrefix: "*"
+ destinationPortRange: "8080"
+ - name: AllowSSH
+ properties:
+ access: Allow
+ protocol: Tcp
+ direction: Inbound
+ priority: 200
+ sourceAddressPrefix: "{{ vnet_cidr_block }}"
+ sourcePortRange: "*"
+ destinationAddressPrefix: "*"
+ destinationPortRange: "22"
+ exports:
+ - network_security_group_id
+ - name: example_nic
+ props:
+ - name: nic_name
+ value: "{{ stack_name }}-{{ stack_env }}-nic"
+ - name: nic_ip_config
+ value:
+ name: ipconfig1
+ properties:
+ subnet:
+ id: "{{ subnet_id }}"
+ privateIPAllocationMethod: Dynamic
+ publicIPAddress:
+ id: "{{ public_ip_id }}"
+ exports:
+ - network_interface_id
+ - name: example_web_server
+ props:
+ - name: vm_name
+ value: "{{ stack_name }}-{{ stack_env }}-vm"
+ - name: hardwareProfile
+ value:
+ vmSize: Standard_DS1_v2
+ - name: storageProfile
+ value:
+ imageReference:
+ publisher: Canonical
+ offer: UbuntuServer
+ sku: 18.04-LTS
+ version: latest
+ osDisk:
+ name: "{{ stack_name }}-{{ stack_env }}-vm-disk1"
+ createOption: FromImage
+ managedDisk:
+ storageAccountType: Standard_LRS
+ diskSizeGB: 30
+ - name: osProfile
+ value:
+ computerName: myVM-{{ stack_name }}-{{ stack_env }}
+ adminUsername: azureuser
+ adminPassword: Password1234!
+ linuxConfiguration:
+ disablePasswordAuthentication: false
+ - name: networkProfile
+ value:
+ networkInterfaces:
+ - id: "{{ network_interface_id }}"
+ exports:
+ - vm_name
+ - vm_id
+ - name: example_vm_ext
+ props:
+ - name: vm_ext_name
+ value: "{{ stack_name }}-{{ stack_env }}-microsoft.custom-script-linux"
+ - name: command_to_execute
+ value: |
+ wget -O index.html https://raw.githubusercontent.com/stackql/stackql-deploy/main/examples/azure/azure-stack/resources/hello-stackql.html && nohup busybox httpd -f -p 8080 &
+ exports:
+ - web_url
+```
+
+
+
+## Resource Query Files
+
+Resource query files are templates used to create, update, test, and delete resources in your stack. Here are some example resource query files for this Azure example:
+
+
+
+
+```sql
+/*+ createorupdate */
+INSERT INTO azure.network.virtual_networks(
+ virtualNetworkName,
+ resourceGroupName,
+ subscriptionId,
+ data__location,
+ data__properties,
+ data__tags
+)
+SELECT
+ '{{ vnet_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{{ location }}',
+ '{"addressSpace": {"addressPrefixes":["{{ vnet_cidr_block }}"]}}',
+ '{{ global_tags }}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.virtual_networks
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND JSON_EXTRACT(properties, '$.addressSpace.addressPrefixes[0]') = '{{ vnet_cidr_block }}'
+
+/*+ exports */
+SELECT '{{ vnet_name }}' as vnet_name,
+'{{ vnet_cidr_block }}' as vnet_cidr_block
+
+/*+ delete */
+DELETE FROM azure.network.virtual_networks
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+```
+
+
+
+
+```sql
+/*+ createorupdate */
+INSERT INTO azure.network.subnets(
+ subnetName,
+ virtualNetworkName,
+ resourceGroupName,
+ subscriptionId,
+ data__properties
+)
+SELECT
+ '{{ subnet_name }}',
+ '{{ vnet_name }}',
+ '{{ resource_group_name }}',
+ '{{ subscription_id }}',
+ '{"addressPrefix": "{{ subnet_cidr }}"}'
+
+/*+ statecheck, retries=5, retry_delay=5 */
+SELECT COUNT(*) as count FROM azure.network.subnets
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND subnetName = '{{ subnet_name }}'
+AND JSON_EXTRACT(properties, '$.addressPrefix') = '{{ subnet_cidr }}'
+
+/*+ exports */
+SELECT '{{ subnet_name }}' as subnet_name,
+id as subnet_id
+FROM azure.network.subnets
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND subnetName = '{{ subnet_name }}'
+
+/*+ delete */
+DELETE FROM azure.network.subnets
+WHERE subscriptionId = '{{ subscription_id }}'
+AND resourceGroupName = '{{ resource_group_name }}'
+AND virtualNetworkName = '{{ vnet_name }}'
+AND subnetName = '{{ subnet_name }}'
+```
+
+
+
+
+## More Information
+
+The complete code for this example stack is available [__here__](https://github.com/stackql/stackql-deploy/tree/main/examples/azure/azure-stack). For more information on how to use StackQL and StackQL Deploy, visit:
+
+- [`azure` provider docs](https://stackql.io/providers/azure)
+- [`stackql`](https://github.com/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
diff --git a/ref-python-packages/stackql-deploy/website/docs/template-library/google/k8s-the-hard-way.md b/ref-python-packages/stackql-deploy/website/docs/template-library/google/k8s-the-hard-way.md
new file mode 100644
index 0000000..1dcf2b4
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docs/template-library/google/k8s-the-hard-way.md
@@ -0,0 +1,430 @@
+---
+id: k8s-the-hard-way
+title: K8s the Hard Way
+hide_title: false
+hide_table_of_contents: false
+description: A step-by-step guide to setting up Kubernetes the Hard Way using StackQL Deploy, based on the popular project by Kelsey Hightower.
+tags: [kubernetes, stackql, google cloud, devops, infrastructure, IaC]
+draft: false
+unlisted: false
+---
+
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+This guide is based on the [Kubernetes the Hard Way](https://github.com/kelseyhightower/kubernetes-the-hard-way) project by Kelsey Hightower, adapted to be deployed using `stackql-deploy`.
+
+## About `stackql-deploy`
+
+[`stackql-deploy`](https://pypi.org/project/stackql-deploy/) is a multi-cloud deployment automation and testing framework that serves as an alternative to Terraform and other IaC tools. Inspired by [`dbt`](https://www.getdbt.com/), `stackql-deploy` offers several advantages:
+
+- Declarative framework
+- No state file (state is determined from the target environment)
+- Multi-cloud/omni-cloud ready
+- Includes resource tests, which can include secure configuration tests
+
+## Installing `stackql-deploy`
+
+To install `stackql-deploy`, use the following command:
+
+```bash
+pip install stackql-deploy
+```
+for more information on installing `stackql-deploy` see [__Installing stackql-deploy__](/getting-started#installing-stackql-deploy).
+
+## Deploying Using `stackql-deploy`
+
+Hereās an example of deploying, testing, and tearing down this example stack:
+
+```bash
+export GOOGLE_CREDENTIALS=$(cat ./creds.json)
+
+# Deploy a stack in the prd environment
+stackql-deploy build \
+k8s-the-hard-way \
+prd \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run \
+--log-level DEBUG
+
+# Test a stack in the sit environment
+stackql-deploy test \
+examples/k8s-the-hard-way \
+sit \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run
+
+# Teardown a stack in the dev environment
+stackql-deploy teardown \
+k8s-the-hard-way \
+dev \
+-e GOOGLE_PROJECT=stackql-k8s-the-hard-way-demo \
+--dry-run
+```
+
+## stackql_manifest.yml
+
+The `stackql_manifest.yml` file defines the resources in your stack and their property values (for one or more environments).
+
+
+ Click to expand the stackql_manifest.yml file
+
+```yaml
+version: 1
+name: kubernetes-the-hard-way
+description: stackql-deploy example for kubernetes-the-hard-way
+providers:
+ - google
+globals:
+ - name: project
+ description: google project name
+ value: "{{ GOOGLE_PROJECT }}"
+ - name: region
+ value: australia-southeast1
+ - name: default_zone
+ value: australia-southeast1-a
+resources:
+ - name: network
+ description: vpc network for k8s-the-hard-way sample app
+ props:
+ - name: vpc_name
+ description: name for the vpc
+ value: "{{ stack_name }}-{{ stack_env }}-vpc"
+ exports:
+ - vpc_name
+ - vpc_link
+ - name: subnetwork
+ props:
+ - name: subnet_name
+ value: "{{ stack_name }}-{{ stack_env }}-{{ region }}-subnet"
+ - name: ip_cidr_range
+ values:
+ prd:
+ value: 192.168.0.0/16
+ sit:
+ value: 10.10.0.0/16
+ dev:
+ value: 10.240.0.0/24
+ exports:
+ - subnet_name
+ - subnet_link
+ - name: public_address
+ props:
+ - name: address_name
+ value: "{{ stack_name }}-{{ stack_env }}-{{ region }}-ip-addr"
+ exports:
+ - address
+ - name: controller_instances
+ file: instances.iql
+ props:
+ - name: num_instances
+ value: 3
+ - name: instance_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-controller"
+ - name: disks
+ value:
+ - autoDelete: true
+ boot: true
+ initializeParams:
+ diskSizeGb: 10
+ sourceImage: https://compute.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/family/ubuntu-2004-lts
+ mode: READ_WRITE
+ type: PERSISTENT
+ - name: machine_type
+ value: "https://compute.googleapis.com/compute/v1/projects/{{ project }}/zones/{{ default_zone }}/machineTypes/f1-micro"
+ - name: scheduling
+ value: {automaticRestart: true}
+ - name: tags
+ value: {items: ["{{ stack_name }}", "controller"]}
+ - name: service_accounts
+ value:
+ - email: default
+ scopes:
+ - https://www.googleapis.com/auth/compute
+ - https://www.googleapis.com/auth/devstorage.read_only
+ - https://www.googleapis.com/auth/logging.write
+ - https://www.googleapis.com/auth/monitoring
+ - https://www.googleapis.com/auth/service.management.readonly
+ - https://www.googleapis.com/auth/servicecontrol
+ - name: network_interfaces
+ values:
+ dev:
+ value:
+ - {networkIP: "10.240.0.10", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.11", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.12", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - name: worker_instances
+ file: instances.iql
+ props:
+ - name: num_instances
+ value: 3
+ - name: instance_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-worker"
+ - name: disks
+ value:
+ - autoDelete: true
+ boot: true
+ initializeParams:
+ diskSizeGb: 10
+ sourceImage: https://compute.googleapis.com/compute/v1/projects/ubuntu-os-cloud/global/images/family/ubuntu-2004-lts
+ mode: READ_WRITE
+ type: PERSISTENT
+ - name: machine_type
+ value: "https://compute.googleapis.com/compute/v1/projects/{{ project }}/zones/{{ default_zone }}/machineTypes/f1-micro"
+ - name: scheduling
+ value: {automaticRestart: true}
+ - name: tags
+ value: {items: ["{{ stack_name }}", "worker"]}
+ - name: service_accounts
+ value:
+ - email: default
+ scopes:
+ - https://www.googleapis.com/auth/compute
+ - https://www.googleapis.com/auth/devstorage.read_only
+ - https://www.googleapis.com/auth/logging.write
+ - https://www.googleapis.com/auth/monitoring
+ - https://www.googleapis.com/auth/service.management.readonly
+ - https://www.googleapis.com/auth/servicecontrol
+ - name: network_interfaces
+ values:
+ dev:
+ value:
+ - {networkIP: "10.240.0.20", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.21", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - {networkIP: "10.240.0.22", subnetwork: "{{ subnet_link }}", accessConfigs: [{name: external-nat, type: ONE_TO_ONE_NAT}]}
+ - name: health_checks
+ props:
+ - name: health_check_name
+ value: kubernetes
+ - name: health_check_interval_sec
+ value: 5
+ - name: health_check_description
+ value: Kubernetes Health Check
+ - name: health_check_timeout_sec
+ value: 5
+ - name: health_check_healthy_threshold
+ value: 2
+ - name: health_check_unhealthy_threshold
+ value: 2
+ - name: health_check_host
+ value: kubernetes.default.svc.cluster.local
+ - name: health_check_port
+ value: 80
+ - name: health_check_path
+ value: /healthz
+ exports:
+ - health_check_link
+ - name: internal_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-internal-fw"
+ -
+
+ name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ values:
+ dev:
+ value: ["10.240.0.0/24", "10.200.0.0/16"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp}, {IPProtocol: udp}, {IPProtocol: icmp}]
+ - name: external_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-external-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ values:
+ dev:
+ value: ["0.0.0.0/0"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp, ports: ["22"]}, {IPProtocol: tcp, ports: ["6443"]},{IPProtocol: icmp}]
+ - name: health_check_firewall
+ file: firewalls.iql
+ props:
+ - name: fw_name
+ value: "{{ stack_name }}-{{ stack_env }}-allow-health-check-fw"
+ - name: fw_direction
+ value: INGRESS
+ - name: fw_source_ranges
+ values:
+ dev:
+ value: ["209.85.152.0/22", "209.85.204.0/22", "35.191.0.0/16"]
+ - name: fw_allowed
+ value: [{IPProtocol: tcp}]
+ - name: get_controller_instances
+ type: query
+ exports:
+ - controller_instances
+ - name: target_pool
+ props:
+ - name: target_pool_name
+ value: "{{ stack_name }}-{{ stack_env }}-target-pool"
+ - name: target_pool_session_affinity
+ value: NONE
+ - name: target_pool_health_checks
+ value: ["{{ health_check_link }}"]
+ - name: target_pool_instances
+ value: "{{ controller_instances }}"
+ exports:
+ - target_pool_link
+ - name: forwarding_rule
+ props:
+ - name: forwarding_rule_name
+ value: "{{ stack_name }}-{{ stack_env }}-forwarding-rule"
+ - name: forwarding_rule_load_balancing_scheme
+ value: EXTERNAL
+ - name: forwarding_rule_port_range
+ value: 6443
+ - name: routes
+ props:
+ - name: num_routes
+ value: 3
+ - name: route_name_prefix
+ value: "{{ stack_name }}-{{ stack_env }}-route"
+ - name: route_priority
+ value: 1000
+ - name: route_data
+ values:
+ dev:
+ value:
+ - {dest_range: "10.200.0.0/24", next_hop_ip: "10.240.0.20"}
+ - {dest_range: "10.200.1.0/24", next_hop_ip: "10.240.0.21"}
+ - {dest_range: "10.200.2.0/24", next_hop_ip: "10.240.0.22"}
+```
+
+
+
+## Resource Query Files
+
+Here are some example resource query files used to create, update, test, and delete resources in this stack:
+
+
+
+
+```sql
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+
+/*+ create */
+INSERT INTO google.compute.networks
+(
+ project,
+ data__name,
+ data__autoCreateSubnetworks,
+ data__routingConfig
+)
+SELECT
+'{{ project }}',
+'{{ vpc_name }}',
+false,
+'{"routingMode": "REGIONAL"}'
+
+/*+ update */
+UPDATE google.compute.networks
+SET data__autoCreateSubnetworks = false
+AND data__routingConfig = '{"routingMode": "REGIONAL"}'
+WHERE network = '{{ vpc_name }}' AND project = '{{ project }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+AND autoCreateSubnetworks = false
+AND JSON_EXTRACT(routingConfig, '$.routingMode') = 'REGIONAL'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.networks
+WHERE network = '{{ vpc_name }}' AND project = '{{ project }}'
+
+/*+ exports */
+SELECT
+'{{ vpc_name }}' as vpc_name,
+selfLink as vpc_link
+FROM google.compute.networks
+WHERE name = '{{ vpc_name }}'
+AND project = '{{ project }}'
+```
+
+
+
+
+```sql
+/*+ exists */
+SELECT COUNT(*) as count FROM google.compute.firewalls
+WHERE project = '{{ project }}'
+AND name = '{{ fw_name }}'
+
+/*+ create */
+INSERT INTO google.compute.firewalls
+(
+ project,
+ data__name,
+ data__network,
+ data__direction,
+ data__sourceRanges,
+ data__allowed
+)
+SELECT
+ '{{ project }}',
+ '{{ fw_name}}',
+ '{{ vpc_link }}',
+ '{{ fw_direction }}',
+ '{{ fw_source_ranges }}',
+ '{{ fw_allowed }}'
+
+/*+ statecheck, retries=5, retry_delay=10 */
+SELECT COUNT(*) as count FROM
+(
+SELECT
+network = '{{ vpc_link }}' as test_network,
+direction = '{{ fw_direction }}' as test_direction,
+JSON_EQUAL(allowed, '{{ fw_allowed }}') as test_allowed,
+JSON_EQUAL(sourceRanges, '{{ fw_source_ranges }}') as test_source_ranges
+FROM google.compute.firewalls
+WHERE project = '{{ project }}'
+AND name = '{{ fw_name }}'
+) t
+WHERE test_network = 1
+AND test_direction = 1
+AND test_allowed = 1
+AND test_source_ranges = 1;
+
+/*+ update */
+UPDATE google.compute.firewalls
+SET
+ data__network = '{{ vpc_link }}',
+ data__direction = '{{ fw_direction }}',
+ data__sourceRanges = '{{ fw_source_ranges }}',
+ data__allowed = '{{ fw_allowed }}'
+WHERE firewall = '{{ fw_name}}'
+AND project = '{{ project }}'
+
+/*+ delete, retries=20, retry_delay=10 */
+DELETE FROM google.compute.firewalls
+WHERE project = '{{ project }}'
+AND firewall = '{{ fw_name }}'
+```
+
+
+
+
+## More Information
+
+The complete code for this example stack is available [__here__](https://github.com/stackql/stackql-deploy/tree/main/examples/k8s-the-hard-way). For more information on how to use StackQL and StackQL Deploy, visit:
+
+- [`google` provider docs](https://stackql.io/providers/google)
+- [`stackql`](https://github.com/stackql)
+- [`stackql-deploy` PyPI home page](https://pypi.org/project/stackql-deploy/)
+- [`stackql-deploy` GitHub repo](https://github.com/stackql/stackql-deploy)
diff --git a/ref-python-packages/stackql-deploy/website/docusaurus.config.js b/ref-python-packages/stackql-deploy/website/docusaurus.config.js
new file mode 100644
index 0000000..dc10c91
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/docusaurus.config.js
@@ -0,0 +1,229 @@
+// @ts-check
+// `@type` JSDoc annotations allow editor autocompletion and type checking
+// (when paired with `@ts-check`).
+// There are various equivalent ways to declare your Docusaurus config.
+// See: https://docusaurus.io/docs/api/docusaurus-config
+
+import {themes as prismThemes} from 'prism-react-renderer';
+
+const providerDropDownListItems = [
+ {
+ label: 'AWS',
+ to: '/providers/aws',
+ },
+ {
+ label: 'Azure',
+ to: '/providers/azure',
+ },
+ {
+ label: 'Google',
+ to: '/providers/google',
+ },
+ {
+ label: 'Databricks',
+ to: '/providers/databricks',
+ },
+ {
+ label: 'Snowflake',
+ to: '/providers/snowflake',
+ },
+ {
+ label: 'Confluent',
+ to: '/providers/confluent',
+ },
+ {
+ label: 'Okta',
+ to: '/providers/okta',
+ },
+ {
+ label: 'GitHub',
+ to: '/providers/github',
+ },
+ {
+ label: 'OpenAI',
+ to: '/providers/openai',
+ },
+ {
+ label: '... More',
+ to: '/providers',
+ },
+];
+
+const footerStackQLItems = [
+ {
+ label: 'Documentation',
+ to: '/stackqldocs',
+ },
+ {
+ label: 'Install',
+ to: '/install',
+ },
+ {
+ label: 'Contact us',
+ to: '/contact-us',
+ },
+];
+
+const footerMoreItems = [
+ {
+ label: 'Providers',
+ to: '/providers',
+ },
+ {
+ label: 'stackql-deploy',
+ to: '/stackql-deploy',
+ },
+ {
+ label: 'Blog',
+ to: '/blog',
+ },
+ {
+ label: 'Tutorials',
+ to: '/tutorials',
+ },
+];
+
+
+/** @type {import('@docusaurus/types').Config} */
+const config = {
+ title: 'StackQL Deploy',
+ // baseUrl: '/stackql-deploy/',
+ baseUrl: '/',
+ tagline: 'Deploy and Test Cloud and SaaS Environments using StackQL',
+ favicon: 'img/favicon.ico',
+ staticDirectories: ['static'],
+ url: 'https://stackql-deploy.io',
+ organizationName: 'stackql',
+ projectName: 'stackql-deploy',
+
+ onBrokenLinks: 'throw',
+ onBrokenMarkdownLinks: 'warn',
+
+ i18n: {
+ defaultLocale: 'en',
+ locales: ['en'],
+ },
+
+ presets: [
+ [
+ 'classic',
+ /** @type {import('@docusaurus/preset-classic').Options} */
+ ({
+ docs: {
+ sidebarPath: './sidebars.js',
+ // Remove this to remove the "edit this page" links.
+ editUrl: 'https://github.com/stackql/stackql-deploy/tree/main/website/',
+ routeBasePath: '/', // Set the docs to be the root of the site
+ },
+ // blog: {
+ // showReadingTime: true,
+ // feedOptions: {
+ // type: ['rss', 'atom'],
+ // xslt: true,
+ // },
+ // // Remove this to remove the "edit this page" links.
+ // editUrl:
+ // 'https://github.com/stackql/stackql-deploy/tree/main/website/',
+ // },
+ theme: {
+ customCss: './src/css/custom.css',
+ },
+ }),
+ ],
+ ],
+
+ markdown: {
+ mermaid: true,
+ },
+ themes: ['@docusaurus/theme-mermaid'],
+
+ themeConfig:
+ /** @type {import('@docusaurus/preset-classic').ThemeConfig} */
+ ({
+ image: 'img/stackql-cover.png',
+ navbar: {
+ logo: {
+ alt: 'StackQL Deploy',
+ href: '/',
+ src: 'img/stackql-deploy-logo.svg',
+ srcDark: 'img/stackql-deploy-logo-white.svg',
+ },
+ items: [
+ // {
+ // type: 'docSidebar',
+ // sidebarId: 'docsSidebar',
+ // position: 'left',
+ // label: 'Deploy Docs',
+ // },
+ {
+ to: '/install',
+ position: 'left',
+ label: 'Install',
+ },
+ {
+ to: '/stackqldocs',
+ position: 'left',
+ label: 'StackQL Docs',
+ },
+ {
+ to: '/providers',
+ type: 'dropdown',
+ label: 'Providers',
+ position: 'left',
+ items: providerDropDownListItems,
+ },
+ {
+ type: 'dropdown',
+ label: 'More',
+ position: 'left',
+ items: [
+ {
+ to: '/blog',
+ label: 'Blog',
+ },
+ {
+ to: '/tutorials',
+ label: 'Tutorials',
+ },
+ ],
+ },
+ {
+ href: 'https://github.com/stackql/stackql',
+ position: 'right',
+ className: 'header-github-link',
+ 'aria-label': 'GitHub repository',
+ },
+ ],
+ },
+ footer: {
+ style: 'dark',
+ logo: {
+ alt: 'StackQL',
+ href: '/',
+ src: 'img/stackql-deploy-logo.svg',
+ srcDark: 'img/stackql-deploy-logo-white.svg',
+ },
+ links: [
+ {
+ title: 'StackQL',
+ items: footerStackQLItems,
+ },
+ {
+ title: 'More',
+ items: footerMoreItems,
+ },
+ ],
+ copyright: `Ā© ${new Date().getFullYear()} StackQL Studios`,
+ },
+ colorMode: {
+ // using user system preferences, instead of the hardcoded defaultMode
+ respectPrefersColorScheme: true,
+ },
+ prism: {
+ theme: prismThemes.nightOwl,
+ darkTheme: prismThemes.dracula,
+ },
+ }),
+};
+
+export default config;
diff --git a/ref-python-packages/stackql-deploy/website/package.json b/ref-python-packages/stackql-deploy/website/package.json
new file mode 100644
index 0000000..fc7494b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/package.json
@@ -0,0 +1,50 @@
+{
+ "name": "website",
+ "version": "0.0.0",
+ "private": true,
+ "scripts": {
+ "docusaurus": "docusaurus",
+ "start": "docusaurus start",
+ "build": "docusaurus build",
+ "swizzle": "docusaurus swizzle",
+ "deploy": "docusaurus deploy",
+ "clear": "docusaurus clear",
+ "serve": "docusaurus serve",
+ "write-translations": "docusaurus write-translations",
+ "write-heading-ids": "docusaurus write-heading-ids"
+ },
+ "dependencies": {
+ "@docusaurus/core": "^3.8.1",
+ "@docusaurus/preset-classic": "^3.8.1",
+ "@docusaurus/theme-mermaid": "^3.8.1",
+ "@emotion/react": "^11.13.0",
+ "@emotion/styled": "^11.13.0",
+ "@iconify/react": "^5.0.2",
+ "@mdx-js/react": "^3.0.0",
+ "@mui/material": "^5.16.7",
+ "@mui/styles": "^5.16.7",
+ "clsx": "^2.0.0",
+ "prism-react-renderer": "^2.3.1",
+ "react": "^18.0.0",
+ "react-dom": "^18.0.0"
+ },
+ "devDependencies": {
+ "@docusaurus/module-type-aliases": "^3.8.1",
+ "@docusaurus/types": "^3.8.1"
+ },
+ "browserslist": {
+ "production": [
+ ">0.5%",
+ "not dead",
+ "not op_mini all"
+ ],
+ "development": [
+ "last 3 chrome version",
+ "last 3 firefox version",
+ "last 5 safari version"
+ ]
+ },
+ "engines": {
+ "node": ">=20.0"
+ }
+}
diff --git a/ref-python-packages/stackql-deploy/website/sidebars.js b/ref-python-packages/stackql-deploy/website/sidebars.js
new file mode 100644
index 0000000..819a0ea
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/sidebars.js
@@ -0,0 +1,109 @@
+// @ts-check
+
+import config from './docusaurus.config.js';
+
+const baseUrl = config.baseUrl;
+
+/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */
+const sidebars = {
+ docsSidebar: [
+ {
+ type: 'doc',
+ id: 'index',
+ label: 'Welcome',
+ },
+ {
+ type: 'doc',
+ id: 'getting-started',
+ label: 'Getting Started',
+ },
+ {
+ type: 'category',
+ label: 'CLI Command Reference',
+ items: [
+ 'cli-reference/build',
+ 'cli-reference/init',
+ 'cli-reference/teardown',
+ 'cli-reference/test',
+ 'cli-reference/info',
+ 'cli-reference/shell',
+ 'cli-reference/upgrade',
+ ],
+ },
+ {
+ type: 'doc',
+ id: 'manifest-file',
+ label: 'stackql_manifest.yml',
+ },
+ {
+ type: 'doc',
+ id: 'resource-query-files',
+ label: 'Resource Query Files',
+ },
+ {
+ type: 'doc',
+ id: 'github-actions',
+ label: 'Deploying with GitHub Actions',
+ },
+ {
+ type: 'category',
+ label: 'Template Library',
+ link: {
+ type: 'generated-index',
+ title: 'Template Library',
+ description: 'stackql-deploy quick starts, how-tos, practical examples and use cases',
+ slug: '/template-library',
+ keywords: ['quickstarts', 'guides', 'how-tos', 'examples', 'use cases'],
+ },
+ items: [
+ {
+ type: 'category',
+ label: 'AWS',
+ description: 'Practical examples and use cases specific to AWS',
+ customProps: {
+ icon: `${baseUrl}img/providers/aws/aws.png`,
+ },
+ link: {
+ type: 'generated-index',
+ title: 'stackql-deploy AWS Templates',
+ description: 'Practical examples and use cases specific to AWS',
+ slug: '/template-library/aws',
+ },
+ items: [{ type: 'autogenerated', dirName: 'template-library/aws' }],
+ },
+ {
+ type: 'category',
+ label: 'Microsoft Azure',
+ description: 'Practical examples and use cases specific to Azure',
+ customProps: {
+ icon: `${baseUrl}img/providers/azure/azure.png`,
+ },
+ link: {
+ type: 'generated-index',
+ title: 'stackql-deploy Azure Templates',
+ description: 'Practical examples and use cases specific to Azure',
+ slug: '/template-library/azure',
+ },
+ items: [{ type: 'autogenerated', dirName: 'template-library/azure' }],
+ },
+ {
+ type: 'category',
+ label: 'Google Cloud Platform',
+ description: 'Practical examples and use cases specific to Google Cloud',
+ customProps: {
+ icon: `${baseUrl}img/providers/google/google.png`,
+ },
+ link: {
+ type: 'generated-index',
+ title: 'stackql-deploy Google Templates',
+ description: 'Practical examples and use cases specific to Google Cloud',
+ slug: '/template-library/google',
+ },
+ items: [{ type: 'autogenerated', dirName: 'template-library/google' }],
+ },
+ ],
+ },
+ ],
+};
+
+export default sidebars;
diff --git a/ref-python-packages/stackql-deploy/website/src/components/CustomSidebarCategory/index.js b/ref-python-packages/stackql-deploy/website/src/components/CustomSidebarCategory/index.js
new file mode 100644
index 0000000..c3af566
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/components/CustomSidebarCategory/index.js
@@ -0,0 +1,23 @@
+import React from 'react';
+import clsx from 'clsx';
+import Link from '@docusaurus/Link';
+
+function CustomSidebarCategory({icon, label, description, to}) {
+ return React.createElement(
+ 'div',
+ { className: 'sidebar-item' },
+ React.createElement(
+ Link,
+ { className: clsx('menu__link'), to: to },
+ React.createElement('img', { src: icon, alt: label, style: { width: '24px', marginRight: '10px' } }),
+ React.createElement(
+ 'div',
+ { className: 'sidebar-item-content' },
+ React.createElement('div', { className: 'sidebar-item-label' }, label),
+ React.createElement('div', { className: 'sidebar-item-description' }, description)
+ )
+ )
+ );
+}
+
+export default CustomSidebarCategory;
diff --git a/ref-python-packages/stackql-deploy/website/src/components/File/index.js b/ref-python-packages/stackql-deploy/website/src/components/File/index.js
new file mode 100644
index 0000000..8f88818
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/components/File/index.js
@@ -0,0 +1,22 @@
+import React from 'react';
+import styles from './styles.module.css';
+
+
+function File({children, name}) {
+ return (
+
+ );
+}
+
+export default File;
diff --git a/ref-python-packages/stackql-deploy/website/src/components/File/styles.module.css b/ref-python-packages/stackql-deploy/website/src/components/File/styles.module.css
new file mode 100644
index 0000000..1421a95
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/components/File/styles.module.css
@@ -0,0 +1,35 @@
+
+:local(.wrapper pre) {
+ border-top-left-radius: 0px;
+ border-top-right-radius: 0px;
+}
+
+:local(.title) {
+ background-color: #f9fbfc;
+ color: rgb(1, 22, 39);
+
+ border: 1px solid #dddddd;
+ border-bottom: 0px;
+
+ border-top-left-radius: 5px;
+ border-top-right-radius: 5px;
+
+ padding: 7px;
+ font-size: 12px;
+}
+
+:local(html[data-theme='dark'] .title) {
+ background-color: rgb(241, 231, 231); /* var(--ifm-menu-color-background-active); */
+ border: 1px solid grey; /*var(--ifm-menu-color-background-active); */
+ color: black /* white */;
+}
+
+:local(.titleInner) {
+ margin: 4px 8px;
+}
+
+:local(.icon) {
+ vertical-align: middle;
+ margin-right: 5px;
+ fill: #8a949d;
+}
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/components/HomepageFeatures/index.js b/ref-python-packages/stackql-deploy/website/src/components/HomepageFeatures/index.js
new file mode 100644
index 0000000..acc7621
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/components/HomepageFeatures/index.js
@@ -0,0 +1,64 @@
+import clsx from 'clsx';
+import Heading from '@theme/Heading';
+import styles from './styles.module.css';
+
+const FeatureList = [
+ {
+ title: 'Easy to Use',
+ Svg: require('@site/static/img/undraw_docusaurus_mountain.svg').default,
+ description: (
+ <>
+ Docusaurus was designed from the ground up to be easily installed and
+ used to get your website up and running quickly.
+ >
+ ),
+ },
+ {
+ title: 'Focus on What Matters',
+ Svg: require('@site/static/img/undraw_docusaurus_tree.svg').default,
+ description: (
+ <>
+ Docusaurus lets you focus on your docs, and we'll do the chores. Go
+ ahead and move your docs into the docs directory.
+ >
+ ),
+ },
+ {
+ title: 'Powered by React',
+ Svg: require('@site/static/img/undraw_docusaurus_react.svg').default,
+ description: (
+ <>
+ Extend or customize your website layout by reusing React. Docusaurus can
+ be extended while reusing the same header and footer.
+ >
+ ),
+ },
+];
+
+function Feature({Svg, title, description}) {
+ return (
+
+
+
+
+
+
{title}
+
{description}
+
+
+ );
+}
+
+export default function HomepageFeatures() {
+ return (
+
+
+
+ {FeatureList.map((props, idx) => (
+
+ ))}
+
+
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/components/HomepageFeatures/styles.module.css b/ref-python-packages/stackql-deploy/website/src/components/HomepageFeatures/styles.module.css
new file mode 100644
index 0000000..b248eb2
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/components/HomepageFeatures/styles.module.css
@@ -0,0 +1,11 @@
+.features {
+ display: flex;
+ align-items: center;
+ padding: 2rem 0;
+ width: 100%;
+}
+
+.featureSvg {
+ height: 200px;
+ width: 200px;
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/components/LeftAlignedTable/index.js b/ref-python-packages/stackql-deploy/website/src/components/LeftAlignedTable/index.js
new file mode 100644
index 0000000..e8c9bb8
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/components/LeftAlignedTable/index.js
@@ -0,0 +1,38 @@
+import React from 'react';
+
+const LeftAlignedTable = ({ type, required, fields }) => {
+ return (
+
+
+
+
+ Type
+ {type}
+
+
+ Required
+ {required ? 'Yes' : 'No'}
+
+ {fields && fields.length > 0 && (
+
+ Fields
+
+ {fields.map((field, index) => (
+
+
+ {field.name}
+
+ {index < fields.length - 1 && ', '}
+
+ ))}
+
+
+ )}
+
+
+
+
+ );
+};
+
+export default LeftAlignedTable;
diff --git a/ref-python-packages/stackql-deploy/website/src/css/custom.css b/ref-python-packages/stackql-deploy/website/src/css/custom.css
new file mode 100644
index 0000000..edc419c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/css/custom.css
@@ -0,0 +1,248 @@
+@import url('https://fonts.googleapis.com/css2?family=Montserrat:wght@400;500;700&display=swap');
+
+/*
+* Brand Colour
+*/
+:root {
+ --gamma-blue: #0f4c81;
+ --gamma-medium-blue: #6c83aa;
+ /* Blues */
+ --gamma-dark-blue: #004165;
+ --gamma-light-blue: #b5bfd4;
+ --code-blue: #00f;
+ /* Grey scale */
+ --default-text: #2e3940;
+ --secondary-text: #718096;
+ --black: #000;
+ --white: #fff;
+ --grey-1: #f5f6f7;
+ --grey-2: #ebedef;
+ /* Dark Mode Blacks */
+ --dark-1: #606264;
+ --dark-2: #404244;
+ --black-2: #090909;
+ --light-black: #111;
+ /* Colours */
+ --default-green: #00af91;
+ --secondary-green: #43af43;
+ --default-red: #e94560;
+ --default-red-2: #fc91a2;
+ --default-blue: #2e3940;
+ --default-light-blue: #bfc2ff;
+ --default-blue-2: #1a1a2e;
+ --secondary-blue: #16213e;
+ --gamma-dark: #030760;
+}
+
+:root {
+ /* infima styling */
+ --ifm-font-family-base: 'Montserrat', sans-serif;
+ --ifm-font-size-base: 16px;
+ --ifm-code-font-size: 95%;
+ --ifm-background-color: var(--white);
+ --ifm-color-primary: var(--gamma-dark-blue);
+ --ifm-code-color: var(--code-blue);
+ --ifm-color-content: #2d3748;
+ --ifm-dropdown-link-color: var(--ifm-menu-color);
+ --ifm-navbar-link-color: var(--ifm-menu-color);
+ --ifm-menu-color-background-active: var(--ifm-color-emphasis-200);
+}
+
+[data-theme='dark'] {
+ --ifm-font-base-color: #dee0f2;
+ --ifm-color-content: var(--ifm-font-base-color);
+ --ifm-navbar-link-hover-color: var(--gamma-light-blue);
+ --ifm-link-color: var(--gamma-light-blue);
+ --ifm-menu-color-active: var(--gamma-light-blue);
+ --ifm-color-primary: var(--white);
+ --ifm-background-color: var(--black);
+ --ifm-footer-background-color: var(--black-2);
+ --ifm-navbar-background-color: var(--black);
+ --ifm-menu-color-background-active: #21243d;
+ --ifm-code-color: var(--white);
+}
+
+/*
+* copyable code
+*/
+.copyable-code-container code {
+ cursor: pointer;
+ position: relative;
+}
+
+/*
+* github
+*/
+ .header-github-link:before {
+ content: '';
+ width: 140px;
+ height: 28px;
+ display: flex;
+ background-image: url("https://img.shields.io/github/stars/stackql/stackql?logo=github&style=social");
+ background-repeat: no-repeat;
+ background-position: center;
+ background-size: contain;
+}
+
+.header-github-link:hover {
+ opacity: 0.6;
+}
+
+/*
+* footer
+*/
+:root .footer--dark {
+ background-color: transparent;
+ --ifm-footer-color: var(--secondary-text);
+ --ifm-footer-link-color: var(--secondary-text);
+ --ifm-footer-title-color: var(--black-2);
+}
+:root .footer .footerLogoLink_src-theme-Footer- {
+ opacity: 1;
+}
+:root .footer .divider {
+ background-color: rgba(0, 0, 0, 0.12);
+}
+:root .footer .container {
+ background-color: var(--grey-1);
+}
+:root a code {
+ color: var(--ifm-code-color);
+}
+
+html[data-theme='dark'] .footer--dark {
+ --ifm-footer-background-color: transparent;
+ --ifm-footer-color: #bdbdbd;
+ --ifm-footer-link-color: #bdbdbd;
+ --ifm-footer-title-color: var(--white);
+}
+html[data-theme='dark'] .footer .divider {
+ background-color: var(--secondary-blue);
+}
+html[data-theme='dark'] .footer .container {
+ background-color: var(--black-2);
+}
+
+/* footer */
+
+.footer__items {
+ font-weight: 400;
+ line-height: 1.43;
+ font-size: 0.875rem;
+}
+.footer__items .footer__link-item:hover {
+ text-decoration: none;
+}
+.footer__title {
+ font-weight: 700;
+ margin-bottom: 0;
+ line-height: 1.43;
+ font-size: 0.875rem;
+}
+.footer {
+ padding-bottom: 0;
+}
+.footer .container {
+ margin: 0 auto;
+ padding: 56px 80px;
+ max-width: 850px;
+ border-radius: 17px;
+}
+.divider {
+ width: 100%;
+ margin: 40px 0px;
+ border: none;
+ height: 1px;
+ flex-shrink: 0;
+}
+.footer__subtitle {
+ font-weight: 500;
+ line-height: 1.57;
+ font-size: 0.875rem;
+}
+.footer__logo {
+ margin-top: 0;
+}
+
+.footerSocialIcon {
+ font-size: 24px;
+ margin: 0 12px;
+}
+
+.footerSocialIconButton {
+ padding: 0;
+ color: 'rgba(255,255,255,.6)';
+ }
+
+ .footerSocialIconButton:hover {
+ background: 'transparent';
+ color: 'rgba(255,255,255,.6)';
+ }
+
+ .footer__items {
+ list-style-type: none;
+ margin: 0;
+ padding: 0;
+ }
+
+/*
+* custom styles
+*/
+code {
+ font-weight: bold;
+}
+table {
+ display: block;
+ max-width: -moz-fit-content;
+ max-width: fit-content;
+ margin: 0 auto;
+ overflow-x: auto;
+ white-space: nowrap;
+}
+
+/*
+* nav bar
+*/
+
+.navbar__item {
+ font-weight: 700;
+}
+.navbar__link {
+ font-weight: 700;
+}
+
+/*
+* doc field headings
+*/
+
+:root .docFieldHeading {
+ color: #0000FF;
+}
+
+html[data-theme='dark'] .docFieldHeading {
+ color: #FFFFFF;
+}
+
+/*
+* vhs image
+*/
+
+.vhsImage {
+ max-width: 60%; /* Reduce width to 80% of container */
+ display: block; /* Ensure it's a block element for margin auto to work */
+ margin: 40px auto; /* Add 40px space above/below and center horizontally */
+ transform: scale(0.9); /* Make it 90% of original size */
+ transform-origin: center; /* Scale from center */
+}
+
+/* Add more space before and after the image container */
+img[class="vhsImage"] {
+ margin-top: 40px;
+ margin-bottom: 40px;
+}
+
+/* If the image is inside a container, you might need this */
+.vhsImage-container,
+div:has(> .vhsImage) {
+ margin: 40px 0;
+}
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/blog.js b/ref-python-packages/stackql-deploy/website/src/pages/blog.js
new file mode 100644
index 0000000..e435012
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/blog.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Blog() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/contact-us.js b/ref-python-packages/stackql-deploy/website/src/pages/contact-us.js
new file mode 100644
index 0000000..b6850d8
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/contact-us.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function ConactUs() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/docs.js b/ref-python-packages/stackql-deploy/website/src/pages/docs.js
new file mode 100644
index 0000000..f99f2a8
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/docs.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function DocsRedirect() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/install.js b/ref-python-packages/stackql-deploy/website/src/pages/install.js
new file mode 100644
index 0000000..a86db97
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/install.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Install() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/providers/aws.js b/ref-python-packages/stackql-deploy/website/src/pages/providers/aws.js
new file mode 100644
index 0000000..780099a
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/providers/aws.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Registry() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/providers/azure.js b/ref-python-packages/stackql-deploy/website/src/pages/providers/azure.js
new file mode 100644
index 0000000..467f77a
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/providers/azure.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Registry() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/providers/confluent.js b/ref-python-packages/stackql-deploy/website/src/pages/providers/confluent.js
new file mode 100644
index 0000000..e886aaf
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/providers/confluent.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Registry() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/providers/databricks.js b/ref-python-packages/stackql-deploy/website/src/pages/providers/databricks.js
new file mode 100644
index 0000000..a04b603
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/providers/databricks.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Registry() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/providers/github.js b/ref-python-packages/stackql-deploy/website/src/pages/providers/github.js
new file mode 100644
index 0000000..b425c6c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/providers/github.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Registry() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/providers/google.js b/ref-python-packages/stackql-deploy/website/src/pages/providers/google.js
new file mode 100644
index 0000000..01fe8b7
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/providers/google.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Registry() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/providers/index.js b/ref-python-packages/stackql-deploy/website/src/pages/providers/index.js
new file mode 100644
index 0000000..20c17c6
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/providers/index.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Registry() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/providers/okta.js b/ref-python-packages/stackql-deploy/website/src/pages/providers/okta.js
new file mode 100644
index 0000000..cdddc72
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/providers/okta.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Registry() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/providers/openai.js b/ref-python-packages/stackql-deploy/website/src/pages/providers/openai.js
new file mode 100644
index 0000000..9884c84
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/providers/openai.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Registry() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/providers/snowflake.js b/ref-python-packages/stackql-deploy/website/src/pages/providers/snowflake.js
new file mode 100644
index 0000000..7b3ec43
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/providers/snowflake.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Registry() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/stackql-deploy.js b/ref-python-packages/stackql-deploy/website/src/pages/stackql-deploy.js
new file mode 100644
index 0000000..fc1b070
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/stackql-deploy.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Home() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/stackqldocs.js b/ref-python-packages/stackql-deploy/website/src/pages/stackqldocs.js
new file mode 100644
index 0000000..7182d93
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/stackqldocs.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function StackQLDocs() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/pages/tutorials.js b/ref-python-packages/stackql-deploy/website/src/pages/tutorials.js
new file mode 100644
index 0000000..7e0e838
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/pages/tutorials.js
@@ -0,0 +1,10 @@
+import React from 'react';
+import Head from '@docusaurus/Head';
+
+export default function Tutorials() {
+ return (
+
+
+
+ );
+};
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/DocCard/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/DocCard/index.tsx
new file mode 100644
index 0000000..eca9592
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/DocCard/index.tsx
@@ -0,0 +1,143 @@
+import React, {type ReactNode} from 'react';
+import clsx from 'clsx';
+import Link from '@docusaurus/Link';
+import {
+ useDocById,
+ findFirstSidebarItemLink,
+} from '@docusaurus/plugin-content-docs/client';
+import {usePluralForm} from '@docusaurus/theme-common';
+import isInternalUrl from '@docusaurus/isInternalUrl';
+import {translate} from '@docusaurus/Translate';
+
+import type {Props} from '@theme/DocCard';
+import Heading from '@theme/Heading';
+import type {
+ PropSidebarItemCategory,
+ PropSidebarItemLink,
+} from '@docusaurus/plugin-content-docs';
+
+import styles from './styles.module.css';
+
+const descriptionMap = {
+ '/template-library/aws/simple-vpc': 'Simple VPC configuration in AWS',
+ '/template-library/azure/simple-vnet': 'Basic Virtual Network setup in Azure',
+};
+
+
+function useCategoryItemsPlural() {
+ const {selectMessage} = usePluralForm();
+ return (count: number) =>
+ selectMessage(
+ count,
+ translate(
+ {
+ message: '1 item|{count} items',
+ id: 'theme.docs.DocCard.categoryDescription.plurals',
+ description:
+ 'The default description for a category card in the generated index about how many items this category includes',
+ },
+ {count},
+ ),
+ );
+}
+
+function CardContainer({
+ href,
+ children,
+}: {
+ href: string;
+ children: ReactNode;
+}): JSX.Element {
+ return (
+
+ {children}
+
+ );
+}
+
+function CardLayout({
+ href,
+ icon,
+ title,
+ description,
+}: {
+ href: string;
+ icon: ReactNode;
+ title: string;
+ description?: string;
+}): JSX.Element {
+ const finalDescription = description || descriptionMap[href] || '';
+ return (
+
+
+ {icon && {icon} }
+
+ {title}
+
+
+ {finalDescription && (
+
+ {finalDescription}
+
+ )}
+
+ );
+}
+
+function CardCategory({
+ item,
+}: {
+ item: PropSidebarItemCategory;
+}): JSX.Element | null {
+ const href = findFirstSidebarItemLink(item);
+ const categoryItemsPlural = useCategoryItemsPlural();
+
+ if (!href) {
+ return null;
+ }
+
+ const icon = item.customProps?.icon
+ ?
+ : 'āļø'; // Default to a cloud icon if no custom icon is provided
+
+ return (
+
+ );
+}
+
+function CardLink({item}: {item: PropSidebarItemLink}): JSX.Element {
+ const icon = 'šļø'; // Default file icon for non-category links
+ const doc = useDocById(item.docId ?? undefined);
+
+ return (
+
+ );
+}
+
+export default function DocCard({item}: Props): JSX.Element {
+ switch (item.type) {
+ case 'link':
+ return ;
+ case 'category':
+ return ;
+ default:
+ throw new Error(`unknown item type ${JSON.stringify(item)}`);
+ }
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/DocCard/styles.module.css b/ref-python-packages/stackql-deploy/website/src/theme/DocCard/styles.module.css
new file mode 100644
index 0000000..6b2de1c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/DocCard/styles.module.css
@@ -0,0 +1,63 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+.cardContainer {
+ --ifm-link-color: var(--ifm-color-emphasis-800);
+ --ifm-link-hover-color: var(--ifm-color-emphasis-700);
+ --ifm-link-hover-decoration: none;
+
+ box-shadow: 0 1.5px 3px 0 rgb(0 0 0 / 15%);
+ border: 1px solid var(--ifm-color-emphasis-200);
+ transition: all var(--ifm-transition-fast) ease;
+ transition-property: border, box-shadow;
+}
+
+.cardContainer:hover {
+ border-color: var(--ifm-color-primary);
+ box-shadow: 0 3px 6px 0 rgb(0 0 0 / 20%);
+}
+
+.cardContainer *:last-child {
+ margin-bottom: 0;
+}
+
+.cardTitle {
+ font-size: 1.2rem;
+}
+
+.cardDescription {
+ margin-top: 8px;
+ font-size: 0.8rem;
+}
+
+.textContent {
+ display: flex;
+ flex-direction: column;
+}
+
+
+/* .customIcon {
+ width: 32px;
+ height: 32px;
+ margin-right: 16px;
+} */
+
+.inlineContent {
+ display: flex;
+ align-items: center;
+}
+
+.inlineIcon {
+ margin-right: 0.5rem;
+ display: inline-flex;
+ align-items: center;
+}
+
+.customIcon {
+ width: 24px; /* Adjust the size as needed */
+ height: 24px; /* Adjust the size as needed */
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Footer/Copyright/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Copyright/index.tsx
new file mode 100644
index 0000000..ab1657d
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Copyright/index.tsx
@@ -0,0 +1,20 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+import React from 'react';
+import type {Props} from '@theme/Footer/Copyright';
+
+export default function FooterCopyright({copyright}: Props): JSX.Element {
+ return (
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Footer/Layout/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Layout/index.tsx
new file mode 100644
index 0000000..bb22f31
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Layout/index.tsx
@@ -0,0 +1,34 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+import React from 'react';
+import clsx from 'clsx';
+import type {Props} from '@theme/Footer/Layout';
+
+export default function FooterLayout({
+ style,
+ links,
+ logo,
+ copyright,
+}: Props): JSX.Element {
+ return (
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Footer/LinkItem/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/Footer/LinkItem/index.tsx
new file mode 100644
index 0000000..44e1517
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Footer/LinkItem/index.tsx
@@ -0,0 +1,36 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+import React from 'react';
+
+import Link from '@docusaurus/Link';
+import useBaseUrl from '@docusaurus/useBaseUrl';
+import isInternalUrl from '@docusaurus/isInternalUrl';
+import IconExternalLink from '@theme/Icon/ExternalLink';
+import type {Props} from '@theme/Footer/LinkItem';
+
+export default function FooterLinkItem({item}: Props): JSX.Element {
+ const {to, href, label, prependBaseUrlToHref, ...props} = item;
+ const toUrl = useBaseUrl(to);
+ const normalizedHref = useBaseUrl(href, {forcePrependBaseUrl: true});
+
+ return (
+
+ {label}
+ {href && !isInternalUrl(href) && }
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Footer/Links/MultiColumn/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Links/MultiColumn/index.tsx
new file mode 100644
index 0000000..2239e8c
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Links/MultiColumn/index.tsx
@@ -0,0 +1,51 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+import React from 'react';
+import LinkItem from '@theme/Footer/LinkItem';
+import type {Props} from '@theme/Footer/Links/MultiColumn';
+
+type ColumnType = Props['columns'][number];
+type ColumnItemType = ColumnType['items'][number];
+
+function ColumnLinkItem({item}: {item: ColumnItemType}) {
+ return item.html ? (
+
+ ) : (
+
+
+
+ );
+}
+
+function Column({column}: {column: ColumnType}) {
+ return (
+
+
{column.title}
+
+ {column.items.map((item, i) => (
+
+ ))}
+
+
+ );
+}
+
+export default function FooterLinksMultiColumn({columns}: Props): JSX.Element {
+ return (
+
+ {columns.map((column, i) => (
+
+ ))}
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Footer/Links/Simple/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Links/Simple/index.tsx
new file mode 100644
index 0000000..e14b77f
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Links/Simple/index.tsx
@@ -0,0 +1,42 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+import React from 'react';
+import LinkItem from '@theme/Footer/LinkItem';
+import type {Props} from '@theme/Footer/Links/Simple';
+
+function Separator() {
+ return Ā· ;
+}
+
+function SimpleLinkItem({item}: {item: Props['links'][number]}) {
+ return item.html ? (
+
+ ) : (
+
+ );
+}
+
+export default function FooterLinksSimple({links}: Props): JSX.Element {
+ return (
+
+
+ {links.map((item, i) => (
+
+
+ {links.length !== i + 1 && }
+
+ ))}
+
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Footer/Links/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Links/index.tsx
new file mode 100644
index 0000000..a4b0f33
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Links/index.tsx
@@ -0,0 +1,21 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+import React from 'react';
+
+import {isMultiColumnFooterLinks} from '@docusaurus/theme-common';
+import FooterLinksMultiColumn from '@theme/Footer/Links/MultiColumn';
+import FooterLinksSimple from '@theme/Footer/Links/Simple';
+import type {Props} from '@theme/Footer/Links';
+
+export default function FooterLinks({links}: Props): JSX.Element {
+ return isMultiColumnFooterLinks(links) ? (
+
+ ) : (
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Footer/Logo/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Logo/index.tsx
new file mode 100644
index 0000000..ebd8e9f
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Logo/index.tsx
@@ -0,0 +1,46 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+import React from 'react';
+import clsx from 'clsx';
+import Link from '@docusaurus/Link';
+import {useBaseUrlUtils} from '@docusaurus/useBaseUrl';
+import ThemedImage from '@theme/ThemedImage';
+import type {Props} from '@theme/Footer/Logo';
+
+import styles from './styles.module.css';
+
+function LogoImage({logo}: Props) {
+ const {withBaseUrl} = useBaseUrlUtils();
+ const sources = {
+ light: withBaseUrl(logo.src),
+ dark: withBaseUrl(logo.srcDark ?? logo.src),
+ };
+ return (
+
+ );
+}
+
+export default function FooterLogo({logo}: Props): JSX.Element {
+ return logo.href ? (
+
+
+
+ ) : (
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Footer/Logo/styles.module.css b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Logo/styles.module.css
new file mode 100644
index 0000000..16b1a2e
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Footer/Logo/styles.module.css
@@ -0,0 +1,16 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+.footerLogoLink {
+ opacity: 0.5;
+ transition: opacity var(--ifm-transition-fast)
+ var(--ifm-transition-timing-default);
+}
+
+.footerLogoLink:hover {
+ opacity: 1;
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Footer/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/Footer/index.tsx
new file mode 100644
index 0000000..99ba9d5
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Footer/index.tsx
@@ -0,0 +1,262 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+import React from 'react';
+import clsx from 'clsx';
+
+import Link from '@docusaurus/Link';
+import {FooterLinkItem, useThemeConfig} from '@docusaurus/theme-common';
+import useBaseUrl from '@docusaurus/useBaseUrl';
+import isInternalUrl from '@docusaurus/isInternalUrl';
+import styles from './styles.module.css';
+import ThemedImage, {Props as ThemedImageProps} from '@theme/ThemedImage';
+import IconExternalLink from '@theme/Icon/ExternalLink';
+import { IconButton } from '@mui/material';
+import { useColorMode } from '@docusaurus/theme-common';
+
+import { Icon } from '@iconify/react';
+
+// add for responsive logo image
+import { useWindowSize } from '@docusaurus/theme-common';
+
+// Custom styles to fix the spacing issue
+const socialIconsContainerStyle: React.CSSProperties = {
+ display: 'flex',
+ justifyContent: 'center',
+ alignItems: 'center',
+ flexWrap: 'wrap', // Allow wrapping on small screens
+ margin: '16px 0',
+};
+
+const iconButtonStyle = {
+ padding: '12px', // Ensure buttons have enough clickable area
+};
+
+function FooterLink({
+ to,
+ href,
+ label,
+ prependBaseUrlToHref,
+ ...props
+}: FooterLinkItem) {
+ const toUrl = useBaseUrl(to);
+ const normalizedHref = useBaseUrl(href, {forcePrependBaseUrl: true});
+
+ return (
+
+ {href && !isInternalUrl(href) ? (
+
+ {label}
+
+
+ ) : (
+ label
+ )}
+
+ );
+}
+
+const FooterLogo = ({
+ sources,
+ alt,
+ width,
+ height,
+ logo,
+}: Pick & { logo: any }) => {
+ // Get window width for responsiveness
+ const windowSize = useWindowSize();
+
+ // Set threshold for mobile view (e.g., 768px)
+ const isMobile = windowSize === 'mobile' ? true : false;
+
+ const getMobileLogoPath = (path: string) => path?.replace('.svg', '-mobile.svg');
+
+ // Choose appropriate image sources based on screen size
+ // const responsiveSources = {
+ // light: useBaseUrl(isMobile ? getMobileLogoPath(logo.src) : logo.src),
+ // dark: useBaseUrl(isMobile ? getMobileLogoPath(logo.srcDark || logo.src) : (logo.srcDark || logo.src)),
+ // };
+ const responsiveSources = {
+ light: useBaseUrl(isMobile ? getMobileLogoPath(logo?.src) : logo?.src),
+ dark: useBaseUrl(isMobile ? getMobileLogoPath(logo?.srcDark || logo?.src) : (logo?.srcDark || logo?.src)),
+ };
+
+ return (
+
+ );
+}
+
+function Footer(): JSX.Element | null {
+ const socialLinks = {
+ linkedin: "https://www.linkedin.com/company/stackql",
+ twitter: "https://twitter.com/stackql",
+ github: "https://github.com/stackql",
+ discord: "https://discord.com/invite/xVXZ9d5NxN",
+ slack: "https://join.slack.com/t/stackqlcommunity/shared_invite/zt-1cbdq9s5v-CkY65IMAesCgFqjN6FU6hg",
+ };
+
+ const {colorMode} = useColorMode();
+
+ const {footer} = useThemeConfig();
+
+ const {copyright, links = [], logo = { src: '' }} = footer || {};
+ const sources = {
+ light: useBaseUrl(logo.src),
+ dark: useBaseUrl(logo.srcDark || logo.src),
+ };
+
+ if (!footer) {
+ return null;
+ }
+
+ return (
+
+ );
+}
+
+export default Footer;
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Footer/styles.module.css b/ref-python-packages/stackql-deploy/website/src/theme/Footer/styles.module.css
new file mode 100644
index 0000000..92e3c3b
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Footer/styles.module.css
@@ -0,0 +1,16 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+.footerLogoLink {
+ opacity: 1;
+ transition: opacity var(--ifm-transition-fast) var(--ifm-transition-timing-default);
+}
+
+.footerLogoLink:hover {
+ opacity: 0.5;
+}
+
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Icon/Close/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/Icon/Close/index.tsx
new file mode 100644
index 0000000..45dee06
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Icon/Close/index.tsx
@@ -0,0 +1,26 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+import React from 'react';
+import type {Props} from '@theme/Icon/Close';
+
+export default function IconClose({
+ width = 21,
+ height = 21,
+ color = 'currentColor',
+ strokeWidth = 1.2,
+ className,
+ ...restProps
+}: Props): JSX.Element {
+ return (
+
+
+
+
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Icon/ExternalLink/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/Icon/ExternalLink/index.tsx
new file mode 100644
index 0000000..ea618ee
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Icon/ExternalLink/index.tsx
@@ -0,0 +1,30 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+import React from 'react';
+import type {Props} from '@theme/Icon/ExternalLink';
+
+import styles from './styles.module.css';
+
+export default function IconExternalLink({
+ width = 13.5,
+ height = 13.5,
+}: Props): JSX.Element {
+ return (
+
+
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Icon/ExternalLink/styles.module.css b/ref-python-packages/stackql-deploy/website/src/theme/Icon/ExternalLink/styles.module.css
new file mode 100644
index 0000000..7b0a5ad
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Icon/ExternalLink/styles.module.css
@@ -0,0 +1,10 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+.iconExternalLink {
+ margin-left: 0.3rem;
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Logo/index.d.ts b/ref-python-packages/stackql-deploy/website/src/theme/Logo/index.d.ts
new file mode 100644
index 0000000..786099f
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Logo/index.d.ts
@@ -0,0 +1,9 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+import { type ReactNode } from 'react';
+import type { Props } from '@theme/Logo';
+export default function Logo(props: Props): ReactNode;
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/Logo/index.js b/ref-python-packages/stackql-deploy/website/src/theme/Logo/index.js
new file mode 100644
index 0000000..d0b73e2
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/Logo/index.js
@@ -0,0 +1,76 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+import React from 'react';
+import Link from '@docusaurus/Link';
+import useBaseUrl from '@docusaurus/useBaseUrl';
+import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
+import {useThemeConfig, useWindowSize} from '@docusaurus/theme-common';
+import ThemedImage from '@theme/ThemedImage';
+function LogoThemedImage({logo, alt, imageClassName}) {
+// Add window size detection
+ const windowSize = useWindowSize();
+
+ // Determine if on mobile
+ const isMobile = windowSize === 'mobile';
+
+ // Function to generate mobile logo path
+ const getMobileLogoPath = (path) => path?.replace('.svg', '-mobile.svg');
+
+ // Get appropriate logo sources based on device
+ const sources = {
+ light: useBaseUrl(isMobile ? getMobileLogoPath(logo.src) : logo.src),
+ dark: useBaseUrl(isMobile ? getMobileLogoPath(logo.srcDark || logo.src) : (logo.srcDark || logo.src)),
+ };
+ const themedImage = (
+
+ );
+ // Is this extra div really necessary?
+ // introduced in https://github.com/facebook/docusaurus/pull/5666
+ return imageClassName ? (
+ {themedImage}
+ ) : (
+ themedImage
+ );
+}
+export default function Logo(props) {
+ const {
+ siteConfig: {title},
+ } = useDocusaurusContext();
+ const {
+ navbar: {title: navbarTitle, logo},
+ } = useThemeConfig();
+ const {imageClassName, titleClassName, ...propsRest} = props;
+ const logoLink = useBaseUrl(logo?.href || '/');
+ // If visible title is shown, fallback alt text should be
+ // an empty string to mark the logo as decorative.
+ const fallbackAlt = navbarTitle ? '' : title;
+ // Use logo alt text if provided (including empty string),
+ // and provide a sensible fallback otherwise.
+ const alt = logo?.alt ?? fallbackAlt;
+ return (
+
+ {logo && (
+
+ )}
+ {navbarTitle != null && {navbarTitle} }
+
+ );
+}
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/ThemedImage/index.tsx b/ref-python-packages/stackql-deploy/website/src/theme/ThemedImage/index.tsx
new file mode 100644
index 0000000..7102e44
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/ThemedImage/index.tsx
@@ -0,0 +1,49 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+ import React from 'react';
+ import clsx from 'clsx';
+ import useIsBrowser from '@docusaurus/useIsBrowser';
+ import {useColorMode} from '@docusaurus/theme-common';
+ import type {Props} from '@theme/ThemedImage';
+
+ import styles from './styles.module.css';
+
+ export default function ThemedImage(props: Props): JSX.Element {
+ const isBrowser = useIsBrowser();
+ const {colorMode} = useColorMode();
+ const {sources, className, alt, ...propsRest} = props;
+
+ type SourceName = keyof Props['sources'];
+
+ const clientThemes: SourceName[] =
+ colorMode === 'dark' ? ['dark'] : ['light'];
+
+ const renderedSourceNames: SourceName[] = isBrowser
+ ? clientThemes
+ : // We need to render both images on the server to avoid flash
+ // See https://github.com/facebook/docusaurus/pull/3730
+ ['light', 'dark'];
+
+ return (
+ <>
+ {renderedSourceNames.map((sourceName) => (
+
+ ))}
+ >
+ );
+ }
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/src/theme/ThemedImage/styles.module.css b/ref-python-packages/stackql-deploy/website/src/theme/ThemedImage/styles.module.css
new file mode 100644
index 0000000..cf93d46
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/src/theme/ThemedImage/styles.module.css
@@ -0,0 +1,18 @@
+/**
+ * Copyright (c) Facebook, Inc. and its affiliates.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+ .themedImage {
+ display: none;
+ }
+
+ [data-theme='light'] .themedImage--light {
+ display: initial;
+ }
+
+ [data-theme='dark'] .themedImage--dark {
+ display: initial;
+ }
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/static/.nojekyll b/ref-python-packages/stackql-deploy/website/static/.nojekyll
new file mode 100644
index 0000000..e69de29
diff --git a/ref-python-packages/stackql-deploy/website/static/img/favicon-16x16.png b/ref-python-packages/stackql-deploy/website/static/img/favicon-16x16.png
new file mode 100644
index 0000000..178c107
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/favicon-16x16.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/favicon-32x32.png b/ref-python-packages/stackql-deploy/website/static/img/favicon-32x32.png
new file mode 100644
index 0000000..f1efee0
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/favicon-32x32.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/favicon.ico b/ref-python-packages/stackql-deploy/website/static/img/favicon.ico
new file mode 100644
index 0000000..0145fbf
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/favicon.ico differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/library/aws/simple-aws-vpc-ec2-stack.png b/ref-python-packages/stackql-deploy/website/static/img/library/aws/simple-aws-vpc-ec2-stack.png
new file mode 100644
index 0000000..abb1cb2
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/library/aws/simple-aws-vpc-ec2-stack.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/library/azure/azure_vnet_and_vm.png b/ref-python-packages/stackql-deploy/website/static/img/library/azure/azure_vnet_and_vm.png
new file mode 100644
index 0000000..5b61c56
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/library/azure/azure_vnet_and_vm.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/providers/aws/aws.png b/ref-python-packages/stackql-deploy/website/static/img/providers/aws/aws.png
new file mode 100644
index 0000000..6e66f02
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/providers/aws/aws.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/providers/aws/stackql-aws-provider-featured-image.png b/ref-python-packages/stackql-deploy/website/static/img/providers/aws/stackql-aws-provider-featured-image.png
new file mode 100644
index 0000000..efbdac7
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/providers/aws/stackql-aws-provider-featured-image.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/providers/azure/azure.png b/ref-python-packages/stackql-deploy/website/static/img/providers/azure/azure.png
new file mode 100644
index 0000000..d2d4af5
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/providers/azure/azure.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/providers/azure/stackql-azure-provider-featured-image.png b/ref-python-packages/stackql-deploy/website/static/img/providers/azure/stackql-azure-provider-featured-image.png
new file mode 100644
index 0000000..7f3f4df
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/providers/azure/stackql-azure-provider-featured-image.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/providers/google/google.png b/ref-python-packages/stackql-deploy/website/static/img/providers/google/google.png
new file mode 100644
index 0000000..29f7087
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/providers/google/google.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/providers/google/stackql-google-provider-featured-image.png b/ref-python-packages/stackql-deploy/website/static/img/providers/google/stackql-google-provider-featured-image.png
new file mode 100644
index 0000000..7596891
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/providers/google/stackql-google-provider-featured-image.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/stackql-cover.png b/ref-python-packages/stackql-deploy/website/static/img/stackql-cover.png
new file mode 100644
index 0000000..872999a
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/stackql-cover.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-featured-image.png b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-featured-image.png
new file mode 100644
index 0000000..086322f
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-featured-image.png differ
diff --git a/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo-mobile.svg b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo-mobile.svg
new file mode 100644
index 0000000..1de09ec
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo-mobile.svg
@@ -0,0 +1,3 @@
+
+
+
diff --git a/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo-white-mobile.svg b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo-white-mobile.svg
new file mode 100644
index 0000000..88c628e
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo-white-mobile.svg
@@ -0,0 +1,3 @@
+
+
+
diff --git a/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo-white.svg b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo-white.svg
new file mode 100644
index 0000000..a09fefc
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo-white.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo.svg b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo.svg
new file mode 100644
index 0000000..1709a64
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy-logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy.gif b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy.gif
new file mode 100644
index 0000000..2922273
Binary files /dev/null and b/ref-python-packages/stackql-deploy/website/static/img/stackql-deploy.gif differ
diff --git a/ref-python-packages/stackql-deploy/website/yarn.lock b/ref-python-packages/stackql-deploy/website/yarn.lock
new file mode 100644
index 0000000..2090806
--- /dev/null
+++ b/ref-python-packages/stackql-deploy/website/yarn.lock
@@ -0,0 +1,11339 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"@algolia/autocomplete-core@1.17.9":
+ version "1.17.9"
+ resolved "https://registry.yarnpkg.com/@algolia/autocomplete-core/-/autocomplete-core-1.17.9.tgz#83374c47dc72482aa45d6b953e89377047f0dcdc"
+ integrity sha512-O7BxrpLDPJWWHv/DLA9DRFWs+iY1uOJZkqUwjS5HSZAGcl0hIVCQ97LTLewiZmZ402JYUrun+8NqFP+hCknlbQ==
+ dependencies:
+ "@algolia/autocomplete-plugin-algolia-insights" "1.17.9"
+ "@algolia/autocomplete-shared" "1.17.9"
+
+"@algolia/autocomplete-plugin-algolia-insights@1.17.9":
+ version "1.17.9"
+ resolved "https://registry.yarnpkg.com/@algolia/autocomplete-plugin-algolia-insights/-/autocomplete-plugin-algolia-insights-1.17.9.tgz#74c86024d09d09e8bfa3dd90b844b77d9f9947b6"
+ integrity sha512-u1fEHkCbWF92DBeB/KHeMacsjsoI0wFhjZtlCq2ddZbAehshbZST6Hs0Avkc0s+4UyBGbMDnSuXHLuvRWK5iDQ==
+ dependencies:
+ "@algolia/autocomplete-shared" "1.17.9"
+
+"@algolia/autocomplete-preset-algolia@1.17.9":
+ version "1.17.9"
+ resolved "https://registry.yarnpkg.com/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.17.9.tgz#911f3250544eb8ea4096fcfb268f156b085321b5"
+ integrity sha512-Na1OuceSJeg8j7ZWn5ssMu/Ax3amtOwk76u4h5J4eK2Nx2KB5qt0Z4cOapCsxot9VcEN11ADV5aUSlQF4RhGjQ==
+ dependencies:
+ "@algolia/autocomplete-shared" "1.17.9"
+
+"@algolia/autocomplete-shared@1.17.9":
+ version "1.17.9"
+ resolved "https://registry.yarnpkg.com/@algolia/autocomplete-shared/-/autocomplete-shared-1.17.9.tgz#5f38868f7cb1d54b014b17a10fc4f7e79d427fa8"
+ integrity sha512-iDf05JDQ7I0b7JEA/9IektxN/80a2MZ1ToohfmNS3rfeuQnIKI3IJlIafD0xu4StbtQTghx9T3Maa97ytkXenQ==
+
+"@algolia/client-abtesting@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/client-abtesting/-/client-abtesting-5.34.0.tgz#1b7162c76cb77179a114d34efd76b0078ba2cab3"
+ integrity sha512-d6ardhDtQsnMpyr/rPrS3YuIE9NYpY4rftkC7Ap9tyuhZ/+V3E/LH+9uEewPguKzVqduApdwJzYq2k+vAXVEbQ==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+ "@algolia/requester-browser-xhr" "5.34.0"
+ "@algolia/requester-fetch" "5.34.0"
+ "@algolia/requester-node-http" "5.34.0"
+
+"@algolia/client-analytics@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/client-analytics/-/client-analytics-5.34.0.tgz#1eb5c5cfdf4be62f5a71600c9e04e71d7a742dc5"
+ integrity sha512-WXIByjHNA106JO1Dj6b4viSX/yMN3oIB4qXr2MmyEmNq0MgfuPfPw8ayLRIZPa9Dp27hvM3G8MWJ4RG978HYFw==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+ "@algolia/requester-browser-xhr" "5.34.0"
+ "@algolia/requester-fetch" "5.34.0"
+ "@algolia/requester-node-http" "5.34.0"
+
+"@algolia/client-common@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/client-common/-/client-common-5.34.0.tgz#c617f62d73325f2c61bf5ee1c122f385f66fc117"
+ integrity sha512-JeN1XJLZIkkv6yK0KT93CIXXk+cDPUGNg5xeH4fN9ZykYFDWYRyqgaDo+qvg4RXC3WWkdQ+hogQuuCk4Y3Eotw==
+
+"@algolia/client-insights@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/client-insights/-/client-insights-5.34.0.tgz#5623db7c926e1e600dd5b8e6f76ab703b8920eb7"
+ integrity sha512-gdFlcQa+TWXJUsihHDlreFWniKPFIQ15i5oynCY4m9K3DCex5g5cVj9VG4Hsquxf2t6Y0yv8w6MvVTGDO8oRLw==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+ "@algolia/requester-browser-xhr" "5.34.0"
+ "@algolia/requester-fetch" "5.34.0"
+ "@algolia/requester-node-http" "5.34.0"
+
+"@algolia/client-personalization@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/client-personalization/-/client-personalization-5.34.0.tgz#e9e8d8af7ff74ab1c72fed259da0c916ada88719"
+ integrity sha512-g91NHhIZDkh1IUeNtsUd8V/ZxuBc2ByOfDqhCkoQY3Z/mZszhpn3Czn6AR5pE81fx793vMaiOZvQVB5QttArkQ==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+ "@algolia/requester-browser-xhr" "5.34.0"
+ "@algolia/requester-fetch" "5.34.0"
+ "@algolia/requester-node-http" "5.34.0"
+
+"@algolia/client-query-suggestions@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/client-query-suggestions/-/client-query-suggestions-5.34.0.tgz#50fdd06c47d949c315590806dba33b549e300365"
+ integrity sha512-cvRApDfFrlJ3Vcn37U4Nd/7S6T8cx7FW3mVLJPqkkzixv8DQ/yV+x4VLirxOtGDdq3KohcIbIGWbg1QuyOZRvQ==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+ "@algolia/requester-browser-xhr" "5.34.0"
+ "@algolia/requester-fetch" "5.34.0"
+ "@algolia/requester-node-http" "5.34.0"
+
+"@algolia/client-search@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/client-search/-/client-search-5.34.0.tgz#44867a0b0be7103d4d72601b7dd534bad6fe6d6d"
+ integrity sha512-m9tK4IqJmn+flEPRtuxuHgiHmrKV0su5fuVwVpq8/es4DMjWMgX1a7Lg1PktvO8AbKaTp9kTtBAPnwXpuCwmEg==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+ "@algolia/requester-browser-xhr" "5.34.0"
+ "@algolia/requester-fetch" "5.34.0"
+ "@algolia/requester-node-http" "5.34.0"
+
+"@algolia/events@^4.0.1":
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/@algolia/events/-/events-4.0.1.tgz#fd39e7477e7bc703d7f893b556f676c032af3950"
+ integrity sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==
+
+"@algolia/ingestion@1.34.0":
+ version "1.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/ingestion/-/ingestion-1.34.0.tgz#393e20c4948f304349230de7cdfd4be5ce6efe93"
+ integrity sha512-2rxy4XoeRtIpzxEh5u5UgDC5HY4XbNdjzNgFx1eDrfFkSHpEVjirtLhISMy2N5uSFqYu1uUby5/NC1Soq8J7iw==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+ "@algolia/requester-browser-xhr" "5.34.0"
+ "@algolia/requester-fetch" "5.34.0"
+ "@algolia/requester-node-http" "5.34.0"
+
+"@algolia/monitoring@1.34.0":
+ version "1.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/monitoring/-/monitoring-1.34.0.tgz#f163560f660195e028b02a81e4bc3475cf001225"
+ integrity sha512-OJiDhlJX8ZdWAndc50Z6aUEW/YmnhFK2ul3rahMw5/c9Damh7+oY9SufoK2LimJejy+65Qka06YPG29v2G/vww==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+ "@algolia/requester-browser-xhr" "5.34.0"
+ "@algolia/requester-fetch" "5.34.0"
+ "@algolia/requester-node-http" "5.34.0"
+
+"@algolia/recommend@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/recommend/-/recommend-5.34.0.tgz#5adec4b49c32612309204e9f17f9180c6e4e1da9"
+ integrity sha512-fzNQZAdVxu/Gnbavy8KW5gurApwdYcPW6+pjO7Pw8V5drCR3eSqnOxSvp79rhscDX8ezwqMqqK4F3Hsq+KpRzg==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+ "@algolia/requester-browser-xhr" "5.34.0"
+ "@algolia/requester-fetch" "5.34.0"
+ "@algolia/requester-node-http" "5.34.0"
+
+"@algolia/requester-browser-xhr@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.34.0.tgz#3882e55907d6a3cf7160aef5b6743be398b88c57"
+ integrity sha512-gEI0xjzA/xvMpEdYmgQnf6AQKllhgKRtnEWmwDrnct+YPIruEHlx1dd7nRJTy/33MiYcCxkB4khXpNrHuqgp3Q==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+
+"@algolia/requester-fetch@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/requester-fetch/-/requester-fetch-5.34.0.tgz#bda378f9e5d70d7516d15478a5a7123eaf62afb0"
+ integrity sha512-5SwGOttpbACT4jXzfSJ3mnTcF46SVNSnZ1JjxC3qBa3qKi4U0CJGzuVVy3L798u8dG5H0SZ2MAB5v7180Gnqew==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+
+"@algolia/requester-node-http@5.34.0":
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/@algolia/requester-node-http/-/requester-node-http-5.34.0.tgz#047bf7f7027c2d94752da55d60d2a3f6d38d12c3"
+ integrity sha512-409XlyIyEXrxyGjWxd0q5RASizHSRVUU0AXPCEdqnbcGEzbCgL1n7oYI8YxzE/RqZLha+PNwWCcTVn7EE5tyyQ==
+ dependencies:
+ "@algolia/client-common" "5.34.0"
+
+"@ampproject/remapping@^2.2.0":
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.3.0.tgz#ed441b6fa600072520ce18b43d2c8cc8caecc7f4"
+ integrity sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==
+ dependencies:
+ "@jridgewell/gen-mapping" "^0.3.5"
+ "@jridgewell/trace-mapping" "^0.3.24"
+
+"@antfu/install-pkg@^1.0.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@antfu/install-pkg/-/install-pkg-1.1.0.tgz#78fa036be1a6081b5a77a5cf59f50c7752b6ba26"
+ integrity sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==
+ dependencies:
+ package-manager-detector "^1.3.0"
+ tinyexec "^1.0.1"
+
+"@antfu/utils@^8.1.0":
+ version "8.1.1"
+ resolved "https://registry.yarnpkg.com/@antfu/utils/-/utils-8.1.1.tgz#95b1947d292a9a2efffba2081796dcaa05ecedfb"
+ integrity sha512-Mex9nXf9vR6AhcXmMrlz/HVgYYZpVGJ6YlPgwl7UnaFpnshXs6EK/oa5Gpf3CzENMjkvEx2tQtntGnb7UtSTOQ==
+
+"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.24.7.tgz#882fd9e09e8ee324e496bd040401c6f046ef4465"
+ integrity sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==
+ dependencies:
+ "@babel/highlight" "^7.24.7"
+ picocolors "^1.0.0"
+
+"@babel/code-frame@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.27.1.tgz#200f715e66d52a23b221a9435534a91cc13ad5be"
+ integrity sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==
+ dependencies:
+ "@babel/helper-validator-identifier" "^7.27.1"
+ js-tokens "^4.0.0"
+ picocolors "^1.1.1"
+
+"@babel/compat-data@^7.22.6", "@babel/compat-data@^7.25.2":
+ version "7.25.2"
+ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.25.2.tgz#e41928bd33475305c586f6acbbb7e3ade7a6f7f5"
+ integrity sha512-bYcppcpKBvX4znYaPEeFau03bp89ShqNMLs+rmdptMw+heSZh9+z84d2YG+K7cYLbWwzdjtDoW/uqZmPjulClQ==
+
+"@babel/compat-data@^7.27.2", "@babel/compat-data@^7.27.7", "@babel/compat-data@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.28.0.tgz#9fc6fd58c2a6a15243cd13983224968392070790"
+ integrity sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==
+
+"@babel/core@^7.21.3":
+ version "7.25.2"
+ resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.25.2.tgz#ed8eec275118d7613e77a352894cd12ded8eba77"
+ integrity sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==
+ dependencies:
+ "@ampproject/remapping" "^2.2.0"
+ "@babel/code-frame" "^7.24.7"
+ "@babel/generator" "^7.25.0"
+ "@babel/helper-compilation-targets" "^7.25.2"
+ "@babel/helper-module-transforms" "^7.25.2"
+ "@babel/helpers" "^7.25.0"
+ "@babel/parser" "^7.25.0"
+ "@babel/template" "^7.25.0"
+ "@babel/traverse" "^7.25.2"
+ "@babel/types" "^7.25.2"
+ convert-source-map "^2.0.0"
+ debug "^4.1.0"
+ gensync "^1.0.0-beta.2"
+ json5 "^2.2.3"
+ semver "^6.3.1"
+
+"@babel/core@^7.25.9":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.28.0.tgz#55dad808d5bf3445a108eefc88ea3fdf034749a4"
+ integrity sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ==
+ dependencies:
+ "@ampproject/remapping" "^2.2.0"
+ "@babel/code-frame" "^7.27.1"
+ "@babel/generator" "^7.28.0"
+ "@babel/helper-compilation-targets" "^7.27.2"
+ "@babel/helper-module-transforms" "^7.27.3"
+ "@babel/helpers" "^7.27.6"
+ "@babel/parser" "^7.28.0"
+ "@babel/template" "^7.27.2"
+ "@babel/traverse" "^7.28.0"
+ "@babel/types" "^7.28.0"
+ convert-source-map "^2.0.0"
+ debug "^4.1.0"
+ gensync "^1.0.0-beta.2"
+ json5 "^2.2.3"
+ semver "^6.3.1"
+
+"@babel/generator@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.25.0.tgz#f858ddfa984350bc3d3b7f125073c9af6988f18e"
+ integrity sha512-3LEEcj3PVW8pW2R1SR1M89g/qrYk/m/mB/tLqn7dn4sbBUQyTqnlod+II2U4dqiGtUmkcnAmkMDralTFZttRiw==
+ dependencies:
+ "@babel/types" "^7.25.0"
+ "@jridgewell/gen-mapping" "^0.3.5"
+ "@jridgewell/trace-mapping" "^0.3.25"
+ jsesc "^2.5.1"
+
+"@babel/generator@^7.25.9", "@babel/generator@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.28.0.tgz#9cc2f7bd6eb054d77dc66c2664148a0c5118acd2"
+ integrity sha512-lJjzvrbEeWrhB4P3QBsH7tey117PjLZnDbLiQEKjQ/fNJTjuq4HSqgFA+UNSwZT8D7dxxbnuSBMsa1lrWzKlQg==
+ dependencies:
+ "@babel/parser" "^7.28.0"
+ "@babel/types" "^7.28.0"
+ "@jridgewell/gen-mapping" "^0.3.12"
+ "@jridgewell/trace-mapping" "^0.3.28"
+ jsesc "^3.0.2"
+
+"@babel/helper-annotate-as-pure@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.24.7.tgz#5373c7bc8366b12a033b4be1ac13a206c6656aab"
+ integrity sha512-BaDeOonYvhdKw+JoMVkAixAAJzG2jVPIwWoKBPdYuY9b452e2rPuI9QPYh3KpofZ3pW2akOmwZLOiOsHMiqRAg==
+ dependencies:
+ "@babel/types" "^7.24.7"
+
+"@babel/helper-annotate-as-pure@^7.27.1", "@babel/helper-annotate-as-pure@^7.27.3":
+ version "7.27.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz#f31fd86b915fc4daf1f3ac6976c59be7084ed9c5"
+ integrity sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==
+ dependencies:
+ "@babel/types" "^7.27.3"
+
+"@babel/helper-builder-binary-assignment-operator-visitor@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.24.7.tgz#37d66feb012024f2422b762b9b2a7cfe27c7fba3"
+ integrity sha512-xZeCVVdwb4MsDBkkyZ64tReWYrLRHlMN72vP7Bdm3OUOuyFZExhsHUUnuWnm2/XOlAJzR0LfPpB56WXZn0X/lA==
+ dependencies:
+ "@babel/traverse" "^7.24.7"
+ "@babel/types" "^7.24.7"
+
+"@babel/helper-compilation-targets@^7.22.6", "@babel/helper-compilation-targets@^7.24.7", "@babel/helper-compilation-targets@^7.24.8", "@babel/helper-compilation-targets@^7.25.2":
+ version "7.25.2"
+ resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.2.tgz#e1d9410a90974a3a5a66e84ff55ef62e3c02d06c"
+ integrity sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==
+ dependencies:
+ "@babel/compat-data" "^7.25.2"
+ "@babel/helper-validator-option" "^7.24.8"
+ browserslist "^4.23.1"
+ lru-cache "^5.1.1"
+ semver "^6.3.1"
+
+"@babel/helper-compilation-targets@^7.27.1", "@babel/helper-compilation-targets@^7.27.2":
+ version "7.27.2"
+ resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz#46a0f6efab808d51d29ce96858dd10ce8732733d"
+ integrity sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==
+ dependencies:
+ "@babel/compat-data" "^7.27.2"
+ "@babel/helper-validator-option" "^7.27.1"
+ browserslist "^4.24.0"
+ lru-cache "^5.1.1"
+ semver "^6.3.1"
+
+"@babel/helper-create-class-features-plugin@^7.24.7", "@babel/helper-create-class-features-plugin@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.25.0.tgz#a109bf9c3d58dfed83aaf42e85633c89f43a6253"
+ integrity sha512-GYM6BxeQsETc9mnct+nIIpf63SAyzvyYN7UB/IlTyd+MBg06afFGp0mIeUqGyWgS2mxad6vqbMrHVlaL3m70sQ==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.24.7"
+ "@babel/helper-member-expression-to-functions" "^7.24.8"
+ "@babel/helper-optimise-call-expression" "^7.24.7"
+ "@babel/helper-replace-supers" "^7.25.0"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7"
+ "@babel/traverse" "^7.25.0"
+ semver "^6.3.1"
+
+"@babel/helper-create-class-features-plugin@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.27.1.tgz#5bee4262a6ea5ddc852d0806199eb17ca3de9281"
+ integrity sha512-QwGAmuvM17btKU5VqXfb+Giw4JcN0hjuufz3DYnpeVDvZLAObloM77bhMXiqry3Iio+Ai4phVRDwl6WU10+r5A==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.27.1"
+ "@babel/helper-member-expression-to-functions" "^7.27.1"
+ "@babel/helper-optimise-call-expression" "^7.27.1"
+ "@babel/helper-replace-supers" "^7.27.1"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.27.1"
+ "@babel/traverse" "^7.27.1"
+ semver "^6.3.1"
+
+"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.24.7", "@babel/helper-create-regexp-features-plugin@^7.25.0":
+ version "7.25.2"
+ resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.25.2.tgz#24c75974ed74183797ffd5f134169316cd1808d9"
+ integrity sha512-+wqVGP+DFmqwFD3EH6TMTfUNeqDehV3E/dl+Sd54eaXqm17tEUNbEIn4sVivVowbvUpOtIGxdo3GoXyDH9N/9g==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.24.7"
+ regexpu-core "^5.3.1"
+ semver "^6.3.1"
+
+"@babel/helper-create-regexp-features-plugin@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.27.1.tgz#05b0882d97ba1d4d03519e4bce615d70afa18c53"
+ integrity sha512-uVDC72XVf8UbrH5qQTc18Agb8emwjTiZrQE11Nv3CuBEZmVvTwwE9CBUEvHku06gQCAyYf8Nv6ja1IN+6LMbxQ==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.27.1"
+ regexpu-core "^6.2.0"
+ semver "^6.3.1"
+
+"@babel/helper-define-polyfill-provider@^0.6.2":
+ version "0.6.2"
+ resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.2.tgz#18594f789c3594acb24cfdb4a7f7b7d2e8bd912d"
+ integrity sha512-LV76g+C502biUK6AyZ3LK10vDpDyCzZnhZFXkH1L75zHPj68+qc8Zfpx2th+gzwA2MzyK+1g/3EPl62yFnVttQ==
+ dependencies:
+ "@babel/helper-compilation-targets" "^7.22.6"
+ "@babel/helper-plugin-utils" "^7.22.5"
+ debug "^4.1.1"
+ lodash.debounce "^4.0.8"
+ resolve "^1.14.2"
+
+"@babel/helper-define-polyfill-provider@^0.6.5":
+ version "0.6.5"
+ resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.5.tgz#742ccf1cb003c07b48859fc9fa2c1bbe40e5f753"
+ integrity sha512-uJnGFcPsWQK8fvjgGP5LZUZZsYGIoPeRjSF5PGwrelYgq7Q15/Ft9NGFp1zglwgIv//W0uG4BevRuSJRyylZPg==
+ dependencies:
+ "@babel/helper-compilation-targets" "^7.27.2"
+ "@babel/helper-plugin-utils" "^7.27.1"
+ debug "^4.4.1"
+ lodash.debounce "^4.0.8"
+ resolve "^1.22.10"
+
+"@babel/helper-globals@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/helper-globals/-/helper-globals-7.28.0.tgz#b9430df2aa4e17bc28665eadeae8aa1d985e6674"
+ integrity sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==
+
+"@babel/helper-member-expression-to-functions@^7.24.8":
+ version "7.24.8"
+ resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.8.tgz#6155e079c913357d24a4c20480db7c712a5c3fb6"
+ integrity sha512-LABppdt+Lp/RlBxqrh4qgf1oEH/WxdzQNDJIu5gC/W1GyvPVrOBiItmmM8wan2fm4oYqFuFfkXmlGpLQhPY8CA==
+ dependencies:
+ "@babel/traverse" "^7.24.8"
+ "@babel/types" "^7.24.8"
+
+"@babel/helper-member-expression-to-functions@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.27.1.tgz#ea1211276be93e798ce19037da6f06fbb994fa44"
+ integrity sha512-E5chM8eWjTp/aNoVpcbfM7mLxu9XGLWYise2eBKGQomAk/Mb4XoxyqXTZbuTohbsl8EKqdlMhnDI2CCLfcs9wA==
+ dependencies:
+ "@babel/traverse" "^7.27.1"
+ "@babel/types" "^7.27.1"
+
+"@babel/helper-module-imports@^7.16.7", "@babel/helper-module-imports@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz#f2f980392de5b84c3328fc71d38bd81bbb83042b"
+ integrity sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==
+ dependencies:
+ "@babel/traverse" "^7.24.7"
+ "@babel/types" "^7.24.7"
+
+"@babel/helper-module-imports@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz#7ef769a323e2655e126673bb6d2d6913bbead204"
+ integrity sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==
+ dependencies:
+ "@babel/traverse" "^7.27.1"
+ "@babel/types" "^7.27.1"
+
+"@babel/helper-module-transforms@^7.24.7", "@babel/helper-module-transforms@^7.24.8", "@babel/helper-module-transforms@^7.25.0", "@babel/helper-module-transforms@^7.25.2":
+ version "7.25.2"
+ resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.25.2.tgz#ee713c29768100f2776edf04d4eb23b8d27a66e6"
+ integrity sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==
+ dependencies:
+ "@babel/helper-module-imports" "^7.24.7"
+ "@babel/helper-simple-access" "^7.24.7"
+ "@babel/helper-validator-identifier" "^7.24.7"
+ "@babel/traverse" "^7.25.2"
+
+"@babel/helper-module-transforms@^7.27.1", "@babel/helper-module-transforms@^7.27.3":
+ version "7.27.3"
+ resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz#db0bbcfba5802f9ef7870705a7ef8788508ede02"
+ integrity sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==
+ dependencies:
+ "@babel/helper-module-imports" "^7.27.1"
+ "@babel/helper-validator-identifier" "^7.27.1"
+ "@babel/traverse" "^7.27.3"
+
+"@babel/helper-optimise-call-expression@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.24.7.tgz#8b0a0456c92f6b323d27cfd00d1d664e76692a0f"
+ integrity sha512-jKiTsW2xmWwxT1ixIdfXUZp+P5yURx2suzLZr5Hi64rURpDYdMW0pv+Uf17EYk2Rd428Lx4tLsnjGJzYKDM/6A==
+ dependencies:
+ "@babel/types" "^7.24.7"
+
+"@babel/helper-optimise-call-expression@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz#c65221b61a643f3e62705e5dd2b5f115e35f9200"
+ integrity sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==
+ dependencies:
+ "@babel/types" "^7.27.1"
+
+"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.24.7", "@babel/helper-plugin-utils@^7.24.8", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3":
+ version "7.24.8"
+ resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.8.tgz#94ee67e8ec0e5d44ea7baeb51e571bd26af07878"
+ integrity sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==
+
+"@babel/helper-plugin-utils@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz#ddb2f876534ff8013e6c2b299bf4d39b3c51d44c"
+ integrity sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==
+
+"@babel/helper-remap-async-to-generator@^7.24.7", "@babel/helper-remap-async-to-generator@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.25.0.tgz#d2f0fbba059a42d68e5e378feaf181ef6055365e"
+ integrity sha512-NhavI2eWEIz/H9dbrG0TuOicDhNexze43i5z7lEqwYm0WEZVTwnPpA0EafUTP7+6/W79HWIP2cTe3Z5NiSTVpw==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.24.7"
+ "@babel/helper-wrap-function" "^7.25.0"
+ "@babel/traverse" "^7.25.0"
+
+"@babel/helper-remap-async-to-generator@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.27.1.tgz#4601d5c7ce2eb2aea58328d43725523fcd362ce6"
+ integrity sha512-7fiA521aVw8lSPeI4ZOD3vRFkoqkJcS+z4hFo82bFSH/2tNd6eJ5qCVMS5OzDmZh/kaHQeBaeyxK6wljcPtveA==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.27.1"
+ "@babel/helper-wrap-function" "^7.27.1"
+ "@babel/traverse" "^7.27.1"
+
+"@babel/helper-replace-supers@^7.24.7", "@babel/helper-replace-supers@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.25.0.tgz#ff44deac1c9f619523fe2ca1fd650773792000a9"
+ integrity sha512-q688zIvQVYtZu+i2PsdIu/uWGRpfxzr5WESsfpShfZECkO+d2o+WROWezCi/Q6kJ0tfPa5+pUGUlfx2HhrA3Bg==
+ dependencies:
+ "@babel/helper-member-expression-to-functions" "^7.24.8"
+ "@babel/helper-optimise-call-expression" "^7.24.7"
+ "@babel/traverse" "^7.25.0"
+
+"@babel/helper-replace-supers@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.27.1.tgz#b1ed2d634ce3bdb730e4b52de30f8cccfd692bc0"
+ integrity sha512-7EHz6qDZc8RYS5ElPoShMheWvEgERonFCs7IAonWLLUTXW59DP14bCZt89/GKyreYn8g3S83m21FelHKbeDCKA==
+ dependencies:
+ "@babel/helper-member-expression-to-functions" "^7.27.1"
+ "@babel/helper-optimise-call-expression" "^7.27.1"
+ "@babel/traverse" "^7.27.1"
+
+"@babel/helper-simple-access@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz#bcade8da3aec8ed16b9c4953b74e506b51b5edb3"
+ integrity sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==
+ dependencies:
+ "@babel/traverse" "^7.24.7"
+ "@babel/types" "^7.24.7"
+
+"@babel/helper-skip-transparent-expression-wrappers@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.24.7.tgz#5f8fa83b69ed5c27adc56044f8be2b3ea96669d9"
+ integrity sha512-IO+DLT3LQUElMbpzlatRASEyQtfhSE0+m465v++3jyyXeBTBUjtVZg28/gHeV5mrTJqvEKhKroBGAvhW+qPHiQ==
+ dependencies:
+ "@babel/traverse" "^7.24.7"
+ "@babel/types" "^7.24.7"
+
+"@babel/helper-skip-transparent-expression-wrappers@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz#62bb91b3abba8c7f1fec0252d9dbea11b3ee7a56"
+ integrity sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==
+ dependencies:
+ "@babel/traverse" "^7.27.1"
+ "@babel/types" "^7.27.1"
+
+"@babel/helper-string-parser@^7.24.8":
+ version "7.24.8"
+ resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz#5b3329c9a58803d5df425e5785865881a81ca48d"
+ integrity sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==
+
+"@babel/helper-string-parser@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz#54da796097ab19ce67ed9f88b47bb2ec49367687"
+ integrity sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==
+
+"@babel/helper-validator-identifier@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz#75b889cfaf9e35c2aaf42cf0d72c8e91719251db"
+ integrity sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==
+
+"@babel/helper-validator-identifier@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz#a7054dcc145a967dd4dc8fee845a57c1316c9df8"
+ integrity sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==
+
+"@babel/helper-validator-option@^7.24.7", "@babel/helper-validator-option@^7.24.8":
+ version "7.24.8"
+ resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz#3725cdeea8b480e86d34df15304806a06975e33d"
+ integrity sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==
+
+"@babel/helper-validator-option@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz#fa52f5b1e7db1ab049445b421c4471303897702f"
+ integrity sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==
+
+"@babel/helper-wrap-function@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.25.0.tgz#dab12f0f593d6ca48c0062c28bcfb14ebe812f81"
+ integrity sha512-s6Q1ebqutSiZnEjaofc/UKDyC4SbzV5n5SrA2Gq8UawLycr3i04f1dX4OzoQVnexm6aOCh37SQNYlJ/8Ku+PMQ==
+ dependencies:
+ "@babel/template" "^7.25.0"
+ "@babel/traverse" "^7.25.0"
+ "@babel/types" "^7.25.0"
+
+"@babel/helper-wrap-function@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.27.1.tgz#b88285009c31427af318d4fe37651cd62a142409"
+ integrity sha512-NFJK2sHUvrjo8wAU/nQTWU890/zB2jj0qBcCbZbbf+005cAsv6tMjXz31fBign6M5ov1o0Bllu+9nbqkfsjjJQ==
+ dependencies:
+ "@babel/template" "^7.27.1"
+ "@babel/traverse" "^7.27.1"
+ "@babel/types" "^7.27.1"
+
+"@babel/helpers@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.25.0.tgz#e69beb7841cb93a6505531ede34f34e6a073650a"
+ integrity sha512-MjgLZ42aCm0oGjJj8CtSM3DB8NOOf8h2l7DCTePJs29u+v7yO/RBX9nShlKMgFnRks/Q4tBAe7Hxnov9VkGwLw==
+ dependencies:
+ "@babel/template" "^7.25.0"
+ "@babel/types" "^7.25.0"
+
+"@babel/helpers@^7.27.6":
+ version "7.27.6"
+ resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.27.6.tgz#6456fed15b2cb669d2d1fabe84b66b34991d812c"
+ integrity sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug==
+ dependencies:
+ "@babel/template" "^7.27.2"
+ "@babel/types" "^7.27.6"
+
+"@babel/highlight@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.7.tgz#a05ab1df134b286558aae0ed41e6c5f731bf409d"
+ integrity sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==
+ dependencies:
+ "@babel/helper-validator-identifier" "^7.24.7"
+ chalk "^2.4.2"
+ js-tokens "^4.0.0"
+ picocolors "^1.0.0"
+
+"@babel/parser@^7.25.0", "@babel/parser@^7.25.3":
+ version "7.25.3"
+ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.25.3.tgz#91fb126768d944966263f0657ab222a642b82065"
+ integrity sha512-iLTJKDbJ4hMvFPgQwwsVoxtHyWpKKPBrxkANrSYewDPaPpT5py5yeVkgPIJ7XYXhndxJpaA3PyALSXQ7u8e/Dw==
+ dependencies:
+ "@babel/types" "^7.25.2"
+
+"@babel/parser@^7.27.2", "@babel/parser@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.28.0.tgz#979829fbab51a29e13901e5a80713dbcb840825e"
+ integrity sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==
+ dependencies:
+ "@babel/types" "^7.28.0"
+
+"@babel/plugin-bugfix-firefox-class-in-computed-class-key@^7.25.3":
+ version "7.25.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.25.3.tgz#dca427b45a6c0f5c095a1c639dfe2476a3daba7f"
+ integrity sha512-wUrcsxZg6rqBXG05HG1FPYgsP6EvwF4WpBbxIpWIIYnH8wG0gzx3yZY3dtEHas4sTAOGkbTsc9EGPxwff8lRoA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/traverse" "^7.25.3"
+
+"@babel/plugin-bugfix-firefox-class-in-computed-class-key@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.27.1.tgz#61dd8a8e61f7eb568268d1b5f129da3eee364bf9"
+ integrity sha512-QPG3C9cCVRQLxAVwmefEmwdTanECuUBMQZ/ym5kiw3XKCGA7qkuQLcjWWHcrD/GKbn/WmJwaezfuuAOcyKlRPA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/traverse" "^7.27.1"
+
+"@babel/plugin-bugfix-safari-class-field-initializer-scope@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.25.0.tgz#cd0c583e01369ef51676bdb3d7b603e17d2b3f73"
+ integrity sha512-Bm4bH2qsX880b/3ziJ8KD711LT7z4u8CFudmjqle65AZj/HNUFhEf90dqYv6O86buWvSBmeQDjv0Tn2aF/bIBA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+
+"@babel/plugin-bugfix-safari-class-field-initializer-scope@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.27.1.tgz#43f70a6d7efd52370eefbdf55ae03d91b293856d"
+ integrity sha512-qNeq3bCKnGgLkEXUuFry6dPlGfCdQNZbn7yUAPCInwAJHMU7THJfrBSozkcWq5sNM6RcF3S8XyQL2A52KNR9IA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.25.0.tgz#749bde80356b295390954643de7635e0dffabe73"
+ integrity sha512-lXwdNZtTmeVOOFtwM/WDe7yg1PL8sYhRk/XH0FzbR2HDQ0xC+EnQ/JHeoMYSavtU115tnUk0q9CDyq8si+LMAA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+
+"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.27.1.tgz#beb623bd573b8b6f3047bd04c32506adc3e58a72"
+ integrity sha512-g4L7OYun04N1WyqMNjldFwlfPCLVkgB54A/YCXICZYBsvJJE3kByKv9c9+R/nAfmIfjl2rKYLNyMHboYbZaWaA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.7.tgz#e4eabdd5109acc399b38d7999b2ef66fc2022f89"
+ integrity sha512-+izXIbke1T33mY4MSNnrqhPXDz01WYhEf3yF5NbnUtkiNnm+XBZJl3kNfoK6NKmYlz/D07+l2GWVK/QfDkNCuQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7"
+ "@babel/plugin-transform-optional-chaining" "^7.24.7"
+
+"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.27.1.tgz#e134a5479eb2ba9c02714e8c1ebf1ec9076124fd"
+ integrity sha512-oO02gcONcD5O1iTLi/6frMJBIwWEHceWGSGqrpCmEL8nogiS6J9PBlE48CaK20/Jx1LuRml9aDftLgdjXT8+Cw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.27.1"
+ "@babel/plugin-transform-optional-chaining" "^7.27.1"
+
+"@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.25.0.tgz#3a82a70e7cb7294ad2559465ebcb871dfbf078fb"
+ integrity sha512-tggFrk1AIShG/RUQbEwt2Tr/E+ObkfwrPjR6BjbRvsx24+PSjK8zrq0GWPNCjo8qpRx4DuJzlcvWJqlm+0h3kw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/traverse" "^7.25.0"
+
+"@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.27.1.tgz#bb1c25af34d75115ce229a1de7fa44bf8f955670"
+ integrity sha512-6BpaYGDavZqkI6yT+KSPdpZFfpnd68UKXbcjI9pJ13pvHhPrCKWOOLp+ysvMeA+DxnhuPpgIaRpxRxo5A9t5jw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/traverse" "^7.27.1"
+
+"@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2":
+ version "7.21.0-placeholder-for-preset-env.2"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz#7844f9289546efa9febac2de4cfe358a050bd703"
+ integrity sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==
+
+"@babel/plugin-syntax-async-generators@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d"
+ integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-class-properties@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10"
+ integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-syntax-class-static-block@^7.14.5":
+ version "7.14.5"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406"
+ integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.14.5"
+
+"@babel/plugin-syntax-dynamic-import@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3"
+ integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-export-namespace-from@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a"
+ integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.3"
+
+"@babel/plugin-syntax-import-assertions@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.7.tgz#2a0b406b5871a20a841240586b1300ce2088a778"
+ integrity sha512-Ec3NRUMoi8gskrkBe3fNmEQfxDvY8bgfQpz6jlk/41kX9eUjvpyqWU7PBP/pLAvMaSQjbMNKJmvX57jP+M6bPg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-syntax-import-assertions@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.27.1.tgz#88894aefd2b03b5ee6ad1562a7c8e1587496aecd"
+ integrity sha512-UT/Jrhw57xg4ILHLFnzFpPDlMbcdEicaAtjPQpbj9wa8T4r5KVWCimHcL/460g8Ht0DMxDyjsLgiWSkVjnwPFg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-syntax-import-attributes@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.7.tgz#b4f9ea95a79e6912480c4b626739f86a076624ca"
+ integrity sha512-hbX+lKKeUMGihnK8nvKqmXBInriT3GVjzXKFriV3YC6APGxMbP8RZNFwy91+hocLXq90Mta+HshoB31802bb8A==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-syntax-import-attributes@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz#34c017d54496f9b11b61474e7ea3dfd5563ffe07"
+ integrity sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-syntax-import-meta@^7.10.4":
+ version "7.10.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51"
+ integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-json-strings@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a"
+ integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-jsx@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.24.7.tgz#39a1fa4a7e3d3d7f34e2acc6be585b718d30e02d"
+ integrity sha512-6ddciUPe/mpMnOKv/U+RSd2vvVy+Yw/JfBB0ZHYjEZt9NLHmCUylNYlsbqCCS1Bffjlb0fCwC9Vqz+sBz6PsiQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-syntax-jsx@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz#2f9beb5eff30fa507c5532d107daac7b888fa34c"
+ integrity sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-syntax-logical-assignment-operators@^7.10.4":
+ version "7.10.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699"
+ integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9"
+ integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-numeric-separator@^7.10.4":
+ version "7.10.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97"
+ integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-object-rest-spread@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871"
+ integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-optional-catch-binding@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1"
+ integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-optional-chaining@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a"
+ integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-private-property-in-object@^7.14.5":
+ version "7.14.5"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad"
+ integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.14.5"
+
+"@babel/plugin-syntax-top-level-await@^7.14.5":
+ version "7.14.5"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c"
+ integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.14.5"
+
+"@babel/plugin-syntax-typescript@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.7.tgz#58d458271b4d3b6bb27ee6ac9525acbb259bad1c"
+ integrity sha512-c/+fVeJBB0FeKsFvwytYiUD+LBvhHjGSI0g446PRGdSVGZLRNArBUno2PETbAly3tpiNAQR5XaZ+JslxkotsbA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-syntax-typescript@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz#5147d29066a793450f220c63fa3a9431b7e6dd18"
+ integrity sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-syntax-unicode-sets-regex@^7.18.6":
+ version "7.18.6"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz#d49a3b3e6b52e5be6740022317580234a6a47357"
+ integrity sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.18.6"
+ "@babel/helper-plugin-utils" "^7.18.6"
+
+"@babel/plugin-transform-arrow-functions@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.7.tgz#4f6886c11e423bd69f3ce51dbf42424a5f275514"
+ integrity sha512-Dt9LQs6iEY++gXUwY03DNFat5C2NbO48jj+j/bSAz6b3HgPs39qcPiYt77fDObIcFwj3/C2ICX9YMwGflUoSHQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-arrow-functions@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.27.1.tgz#6e2061067ba3ab0266d834a9f94811196f2aba9a"
+ integrity sha512-8Z4TGic6xW70FKThA5HYEKKyBpOOsucTOD1DjU3fZxDg+K3zBJcXMFnt/4yQiZnf5+MiOMSXQ9PaEK/Ilh1DeA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-async-generator-functions@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.25.0.tgz#b785cf35d73437f6276b1e30439a57a50747bddf"
+ integrity sha512-uaIi2FdqzjpAMvVqvB51S42oC2JEVgh0LDsGfZVDysWE8LrJtQC2jvKmOqEYThKyB7bDEb7BP1GYWDm7tABA0Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/helper-remap-async-to-generator" "^7.25.0"
+ "@babel/plugin-syntax-async-generators" "^7.8.4"
+ "@babel/traverse" "^7.25.0"
+
+"@babel/plugin-transform-async-generator-functions@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.28.0.tgz#1276e6c7285ab2cd1eccb0bc7356b7a69ff842c2"
+ integrity sha512-BEOdvX4+M765icNPZeidyADIvQ1m1gmunXufXxvRESy/jNNyfovIqUyE7MVgGBjWktCoJlzvFA1To2O4ymIO3Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-remap-async-to-generator" "^7.27.1"
+ "@babel/traverse" "^7.28.0"
+
+"@babel/plugin-transform-async-to-generator@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.7.tgz#72a3af6c451d575842a7e9b5a02863414355bdcc"
+ integrity sha512-SQY01PcJfmQ+4Ash7NE+rpbLFbmqA2GPIgqzxfFTL4t1FKRq4zTms/7htKpoCUI9OcFYgzqfmCdH53s6/jn5fA==
+ dependencies:
+ "@babel/helper-module-imports" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/helper-remap-async-to-generator" "^7.24.7"
+
+"@babel/plugin-transform-async-to-generator@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.27.1.tgz#9a93893b9379b39466c74474f55af03de78c66e7"
+ integrity sha512-NREkZsZVJS4xmTr8qzE5y8AfIPqsdQfRuUiLRTEzb7Qii8iFWCyDKaUV2c0rCuh4ljDZ98ALHP/PetiBV2nddA==
+ dependencies:
+ "@babel/helper-module-imports" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-remap-async-to-generator" "^7.27.1"
+
+"@babel/plugin-transform-block-scoped-functions@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.7.tgz#a4251d98ea0c0f399dafe1a35801eaba455bbf1f"
+ integrity sha512-yO7RAz6EsVQDaBH18IDJcMB1HnrUn2FJ/Jslc/WtPPWcjhpUJXU/rjbwmluzp7v/ZzWcEhTMXELnnsz8djWDwQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-block-scoped-functions@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.27.1.tgz#558a9d6e24cf72802dd3b62a4b51e0d62c0f57f9"
+ integrity sha512-cnqkuOtZLapWYZUYM5rVIdv1nXYuFVIltZ6ZJ7nIj585QsjKM5dhL2Fu/lICXZ1OyIAFc7Qy+bvDAtTXqGrlhg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-block-scoping@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.25.0.tgz#23a6ed92e6b006d26b1869b1c91d1b917c2ea2ac"
+ integrity sha512-yBQjYoOjXlFv9nlXb3f1casSHOZkWr29NX+zChVanLg5Nc157CrbEX9D7hxxtTpuFy7Q0YzmmWfJxzvps4kXrQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+
+"@babel/plugin-transform-block-scoping@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.28.0.tgz#e7c50cbacc18034f210b93defa89638666099451"
+ integrity sha512-gKKnwjpdx5sER/wl0WN0efUBFzF/56YZO0RJrSYP4CljXnP31ByY7fol89AzomdlLNzI36AvOTmYHsnZTCkq8Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-class-properties@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.7.tgz#256879467b57b0b68c7ddfc5b76584f398cd6834"
+ integrity sha512-vKbfawVYayKcSeSR5YYzzyXvsDFWU2mD8U5TFeXtbCPLFUqe7GyCgvO6XDHzje862ODrOwy6WCPmKeWHbCFJ4w==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-class-properties@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.27.1.tgz#dd40a6a370dfd49d32362ae206ddaf2bb082a925"
+ integrity sha512-D0VcalChDMtuRvJIu3U/fwWjf8ZMykz5iZsg77Nuj821vCKI3zCyRLwRdWbsuJ/uRwZhZ002QtCqIkwC/ZkvbA==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-class-static-block@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.7.tgz#c82027ebb7010bc33c116d4b5044fbbf8c05484d"
+ integrity sha512-HMXK3WbBPpZQufbMG4B46A90PkuuhN9vBCb5T8+VAHqvAqvcLi+2cKoukcpmUYkszLhScU3l1iudhrks3DggRQ==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/plugin-syntax-class-static-block" "^7.14.5"
+
+"@babel/plugin-transform-class-static-block@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.27.1.tgz#7e920d5625b25bbccd3061aefbcc05805ed56ce4"
+ integrity sha512-s734HmYU78MVzZ++joYM+NkJusItbdRcbm+AGRgJCt3iA+yux0QpD9cBVdz3tKyrjVYWRl7j0mHSmv4lhV0aoA==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-classes@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.25.0.tgz#63122366527d88e0ef61b612554fe3f8c793991e"
+ integrity sha512-xyi6qjr/fYU304fiRwFbekzkqVJZ6A7hOjWZd+89FVcBqPV3S9Wuozz82xdpLspckeaafntbzglaW4pqpzvtSw==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.24.7"
+ "@babel/helper-compilation-targets" "^7.24.8"
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/helper-replace-supers" "^7.25.0"
+ "@babel/traverse" "^7.25.0"
+ globals "^11.1.0"
+
+"@babel/plugin-transform-classes@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.28.0.tgz#12fa46cffc32a6e084011b650539e880add8a0f8"
+ integrity sha512-IjM1IoJNw72AZFlj33Cu8X0q2XK/6AaVC3jQu+cgQ5lThWD5ajnuUAml80dqRmOhmPkTH8uAwnpMu9Rvj0LTRA==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.27.3"
+ "@babel/helper-compilation-targets" "^7.27.2"
+ "@babel/helper-globals" "^7.28.0"
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-replace-supers" "^7.27.1"
+ "@babel/traverse" "^7.28.0"
+
+"@babel/plugin-transform-computed-properties@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.7.tgz#4cab3214e80bc71fae3853238d13d097b004c707"
+ integrity sha512-25cS7v+707Gu6Ds2oY6tCkUwsJ9YIDbggd9+cu9jzzDgiNq7hR/8dkzxWfKWnTic26vsI3EsCXNd4iEB6e8esQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/template" "^7.24.7"
+
+"@babel/plugin-transform-computed-properties@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.27.1.tgz#81662e78bf5e734a97982c2b7f0a793288ef3caa"
+ integrity sha512-lj9PGWvMTVksbWiDT2tW68zGS/cyo4AkZ/QTp0sQT0mjPopCmrSkzxeXkznjqBxzDI6TclZhOJbBmbBLjuOZUw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/template" "^7.27.1"
+
+"@babel/plugin-transform-destructuring@^7.24.8":
+ version "7.24.8"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.8.tgz#c828e814dbe42a2718a838c2a2e16a408e055550"
+ integrity sha512-36e87mfY8TnRxc7yc6M9g9gOB7rKgSahqkIKwLpz4Ppk2+zC2Cy1is0uwtuSG6AE4zlTOUa+7JGz9jCJGLqQFQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+
+"@babel/plugin-transform-destructuring@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.28.0.tgz#0f156588f69c596089b7d5b06f5af83d9aa7f97a"
+ integrity sha512-v1nrSMBiKcodhsyJ4Gf+Z0U/yawmJDBOTpEB3mcQY52r9RIyPneGyAS/yM6seP/8I+mWI3elOMtT5dB8GJVs+A==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/traverse" "^7.28.0"
+
+"@babel/plugin-transform-dotall-regex@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.7.tgz#5f8bf8a680f2116a7207e16288a5f974ad47a7a0"
+ integrity sha512-ZOA3W+1RRTSWvyqcMJDLqbchh7U4NRGqwRfFSVbOLS/ePIP4vHB5e8T8eXcuqyN1QkgKyj5wuW0lcS85v4CrSw==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-dotall-regex@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.27.1.tgz#aa6821de864c528b1fecf286f0a174e38e826f4d"
+ integrity sha512-gEbkDVGRvjj7+T1ivxrfgygpT7GUd4vmODtYpbs0gZATdkX8/iSnOtZSxiZnsgm1YjTgjI6VKBGSJJevkrclzw==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-duplicate-keys@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.7.tgz#dd20102897c9a2324e5adfffb67ff3610359a8ee"
+ integrity sha512-JdYfXyCRihAe46jUIliuL2/s0x0wObgwwiGxw/UbgJBr20gQBThrokO4nYKgWkD7uBaqM7+9x5TU7NkExZJyzw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-duplicate-keys@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.27.1.tgz#f1fbf628ece18e12e7b32b175940e68358f546d1"
+ integrity sha512-MTyJk98sHvSs+cvZ4nOauwTTG1JeonDjSGvGGUNHreGQns+Mpt6WX/dVzWBHgg+dYZhkC4X+zTDfkTU+Vy9y7Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-duplicate-named-capturing-groups-regex@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.25.0.tgz#809af7e3339466b49c034c683964ee8afb3e2604"
+ integrity sha512-YLpb4LlYSc3sCUa35un84poXoraOiQucUTTu8X1j18JV+gNa8E0nyUf/CjZ171IRGr4jEguF+vzJU66QZhn29g==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.25.0"
+ "@babel/helper-plugin-utils" "^7.24.8"
+
+"@babel/plugin-transform-duplicate-named-capturing-groups-regex@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.27.1.tgz#5043854ca620a94149372e69030ff8cb6a9eb0ec"
+ integrity sha512-hkGcueTEzuhB30B3eJCbCYeCaaEQOmQR0AdvzpD4LoN0GXMWzzGSuRrxR2xTnCrvNbVwK9N6/jQ92GSLfiZWoQ==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-dynamic-import@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.7.tgz#4d8b95e3bae2b037673091aa09cd33fecd6419f4"
+ integrity sha512-sc3X26PhZQDb3JhORmakcbvkeInvxz+A8oda99lj7J60QRuPZvNAk9wQlTBS1ZynelDrDmTU4pw1tyc5d5ZMUg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.3"
+
+"@babel/plugin-transform-dynamic-import@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.27.1.tgz#4c78f35552ac0e06aa1f6e3c573d67695e8af5a4"
+ integrity sha512-MHzkWQcEmjzzVW9j2q8LGjwGWpG2mjwaaB0BNQwst3FIjqsg8Ct/mIZlvSPJvfi9y2AC8mi/ktxbFVL9pZ1I4A==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-explicit-resource-management@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-explicit-resource-management/-/plugin-transform-explicit-resource-management-7.28.0.tgz#45be6211b778dbf4b9d54c4e8a2b42fa72e09a1a"
+ integrity sha512-K8nhUcn3f6iB+P3gwCv/no7OdzOZQcKchW6N389V6PD8NUWKZHzndOd9sPDVbMoBsbmjMqlB4L9fm+fEFNVlwQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/plugin-transform-destructuring" "^7.28.0"
+
+"@babel/plugin-transform-exponentiation-operator@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.7.tgz#b629ee22645f412024297d5245bce425c31f9b0d"
+ integrity sha512-Rqe/vSc9OYgDajNIK35u7ot+KeCoetqQYFXM4Epf7M7ez3lWlOjrDjrwMei6caCVhfdw+mIKD4cgdGNy5JQotQ==
+ dependencies:
+ "@babel/helper-builder-binary-assignment-operator-visitor" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-exponentiation-operator@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.27.1.tgz#fc497b12d8277e559747f5a3ed868dd8064f83e1"
+ integrity sha512-uspvXnhHvGKf2r4VVtBpeFnuDWsJLQ6MF6lGJLC89jBR1uoVeqM416AZtTuhTezOfgHicpJQmoD5YUakO/YmXQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-export-namespace-from@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.7.tgz#176d52d8d8ed516aeae7013ee9556d540c53f197"
+ integrity sha512-v0K9uNYsPL3oXZ/7F9NNIbAj2jv1whUEtyA6aujhekLs56R++JDQuzRcP2/z4WX5Vg/c5lE9uWZA0/iUoFhLTA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/plugin-syntax-export-namespace-from" "^7.8.3"
+
+"@babel/plugin-transform-export-namespace-from@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.27.1.tgz#71ca69d3471edd6daa711cf4dfc3400415df9c23"
+ integrity sha512-tQvHWSZ3/jH2xuq/vZDy0jNn+ZdXJeM8gHvX4lnJmsc3+50yPlWdZXIc5ay+umX+2/tJIqHqiEqcJvxlmIvRvQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-for-of@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.7.tgz#f25b33f72df1d8be76399e1b8f3f9d366eb5bc70"
+ integrity sha512-wo9ogrDG1ITTTBsy46oGiN1dS9A7MROBTcYsfS8DtsImMkHk9JXJ3EWQM6X2SUw4x80uGPlwj0o00Uoc6nEE3g==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7"
+
+"@babel/plugin-transform-for-of@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.27.1.tgz#bc24f7080e9ff721b63a70ac7b2564ca15b6c40a"
+ integrity sha512-BfbWFFEJFQzLCQ5N8VocnCtA8J1CLkNTe2Ms2wocj75dd6VpiqS5Z5quTYcUoo4Yq+DN0rtikODccuv7RU81sw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.27.1"
+
+"@babel/plugin-transform-function-name@^7.25.1":
+ version "7.25.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.25.1.tgz#b85e773097526c1a4fc4ba27322748643f26fc37"
+ integrity sha512-TVVJVdW9RKMNgJJlLtHsKDTydjZAbwIsn6ySBPQaEAUU5+gVvlJt/9nRmqVbsV/IBanRjzWoaAQKLoamWVOUuA==
+ dependencies:
+ "@babel/helper-compilation-targets" "^7.24.8"
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/traverse" "^7.25.1"
+
+"@babel/plugin-transform-function-name@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.27.1.tgz#4d0bf307720e4dce6d7c30fcb1fd6ca77bdeb3a7"
+ integrity sha512-1bQeydJF9Nr1eBCMMbC+hdwmRlsv5XYOMu03YSWFwNs0HsAmtSxxF1fyuYPqemVldVyFmlCU7w8UE14LupUSZQ==
+ dependencies:
+ "@babel/helper-compilation-targets" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/traverse" "^7.27.1"
+
+"@babel/plugin-transform-json-strings@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.7.tgz#f3e9c37c0a373fee86e36880d45b3664cedaf73a"
+ integrity sha512-2yFnBGDvRuxAaE/f0vfBKvtnvvqU8tGpMHqMNpTN2oWMKIR3NqFkjaAgGwawhqK/pIN2T3XdjGPdaG0vDhOBGw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/plugin-syntax-json-strings" "^7.8.3"
+
+"@babel/plugin-transform-json-strings@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.27.1.tgz#a2e0ce6ef256376bd527f290da023983527a4f4c"
+ integrity sha512-6WVLVJiTjqcQauBhn1LkICsR2H+zm62I3h9faTDKt1qP4jn2o72tSvqMwtGFKGTpojce0gJs+76eZ2uCHRZh0Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-literals@^7.25.2":
+ version "7.25.2"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.25.2.tgz#deb1ad14fc5490b9a65ed830e025bca849d8b5f3"
+ integrity sha512-HQI+HcTbm9ur3Z2DkO+jgESMAMcYLuN/A7NRw9juzxAezN9AvqvUTnpKP/9kkYANz6u7dFlAyOu44ejuGySlfw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+
+"@babel/plugin-transform-literals@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.27.1.tgz#baaefa4d10a1d4206f9dcdda50d7d5827bb70b24"
+ integrity sha512-0HCFSepIpLTkLcsi86GG3mTUzxV5jpmbv97hTETW3yzrAij8aqlD36toB1D0daVFJM8NK6GvKO0gslVQmm+zZA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-logical-assignment-operators@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.7.tgz#a58fb6eda16c9dc8f9ff1c7b1ba6deb7f4694cb0"
+ integrity sha512-4D2tpwlQ1odXmTEIFWy9ELJcZHqrStlzK/dAOWYyxX3zT0iXQB6banjgeOJQXzEc4S0E0a5A+hahxPaEFYftsw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4"
+
+"@babel/plugin-transform-logical-assignment-operators@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.27.1.tgz#890cb20e0270e0e5bebe3f025b434841c32d5baa"
+ integrity sha512-SJvDs5dXxiae4FbSL1aBJlG4wvl594N6YEVVn9e3JGulwioy6z3oPjx/sQBO3Y4NwUu5HNix6KJ3wBZoewcdbw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-member-expression-literals@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.7.tgz#3b4454fb0e302e18ba4945ba3246acb1248315df"
+ integrity sha512-T/hRC1uqrzXMKLQ6UCwMT85S3EvqaBXDGf0FaMf4446Qx9vKwlghvee0+uuZcDUCZU5RuNi4781UQ7R308zzBw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-member-expression-literals@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.27.1.tgz#37b88ba594d852418e99536f5612f795f23aeaf9"
+ integrity sha512-hqoBX4dcZ1I33jCSWcXrP+1Ku7kdqXf1oeah7ooKOIiAdKQ+uqftgCFNOSzA5AMS2XIHEYeGFg4cKRCdpxzVOQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-modules-amd@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.7.tgz#65090ed493c4a834976a3ca1cde776e6ccff32d7"
+ integrity sha512-9+pB1qxV3vs/8Hdmz/CulFB8w2tuu6EB94JZFsjdqxQokwGa9Unap7Bo2gGBGIvPmDIVvQrom7r5m/TCDMURhg==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-modules-amd@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.27.1.tgz#a4145f9d87c2291fe2d05f994b65dba4e3e7196f"
+ integrity sha512-iCsytMg/N9/oFq6n+gFTvUYDZQOMK5kEdeYxmxt91fcJGycfxVP9CnrxoliM0oumFERba2i8ZtwRUCMhvP1LnA==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-modules-commonjs@^7.24.7", "@babel/plugin-transform-modules-commonjs@^7.24.8":
+ version "7.24.8"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.8.tgz#ab6421e564b717cb475d6fff70ae7f103536ea3c"
+ integrity sha512-WHsk9H8XxRs3JXKWFiqtQebdh9b/pTk4EgueygFzYlTKAg0Ud985mSevdNjdXdFBATSKVJGQXP1tv6aGbssLKA==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.24.8"
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/helper-simple-access" "^7.24.7"
+
+"@babel/plugin-transform-modules-commonjs@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.27.1.tgz#8e44ed37c2787ecc23bdc367f49977476614e832"
+ integrity sha512-OJguuwlTYlN0gBZFRPqwOGNWssZjfIUdS7HMYtN8c1KmwpwHFBwTeFZrg9XZa+DFTitWOW5iTAG7tyCUPsCCyw==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-modules-systemjs@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.25.0.tgz#8f46cdc5f9e5af74f3bd019485a6cbe59685ea33"
+ integrity sha512-YPJfjQPDXxyQWg/0+jHKj1llnY5f/R6a0p/vP4lPymxLu7Lvl4k2WMitqi08yxwQcCVUUdG9LCUj4TNEgAp3Jw==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.25.0"
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/helper-validator-identifier" "^7.24.7"
+ "@babel/traverse" "^7.25.0"
+
+"@babel/plugin-transform-modules-systemjs@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.27.1.tgz#00e05b61863070d0f3292a00126c16c0e024c4ed"
+ integrity sha512-w5N1XzsRbc0PQStASMksmUeqECuzKuTJer7kFagK8AXgpCMkeDMO5S+aaFb7A51ZYDF7XI34qsTX+fkHiIm5yA==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-validator-identifier" "^7.27.1"
+ "@babel/traverse" "^7.27.1"
+
+"@babel/plugin-transform-modules-umd@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.7.tgz#edd9f43ec549099620df7df24e7ba13b5c76efc8"
+ integrity sha512-3aytQvqJ/h9z4g8AsKPLvD4Zqi2qT+L3j7XoFFu1XBlZWEl2/1kWnhmAbxpLgPrHSY0M6UA02jyTiwUVtiKR6A==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-modules-umd@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.27.1.tgz#63f2cf4f6dc15debc12f694e44714863d34cd334"
+ integrity sha512-iQBE/xC5BV1OxJbp6WG7jq9IWiD+xxlZhLrdwpPkTX3ydmXdvoCpyfJN7acaIBZaOqTfr76pgzqBJflNbeRK+w==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-named-capturing-groups-regex@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.24.7.tgz#9042e9b856bc6b3688c0c2e4060e9e10b1460923"
+ integrity sha512-/jr7h/EWeJtk1U/uz2jlsCioHkZk1JJZVcc8oQsJ1dUlaJD83f4/6Zeh2aHt9BIFokHIsSeDfhUmju0+1GPd6g==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-named-capturing-groups-regex@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.27.1.tgz#f32b8f7818d8fc0cc46ee20a8ef75f071af976e1"
+ integrity sha512-SstR5JYy8ddZvD6MhV0tM/j16Qds4mIpJTOd1Yu9J9pJjH93bxHECF7pgtc28XvkzTD6Pxcm/0Z73Hvk7kb3Ng==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-new-target@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.7.tgz#31ff54c4e0555cc549d5816e4ab39241dfb6ab00"
+ integrity sha512-RNKwfRIXg4Ls/8mMTza5oPF5RkOW8Wy/WgMAp1/F1yZ8mMbtwXW+HDoJiOsagWrAhI5f57Vncrmr9XeT4CVapA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-new-target@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.27.1.tgz#259c43939728cad1706ac17351b7e6a7bea1abeb"
+ integrity sha512-f6PiYeqXQ05lYq3TIfIDu/MtliKUbNwkGApPUvyo6+tc7uaR4cPjPe7DFPr15Uyycg2lZU6btZ575CuQoYh7MQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-nullish-coalescing-operator@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.7.tgz#1de4534c590af9596f53d67f52a92f12db984120"
+ integrity sha512-Ts7xQVk1OEocqzm8rHMXHlxvsfZ0cEF2yomUqpKENHWMF4zKk175Y4q8H5knJes6PgYad50uuRmt3UJuhBw8pQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3"
+
+"@babel/plugin-transform-nullish-coalescing-operator@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.27.1.tgz#4f9d3153bf6782d73dd42785a9d22d03197bc91d"
+ integrity sha512-aGZh6xMo6q9vq1JGcw58lZ1Z0+i0xB2x0XaauNIUXd6O1xXc3RwoWEBlsTQrY4KQ9Jf0s5rgD6SiNkaUdJegTA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-numeric-separator@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.7.tgz#bea62b538c80605d8a0fac9b40f48e97efa7de63"
+ integrity sha512-e6q1TiVUzvH9KRvicuxdBTUj4AdKSRwzIyFFnfnezpCfP2/7Qmbb8qbU2j7GODbl4JMkblitCQjKYUaX/qkkwA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/plugin-syntax-numeric-separator" "^7.10.4"
+
+"@babel/plugin-transform-numeric-separator@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.27.1.tgz#614e0b15cc800e5997dadd9bd6ea524ed6c819c6"
+ integrity sha512-fdPKAcujuvEChxDBJ5c+0BTaS6revLV7CJL08e4m3de8qJfNIuCc2nc7XJYOjBoTMJeqSmwXJ0ypE14RCjLwaw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-object-rest-spread@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.7.tgz#d13a2b93435aeb8a197e115221cab266ba6e55d6"
+ integrity sha512-4QrHAr0aXQCEFni2q4DqKLD31n2DL+RxcwnNjDFkSG0eNQ/xCavnRkfCUjsyqGC2OviNJvZOF/mQqZBw7i2C5Q==
+ dependencies:
+ "@babel/helper-compilation-targets" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.3"
+ "@babel/plugin-transform-parameters" "^7.24.7"
+
+"@babel/plugin-transform-object-rest-spread@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.28.0.tgz#d23021857ffd7cd809f54d624299b8086402ed8d"
+ integrity sha512-9VNGikXxzu5eCiQjdE4IZn8sb9q7Xsk5EXLDBKUYg1e/Tve8/05+KJEtcxGxAgCY5t/BpKQM+JEL/yT4tvgiUA==
+ dependencies:
+ "@babel/helper-compilation-targets" "^7.27.2"
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/plugin-transform-destructuring" "^7.28.0"
+ "@babel/plugin-transform-parameters" "^7.27.7"
+ "@babel/traverse" "^7.28.0"
+
+"@babel/plugin-transform-object-super@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.7.tgz#66eeaff7830bba945dd8989b632a40c04ed625be"
+ integrity sha512-A/vVLwN6lBrMFmMDmPPz0jnE6ZGx7Jq7d6sT/Ev4H65RER6pZ+kczlf1DthF5N0qaPHBsI7UXiE8Zy66nmAovg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/helper-replace-supers" "^7.24.7"
+
+"@babel/plugin-transform-object-super@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.27.1.tgz#1c932cd27bf3874c43a5cac4f43ebf970c9871b5"
+ integrity sha512-SFy8S9plRPbIcxlJ8A6mT/CxFdJx/c04JEctz4jf8YZaVS2px34j7NXRrlGlHkN/M2gnpL37ZpGRGVFLd3l8Ng==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-replace-supers" "^7.27.1"
+
+"@babel/plugin-transform-optional-catch-binding@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.7.tgz#00eabd883d0dd6a60c1c557548785919b6e717b4"
+ integrity sha512-uLEndKqP5BfBbC/5jTwPxLh9kqPWWgzN/f8w6UwAIirAEqiIVJWWY312X72Eub09g5KF9+Zn7+hT7sDxmhRuKA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.3"
+
+"@babel/plugin-transform-optional-catch-binding@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.27.1.tgz#84c7341ebde35ccd36b137e9e45866825072a30c"
+ integrity sha512-txEAEKzYrHEX4xSZN4kJ+OfKXFVSWKB2ZxM9dpcE3wT7smwkNmXo5ORRlVzMVdJbD+Q8ILTgSD7959uj+3Dm3Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-optional-chaining@^7.24.7", "@babel/plugin-transform-optional-chaining@^7.24.8":
+ version "7.24.8"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.8.tgz#bb02a67b60ff0406085c13d104c99a835cdf365d"
+ integrity sha512-5cTOLSMs9eypEy8JUVvIKOu6NgvbJMnpG62VpIHrTmROdQ+L5mDAaI40g25k5vXti55JWNX5jCkq3HZxXBQANw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.3"
+
+"@babel/plugin-transform-optional-chaining@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.27.1.tgz#874ce3c4f06b7780592e946026eb76a32830454f"
+ integrity sha512-BQmKPPIuc8EkZgNKsv0X4bPmOoayeu4F1YCwx2/CfmDSXDbp7GnzlUH+/ul5VGfRg1AoFPsrIThlEBj2xb4CAg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.27.1"
+
+"@babel/plugin-transform-parameters@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.7.tgz#5881f0ae21018400e320fc7eb817e529d1254b68"
+ integrity sha512-yGWW5Rr+sQOhK0Ot8hjDJuxU3XLRQGflvT4lhlSY0DFvdb3TwKaY26CJzHtYllU0vT9j58hc37ndFPsqT1SrzA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-parameters@^7.27.7":
+ version "7.27.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.27.7.tgz#1fd2febb7c74e7d21cf3b05f7aebc907940af53a"
+ integrity sha512-qBkYTYCb76RRxUM6CcZA5KRu8K4SM8ajzVeUgVdMVO9NN9uI/GaVmBg/WKJJGnNokV9SY8FxNOVWGXzqzUidBg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-private-methods@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.7.tgz#e6318746b2ae70a59d023d5cc1344a2ba7a75f5e"
+ integrity sha512-COTCOkG2hn4JKGEKBADkA8WNb35TGkkRbI5iT845dB+NyqgO8Hn+ajPbSnIQznneJTa3d30scb6iz/DhH8GsJQ==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-private-methods@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.27.1.tgz#fdacbab1c5ed81ec70dfdbb8b213d65da148b6af"
+ integrity sha512-10FVt+X55AjRAYI9BrdISN9/AQWHqldOeZDUoLyif1Kn05a56xVBXb8ZouL8pZ9jem8QpXaOt8TS7RHUIS+GPA==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-private-property-in-object@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.7.tgz#4eec6bc701288c1fab5f72e6a4bbc9d67faca061"
+ integrity sha512-9z76mxwnwFxMyxZWEgdgECQglF2Q7cFLm0kMf8pGwt+GSJsY0cONKj/UuO4bOH0w/uAel3ekS4ra5CEAyJRmDA==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.24.7"
+ "@babel/helper-create-class-features-plugin" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/plugin-syntax-private-property-in-object" "^7.14.5"
+
+"@babel/plugin-transform-private-property-in-object@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.27.1.tgz#4dbbef283b5b2f01a21e81e299f76e35f900fb11"
+ integrity sha512-5J+IhqTi1XPa0DXF83jYOaARrX+41gOewWbkPyjMNRDqgOCqdffGh8L3f/Ek5utaEBZExjSAzcyjmV9SSAWObQ==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.27.1"
+ "@babel/helper-create-class-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-property-literals@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.7.tgz#f0d2ed8380dfbed949c42d4d790266525d63bbdc"
+ integrity sha512-EMi4MLQSHfd2nrCqQEWxFdha2gBCqU4ZcCng4WBGZ5CJL4bBRW0ptdqqDdeirGZcpALazVVNJqRmsO8/+oNCBA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-property-literals@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.27.1.tgz#07eafd618800591e88073a0af1b940d9a42c6424"
+ integrity sha512-oThy3BCuCha8kDZ8ZkgOg2exvPYUlprMukKQXI1r1pJ47NCvxfkEy8vK+r/hT9nF0Aa4H1WUPZZjHTFtAhGfmQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-react-constant-elements@^7.21.3":
+ version "7.25.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.25.1.tgz#71a665ed16ce618067d05f4a98130207349d82ae"
+ integrity sha512-SLV/giH/V4SmloZ6Dt40HjTGTAIkxn33TVIHxNGNvo8ezMhrxBkzisj4op1KZYPIOHFLqhv60OHvX+YRu4xbmQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+
+"@babel/plugin-transform-react-display-name@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.24.7.tgz#9caff79836803bc666bcfe210aeb6626230c293b"
+ integrity sha512-H/Snz9PFxKsS1JLI4dJLtnJgCJRoo0AUm3chP6NYr+9En1JMKloheEiLIhlp5MDVznWo+H3AAC1Mc8lmUEpsgg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-react-display-name@^7.27.1":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.28.0.tgz#6f20a7295fea7df42eb42fed8f896813f5b934de"
+ integrity sha512-D6Eujc2zMxKjfa4Zxl4GHMsmhKKZ9VpcqIchJLvwTxad9zWIYulwYItBovpDOoNLISpcZSXoDJ5gaGbQUDqViA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-react-jsx-development@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.24.7.tgz#eaee12f15a93f6496d852509a850085e6361470b"
+ integrity sha512-QG9EnzoGn+Qar7rxuW+ZOsbWOt56FvvI93xInqsZDC5fsekx1AlIO4KIJ5M+D0p0SqSH156EpmZyXq630B8OlQ==
+ dependencies:
+ "@babel/plugin-transform-react-jsx" "^7.24.7"
+
+"@babel/plugin-transform-react-jsx-development@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.27.1.tgz#47ff95940e20a3a70e68ad3d4fcb657b647f6c98"
+ integrity sha512-ykDdF5yI4f1WrAolLqeF3hmYU12j9ntLQl/AOG1HAS21jxyg1Q0/J/tpREuYLfatGdGmXp/3yS0ZA76kOlVq9Q==
+ dependencies:
+ "@babel/plugin-transform-react-jsx" "^7.27.1"
+
+"@babel/plugin-transform-react-jsx@^7.24.7":
+ version "7.25.2"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.25.2.tgz#e37e8ebfa77e9f0b16ba07fadcb6adb47412227a"
+ integrity sha512-KQsqEAVBpU82NM/B/N9j9WOdphom1SZH3R+2V7INrQUH+V9EBFwZsEJl8eBIVeQE62FxJCc70jzEZwqU7RcVqA==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.24.7"
+ "@babel/helper-module-imports" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/plugin-syntax-jsx" "^7.24.7"
+ "@babel/types" "^7.25.2"
+
+"@babel/plugin-transform-react-jsx@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.27.1.tgz#1023bc94b78b0a2d68c82b5e96aed573bcfb9db0"
+ integrity sha512-2KH4LWGSrJIkVf5tSiBFYuXDAoWRq2MMwgivCf+93dd0GQi8RXLjKA/0EvRnVV5G0hrHczsquXuD01L8s6dmBw==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.27.1"
+ "@babel/helper-module-imports" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/plugin-syntax-jsx" "^7.27.1"
+ "@babel/types" "^7.27.1"
+
+"@babel/plugin-transform-react-pure-annotations@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.24.7.tgz#bdd9d140d1c318b4f28b29a00fb94f97ecab1595"
+ integrity sha512-PLgBVk3fzbmEjBJ/u8kFzOqS9tUeDjiaWud/rRym/yjCo/M9cASPlnrd2ZmmZpQT40fOOrvR8jh+n8jikrOhNA==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-react-pure-annotations@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.27.1.tgz#339f1ce355eae242e0649f232b1c68907c02e879"
+ integrity sha512-JfuinvDOsD9FVMTHpzA/pBLisxpv1aSf+OIV8lgH3MuWrks19R27e6a6DipIg4aX1Zm9Wpb04p8wljfKrVSnPA==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-regenerator@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.7.tgz#021562de4534d8b4b1851759fd7af4e05d2c47f8"
+ integrity sha512-lq3fvXPdimDrlg6LWBoqj+r/DEWgONuwjuOuQCSYgRroXDH/IdM1C0IZf59fL5cHLpjEH/O6opIRBbqv7ELnuA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ regenerator-transform "^0.15.2"
+
+"@babel/plugin-transform-regenerator@^7.28.0":
+ version "7.28.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.28.1.tgz#bde80603442ff4bb4e910bc8b35485295d556ab1"
+ integrity sha512-P0QiV/taaa3kXpLY+sXla5zec4E+4t4Aqc9ggHlfZ7a2cp8/x/Gv08jfwEtn9gnnYIMvHx6aoOZ8XJL8eU71Dg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-regexp-modifiers@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regexp-modifiers/-/plugin-transform-regexp-modifiers-7.27.1.tgz#df9ba5577c974e3f1449888b70b76169998a6d09"
+ integrity sha512-TtEciroaiODtXvLZv4rmfMhkCv8jx3wgKpL68PuiPh2M4fvz5jhsA7697N1gMvkvr/JTF13DrFYyEbY9U7cVPA==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-reserved-words@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.7.tgz#80037fe4fbf031fc1125022178ff3938bb3743a4"
+ integrity sha512-0DUq0pHcPKbjFZCfTss/pGkYMfy3vFWydkUBd9r0GHpIyfs2eCDENvqadMycRS9wZCXR41wucAfJHJmwA0UmoQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-reserved-words@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.27.1.tgz#40fba4878ccbd1c56605a4479a3a891ac0274bb4"
+ integrity sha512-V2ABPHIJX4kC7HegLkYoDpfg9PVmuWy/i6vUM5eGK22bx4YVFD3M5F0QQnWQoDs6AGsUWTVOopBiMFQgHaSkVw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-runtime@^7.25.9":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.28.0.tgz#462e79008cc7bdac03e4c5e1765b9de2bcd31c21"
+ integrity sha512-dGopk9nZrtCs2+nfIem25UuHyt5moSJamArzIoh9/vezUQPmYDOzjaHDCkAzuGJibCIkPup8rMT2+wYB6S73cA==
+ dependencies:
+ "@babel/helper-module-imports" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+ babel-plugin-polyfill-corejs2 "^0.4.14"
+ babel-plugin-polyfill-corejs3 "^0.13.0"
+ babel-plugin-polyfill-regenerator "^0.6.5"
+ semver "^6.3.1"
+
+"@babel/plugin-transform-shorthand-properties@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.7.tgz#85448c6b996e122fa9e289746140aaa99da64e73"
+ integrity sha512-KsDsevZMDsigzbA09+vacnLpmPH4aWjcZjXdyFKGzpplxhbeB4wYtury3vglQkg6KM/xEPKt73eCjPPf1PgXBA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-shorthand-properties@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.27.1.tgz#532abdacdec87bfee1e0ef8e2fcdee543fe32b90"
+ integrity sha512-N/wH1vcn4oYawbJ13Y/FxcQrWk63jhfNa7jef0ih7PHSIHX2LB7GWE1rkPrOnka9kwMxb6hMl19p7lidA+EHmQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-spread@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.7.tgz#e8a38c0fde7882e0fb8f160378f74bd885cc7bb3"
+ integrity sha512-x96oO0I09dgMDxJaANcRyD4ellXFLLiWhuwDxKZX5g2rWP1bTPkBSwCYv96VDXVT1bD9aPj8tppr5ITIh8hBng==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7"
+
+"@babel/plugin-transform-spread@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.27.1.tgz#1a264d5fc12750918f50e3fe3e24e437178abb08"
+ integrity sha512-kpb3HUqaILBJcRFVhFUs6Trdd4mkrzcGXss+6/mxUd273PfbWqSDHRzMT2234gIg2QYfAjvXLSquP1xECSg09Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.27.1"
+
+"@babel/plugin-transform-sticky-regex@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.7.tgz#96ae80d7a7e5251f657b5cf18f1ea6bf926f5feb"
+ integrity sha512-kHPSIJc9v24zEml5geKg9Mjx5ULpfncj0wRpYtxbvKyTtHCYDkVE3aHQ03FrpEo4gEe2vrJJS1Y9CJTaThA52g==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-sticky-regex@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.27.1.tgz#18984935d9d2296843a491d78a014939f7dcd280"
+ integrity sha512-lhInBO5bi/Kowe2/aLdBAawijx+q1pQzicSgnkB6dUPc1+RC8QmJHKf2OjvU+NZWitguJHEaEmbV6VWEouT58g==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-template-literals@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.7.tgz#a05debb4a9072ae8f985bcf77f3f215434c8f8c8"
+ integrity sha512-AfDTQmClklHCOLxtGoP7HkeMw56k1/bTQjwsfhL6pppo/M4TOBSq+jjBUBLmV/4oeFg4GWMavIl44ZeCtmmZTw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-template-literals@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.27.1.tgz#1a0eb35d8bb3e6efc06c9fd40eb0bcef548328b8"
+ integrity sha512-fBJKiV7F2DxZUkg5EtHKXQdbsbURW3DZKQUWphDum0uRP6eHGGa/He9mc0mypL680pb+e/lDIthRohlv8NCHkg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-typeof-symbol@^7.24.8":
+ version "7.24.8"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.8.tgz#383dab37fb073f5bfe6e60c654caac309f92ba1c"
+ integrity sha512-adNTUpDCVnmAE58VEqKlAA6ZBlNkMnWD0ZcW76lyNFN3MJniyGFZfNwERVk8Ap56MCnXztmDr19T4mPTztcuaw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.8"
+
+"@babel/plugin-transform-typeof-symbol@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.27.1.tgz#70e966bb492e03509cf37eafa6dcc3051f844369"
+ integrity sha512-RiSILC+nRJM7FY5srIyc4/fGIwUhyDuuBSdWn4y6yT6gm652DpCHZjIipgn6B7MQ1ITOUnAKWixEUjQRIBIcLw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-typescript@^7.24.7":
+ version "7.25.2"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.25.2.tgz#237c5d10de6d493be31637c6b9fa30b6c5461add"
+ integrity sha512-lBwRvjSmqiMYe/pS0+1gggjJleUJi7NzjvQ1Fkqtt69hBa/0t1YuW/MLQMAPixfwaQOHUXsd6jeU3Z+vdGv3+A==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.24.7"
+ "@babel/helper-create-class-features-plugin" "^7.25.0"
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7"
+ "@babel/plugin-syntax-typescript" "^7.24.7"
+
+"@babel/plugin-transform-typescript@^7.27.1":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.0.tgz#796cbd249ab56c18168b49e3e1d341b72af04a6b"
+ integrity sha512-4AEiDEBPIZvLQaWlc9liCavE0xRM0dNca41WtBeM3jgFptfUOSG9z0uteLhq6+3rq+WB6jIvUwKDTpXEHPJ2Vg==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.27.3"
+ "@babel/helper-create-class-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.27.1"
+ "@babel/plugin-syntax-typescript" "^7.27.1"
+
+"@babel/plugin-transform-unicode-escapes@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.7.tgz#2023a82ced1fb4971630a2e079764502c4148e0e"
+ integrity sha512-U3ap1gm5+4edc2Q/P+9VrBNhGkfnf+8ZqppY71Bo/pzZmXhhLdqgaUl6cuB07O1+AQJtCLfaOmswiNbSQ9ivhw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-unicode-escapes@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.27.1.tgz#3e3143f8438aef842de28816ece58780190cf806"
+ integrity sha512-Ysg4v6AmF26k9vpfFuTZg8HRfVWzsh1kVfowA23y9j/Gu6dOuahdUVhkLqpObp3JIv27MLSii6noRnuKN8H0Mg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-unicode-property-regex@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.7.tgz#9073a4cd13b86ea71c3264659590ac086605bbcd"
+ integrity sha512-uH2O4OV5M9FZYQrwc7NdVmMxQJOCCzFeYudlZSzUAHRFeOujQefa92E74TQDVskNHCzOXoigEuoyzHDhaEaK5w==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-unicode-property-regex@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.27.1.tgz#bdfe2d3170c78c5691a3c3be934c8c0087525956"
+ integrity sha512-uW20S39PnaTImxp39O5qFlHLS9LJEmANjMG7SxIhap8rCHqu0Ik+tLEPX5DKmHn6CsWQ7j3lix2tFOa5YtL12Q==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-unicode-regex@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.7.tgz#dfc3d4a51127108099b19817c0963be6a2adf19f"
+ integrity sha512-hlQ96MBZSAXUq7ltkjtu3FJCCSMx/j629ns3hA3pXnBXjanNP0LHi+JpPeA81zaWgVK1VGH95Xuy7u0RyQ8kMg==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-unicode-regex@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.27.1.tgz#25948f5c395db15f609028e370667ed8bae9af97"
+ integrity sha512-xvINq24TRojDuyt6JGtHmkVkrfVV3FPT16uytxImLeBZqW3/H52yN+kM1MGuyPkIQxrzKwPHs5U/MP3qKyzkGw==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/plugin-transform-unicode-sets-regex@^7.24.7":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.7.tgz#d40705d67523803a576e29c63cef6e516b858ed9"
+ integrity sha512-2G8aAvF4wy1w/AGZkemprdGMRg5o6zPNhbHVImRz3lss55TYCBd6xStN19rt8XJHq20sqV0JbyWjOWwQRwV/wg==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.24.7"
+ "@babel/helper-plugin-utils" "^7.24.7"
+
+"@babel/plugin-transform-unicode-sets-regex@^7.27.1":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.27.1.tgz#6ab706d10f801b5c72da8bb2548561fa04193cd1"
+ integrity sha512-EtkOujbc4cgvb0mlpQefi4NTPBzhSIevblFevACNLUspmrALgmEBdL/XfnyyITfd8fKBZrZys92zOWcik7j9Tw==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.27.1"
+ "@babel/helper-plugin-utils" "^7.27.1"
+
+"@babel/preset-env@^7.20.2":
+ version "7.25.3"
+ resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.25.3.tgz#0bf4769d84ac51d1073ab4a86f00f30a3a83c67c"
+ integrity sha512-QsYW7UeAaXvLPX9tdVliMJE7MD7M6MLYVTovRTIwhoYQVFHR1rM4wO8wqAezYi3/BpSD+NzVCZ69R6smWiIi8g==
+ dependencies:
+ "@babel/compat-data" "^7.25.2"
+ "@babel/helper-compilation-targets" "^7.25.2"
+ "@babel/helper-plugin-utils" "^7.24.8"
+ "@babel/helper-validator-option" "^7.24.8"
+ "@babel/plugin-bugfix-firefox-class-in-computed-class-key" "^7.25.3"
+ "@babel/plugin-bugfix-safari-class-field-initializer-scope" "^7.25.0"
+ "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.25.0"
+ "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.24.7"
+ "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly" "^7.25.0"
+ "@babel/plugin-proposal-private-property-in-object" "7.21.0-placeholder-for-preset-env.2"
+ "@babel/plugin-syntax-async-generators" "^7.8.4"
+ "@babel/plugin-syntax-class-properties" "^7.12.13"
+ "@babel/plugin-syntax-class-static-block" "^7.14.5"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.3"
+ "@babel/plugin-syntax-export-namespace-from" "^7.8.3"
+ "@babel/plugin-syntax-import-assertions" "^7.24.7"
+ "@babel/plugin-syntax-import-attributes" "^7.24.7"
+ "@babel/plugin-syntax-import-meta" "^7.10.4"
+ "@babel/plugin-syntax-json-strings" "^7.8.3"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3"
+ "@babel/plugin-syntax-numeric-separator" "^7.10.4"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.3"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.3"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.3"
+ "@babel/plugin-syntax-private-property-in-object" "^7.14.5"
+ "@babel/plugin-syntax-top-level-await" "^7.14.5"
+ "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6"
+ "@babel/plugin-transform-arrow-functions" "^7.24.7"
+ "@babel/plugin-transform-async-generator-functions" "^7.25.0"
+ "@babel/plugin-transform-async-to-generator" "^7.24.7"
+ "@babel/plugin-transform-block-scoped-functions" "^7.24.7"
+ "@babel/plugin-transform-block-scoping" "^7.25.0"
+ "@babel/plugin-transform-class-properties" "^7.24.7"
+ "@babel/plugin-transform-class-static-block" "^7.24.7"
+ "@babel/plugin-transform-classes" "^7.25.0"
+ "@babel/plugin-transform-computed-properties" "^7.24.7"
+ "@babel/plugin-transform-destructuring" "^7.24.8"
+ "@babel/plugin-transform-dotall-regex" "^7.24.7"
+ "@babel/plugin-transform-duplicate-keys" "^7.24.7"
+ "@babel/plugin-transform-duplicate-named-capturing-groups-regex" "^7.25.0"
+ "@babel/plugin-transform-dynamic-import" "^7.24.7"
+ "@babel/plugin-transform-exponentiation-operator" "^7.24.7"
+ "@babel/plugin-transform-export-namespace-from" "^7.24.7"
+ "@babel/plugin-transform-for-of" "^7.24.7"
+ "@babel/plugin-transform-function-name" "^7.25.1"
+ "@babel/plugin-transform-json-strings" "^7.24.7"
+ "@babel/plugin-transform-literals" "^7.25.2"
+ "@babel/plugin-transform-logical-assignment-operators" "^7.24.7"
+ "@babel/plugin-transform-member-expression-literals" "^7.24.7"
+ "@babel/plugin-transform-modules-amd" "^7.24.7"
+ "@babel/plugin-transform-modules-commonjs" "^7.24.8"
+ "@babel/plugin-transform-modules-systemjs" "^7.25.0"
+ "@babel/plugin-transform-modules-umd" "^7.24.7"
+ "@babel/plugin-transform-named-capturing-groups-regex" "^7.24.7"
+ "@babel/plugin-transform-new-target" "^7.24.7"
+ "@babel/plugin-transform-nullish-coalescing-operator" "^7.24.7"
+ "@babel/plugin-transform-numeric-separator" "^7.24.7"
+ "@babel/plugin-transform-object-rest-spread" "^7.24.7"
+ "@babel/plugin-transform-object-super" "^7.24.7"
+ "@babel/plugin-transform-optional-catch-binding" "^7.24.7"
+ "@babel/plugin-transform-optional-chaining" "^7.24.8"
+ "@babel/plugin-transform-parameters" "^7.24.7"
+ "@babel/plugin-transform-private-methods" "^7.24.7"
+ "@babel/plugin-transform-private-property-in-object" "^7.24.7"
+ "@babel/plugin-transform-property-literals" "^7.24.7"
+ "@babel/plugin-transform-regenerator" "^7.24.7"
+ "@babel/plugin-transform-reserved-words" "^7.24.7"
+ "@babel/plugin-transform-shorthand-properties" "^7.24.7"
+ "@babel/plugin-transform-spread" "^7.24.7"
+ "@babel/plugin-transform-sticky-regex" "^7.24.7"
+ "@babel/plugin-transform-template-literals" "^7.24.7"
+ "@babel/plugin-transform-typeof-symbol" "^7.24.8"
+ "@babel/plugin-transform-unicode-escapes" "^7.24.7"
+ "@babel/plugin-transform-unicode-property-regex" "^7.24.7"
+ "@babel/plugin-transform-unicode-regex" "^7.24.7"
+ "@babel/plugin-transform-unicode-sets-regex" "^7.24.7"
+ "@babel/preset-modules" "0.1.6-no-external-plugins"
+ babel-plugin-polyfill-corejs2 "^0.4.10"
+ babel-plugin-polyfill-corejs3 "^0.10.4"
+ babel-plugin-polyfill-regenerator "^0.6.1"
+ core-js-compat "^3.37.1"
+ semver "^6.3.1"
+
+"@babel/preset-env@^7.25.9":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.28.0.tgz#d23a6bc17b43227d11db77081a0779c706b5569c"
+ integrity sha512-VmaxeGOwuDqzLl5JUkIRM1X2Qu2uKGxHEQWh+cvvbl7JuJRgKGJSfsEF/bUaxFhJl/XAyxBe7q7qSuTbKFuCyg==
+ dependencies:
+ "@babel/compat-data" "^7.28.0"
+ "@babel/helper-compilation-targets" "^7.27.2"
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-validator-option" "^7.27.1"
+ "@babel/plugin-bugfix-firefox-class-in-computed-class-key" "^7.27.1"
+ "@babel/plugin-bugfix-safari-class-field-initializer-scope" "^7.27.1"
+ "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.27.1"
+ "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.27.1"
+ "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly" "^7.27.1"
+ "@babel/plugin-proposal-private-property-in-object" "7.21.0-placeholder-for-preset-env.2"
+ "@babel/plugin-syntax-import-assertions" "^7.27.1"
+ "@babel/plugin-syntax-import-attributes" "^7.27.1"
+ "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6"
+ "@babel/plugin-transform-arrow-functions" "^7.27.1"
+ "@babel/plugin-transform-async-generator-functions" "^7.28.0"
+ "@babel/plugin-transform-async-to-generator" "^7.27.1"
+ "@babel/plugin-transform-block-scoped-functions" "^7.27.1"
+ "@babel/plugin-transform-block-scoping" "^7.28.0"
+ "@babel/plugin-transform-class-properties" "^7.27.1"
+ "@babel/plugin-transform-class-static-block" "^7.27.1"
+ "@babel/plugin-transform-classes" "^7.28.0"
+ "@babel/plugin-transform-computed-properties" "^7.27.1"
+ "@babel/plugin-transform-destructuring" "^7.28.0"
+ "@babel/plugin-transform-dotall-regex" "^7.27.1"
+ "@babel/plugin-transform-duplicate-keys" "^7.27.1"
+ "@babel/plugin-transform-duplicate-named-capturing-groups-regex" "^7.27.1"
+ "@babel/plugin-transform-dynamic-import" "^7.27.1"
+ "@babel/plugin-transform-explicit-resource-management" "^7.28.0"
+ "@babel/plugin-transform-exponentiation-operator" "^7.27.1"
+ "@babel/plugin-transform-export-namespace-from" "^7.27.1"
+ "@babel/plugin-transform-for-of" "^7.27.1"
+ "@babel/plugin-transform-function-name" "^7.27.1"
+ "@babel/plugin-transform-json-strings" "^7.27.1"
+ "@babel/plugin-transform-literals" "^7.27.1"
+ "@babel/plugin-transform-logical-assignment-operators" "^7.27.1"
+ "@babel/plugin-transform-member-expression-literals" "^7.27.1"
+ "@babel/plugin-transform-modules-amd" "^7.27.1"
+ "@babel/plugin-transform-modules-commonjs" "^7.27.1"
+ "@babel/plugin-transform-modules-systemjs" "^7.27.1"
+ "@babel/plugin-transform-modules-umd" "^7.27.1"
+ "@babel/plugin-transform-named-capturing-groups-regex" "^7.27.1"
+ "@babel/plugin-transform-new-target" "^7.27.1"
+ "@babel/plugin-transform-nullish-coalescing-operator" "^7.27.1"
+ "@babel/plugin-transform-numeric-separator" "^7.27.1"
+ "@babel/plugin-transform-object-rest-spread" "^7.28.0"
+ "@babel/plugin-transform-object-super" "^7.27.1"
+ "@babel/plugin-transform-optional-catch-binding" "^7.27.1"
+ "@babel/plugin-transform-optional-chaining" "^7.27.1"
+ "@babel/plugin-transform-parameters" "^7.27.7"
+ "@babel/plugin-transform-private-methods" "^7.27.1"
+ "@babel/plugin-transform-private-property-in-object" "^7.27.1"
+ "@babel/plugin-transform-property-literals" "^7.27.1"
+ "@babel/plugin-transform-regenerator" "^7.28.0"
+ "@babel/plugin-transform-regexp-modifiers" "^7.27.1"
+ "@babel/plugin-transform-reserved-words" "^7.27.1"
+ "@babel/plugin-transform-shorthand-properties" "^7.27.1"
+ "@babel/plugin-transform-spread" "^7.27.1"
+ "@babel/plugin-transform-sticky-regex" "^7.27.1"
+ "@babel/plugin-transform-template-literals" "^7.27.1"
+ "@babel/plugin-transform-typeof-symbol" "^7.27.1"
+ "@babel/plugin-transform-unicode-escapes" "^7.27.1"
+ "@babel/plugin-transform-unicode-property-regex" "^7.27.1"
+ "@babel/plugin-transform-unicode-regex" "^7.27.1"
+ "@babel/plugin-transform-unicode-sets-regex" "^7.27.1"
+ "@babel/preset-modules" "0.1.6-no-external-plugins"
+ babel-plugin-polyfill-corejs2 "^0.4.14"
+ babel-plugin-polyfill-corejs3 "^0.13.0"
+ babel-plugin-polyfill-regenerator "^0.6.5"
+ core-js-compat "^3.43.0"
+ semver "^6.3.1"
+
+"@babel/preset-modules@0.1.6-no-external-plugins":
+ version "0.1.6-no-external-plugins"
+ resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz#ccb88a2c49c817236861fee7826080573b8a923a"
+ integrity sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/types" "^7.4.4"
+ esutils "^2.0.2"
+
+"@babel/preset-react@^7.18.6":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.24.7.tgz#480aeb389b2a798880bf1f889199e3641cbb22dc"
+ integrity sha512-AAH4lEkpmzFWrGVlHaxJB7RLH21uPQ9+He+eFLWHmF9IuFQVugz8eAsamaW0DXRrTfco5zj1wWtpdcXJUOfsag==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/helper-validator-option" "^7.24.7"
+ "@babel/plugin-transform-react-display-name" "^7.24.7"
+ "@babel/plugin-transform-react-jsx" "^7.24.7"
+ "@babel/plugin-transform-react-jsx-development" "^7.24.7"
+ "@babel/plugin-transform-react-pure-annotations" "^7.24.7"
+
+"@babel/preset-react@^7.25.9":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.27.1.tgz#86ea0a5ca3984663f744be2fd26cb6747c3fd0ec"
+ integrity sha512-oJHWh2gLhU9dW9HHr42q0cI0/iHHXTLGe39qvpAZZzagHy0MzYLCnCVV0symeRvzmjHyVU7mw2K06E6u/JwbhA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-validator-option" "^7.27.1"
+ "@babel/plugin-transform-react-display-name" "^7.27.1"
+ "@babel/plugin-transform-react-jsx" "^7.27.1"
+ "@babel/plugin-transform-react-jsx-development" "^7.27.1"
+ "@babel/plugin-transform-react-pure-annotations" "^7.27.1"
+
+"@babel/preset-typescript@^7.21.0":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.24.7.tgz#66cd86ea8f8c014855671d5ea9a737139cbbfef1"
+ integrity sha512-SyXRe3OdWwIwalxDg5UtJnJQO+YPcTfwiIY2B0Xlddh9o7jpWLvv8X1RthIeDOxQ+O1ML5BLPCONToObyVQVuQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.24.7"
+ "@babel/helper-validator-option" "^7.24.7"
+ "@babel/plugin-syntax-jsx" "^7.24.7"
+ "@babel/plugin-transform-modules-commonjs" "^7.24.7"
+ "@babel/plugin-transform-typescript" "^7.24.7"
+
+"@babel/preset-typescript@^7.25.9":
+ version "7.27.1"
+ resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.27.1.tgz#190742a6428d282306648a55b0529b561484f912"
+ integrity sha512-l7WfQfX0WK4M0v2RudjuQK4u99BS6yLHYEmdtVPP7lKV013zr9DygFuWNlnbvQ9LR+LS0Egz/XAvGx5U9MX0fQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.27.1"
+ "@babel/helper-validator-option" "^7.27.1"
+ "@babel/plugin-syntax-jsx" "^7.27.1"
+ "@babel/plugin-transform-modules-commonjs" "^7.27.1"
+ "@babel/plugin-transform-typescript" "^7.27.1"
+
+"@babel/regjsgen@^0.8.0":
+ version "0.8.0"
+ resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310"
+ integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==
+
+"@babel/runtime-corejs3@^7.25.9":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.28.0.tgz#4d9938897f5a9aaa9e5f99408a4bf86daba40ec1"
+ integrity sha512-nlIXnSqLcBij8K8TtkxbBJgfzfvi75V1pAKSM7dUXejGw12vJAqez74jZrHTsJ3Z+Aczc5Q/6JgNjKRMsVU44g==
+ dependencies:
+ core-js-pure "^3.43.0"
+
+"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.18.3", "@babel/runtime@^7.23.9", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.5", "@babel/runtime@^7.8.3", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.25.0.tgz#3af9a91c1b739c569d5d80cc917280919c544ecb"
+ integrity sha512-7dRy4DwXwtzBrPbZflqxnvfxLF8kdZXPkhymtDeFoFqE6ldzjQFgYTtYIFARcLEYDrqfBfYcZt1WqFxRoyC9Rw==
+ dependencies:
+ regenerator-runtime "^0.14.0"
+
+"@babel/runtime@^7.25.9":
+ version "7.27.6"
+ resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.27.6.tgz#ec4070a04d76bae8ddbb10770ba55714a417b7c6"
+ integrity sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q==
+
+"@babel/template@^7.24.7", "@babel/template@^7.25.0":
+ version "7.25.0"
+ resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.25.0.tgz#e733dc3134b4fede528c15bc95e89cb98c52592a"
+ integrity sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==
+ dependencies:
+ "@babel/code-frame" "^7.24.7"
+ "@babel/parser" "^7.25.0"
+ "@babel/types" "^7.25.0"
+
+"@babel/template@^7.27.1", "@babel/template@^7.27.2":
+ version "7.27.2"
+ resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.27.2.tgz#fa78ceed3c4e7b63ebf6cb39e5852fca45f6809d"
+ integrity sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==
+ dependencies:
+ "@babel/code-frame" "^7.27.1"
+ "@babel/parser" "^7.27.2"
+ "@babel/types" "^7.27.1"
+
+"@babel/traverse@^7.24.7", "@babel/traverse@^7.24.8", "@babel/traverse@^7.25.0", "@babel/traverse@^7.25.1", "@babel/traverse@^7.25.2", "@babel/traverse@^7.25.3":
+ version "7.25.3"
+ resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.25.3.tgz#f1b901951c83eda2f3e29450ce92743783373490"
+ integrity sha512-HefgyP1x754oGCsKmV5reSmtV7IXj/kpaE1XYY+D9G5PvKKoFfSbiS4M77MdjuwlZKDIKFCffq9rPU+H/s3ZdQ==
+ dependencies:
+ "@babel/code-frame" "^7.24.7"
+ "@babel/generator" "^7.25.0"
+ "@babel/parser" "^7.25.3"
+ "@babel/template" "^7.25.0"
+ "@babel/types" "^7.25.2"
+ debug "^4.3.1"
+ globals "^11.1.0"
+
+"@babel/traverse@^7.25.9", "@babel/traverse@^7.27.1", "@babel/traverse@^7.27.3", "@babel/traverse@^7.28.0":
+ version "7.28.0"
+ resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.28.0.tgz#518aa113359b062042379e333db18380b537e34b"
+ integrity sha512-mGe7UK5wWyh0bKRfupsUchrQGqvDbZDbKJw+kcRGSmdHVYrv+ltd0pnpDTVpiTqnaBru9iEvA8pz8W46v0Amwg==
+ dependencies:
+ "@babel/code-frame" "^7.27.1"
+ "@babel/generator" "^7.28.0"
+ "@babel/helper-globals" "^7.28.0"
+ "@babel/parser" "^7.28.0"
+ "@babel/template" "^7.27.2"
+ "@babel/types" "^7.28.0"
+ debug "^4.3.1"
+
+"@babel/types@^7.21.3", "@babel/types@^7.24.7", "@babel/types@^7.24.8", "@babel/types@^7.25.0", "@babel/types@^7.25.2", "@babel/types@^7.4.4":
+ version "7.25.2"
+ resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.25.2.tgz#55fb231f7dc958cd69ea141a4c2997e819646125"
+ integrity sha512-YTnYtra7W9e6/oAZEHj0bJehPRUlLH9/fbpT5LfB0NhQXyALCRkRs3zH9v07IYhkgpqX6Z78FnuccZr/l4Fs4Q==
+ dependencies:
+ "@babel/helper-string-parser" "^7.24.8"
+ "@babel/helper-validator-identifier" "^7.24.7"
+ to-fast-properties "^2.0.0"
+
+"@babel/types@^7.27.1", "@babel/types@^7.27.3", "@babel/types@^7.27.6", "@babel/types@^7.28.0":
+ version "7.28.1"
+ resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.28.1.tgz#2aaf3c10b31ba03a77ac84f52b3912a0edef4cf9"
+ integrity sha512-x0LvFTekgSX+83TI28Y9wYPUfzrnl2aT5+5QLnO6v7mSJYtEEevuDRN0F0uSHRk1G1IWZC43o00Y0xDDrpBGPQ==
+ dependencies:
+ "@babel/helper-string-parser" "^7.27.1"
+ "@babel/helper-validator-identifier" "^7.27.1"
+
+"@braintree/sanitize-url@^7.0.4":
+ version "7.1.1"
+ resolved "https://registry.yarnpkg.com/@braintree/sanitize-url/-/sanitize-url-7.1.1.tgz#15e19737d946559289b915e5dad3b4c28407735e"
+ integrity sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==
+
+"@chevrotain/cst-dts-gen@11.0.3":
+ version "11.0.3"
+ resolved "https://registry.yarnpkg.com/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.0.3.tgz#5e0863cc57dc45e204ccfee6303225d15d9d4783"
+ integrity sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==
+ dependencies:
+ "@chevrotain/gast" "11.0.3"
+ "@chevrotain/types" "11.0.3"
+ lodash-es "4.17.21"
+
+"@chevrotain/gast@11.0.3":
+ version "11.0.3"
+ resolved "https://registry.yarnpkg.com/@chevrotain/gast/-/gast-11.0.3.tgz#e84d8880323fe8cbe792ef69ce3ffd43a936e818"
+ integrity sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==
+ dependencies:
+ "@chevrotain/types" "11.0.3"
+ lodash-es "4.17.21"
+
+"@chevrotain/regexp-to-ast@11.0.3":
+ version "11.0.3"
+ resolved "https://registry.yarnpkg.com/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.0.3.tgz#11429a81c74a8e6a829271ce02fc66166d56dcdb"
+ integrity sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==
+
+"@chevrotain/types@11.0.3":
+ version "11.0.3"
+ resolved "https://registry.yarnpkg.com/@chevrotain/types/-/types-11.0.3.tgz#f8a03914f7b937f594f56eb89312b3b8f1c91848"
+ integrity sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==
+
+"@chevrotain/utils@11.0.3":
+ version "11.0.3"
+ resolved "https://registry.yarnpkg.com/@chevrotain/utils/-/utils-11.0.3.tgz#e39999307b102cff3645ec4f5b3665f5297a2224"
+ integrity sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==
+
+"@colors/colors@1.5.0":
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9"
+ integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==
+
+"@csstools/cascade-layer-name-parser@^2.0.5":
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/@csstools/cascade-layer-name-parser/-/cascade-layer-name-parser-2.0.5.tgz#43f962bebead0052a9fed1a2deeb11f85efcbc72"
+ integrity sha512-p1ko5eHgV+MgXFVa4STPKpvPxr6ReS8oS2jzTukjR74i5zJNyWO1ZM1m8YKBXnzDKWfBN1ztLYlHxbVemDD88A==
+
+"@csstools/color-helpers@^5.0.2":
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/@csstools/color-helpers/-/color-helpers-5.0.2.tgz#82592c9a7c2b83c293d9161894e2a6471feb97b8"
+ integrity sha512-JqWH1vsgdGcw2RR6VliXXdA0/59LttzlU8UlRT/iUUsEeWfYq8I+K0yhihEUTTHLRm1EXvpsCx3083EU15ecsA==
+
+"@csstools/css-calc@^2.1.4":
+ version "2.1.4"
+ resolved "https://registry.yarnpkg.com/@csstools/css-calc/-/css-calc-2.1.4.tgz#8473f63e2fcd6e459838dd412401d5948f224c65"
+ integrity sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==
+
+"@csstools/css-color-parser@^3.0.10":
+ version "3.0.10"
+ resolved "https://registry.yarnpkg.com/@csstools/css-color-parser/-/css-color-parser-3.0.10.tgz#79fc68864dd43c3b6782d2b3828bc0fa9d085c10"
+ integrity sha512-TiJ5Ajr6WRd1r8HSiwJvZBiJOqtH86aHpUjq5aEKWHiII2Qfjqd/HCWKPOW8EP4vcspXbHnXrwIDlu5savQipg==
+ dependencies:
+ "@csstools/color-helpers" "^5.0.2"
+ "@csstools/css-calc" "^2.1.4"
+
+"@csstools/css-parser-algorithms@^3.0.5":
+ version "3.0.5"
+ resolved "https://registry.yarnpkg.com/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz#5755370a9a29abaec5515b43c8b3f2cf9c2e3076"
+ integrity sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==
+
+"@csstools/css-tokenizer@^3.0.4":
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz#333fedabc3fd1a8e5d0100013731cf19e6a8c5d3"
+ integrity sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==
+
+"@csstools/media-query-list-parser@^4.0.3":
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/@csstools/media-query-list-parser/-/media-query-list-parser-4.0.3.tgz#7aec77bcb89c2da80ef207e73f474ef9e1b3cdf1"
+ integrity sha512-HAYH7d3TLRHDOUQK4mZKf9k9Ph/m8Akstg66ywKR4SFAigjs3yBiUeZtFxywiTm5moZMAp/5W/ZuFnNXXYLuuQ==
+
+"@csstools/postcss-cascade-layers@^5.0.2":
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-cascade-layers/-/postcss-cascade-layers-5.0.2.tgz#dd2c70db3867b88975f2922da3bfbae7d7a2cae7"
+ integrity sha512-nWBE08nhO8uWl6kSAeCx4im7QfVko3zLrtgWZY4/bP87zrSPpSyN/3W3TDqz1jJuH+kbKOHXg5rJnK+ZVYcFFg==
+ dependencies:
+ "@csstools/selector-specificity" "^5.0.0"
+ postcss-selector-parser "^7.0.0"
+
+"@csstools/postcss-color-function@^4.0.10":
+ version "4.0.10"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-color-function/-/postcss-color-function-4.0.10.tgz#11ad43a66ef2cc794ab826a07df8b5fa9fb47a3a"
+ integrity sha512-4dY0NBu7NVIpzxZRgh/Q/0GPSz/jLSw0i/u3LTUor0BkQcz/fNhN10mSWBDsL0p9nDb0Ky1PD6/dcGbhACuFTQ==
+ dependencies:
+ "@csstools/css-color-parser" "^3.0.10"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+"@csstools/postcss-color-mix-function@^3.0.10":
+ version "3.0.10"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-color-mix-function/-/postcss-color-mix-function-3.0.10.tgz#8c9d0ccfae5c45a9870dd84807ea2995c7a3a514"
+ integrity sha512-P0lIbQW9I4ShE7uBgZRib/lMTf9XMjJkFl/d6w4EMNHu2qvQ6zljJGEcBkw/NsBtq/6q3WrmgxSS8kHtPMkK4Q==
+ dependencies:
+ "@csstools/css-color-parser" "^3.0.10"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+"@csstools/postcss-color-mix-variadic-function-arguments@^1.0.0":
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-color-mix-variadic-function-arguments/-/postcss-color-mix-variadic-function-arguments-1.0.0.tgz#0b29cb9b4630d7ed68549db265662d41554a17ed"
+ integrity sha512-Z5WhouTyD74dPFPrVE7KydgNS9VvnjB8qcdes9ARpCOItb4jTnm7cHp4FhxCRUoyhabD0WVv43wbkJ4p8hLAlQ==
+ dependencies:
+ "@csstools/css-color-parser" "^3.0.10"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+"@csstools/postcss-content-alt-text@^2.0.6":
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-content-alt-text/-/postcss-content-alt-text-2.0.6.tgz#548862226eac54bab0ee5f1bf3a9981393ab204b"
+ integrity sha512-eRjLbOjblXq+byyaedQRSrAejKGNAFued+LcbzT+LCL78fabxHkxYjBbxkroONxHHYu2qxhFK2dBStTLPG3jpQ==
+ dependencies:
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+"@csstools/postcss-exponential-functions@^2.0.9":
+ version "2.0.9"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-exponential-functions/-/postcss-exponential-functions-2.0.9.tgz#fc03d1272888cb77e64cc1a7d8a33016e4f05c69"
+ integrity sha512-abg2W/PI3HXwS/CZshSa79kNWNZHdJPMBXeZNyPQFbbj8sKO3jXxOt/wF7juJVjyDTc6JrvaUZYFcSBZBhaxjw==
+ dependencies:
+ "@csstools/css-calc" "^2.1.4"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+
+"@csstools/postcss-font-format-keywords@^4.0.0":
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-font-format-keywords/-/postcss-font-format-keywords-4.0.0.tgz#6730836eb0153ff4f3840416cc2322f129c086e6"
+ integrity sha512-usBzw9aCRDvchpok6C+4TXC57btc4bJtmKQWOHQxOVKen1ZfVqBUuCZ/wuqdX5GHsD0NRSr9XTP+5ID1ZZQBXw==
+ dependencies:
+ "@csstools/utilities" "^2.0.0"
+ postcss-value-parser "^4.2.0"
+
+"@csstools/postcss-gamut-mapping@^2.0.10":
+ version "2.0.10"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-gamut-mapping/-/postcss-gamut-mapping-2.0.10.tgz#f518d941231d721dbecf5b41e3c441885ff2f28b"
+ integrity sha512-QDGqhJlvFnDlaPAfCYPsnwVA6ze+8hhrwevYWlnUeSjkkZfBpcCO42SaUD8jiLlq7niouyLgvup5lh+f1qessg==
+ dependencies:
+ "@csstools/css-color-parser" "^3.0.10"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+
+"@csstools/postcss-gradients-interpolation-method@^5.0.10":
+ version "5.0.10"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-gradients-interpolation-method/-/postcss-gradients-interpolation-method-5.0.10.tgz#3146da352c31142a721fdba062ac3a6d11dbbec3"
+ integrity sha512-HHPauB2k7Oits02tKFUeVFEU2ox/H3OQVrP3fSOKDxvloOikSal+3dzlyTZmYsb9FlY9p5EUpBtz0//XBmy+aw==
+ dependencies:
+ "@csstools/css-color-parser" "^3.0.10"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+"@csstools/postcss-hwb-function@^4.0.10":
+ version "4.0.10"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-hwb-function/-/postcss-hwb-function-4.0.10.tgz#f93f3c457e6440ac37ef9b908feb5d901b417d50"
+ integrity sha512-nOKKfp14SWcdEQ++S9/4TgRKchooLZL0TUFdun3nI4KPwCjETmhjta1QT4ICQcGVWQTvrsgMM/aLB5We+kMHhQ==
+ dependencies:
+ "@csstools/css-color-parser" "^3.0.10"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+"@csstools/postcss-ic-unit@^4.0.2":
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-ic-unit/-/postcss-ic-unit-4.0.2.tgz#7561e09db65fac8304ceeab9dd3e5c6e43414587"
+ integrity sha512-lrK2jjyZwh7DbxaNnIUjkeDmU8Y6KyzRBk91ZkI5h8nb1ykEfZrtIVArdIjX4DHMIBGpdHrgP0n4qXDr7OHaKA==
+ dependencies:
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+ postcss-value-parser "^4.2.0"
+
+"@csstools/postcss-initial@^2.0.1":
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-initial/-/postcss-initial-2.0.1.tgz#c385bd9d8ad31ad159edd7992069e97ceea4d09a"
+ integrity sha512-L1wLVMSAZ4wovznquK0xmC7QSctzO4D0Is590bxpGqhqjboLXYA16dWZpfwImkdOgACdQ9PqXsuRroW6qPlEsg==
+
+"@csstools/postcss-is-pseudo-class@^5.0.3":
+ version "5.0.3"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-5.0.3.tgz#d34e850bcad4013c2ed7abe948bfa0448aa8eb74"
+ integrity sha512-jS/TY4SpG4gszAtIg7Qnf3AS2pjcUM5SzxpApOrlndMeGhIbaTzWBzzP/IApXoNWEW7OhcjkRT48jnAUIFXhAQ==
+ dependencies:
+ "@csstools/selector-specificity" "^5.0.0"
+ postcss-selector-parser "^7.0.0"
+
+"@csstools/postcss-light-dark-function@^2.0.9":
+ version "2.0.9"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-light-dark-function/-/postcss-light-dark-function-2.0.9.tgz#9fb080188907539734a9d5311d2a1cb82531ef38"
+ integrity sha512-1tCZH5bla0EAkFAI2r0H33CDnIBeLUaJh1p+hvvsylJ4svsv2wOmJjJn+OXwUZLXef37GYbRIVKX+X+g6m+3CQ==
+ dependencies:
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+"@csstools/postcss-logical-float-and-clear@^3.0.0":
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-logical-float-and-clear/-/postcss-logical-float-and-clear-3.0.0.tgz#62617564182cf86ab5d4e7485433ad91e4c58571"
+ integrity sha512-SEmaHMszwakI2rqKRJgE+8rpotFfne1ZS6bZqBoQIicFyV+xT1UF42eORPxJkVJVrH9C0ctUgwMSn3BLOIZldQ==
+
+"@csstools/postcss-logical-overflow@^2.0.0":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-logical-overflow/-/postcss-logical-overflow-2.0.0.tgz#c6de7c5f04e3d4233731a847f6c62819bcbcfa1d"
+ integrity sha512-spzR1MInxPuXKEX2csMamshR4LRaSZ3UXVaRGjeQxl70ySxOhMpP2252RAFsg8QyyBXBzuVOOdx1+bVO5bPIzA==
+
+"@csstools/postcss-logical-overscroll-behavior@^2.0.0":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-logical-overscroll-behavior/-/postcss-logical-overscroll-behavior-2.0.0.tgz#43c03eaecdf34055ef53bfab691db6dc97a53d37"
+ integrity sha512-e/webMjoGOSYfqLunyzByZj5KKe5oyVg/YSbie99VEaSDE2kimFm0q1f6t/6Jo+VVCQ/jbe2Xy+uX+C4xzWs4w==
+
+"@csstools/postcss-logical-resize@^3.0.0":
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-logical-resize/-/postcss-logical-resize-3.0.0.tgz#4df0eeb1a61d7bd85395e56a5cce350b5dbfdca6"
+ integrity sha512-DFbHQOFW/+I+MY4Ycd/QN6Dg4Hcbb50elIJCfnwkRTCX05G11SwViI5BbBlg9iHRl4ytB7pmY5ieAFk3ws7yyg==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+"@csstools/postcss-logical-viewport-units@^3.0.4":
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-logical-viewport-units/-/postcss-logical-viewport-units-3.0.4.tgz#016d98a8b7b5f969e58eb8413447eb801add16fc"
+ integrity sha512-q+eHV1haXA4w9xBwZLKjVKAWn3W2CMqmpNpZUk5kRprvSiBEGMgrNH3/sJZ8UA3JgyHaOt3jwT9uFa4wLX4EqQ==
+ dependencies:
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/utilities" "^2.0.0"
+
+"@csstools/postcss-media-minmax@^2.0.9":
+ version "2.0.9"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-media-minmax/-/postcss-media-minmax-2.0.9.tgz#184252d5b93155ae526689328af6bdf3fc113987"
+ integrity sha512-af9Qw3uS3JhYLnCbqtZ9crTvvkR+0Se+bBqSr7ykAnl9yKhk6895z9rf+2F4dClIDJWxgn0iZZ1PSdkhrbs2ig==
+ dependencies:
+ "@csstools/css-calc" "^2.1.4"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/media-query-list-parser" "^4.0.3"
+
+"@csstools/postcss-media-queries-aspect-ratio-number-values@^3.0.5":
+ version "3.0.5"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-media-queries-aspect-ratio-number-values/-/postcss-media-queries-aspect-ratio-number-values-3.0.5.tgz#f485c31ec13d6b0fb5c528a3474334a40eff5f11"
+ integrity sha512-zhAe31xaaXOY2Px8IYfoVTB3wglbJUVigGphFLj6exb7cjZRH9A6adyE22XfFK3P2PzwRk0VDeTJmaxpluyrDg==
+ dependencies:
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/media-query-list-parser" "^4.0.3"
+
+"@csstools/postcss-nested-calc@^4.0.0":
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-nested-calc/-/postcss-nested-calc-4.0.0.tgz#754e10edc6958d664c11cde917f44ba144141c62"
+ integrity sha512-jMYDdqrQQxE7k9+KjstC3NbsmC063n1FTPLCgCRS2/qHUbHM0mNy9pIn4QIiQGs9I/Bg98vMqw7mJXBxa0N88A==
+ dependencies:
+ "@csstools/utilities" "^2.0.0"
+ postcss-value-parser "^4.2.0"
+
+"@csstools/postcss-normalize-display-values@^4.0.0":
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-4.0.0.tgz#ecdde2daf4e192e5da0c6fd933b6d8aff32f2a36"
+ integrity sha512-HlEoG0IDRoHXzXnkV4in47dzsxdsjdz6+j7MLjaACABX2NfvjFS6XVAnpaDyGesz9gK2SC7MbNwdCHusObKJ9Q==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+"@csstools/postcss-oklab-function@^4.0.10":
+ version "4.0.10"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-oklab-function/-/postcss-oklab-function-4.0.10.tgz#d4c23c51dd0be45e6dedde22432d7d0003710780"
+ integrity sha512-ZzZUTDd0fgNdhv8UUjGCtObPD8LYxMH+MJsW9xlZaWTV8Ppr4PtxlHYNMmF4vVWGl0T6f8tyWAKjoI6vePSgAg==
+ dependencies:
+ "@csstools/css-color-parser" "^3.0.10"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+"@csstools/postcss-progressive-custom-properties@^4.1.0":
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-4.1.0.tgz#70c8d41b577f4023633b7e3791604e0b7f3775bc"
+ integrity sha512-YrkI9dx8U4R8Sz2EJaoeD9fI7s7kmeEBfmO+UURNeL6lQI7VxF6sBE+rSqdCBn4onwqmxFdBU3lTwyYb/lCmxA==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+"@csstools/postcss-random-function@^2.0.1":
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-random-function/-/postcss-random-function-2.0.1.tgz#3191f32fe72936e361dadf7dbfb55a0209e2691e"
+ integrity sha512-q+FQaNiRBhnoSNo+GzqGOIBKoHQ43lYz0ICrV+UudfWnEF6ksS6DsBIJSISKQT2Bvu3g4k6r7t0zYrk5pDlo8w==
+ dependencies:
+ "@csstools/css-calc" "^2.1.4"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+
+"@csstools/postcss-relative-color-syntax@^3.0.10":
+ version "3.0.10"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-relative-color-syntax/-/postcss-relative-color-syntax-3.0.10.tgz#daa840583969461e1e06b12e9c591e52a790ec86"
+ integrity sha512-8+0kQbQGg9yYG8hv0dtEpOMLwB9M+P7PhacgIzVzJpixxV4Eq9AUQtQw8adMmAJU1RBBmIlpmtmm3XTRd/T00g==
+ dependencies:
+ "@csstools/css-color-parser" "^3.0.10"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+"@csstools/postcss-scope-pseudo-class@^4.0.1":
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-scope-pseudo-class/-/postcss-scope-pseudo-class-4.0.1.tgz#9fe60e9d6d91d58fb5fc6c768a40f6e47e89a235"
+ integrity sha512-IMi9FwtH6LMNuLea1bjVMQAsUhFxJnyLSgOp/cpv5hrzWmrUYU5fm0EguNDIIOHUqzXode8F/1qkC/tEo/qN8Q==
+ dependencies:
+ postcss-selector-parser "^7.0.0"
+
+"@csstools/postcss-sign-functions@^1.1.4":
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-sign-functions/-/postcss-sign-functions-1.1.4.tgz#a9ac56954014ae4c513475b3f1b3e3424a1e0c12"
+ integrity sha512-P97h1XqRPcfcJndFdG95Gv/6ZzxUBBISem0IDqPZ7WMvc/wlO+yU0c5D/OCpZ5TJoTt63Ok3knGk64N+o6L2Pg==
+ dependencies:
+ "@csstools/css-calc" "^2.1.4"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+
+"@csstools/postcss-stepped-value-functions@^4.0.9":
+ version "4.0.9"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-4.0.9.tgz#36036f1a0e5e5ee2308e72f3c9cb433567c387b9"
+ integrity sha512-h9btycWrsex4dNLeQfyU3y3w40LMQooJWFMm/SK9lrKguHDcFl4VMkncKKoXi2z5rM9YGWbUQABI8BT2UydIcA==
+ dependencies:
+ "@csstools/css-calc" "^2.1.4"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+
+"@csstools/postcss-text-decoration-shorthand@^4.0.2":
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-4.0.2.tgz#a3bcf80492e6dda36477538ab8e8943908c9f80a"
+ integrity sha512-8XvCRrFNseBSAGxeaVTaNijAu+FzUvjwFXtcrynmazGb/9WUdsPCpBX+mHEHShVRq47Gy4peYAoxYs8ltUnmzA==
+ dependencies:
+ "@csstools/color-helpers" "^5.0.2"
+ postcss-value-parser "^4.2.0"
+
+"@csstools/postcss-trigonometric-functions@^4.0.9":
+ version "4.0.9"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-4.0.9.tgz#3f94ed2e319b57f2c59720b64e4d0a8a6fb8c3b2"
+ integrity sha512-Hnh5zJUdpNrJqK9v1/E3BbrQhaDTj5YiX7P61TOvUhoDHnUmsNNxcDAgkQ32RrcWx9GVUvfUNPcUkn8R3vIX6A==
+ dependencies:
+ "@csstools/css-calc" "^2.1.4"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+
+"@csstools/postcss-unset-value@^4.0.0":
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/postcss-unset-value/-/postcss-unset-value-4.0.0.tgz#7caa981a34196d06a737754864baf77d64de4bba"
+ integrity sha512-cBz3tOCI5Fw6NIFEwU3RiwK6mn3nKegjpJuzCndoGq3BZPkUjnsq7uQmIeMNeMbMk7YD2MfKcgCpZwX5jyXqCA==
+
+"@csstools/selector-resolve-nested@^3.1.0":
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/@csstools/selector-resolve-nested/-/selector-resolve-nested-3.1.0.tgz#848c6f44cb65e3733e478319b9342b7aa436fac7"
+ integrity sha512-mf1LEW0tJLKfWyvn5KdDrhpxHyuxpbNwTIwOYLIvsTffeyOf85j5oIzfG0yosxDgx/sswlqBnESYUcQH0vgZ0g==
+
+"@csstools/selector-specificity@^5.0.0":
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/selector-specificity/-/selector-specificity-5.0.0.tgz#037817b574262134cabd68fc4ec1a454f168407b"
+ integrity sha512-PCqQV3c4CoVm3kdPhyeZ07VmBRdH2EpMFA/pd9OASpOEC3aXNGoqPDAZ80D0cLpMBxnmk0+yNhGsEx31hq7Gtw==
+
+"@csstools/utilities@^2.0.0":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/@csstools/utilities/-/utilities-2.0.0.tgz#f7ff0fee38c9ffb5646d47b6906e0bc8868bde60"
+ integrity sha512-5VdOr0Z71u+Yp3ozOx8T11N703wIFGVRgOWbOZMKgglPJsWA54MRIoMNVMa7shUToIhx5J8vX4sOZgD2XiihiQ==
+
+"@discoveryjs/json-ext@0.5.7":
+ version "0.5.7"
+ resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70"
+ integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==
+
+"@docsearch/css@3.9.0":
+ version "3.9.0"
+ resolved "https://registry.yarnpkg.com/@docsearch/css/-/css-3.9.0.tgz#3bc29c96bf024350d73b0cfb7c2a7b71bf251cd5"
+ integrity sha512-cQbnVbq0rrBwNAKegIac/t6a8nWoUAn8frnkLFW6YARaRmAQr5/Eoe6Ln2fqkUCZ40KpdrKbpSAmgrkviOxuWA==
+
+"@docsearch/react@^3.9.0":
+ version "3.9.0"
+ resolved "https://registry.yarnpkg.com/@docsearch/react/-/react-3.9.0.tgz#d0842b700c3ee26696786f3c8ae9f10c1a3f0db3"
+ integrity sha512-mb5FOZYZIkRQ6s/NWnM98k879vu5pscWqTLubLFBO87igYYT4VzVazh4h5o/zCvTIZgEt3PvsCOMOswOUo9yHQ==
+ dependencies:
+ "@algolia/autocomplete-core" "1.17.9"
+ "@algolia/autocomplete-preset-algolia" "1.17.9"
+ "@docsearch/css" "3.9.0"
+ algoliasearch "^5.14.2"
+
+"@docusaurus/babel@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/babel/-/babel-3.8.1.tgz#db329ac047184214e08e2dbc809832c696c18506"
+ integrity sha512-3brkJrml8vUbn9aeoZUlJfsI/GqyFcDgQJwQkmBtclJgWDEQBKKeagZfOgx0WfUQhagL1sQLNW0iBdxnI863Uw==
+ dependencies:
+ "@babel/core" "^7.25.9"
+ "@babel/generator" "^7.25.9"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.3"
+ "@babel/plugin-transform-runtime" "^7.25.9"
+ "@babel/preset-env" "^7.25.9"
+ "@babel/preset-react" "^7.25.9"
+ "@babel/preset-typescript" "^7.25.9"
+ "@babel/runtime" "^7.25.9"
+ "@babel/runtime-corejs3" "^7.25.9"
+ "@babel/traverse" "^7.25.9"
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ babel-plugin-dynamic-import-node "^2.3.3"
+ fs-extra "^11.1.1"
+ tslib "^2.6.0"
+
+"@docusaurus/bundler@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/bundler/-/bundler-3.8.1.tgz#e2b11d615f09a6e470774bb36441b8d06736b94c"
+ integrity sha512-/z4V0FRoQ0GuSLToNjOSGsk6m2lQUG4FRn8goOVoZSRsTrU8YR2aJacX5K3RG18EaX9b+52pN4m1sL3MQZVsQA==
+ dependencies:
+ "@babel/core" "^7.25.9"
+ "@docusaurus/babel" "3.8.1"
+ "@docusaurus/cssnano-preset" "3.8.1"
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ babel-loader "^9.2.1"
+ clean-css "^5.3.3"
+ copy-webpack-plugin "^11.0.0"
+ css-loader "^6.11.0"
+ css-minimizer-webpack-plugin "^5.0.1"
+ cssnano "^6.1.2"
+ file-loader "^6.2.0"
+ html-minifier-terser "^7.2.0"
+ mini-css-extract-plugin "^2.9.2"
+ null-loader "^4.0.1"
+ postcss "^8.5.4"
+ postcss-loader "^7.3.4"
+ postcss-preset-env "^10.2.1"
+ terser-webpack-plugin "^5.3.9"
+ tslib "^2.6.0"
+ url-loader "^4.1.1"
+ webpack "^5.95.0"
+ webpackbar "^6.0.1"
+
+"@docusaurus/core@3.8.1", "@docusaurus/core@^3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-3.8.1.tgz#c22e47c16a22cb7d245306c64bc54083838ff3db"
+ integrity sha512-ENB01IyQSqI2FLtOzqSI3qxG2B/jP4gQPahl2C3XReiLebcVh5B5cB9KYFvdoOqOWPyr5gXK4sjgTKv7peXCrA==
+ dependencies:
+ "@docusaurus/babel" "3.8.1"
+ "@docusaurus/bundler" "3.8.1"
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/mdx-loader" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-common" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ boxen "^6.2.1"
+ chalk "^4.1.2"
+ chokidar "^3.5.3"
+ cli-table3 "^0.6.3"
+ combine-promises "^1.1.0"
+ commander "^5.1.0"
+ core-js "^3.31.1"
+ detect-port "^1.5.1"
+ escape-html "^1.0.3"
+ eta "^2.2.0"
+ eval "^0.1.8"
+ execa "5.1.1"
+ fs-extra "^11.1.1"
+ html-tags "^3.3.1"
+ html-webpack-plugin "^5.6.0"
+ leven "^3.1.0"
+ lodash "^4.17.21"
+ open "^8.4.0"
+ p-map "^4.0.0"
+ prompts "^2.4.2"
+ react-helmet-async "npm:@slorber/react-helmet-async@1.3.0"
+ react-loadable "npm:@docusaurus/react-loadable@6.0.0"
+ react-loadable-ssr-addon-v5-slorber "^1.0.1"
+ react-router "^5.3.4"
+ react-router-config "^5.1.1"
+ react-router-dom "^5.3.4"
+ semver "^7.5.4"
+ serve-handler "^6.1.6"
+ tinypool "^1.0.2"
+ tslib "^2.6.0"
+ update-notifier "^6.0.2"
+ webpack "^5.95.0"
+ webpack-bundle-analyzer "^4.10.2"
+ webpack-dev-server "^4.15.2"
+ webpack-merge "^6.0.1"
+
+"@docusaurus/cssnano-preset@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-3.8.1.tgz#bd55026251a6ab8e2194839a2042458ef9880c44"
+ integrity sha512-G7WyR2N6SpyUotqhGznERBK+x84uyhfMQM2MmDLs88bw4Flom6TY46HzkRkSEzaP9j80MbTN8naiL1fR17WQug==
+ dependencies:
+ cssnano-preset-advanced "^6.1.2"
+ postcss "^8.5.4"
+ postcss-sort-media-queries "^5.2.0"
+ tslib "^2.6.0"
+
+"@docusaurus/logger@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/logger/-/logger-3.8.1.tgz#45321b2e2e14695d0dbd8b4104ea7b0fbaa98700"
+ integrity sha512-2wjeGDhKcExEmjX8k1N/MRDiPKXGF2Pg+df/bDDPnnJWHXnVEZxXj80d6jcxp1Gpnksl0hF8t/ZQw9elqj2+ww==
+ dependencies:
+ chalk "^4.1.2"
+ tslib "^2.6.0"
+
+"@docusaurus/mdx-loader@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-3.8.1.tgz#74309b3614bbcef1d55fb13e6cc339b7fb000b5f"
+ integrity sha512-DZRhagSFRcEq1cUtBMo4TKxSNo/W6/s44yhr8X+eoXqCLycFQUylebOMPseHi5tc4fkGJqwqpWJLz6JStU9L4w==
+ dependencies:
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ "@mdx-js/mdx" "^3.0.0"
+ "@slorber/remark-comment" "^1.0.0"
+ escape-html "^1.0.3"
+ estree-util-value-to-estree "^3.0.1"
+ file-loader "^6.2.0"
+ fs-extra "^11.1.1"
+ image-size "^2.0.2"
+ mdast-util-mdx "^3.0.0"
+ mdast-util-to-string "^4.0.0"
+ rehype-raw "^7.0.0"
+ remark-directive "^3.0.0"
+ remark-emoji "^4.0.0"
+ remark-frontmatter "^5.0.0"
+ remark-gfm "^4.0.0"
+ stringify-object "^3.3.0"
+ tslib "^2.6.0"
+ unified "^11.0.3"
+ unist-util-visit "^5.0.0"
+ url-loader "^4.1.1"
+ vfile "^6.0.1"
+ webpack "^5.88.1"
+
+"@docusaurus/module-type-aliases@3.8.1", "@docusaurus/module-type-aliases@^3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/module-type-aliases/-/module-type-aliases-3.8.1.tgz#454de577bd7f50b5eae16db0f76b49ca5e4e281a"
+ integrity sha512-6xhvAJiXzsaq3JdosS7wbRt/PwEPWHr9eM4YNYqVlbgG1hSK3uQDXTVvQktasp3VO6BmfYWPozueLWuj4gB+vg==
+ dependencies:
+ "@docusaurus/types" "3.8.1"
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+ "@types/react-router-config" "*"
+ "@types/react-router-dom" "*"
+ react-helmet-async "npm:@slorber/react-helmet-async@1.3.0"
+ react-loadable "npm:@docusaurus/react-loadable@6.0.0"
+
+"@docusaurus/plugin-content-blog@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.8.1.tgz#88d842b562b04cf59df900d9f6984b086f821525"
+ integrity sha512-vNTpMmlvNP9n3hGEcgPaXyvTljanAKIUkuG9URQ1DeuDup0OR7Ltvoc8yrmH+iMZJbcQGhUJF+WjHLwuk8HSdw==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/mdx-loader" "3.8.1"
+ "@docusaurus/theme-common" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-common" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ cheerio "1.0.0-rc.12"
+ feed "^4.2.2"
+ fs-extra "^11.1.1"
+ lodash "^4.17.21"
+ schema-dts "^1.1.2"
+ srcset "^4.0.0"
+ tslib "^2.6.0"
+ unist-util-visit "^5.0.0"
+ utility-types "^3.10.0"
+ webpack "^5.88.1"
+
+"@docusaurus/plugin-content-docs@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.8.1.tgz#40686a206abb6373bee5638de100a2c312f112a4"
+ integrity sha512-oByRkSZzeGNQByCMaX+kif5Nl2vmtj2IHQI2fWjCfCootsdKZDPFLonhIp5s3IGJO7PLUfe0POyw0Xh/RrGXJA==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/mdx-loader" "3.8.1"
+ "@docusaurus/module-type-aliases" "3.8.1"
+ "@docusaurus/theme-common" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-common" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ "@types/react-router-config" "^5.0.7"
+ combine-promises "^1.1.0"
+ fs-extra "^11.1.1"
+ js-yaml "^4.1.0"
+ lodash "^4.17.21"
+ schema-dts "^1.1.2"
+ tslib "^2.6.0"
+ utility-types "^3.10.0"
+ webpack "^5.88.1"
+
+"@docusaurus/plugin-content-pages@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.8.1.tgz#41b684dbd15390b7bb6a627f78bf81b6324511ac"
+ integrity sha512-a+V6MS2cIu37E/m7nDJn3dcxpvXb6TvgdNI22vJX8iUTp8eoMoPa0VArEbWvCxMY/xdC26WzNv4wZ6y0iIni/w==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/mdx-loader" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ fs-extra "^11.1.1"
+ tslib "^2.6.0"
+ webpack "^5.88.1"
+
+"@docusaurus/plugin-css-cascade-layers@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/plugin-css-cascade-layers/-/plugin-css-cascade-layers-3.8.1.tgz#cb414b4a82aa60fc64ef2a435ad0105e142a6c71"
+ integrity sha512-VQ47xRxfNKjHS5ItzaVXpxeTm7/wJLFMOPo1BkmoMG4Cuz4nuI+Hs62+RMk1OqVog68Swz66xVPK8g9XTrBKRw==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ tslib "^2.6.0"
+
+"@docusaurus/plugin-debug@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/plugin-debug/-/plugin-debug-3.8.1.tgz#45b107e46b627caaae66995f53197ace78af3491"
+ integrity sha512-nT3lN7TV5bi5hKMB7FK8gCffFTBSsBsAfV84/v293qAmnHOyg1nr9okEw8AiwcO3bl9vije5nsUvP0aRl2lpaw==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ fs-extra "^11.1.1"
+ react-json-view-lite "^2.3.0"
+ tslib "^2.6.0"
+
+"@docusaurus/plugin-google-analytics@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.8.1.tgz#64a302e62fe5cb6e007367c964feeef7b056764a"
+ integrity sha512-Hrb/PurOJsmwHAsfMDH6oVpahkEGsx7F8CWMjyP/dw1qjqmdS9rcV1nYCGlM8nOtD3Wk/eaThzUB5TSZsGz+7Q==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ tslib "^2.6.0"
+
+"@docusaurus/plugin-google-gtag@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.8.1.tgz#8c76f8a1d96448f2f0f7b10e6bde451c40672b95"
+ integrity sha512-tKE8j1cEZCh8KZa4aa80zpSTxsC2/ZYqjx6AAfd8uA8VHZVw79+7OTEP2PoWi0uL5/1Is0LF5Vwxd+1fz5HlKg==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ "@types/gtag.js" "^0.0.12"
+ tslib "^2.6.0"
+
+"@docusaurus/plugin-google-tag-manager@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.8.1.tgz#88241ffd06369f4a4d5fb982ff3ac2777561ae37"
+ integrity sha512-iqe3XKITBquZq+6UAXdb1vI0fPY5iIOitVjPQ581R1ZKpHr0qe+V6gVOrrcOHixPDD/BUKdYwkxFjpNiEN+vBw==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ tslib "^2.6.0"
+
+"@docusaurus/plugin-sitemap@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.8.1.tgz#3aebd39186dc30e53023f1aab44625bc0bdac892"
+ integrity sha512-+9YV/7VLbGTq8qNkjiugIelmfUEVkTyLe6X8bWq7K5qPvGXAjno27QAfFq63mYfFFbJc7z+pudL63acprbqGzw==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-common" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ fs-extra "^11.1.1"
+ sitemap "^7.1.1"
+ tslib "^2.6.0"
+
+"@docusaurus/plugin-svgr@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/plugin-svgr/-/plugin-svgr-3.8.1.tgz#6f340be8eae418a2cce540d8ece096ffd9c9b6ab"
+ integrity sha512-rW0LWMDsdlsgowVwqiMb/7tANDodpy1wWPwCcamvhY7OECReN3feoFwLjd/U4tKjNY3encj0AJSTxJA+Fpe+Gw==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ "@svgr/core" "8.1.0"
+ "@svgr/webpack" "^8.1.0"
+ tslib "^2.6.0"
+ webpack "^5.88.1"
+
+"@docusaurus/preset-classic@^3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/preset-classic/-/preset-classic-3.8.1.tgz#bb79fd12f3211363720c569a526c7e24d3aa966b"
+ integrity sha512-yJSjYNHXD8POMGc2mKQuj3ApPrN+eG0rO1UPgSx7jySpYU+n4WjBikbrA2ue5ad9A7aouEtMWUoiSRXTH/g7KQ==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/plugin-content-blog" "3.8.1"
+ "@docusaurus/plugin-content-docs" "3.8.1"
+ "@docusaurus/plugin-content-pages" "3.8.1"
+ "@docusaurus/plugin-css-cascade-layers" "3.8.1"
+ "@docusaurus/plugin-debug" "3.8.1"
+ "@docusaurus/plugin-google-analytics" "3.8.1"
+ "@docusaurus/plugin-google-gtag" "3.8.1"
+ "@docusaurus/plugin-google-tag-manager" "3.8.1"
+ "@docusaurus/plugin-sitemap" "3.8.1"
+ "@docusaurus/plugin-svgr" "3.8.1"
+ "@docusaurus/theme-classic" "3.8.1"
+ "@docusaurus/theme-common" "3.8.1"
+ "@docusaurus/theme-search-algolia" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+
+"@docusaurus/theme-classic@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/theme-classic/-/theme-classic-3.8.1.tgz#1e45c66d89ded359225fcd29bf3258d9205765c1"
+ integrity sha512-bqDUCNqXeYypMCsE1VcTXSI1QuO4KXfx8Cvl6rYfY0bhhqN6d2WZlRkyLg/p6pm+DzvanqHOyYlqdPyP0iz+iw==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/mdx-loader" "3.8.1"
+ "@docusaurus/module-type-aliases" "3.8.1"
+ "@docusaurus/plugin-content-blog" "3.8.1"
+ "@docusaurus/plugin-content-docs" "3.8.1"
+ "@docusaurus/plugin-content-pages" "3.8.1"
+ "@docusaurus/theme-common" "3.8.1"
+ "@docusaurus/theme-translations" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-common" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ "@mdx-js/react" "^3.0.0"
+ clsx "^2.0.0"
+ copy-text-to-clipboard "^3.2.0"
+ infima "0.2.0-alpha.45"
+ lodash "^4.17.21"
+ nprogress "^0.2.0"
+ postcss "^8.5.4"
+ prism-react-renderer "^2.3.0"
+ prismjs "^1.29.0"
+ react-router-dom "^5.3.4"
+ rtlcss "^4.1.0"
+ tslib "^2.6.0"
+ utility-types "^3.10.0"
+
+"@docusaurus/theme-common@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/theme-common/-/theme-common-3.8.1.tgz#17c23316fbe3ee3f7e707c7298cb59a0fff38b4b"
+ integrity sha512-UswMOyTnPEVRvN5Qzbo+l8k4xrd5fTFu2VPPfD6FcW/6qUtVLmJTQCktbAL3KJ0BVXGm5aJXz/ZrzqFuZERGPw==
+ dependencies:
+ "@docusaurus/mdx-loader" "3.8.1"
+ "@docusaurus/module-type-aliases" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-common" "3.8.1"
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+ "@types/react-router-config" "*"
+ clsx "^2.0.0"
+ parse-numeric-range "^1.3.0"
+ prism-react-renderer "^2.3.0"
+ tslib "^2.6.0"
+ utility-types "^3.10.0"
+
+"@docusaurus/theme-mermaid@^3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/theme-mermaid/-/theme-mermaid-3.8.1.tgz#2b73b5e90057bd9fb46f267aeb2d3470b168a7c8"
+ integrity sha512-IWYqjyTPjkNnHsFFu9+4YkeXS7PD1xI3Bn2shOhBq+f95mgDfWInkpfBN4aYvx4fTT67Am6cPtohRdwh4Tidtg==
+ dependencies:
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/module-type-aliases" "3.8.1"
+ "@docusaurus/theme-common" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ mermaid ">=11.6.0"
+ tslib "^2.6.0"
+
+"@docusaurus/theme-search-algolia@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.8.1.tgz#3aa3d99c35cc2d4b709fcddd4df875a9b536e29b"
+ integrity sha512-NBFH5rZVQRAQM087aYSRKQ9yGEK9eHd+xOxQjqNpxMiV85OhJDD4ZGz6YJIod26Fbooy54UWVdzNU0TFeUUUzQ==
+ dependencies:
+ "@docsearch/react" "^3.9.0"
+ "@docusaurus/core" "3.8.1"
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/plugin-content-docs" "3.8.1"
+ "@docusaurus/theme-common" "3.8.1"
+ "@docusaurus/theme-translations" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-validation" "3.8.1"
+ algoliasearch "^5.17.1"
+ algoliasearch-helper "^3.22.6"
+ clsx "^2.0.0"
+ eta "^2.2.0"
+ fs-extra "^11.1.1"
+ lodash "^4.17.21"
+ tslib "^2.6.0"
+ utility-types "^3.10.0"
+
+"@docusaurus/theme-translations@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/theme-translations/-/theme-translations-3.8.1.tgz#4b1d76973eb53861e167c7723485e059ba4ffd0a"
+ integrity sha512-OTp6eebuMcf2rJt4bqnvuwmm3NVXfzfYejL+u/Y1qwKhZPrjPoKWfk1CbOP5xH5ZOPkiAsx4dHdQBRJszK3z2g==
+ dependencies:
+ fs-extra "^11.1.1"
+ tslib "^2.6.0"
+
+"@docusaurus/types@3.8.1", "@docusaurus/types@^3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-3.8.1.tgz#83ab66c345464e003b576a49f78897482061fc26"
+ integrity sha512-ZPdW5AB+pBjiVrcLuw3dOS6BFlrG0XkS2lDGsj8TizcnREQg3J8cjsgfDviszOk4CweNfwo1AEELJkYaMUuOPg==
+ dependencies:
+ "@mdx-js/mdx" "^3.0.0"
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+ commander "^5.1.0"
+ joi "^17.9.2"
+ react-helmet-async "npm:@slorber/react-helmet-async@1.3.0"
+ utility-types "^3.10.0"
+ webpack "^5.95.0"
+ webpack-merge "^5.9.0"
+
+"@docusaurus/utils-common@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-3.8.1.tgz#c369b8c3041afb7dcd595d4172beb1cc1015c85f"
+ integrity sha512-zTZiDlvpvoJIrQEEd71c154DkcriBecm4z94OzEE9kz7ikS3J+iSlABhFXM45mZ0eN5pVqqr7cs60+ZlYLewtg==
+ dependencies:
+ "@docusaurus/types" "3.8.1"
+ tslib "^2.6.0"
+
+"@docusaurus/utils-validation@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-3.8.1.tgz#0499c0d151a4098a0963237057993282cfbd538e"
+ integrity sha512-gs5bXIccxzEbyVecvxg6upTwaUbfa0KMmTj7HhHzc016AGyxH2o73k1/aOD0IFrdCsfJNt37MqNI47s2MgRZMA==
+ dependencies:
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/utils" "3.8.1"
+ "@docusaurus/utils-common" "3.8.1"
+ fs-extra "^11.2.0"
+ joi "^17.9.2"
+ js-yaml "^4.1.0"
+ lodash "^4.17.21"
+ tslib "^2.6.0"
+
+"@docusaurus/utils@3.8.1":
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-3.8.1.tgz#2ac1e734106e2f73dbd0f6a8824d525f9064e9f0"
+ integrity sha512-P1ml0nvOmEFdmu0smSXOqTS1sxU5tqvnc0dA4MTKV39kye+bhQnjkIKEE18fNOvxjyB86k8esoCIFM3x4RykOQ==
+ dependencies:
+ "@docusaurus/logger" "3.8.1"
+ "@docusaurus/types" "3.8.1"
+ "@docusaurus/utils-common" "3.8.1"
+ escape-string-regexp "^4.0.0"
+ execa "5.1.1"
+ file-loader "^6.2.0"
+ fs-extra "^11.1.1"
+ github-slugger "^1.5.0"
+ globby "^11.1.0"
+ gray-matter "^4.0.3"
+ jiti "^1.20.0"
+ js-yaml "^4.1.0"
+ lodash "^4.17.21"
+ micromatch "^4.0.5"
+ p-queue "^6.6.2"
+ prompts "^2.4.2"
+ resolve-pathname "^3.0.0"
+ tslib "^2.6.0"
+ url-loader "^4.1.1"
+ utility-types "^3.10.0"
+ webpack "^5.88.1"
+
+"@emotion/babel-plugin@^11.12.0":
+ version "11.12.0"
+ resolved "https://registry.yarnpkg.com/@emotion/babel-plugin/-/babel-plugin-11.12.0.tgz#7b43debb250c313101b3f885eba634f1d723fcc2"
+ integrity sha512-y2WQb+oP8Jqvvclh8Q55gLUyb7UFvgv7eJfsj7td5TToBrIUtPay2kMrZi4xjq9qw2vD0ZR5fSho0yqoFgX7Rw==
+ dependencies:
+ "@babel/helper-module-imports" "^7.16.7"
+ "@babel/runtime" "^7.18.3"
+ "@emotion/hash" "^0.9.2"
+ "@emotion/memoize" "^0.9.0"
+ "@emotion/serialize" "^1.2.0"
+ babel-plugin-macros "^3.1.0"
+ convert-source-map "^1.5.0"
+ escape-string-regexp "^4.0.0"
+ find-root "^1.1.0"
+ source-map "^0.5.7"
+ stylis "4.2.0"
+
+"@emotion/cache@^11.11.0", "@emotion/cache@^11.13.0":
+ version "11.13.1"
+ resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-11.13.1.tgz#fecfc54d51810beebf05bf2a161271a1a91895d7"
+ integrity sha512-iqouYkuEblRcXmylXIwwOodiEK5Ifl7JcX7o6V4jI3iW4mLXX3dmt5xwBtIkJiQEXFAI+pC8X0i67yiPkH9Ucw==
+ dependencies:
+ "@emotion/memoize" "^0.9.0"
+ "@emotion/sheet" "^1.4.0"
+ "@emotion/utils" "^1.4.0"
+ "@emotion/weak-memoize" "^0.4.0"
+ stylis "4.2.0"
+
+"@emotion/hash@^0.9.1", "@emotion/hash@^0.9.2":
+ version "0.9.2"
+ resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.9.2.tgz#ff9221b9f58b4dfe61e619a7788734bd63f6898b"
+ integrity sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g==
+
+"@emotion/is-prop-valid@^1.3.0":
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-1.3.0.tgz#bd84ba972195e8a2d42462387581560ef780e4e2"
+ integrity sha512-SHetuSLvJDzuNbOdtPVbq6yMMMlLoW5Q94uDqJZqy50gcmAjxFkVqmzqSGEFq9gT2iMuIeKV1PXVWmvUhuZLlQ==
+ dependencies:
+ "@emotion/memoize" "^0.9.0"
+
+"@emotion/memoize@^0.9.0":
+ version "0.9.0"
+ resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.9.0.tgz#745969d649977776b43fc7648c556aaa462b4102"
+ integrity sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ==
+
+"@emotion/react@^11.13.0":
+ version "11.13.0"
+ resolved "https://registry.yarnpkg.com/@emotion/react/-/react-11.13.0.tgz#a9ebf827b98220255e5760dac89fa2d38ca7b43d"
+ integrity sha512-WkL+bw1REC2VNV1goQyfxjx1GYJkcc23CRQkXX+vZNLINyfI7o+uUn/rTGPt/xJ3bJHd5GcljgnxHf4wRw5VWQ==
+ dependencies:
+ "@babel/runtime" "^7.18.3"
+ "@emotion/babel-plugin" "^11.12.0"
+ "@emotion/cache" "^11.13.0"
+ "@emotion/serialize" "^1.3.0"
+ "@emotion/use-insertion-effect-with-fallbacks" "^1.1.0"
+ "@emotion/utils" "^1.4.0"
+ "@emotion/weak-memoize" "^0.4.0"
+ hoist-non-react-statics "^3.3.1"
+
+"@emotion/serialize@^1.2.0", "@emotion/serialize@^1.3.0":
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-1.3.0.tgz#e07cadfc967a4e7816e0c3ffaff4c6ce05cb598d"
+ integrity sha512-jACuBa9SlYajnpIVXB+XOXnfJHyckDfe6fOpORIM6yhBDlqGuExvDdZYHDQGoDf3bZXGv7tNr+LpLjJqiEQ6EA==
+ dependencies:
+ "@emotion/hash" "^0.9.2"
+ "@emotion/memoize" "^0.9.0"
+ "@emotion/unitless" "^0.9.0"
+ "@emotion/utils" "^1.4.0"
+ csstype "^3.0.2"
+
+"@emotion/sheet@^1.4.0":
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.4.0.tgz#c9299c34d248bc26e82563735f78953d2efca83c"
+ integrity sha512-fTBW9/8r2w3dXWYM4HCB1Rdp8NLibOw2+XELH5m5+AkWiL/KqYX6dc0kKYlaYyKjrQ6ds33MCdMPEwgs2z1rqg==
+
+"@emotion/styled@^11.13.0":
+ version "11.13.0"
+ resolved "https://registry.yarnpkg.com/@emotion/styled/-/styled-11.13.0.tgz#633fd700db701472c7a5dbef54d6f9834e9fb190"
+ integrity sha512-tkzkY7nQhW/zC4hztlwucpT8QEZ6eUzpXDRhww/Eej4tFfO0FxQYWRyg/c5CCXa4d/f174kqeXYjuQRnhzf6dA==
+ dependencies:
+ "@babel/runtime" "^7.18.3"
+ "@emotion/babel-plugin" "^11.12.0"
+ "@emotion/is-prop-valid" "^1.3.0"
+ "@emotion/serialize" "^1.3.0"
+ "@emotion/use-insertion-effect-with-fallbacks" "^1.1.0"
+ "@emotion/utils" "^1.4.0"
+
+"@emotion/unitless@^0.9.0":
+ version "0.9.0"
+ resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.9.0.tgz#8e5548f072bd67b8271877e51c0f95c76a66cbe2"
+ integrity sha512-TP6GgNZtmtFaFcsOgExdnfxLLpRDla4Q66tnenA9CktvVSdNKDvMVuUah4QvWPIpNjrWsGg3qeGo9a43QooGZQ==
+
+"@emotion/use-insertion-effect-with-fallbacks@^1.1.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.1.0.tgz#1a818a0b2c481efba0cf34e5ab1e0cb2dcb9dfaf"
+ integrity sha512-+wBOcIV5snwGgI2ya3u99D7/FJquOIniQT1IKyDsBmEgwvpxMNeS65Oib7OnE2d2aY+3BU4OiH+0Wchf8yk3Hw==
+
+"@emotion/utils@^1.4.0":
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.4.0.tgz#262f1d02aaedb2ec91c83a0955dd47822ad5fbdd"
+ integrity sha512-spEnrA1b6hDR/C68lC2M7m6ALPUHZC0lIY7jAS/B/9DuuO1ZP04eov8SMv/6fwRd8pzmsn2AuJEznRREWlQrlQ==
+
+"@emotion/weak-memoize@^0.4.0":
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.4.0.tgz#5e13fac887f08c44f76b0ccaf3370eb00fec9bb6"
+ integrity sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg==
+
+"@hapi/hoek@^9.0.0", "@hapi/hoek@^9.3.0":
+ version "9.3.0"
+ resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-9.3.0.tgz#8368869dcb735be2e7f5cb7647de78e167a251fb"
+ integrity sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==
+
+"@hapi/topo@^5.1.0":
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/@hapi/topo/-/topo-5.1.0.tgz#dc448e332c6c6e37a4dc02fd84ba8d44b9afb012"
+ integrity sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==
+ dependencies:
+ "@hapi/hoek" "^9.0.0"
+
+"@iconify/react@^5.0.2":
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/@iconify/react/-/react-5.0.2.tgz#03d6aa43ed22737451a5d087b00b26b3e6e6895a"
+ integrity sha512-wtmstbYlEbo4NDxFxBJkhkf9gJBDqMGr7FaqLrAUMneRV3Z+fVHLJjOhWbkAF8xDQNFC/wcTYdrWo1lnRhmagQ==
+ dependencies:
+ "@iconify/types" "^2.0.0"
+
+"@iconify/types@^2.0.0":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/@iconify/types/-/types-2.0.0.tgz#ab0e9ea681d6c8a1214f30cd741fe3a20cc57f57"
+ integrity sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==
+
+"@iconify/utils@^2.1.33":
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/@iconify/utils/-/utils-2.3.0.tgz#1bbbf8c477ebe9a7cacaea78b1b7e8937f9cbfba"
+ integrity sha512-GmQ78prtwYW6EtzXRU1rY+KwOKfz32PD7iJh6Iyqw68GiKuoZ2A6pRtzWONz5VQJbp50mEjXh/7NkumtrAgRKA==
+ dependencies:
+ "@antfu/install-pkg" "^1.0.0"
+ "@antfu/utils" "^8.1.0"
+ "@iconify/types" "^2.0.0"
+ debug "^4.4.0"
+ globals "^15.14.0"
+ kolorist "^1.8.0"
+ local-pkg "^1.0.0"
+ mlly "^1.7.4"
+
+"@jest/schemas@^29.6.3":
+ version "29.6.3"
+ resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03"
+ integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==
+ dependencies:
+ "@sinclair/typebox" "^0.27.8"
+
+"@jest/types@^29.6.3":
+ version "29.6.3"
+ resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59"
+ integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==
+ dependencies:
+ "@jest/schemas" "^29.6.3"
+ "@types/istanbul-lib-coverage" "^2.0.0"
+ "@types/istanbul-reports" "^3.0.0"
+ "@types/node" "*"
+ "@types/yargs" "^17.0.8"
+ chalk "^4.0.0"
+
+"@jridgewell/gen-mapping@^0.3.12":
+ version "0.3.12"
+ resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz#2234ce26c62889f03db3d7fea43c1932ab3e927b"
+ integrity sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==
+ dependencies:
+ "@jridgewell/sourcemap-codec" "^1.5.0"
+ "@jridgewell/trace-mapping" "^0.3.24"
+
+"@jridgewell/gen-mapping@^0.3.5":
+ version "0.3.5"
+ resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz#dcce6aff74bdf6dad1a95802b69b04a2fcb1fb36"
+ integrity sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==
+ dependencies:
+ "@jridgewell/set-array" "^1.2.1"
+ "@jridgewell/sourcemap-codec" "^1.4.10"
+ "@jridgewell/trace-mapping" "^0.3.24"
+
+"@jridgewell/resolve-uri@^3.1.0":
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6"
+ integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==
+
+"@jridgewell/set-array@^1.2.1":
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.2.1.tgz#558fb6472ed16a4c850b889530e6b36438c49280"
+ integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==
+
+"@jridgewell/source-map@^0.3.3":
+ version "0.3.6"
+ resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.6.tgz#9d71ca886e32502eb9362c9a74a46787c36df81a"
+ integrity sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==
+ dependencies:
+ "@jridgewell/gen-mapping" "^0.3.5"
+ "@jridgewell/trace-mapping" "^0.3.25"
+
+"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14":
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a"
+ integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==
+
+"@jridgewell/sourcemap-codec@^1.5.0":
+ version "1.5.4"
+ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz#7358043433b2e5da569aa02cbc4c121da3af27d7"
+ integrity sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==
+
+"@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25":
+ version "0.3.25"
+ resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0"
+ integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==
+ dependencies:
+ "@jridgewell/resolve-uri" "^3.1.0"
+ "@jridgewell/sourcemap-codec" "^1.4.14"
+
+"@jridgewell/trace-mapping@^0.3.28":
+ version "0.3.29"
+ resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz#a58d31eaadaf92c6695680b2e1d464a9b8fbf7fc"
+ integrity sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==
+ dependencies:
+ "@jridgewell/resolve-uri" "^3.1.0"
+ "@jridgewell/sourcemap-codec" "^1.4.14"
+
+"@leichtgewicht/ip-codec@^2.0.1":
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz#4fc56c15c580b9adb7dc3c333a134e540b44bfb1"
+ integrity sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==
+
+"@mdx-js/mdx@^3.0.0":
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/@mdx-js/mdx/-/mdx-3.0.1.tgz#617bd2629ae561fdca1bb88e3badd947f5a82191"
+ integrity sha512-eIQ4QTrOWyL3LWEe/bu6Taqzq2HQvHcyTMaOrI95P2/LmJE7AsfPfgJGuFLPVqBUE1BC1rik3VIhU+s9u72arA==
+ dependencies:
+ "@types/estree" "^1.0.0"
+ "@types/estree-jsx" "^1.0.0"
+ "@types/hast" "^3.0.0"
+ "@types/mdx" "^2.0.0"
+ collapse-white-space "^2.0.0"
+ devlop "^1.0.0"
+ estree-util-build-jsx "^3.0.0"
+ estree-util-is-identifier-name "^3.0.0"
+ estree-util-to-js "^2.0.0"
+ estree-walker "^3.0.0"
+ hast-util-to-estree "^3.0.0"
+ hast-util-to-jsx-runtime "^2.0.0"
+ markdown-extensions "^2.0.0"
+ periscopic "^3.0.0"
+ remark-mdx "^3.0.0"
+ remark-parse "^11.0.0"
+ remark-rehype "^11.0.0"
+ source-map "^0.7.0"
+ unified "^11.0.0"
+ unist-util-position-from-estree "^2.0.0"
+ unist-util-stringify-position "^4.0.0"
+ unist-util-visit "^5.0.0"
+ vfile "^6.0.0"
+
+"@mdx-js/react@^3.0.0":
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/@mdx-js/react/-/react-3.0.1.tgz#997a19b3a5b783d936c75ae7c47cfe62f967f746"
+ integrity sha512-9ZrPIU4MGf6et1m1ov3zKf+q9+deetI51zprKB1D/z3NOb+rUxxtEl3mCjW5wTGh6VhRdwPueh1oRzi6ezkA8A==
+ dependencies:
+ "@types/mdx" "^2.0.0"
+
+"@mermaid-js/parser@^0.6.2":
+ version "0.6.2"
+ resolved "https://registry.yarnpkg.com/@mermaid-js/parser/-/parser-0.6.2.tgz#6d505a33acb52ddeb592c596b14f9d92a30396a9"
+ integrity sha512-+PO02uGF6L6Cs0Bw8RpGhikVvMWEysfAyl27qTlroUB8jSWr1lL0Sf6zi78ZxlSnmgSY2AMMKVgghnN9jTtwkQ==
+ dependencies:
+ langium "3.3.1"
+
+"@mui/core-downloads-tracker@^5.16.7":
+ version "5.16.7"
+ resolved "https://registry.yarnpkg.com/@mui/core-downloads-tracker/-/core-downloads-tracker-5.16.7.tgz#182a325a520f7ebd75de051fceabfc0314cfd004"
+ integrity sha512-RtsCt4Geed2/v74sbihWzzRs+HsIQCfclHeORh5Ynu2fS4icIKozcSubwuG7vtzq2uW3fOR1zITSP84TNt2GoQ==
+
+"@mui/material@^5.16.7":
+ version "5.16.7"
+ resolved "https://registry.yarnpkg.com/@mui/material/-/material-5.16.7.tgz#6e814e2eefdaf065a769cecf549c3569e107a50b"
+ integrity sha512-cwwVQxBhK60OIOqZOVLFt55t01zmarKJiJUWbk0+8s/Ix5IaUzAShqlJchxsIQ4mSrWqgcKCCXKtIlG5H+/Jmg==
+ dependencies:
+ "@babel/runtime" "^7.23.9"
+ "@mui/core-downloads-tracker" "^5.16.7"
+ "@mui/system" "^5.16.7"
+ "@mui/types" "^7.2.15"
+ "@mui/utils" "^5.16.6"
+ "@popperjs/core" "^2.11.8"
+ "@types/react-transition-group" "^4.4.10"
+ clsx "^2.1.0"
+ csstype "^3.1.3"
+ prop-types "^15.8.1"
+ react-is "^18.3.1"
+ react-transition-group "^4.4.5"
+
+"@mui/private-theming@^5.16.6":
+ version "5.16.6"
+ resolved "https://registry.yarnpkg.com/@mui/private-theming/-/private-theming-5.16.6.tgz#547671e7ae3f86b68d1289a0b90af04dfcc1c8c9"
+ integrity sha512-rAk+Rh8Clg7Cd7shZhyt2HGTTE5wYKNSJ5sspf28Fqm/PZ69Er9o6KX25g03/FG2dfpg5GCwZh/xOojiTfm3hw==
+ dependencies:
+ "@babel/runtime" "^7.23.9"
+ "@mui/utils" "^5.16.6"
+ prop-types "^15.8.1"
+
+"@mui/styled-engine@^5.16.6":
+ version "5.16.6"
+ resolved "https://registry.yarnpkg.com/@mui/styled-engine/-/styled-engine-5.16.6.tgz#60110c106dd482dfdb7e2aa94fd6490a0a3f8852"
+ integrity sha512-zaThmS67ZmtHSWToTiHslbI8jwrmITcN93LQaR2lKArbvS7Z3iLkwRoiikNWutx9MBs8Q6okKvbZq1RQYB3v7g==
+ dependencies:
+ "@babel/runtime" "^7.23.9"
+ "@emotion/cache" "^11.11.0"
+ csstype "^3.1.3"
+ prop-types "^15.8.1"
+
+"@mui/styles@^5.16.7":
+ version "5.16.7"
+ resolved "https://registry.yarnpkg.com/@mui/styles/-/styles-5.16.7.tgz#cf052f0243d283fab837d2505f4901e5207a0575"
+ integrity sha512-FfXhHP/2MlqH+vLs2tIHMeCChmqSRgkOALVNLKkPrDsvtoq5J8OraOutCn1scpvRjr9mO8ZhW6jKx2t/vUDxtQ==
+ dependencies:
+ "@babel/runtime" "^7.23.9"
+ "@emotion/hash" "^0.9.1"
+ "@mui/private-theming" "^5.16.6"
+ "@mui/types" "^7.2.15"
+ "@mui/utils" "^5.16.6"
+ clsx "^2.1.0"
+ csstype "^3.1.3"
+ hoist-non-react-statics "^3.3.2"
+ jss "^10.10.0"
+ jss-plugin-camel-case "^10.10.0"
+ jss-plugin-default-unit "^10.10.0"
+ jss-plugin-global "^10.10.0"
+ jss-plugin-nested "^10.10.0"
+ jss-plugin-props-sort "^10.10.0"
+ jss-plugin-rule-value-function "^10.10.0"
+ jss-plugin-vendor-prefixer "^10.10.0"
+ prop-types "^15.8.1"
+
+"@mui/system@^5.16.7":
+ version "5.16.7"
+ resolved "https://registry.yarnpkg.com/@mui/system/-/system-5.16.7.tgz#4583ca5bf3b38942e02c15a1e622ba869ac51393"
+ integrity sha512-Jncvs/r/d/itkxh7O7opOunTqbbSSzMTHzZkNLM+FjAOg+cYAZHrPDlYe1ZGKUYORwwb2XexlWnpZp0kZ4AHuA==
+ dependencies:
+ "@babel/runtime" "^7.23.9"
+ "@mui/private-theming" "^5.16.6"
+ "@mui/styled-engine" "^5.16.6"
+ "@mui/types" "^7.2.15"
+ "@mui/utils" "^5.16.6"
+ clsx "^2.1.0"
+ csstype "^3.1.3"
+ prop-types "^15.8.1"
+
+"@mui/types@^7.2.15":
+ version "7.2.15"
+ resolved "https://registry.yarnpkg.com/@mui/types/-/types-7.2.15.tgz#dadd232fe9a70be0d526630675dff3b110f30b53"
+ integrity sha512-nbo7yPhtKJkdf9kcVOF8JZHPZTmqXjJ/tI0bdWgHg5tp9AnIN4Y7f7wm9T+0SyGYJk76+GYZ8Q5XaTYAsUHN0Q==
+
+"@mui/utils@^5.16.6":
+ version "5.16.6"
+ resolved "https://registry.yarnpkg.com/@mui/utils/-/utils-5.16.6.tgz#905875bbc58d3dcc24531c3314a6807aba22a711"
+ integrity sha512-tWiQqlhxAt3KENNiSRL+DIn9H5xNVK6Jjf70x3PnfQPz1MPBdh7yyIcAyVBT9xiw7hP3SomRhPR7hzBMBCjqEA==
+ dependencies:
+ "@babel/runtime" "^7.23.9"
+ "@mui/types" "^7.2.15"
+ "@types/prop-types" "^15.7.12"
+ clsx "^2.1.1"
+ prop-types "^15.8.1"
+ react-is "^18.3.1"
+
+"@nodelib/fs.scandir@2.1.5":
+ version "2.1.5"
+ resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5"
+ integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==
+ dependencies:
+ "@nodelib/fs.stat" "2.0.5"
+ run-parallel "^1.1.9"
+
+"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2":
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b"
+ integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==
+
+"@nodelib/fs.walk@^1.2.3":
+ version "1.2.8"
+ resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a"
+ integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==
+ dependencies:
+ "@nodelib/fs.scandir" "2.1.5"
+ fastq "^1.6.0"
+
+"@pnpm/config.env-replace@^1.1.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@pnpm/config.env-replace/-/config.env-replace-1.1.0.tgz#ab29da53df41e8948a00f2433f085f54de8b3a4c"
+ integrity sha512-htyl8TWnKL7K/ESFa1oW2UB5lVDxuF5DpM7tBi6Hu2LNL3mWkIzNLG6N4zoCUP1lCKNxWy/3iu8mS8MvToGd6w==
+
+"@pnpm/network.ca-file@^1.0.1":
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz#2ab05e09c1af0cdf2fcf5035bea1484e222f7983"
+ integrity sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==
+ dependencies:
+ graceful-fs "4.2.10"
+
+"@pnpm/npm-conf@^2.1.0":
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/@pnpm/npm-conf/-/npm-conf-2.3.1.tgz#bb375a571a0bd63ab0a23bece33033c683e9b6b0"
+ integrity sha512-c83qWb22rNRuB0UaVCI0uRPNRr8Z0FWnEIvT47jiHAmOIUHbBOg5XvV7pM5x+rKn9HRpjxquDbXYSXr3fAKFcw==
+ dependencies:
+ "@pnpm/config.env-replace" "^1.1.0"
+ "@pnpm/network.ca-file" "^1.0.1"
+ config-chain "^1.1.11"
+
+"@polka/url@^1.0.0-next.24":
+ version "1.0.0-next.25"
+ resolved "https://registry.yarnpkg.com/@polka/url/-/url-1.0.0-next.25.tgz#f077fdc0b5d0078d30893396ff4827a13f99e817"
+ integrity sha512-j7P6Rgr3mmtdkeDGTe0E/aYyWEWVtc5yFXtHCRHs28/jptDEWfaVOc5T7cblqy1XKPPfCxJc/8DwQ5YgLOZOVQ==
+
+"@popperjs/core@^2.11.8":
+ version "2.11.8"
+ resolved "https://registry.yarnpkg.com/@popperjs/core/-/core-2.11.8.tgz#6b79032e760a0899cd4204710beede972a3a185f"
+ integrity sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==
+
+"@sideway/address@^4.1.5":
+ version "4.1.5"
+ resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.5.tgz#4bc149a0076623ced99ca8208ba780d65a99b9d5"
+ integrity sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q==
+ dependencies:
+ "@hapi/hoek" "^9.0.0"
+
+"@sideway/formula@^3.0.1":
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/@sideway/formula/-/formula-3.0.1.tgz#80fcbcbaf7ce031e0ef2dd29b1bfc7c3f583611f"
+ integrity sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==
+
+"@sideway/pinpoint@^2.0.0":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/@sideway/pinpoint/-/pinpoint-2.0.0.tgz#cff8ffadc372ad29fd3f78277aeb29e632cc70df"
+ integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==
+
+"@sinclair/typebox@^0.27.8":
+ version "0.27.8"
+ resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e"
+ integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==
+
+"@sindresorhus/is@^4.6.0":
+ version "4.6.0"
+ resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f"
+ integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==
+
+"@sindresorhus/is@^5.2.0":
+ version "5.6.0"
+ resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-5.6.0.tgz#41dd6093d34652cddb5d5bdeee04eafc33826668"
+ integrity sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==
+
+"@slorber/remark-comment@^1.0.0":
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/@slorber/remark-comment/-/remark-comment-1.0.0.tgz#2a020b3f4579c89dec0361673206c28d67e08f5a"
+ integrity sha512-RCE24n7jsOj1M0UPvIQCHTe7fI0sFL4S2nwKVWwHyVr/wI/H8GosgsJGyhnsZoGFnD/P2hLf1mSbrrgSLN93NA==
+ dependencies:
+ micromark-factory-space "^1.0.0"
+ micromark-util-character "^1.1.0"
+ micromark-util-symbol "^1.0.1"
+
+"@svgr/babel-plugin-add-jsx-attribute@8.0.0":
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz#4001f5d5dd87fa13303e36ee106e3ff3a7eb8b22"
+ integrity sha512-b9MIk7yhdS1pMCZM8VeNfUlSKVRhsHZNMl5O9SfaX0l0t5wjdgu4IDzGB8bpnGBBOjGST3rRFVsaaEtI4W6f7g==
+
+"@svgr/babel-plugin-remove-jsx-attribute@8.0.0":
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz#69177f7937233caca3a1afb051906698f2f59186"
+ integrity sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA==
+
+"@svgr/babel-plugin-remove-jsx-empty-expression@8.0.0":
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz#c2c48104cfd7dcd557f373b70a56e9e3bdae1d44"
+ integrity sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA==
+
+"@svgr/babel-plugin-replace-jsx-attribute-value@8.0.0":
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-8.0.0.tgz#8fbb6b2e91fa26ac5d4aa25c6b6e4f20f9c0ae27"
+ integrity sha512-KVQ+PtIjb1BuYT3ht8M5KbzWBhdAjjUPdlMtpuw/VjT8coTrItWX6Qafl9+ji831JaJcu6PJNKCV0bp01lBNzQ==
+
+"@svgr/babel-plugin-svg-dynamic-title@8.0.0":
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-8.0.0.tgz#1d5ba1d281363fc0f2f29a60d6d936f9bbc657b0"
+ integrity sha512-omNiKqwjNmOQJ2v6ge4SErBbkooV2aAWwaPFs2vUY7p7GhVkzRkJ00kILXQvRhA6miHnNpXv7MRnnSjdRjK8og==
+
+"@svgr/babel-plugin-svg-em-dimensions@8.0.0":
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-8.0.0.tgz#35e08df300ea8b1d41cb8f62309c241b0369e501"
+ integrity sha512-mURHYnu6Iw3UBTbhGwE/vsngtCIbHE43xCRK7kCw4t01xyGqb2Pd+WXekRRoFOBIY29ZoOhUCTEweDMdrjfi9g==
+
+"@svgr/babel-plugin-transform-react-native-svg@8.1.0":
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-8.1.0.tgz#90a8b63998b688b284f255c6a5248abd5b28d754"
+ integrity sha512-Tx8T58CHo+7nwJ+EhUwx3LfdNSG9R2OKfaIXXs5soiy5HtgoAEkDay9LIimLOcG8dJQH1wPZp/cnAv6S9CrR1Q==
+
+"@svgr/babel-plugin-transform-svg-component@8.0.0":
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-8.0.0.tgz#013b4bfca88779711f0ed2739f3f7efcefcf4f7e"
+ integrity sha512-DFx8xa3cZXTdb/k3kfPeaixecQLgKh5NVBMwD0AQxOzcZawK4oo1Jh9LbrcACUivsCA7TLG8eeWgrDXjTMhRmw==
+
+"@svgr/babel-preset@8.1.0":
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-8.1.0.tgz#0e87119aecdf1c424840b9d4565b7137cabf9ece"
+ integrity sha512-7EYDbHE7MxHpv4sxvnVPngw5fuR6pw79SkcrILHJ/iMpuKySNCl5W1qcwPEpU+LgyRXOaAFgH0KhwD18wwg6ug==
+ dependencies:
+ "@svgr/babel-plugin-add-jsx-attribute" "8.0.0"
+ "@svgr/babel-plugin-remove-jsx-attribute" "8.0.0"
+ "@svgr/babel-plugin-remove-jsx-empty-expression" "8.0.0"
+ "@svgr/babel-plugin-replace-jsx-attribute-value" "8.0.0"
+ "@svgr/babel-plugin-svg-dynamic-title" "8.0.0"
+ "@svgr/babel-plugin-svg-em-dimensions" "8.0.0"
+ "@svgr/babel-plugin-transform-react-native-svg" "8.1.0"
+ "@svgr/babel-plugin-transform-svg-component" "8.0.0"
+
+"@svgr/core@8.1.0":
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/@svgr/core/-/core-8.1.0.tgz#41146f9b40b1a10beaf5cc4f361a16a3c1885e88"
+ integrity sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA==
+ dependencies:
+ "@babel/core" "^7.21.3"
+ "@svgr/babel-preset" "8.1.0"
+ camelcase "^6.2.0"
+ cosmiconfig "^8.1.3"
+ snake-case "^3.0.4"
+
+"@svgr/hast-util-to-babel-ast@8.0.0":
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz#6952fd9ce0f470e1aded293b792a2705faf4ffd4"
+ integrity sha512-EbDKwO9GpfWP4jN9sGdYwPBU0kdomaPIL2Eu4YwmgP+sJeXT+L7bMwJUBnhzfH8Q2qMBqZ4fJwpCyYsAN3mt2Q==
+ dependencies:
+ "@babel/types" "^7.21.3"
+ entities "^4.4.0"
+
+"@svgr/plugin-jsx@8.1.0":
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-8.1.0.tgz#96969f04a24b58b174ee4cd974c60475acbd6928"
+ integrity sha512-0xiIyBsLlr8quN+WyuxooNW9RJ0Dpr8uOnH/xrCVO8GLUcwHISwj1AG0k+LFzteTkAA0GbX0kj9q6Dk70PTiPA==
+ dependencies:
+ "@babel/core" "^7.21.3"
+ "@svgr/babel-preset" "8.1.0"
+ "@svgr/hast-util-to-babel-ast" "8.0.0"
+ svg-parser "^2.0.4"
+
+"@svgr/plugin-svgo@8.1.0":
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/@svgr/plugin-svgo/-/plugin-svgo-8.1.0.tgz#b115b7b967b564f89ac58feae89b88c3decd0f00"
+ integrity sha512-Ywtl837OGO9pTLIN/onoWLmDQ4zFUycI1g76vuKGEz6evR/ZTJlJuz3G/fIkb6OVBJ2g0o6CGJzaEjfmEo3AHA==
+ dependencies:
+ cosmiconfig "^8.1.3"
+ deepmerge "^4.3.1"
+ svgo "^3.0.2"
+
+"@svgr/webpack@^8.1.0":
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/@svgr/webpack/-/webpack-8.1.0.tgz#16f1b5346f102f89fda6ec7338b96a701d8be0c2"
+ integrity sha512-LnhVjMWyMQV9ZmeEy26maJk+8HTIbd59cH4F2MJ439k9DqejRisfFNGAPvRYlKETuh9LrImlS8aKsBgKjMA8WA==
+ dependencies:
+ "@babel/core" "^7.21.3"
+ "@babel/plugin-transform-react-constant-elements" "^7.21.3"
+ "@babel/preset-env" "^7.20.2"
+ "@babel/preset-react" "^7.18.6"
+ "@babel/preset-typescript" "^7.21.0"
+ "@svgr/core" "8.1.0"
+ "@svgr/plugin-jsx" "8.1.0"
+ "@svgr/plugin-svgo" "8.1.0"
+
+"@szmarczak/http-timer@^5.0.1":
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-5.0.1.tgz#c7c1bf1141cdd4751b0399c8fc7b8b664cd5be3a"
+ integrity sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==
+ dependencies:
+ defer-to-connect "^2.0.1"
+
+"@trysound/sax@0.2.0":
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad"
+ integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==
+
+"@types/acorn@^4.0.0":
+ version "4.0.6"
+ resolved "https://registry.yarnpkg.com/@types/acorn/-/acorn-4.0.6.tgz#d61ca5480300ac41a7d973dd5b84d0a591154a22"
+ integrity sha512-veQTnWP+1D/xbxVrPC3zHnCZRjSrKfhbMUlEA43iMZLu7EsnTtkJklIuwrCPbOi8YkvDQAiW05VQQFvvz9oieQ==
+ dependencies:
+ "@types/estree" "*"
+
+"@types/body-parser@*":
+ version "1.19.5"
+ resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.5.tgz#04ce9a3b677dc8bd681a17da1ab9835dc9d3ede4"
+ integrity sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==
+ dependencies:
+ "@types/connect" "*"
+ "@types/node" "*"
+
+"@types/bonjour@^3.5.9":
+ version "3.5.13"
+ resolved "https://registry.yarnpkg.com/@types/bonjour/-/bonjour-3.5.13.tgz#adf90ce1a105e81dd1f9c61fdc5afda1bfb92956"
+ integrity sha512-z9fJ5Im06zvUL548KvYNecEVlA7cVDkGUi6kZusb04mpyEFKCIZJvloCcmpmLaIahDpOQGHaHmG6imtPMmPXGQ==
+ dependencies:
+ "@types/node" "*"
+
+"@types/connect-history-api-fallback@^1.3.5":
+ version "1.5.4"
+ resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.5.4.tgz#7de71645a103056b48ac3ce07b3520b819c1d5b3"
+ integrity sha512-n6Cr2xS1h4uAulPRdlw6Jl6s1oG8KrVilPN2yUITEs+K48EzMJJ3W1xy8K5eWuFvjp3R74AOIGSmp2UfBJ8HFw==
+ dependencies:
+ "@types/express-serve-static-core" "*"
+ "@types/node" "*"
+
+"@types/connect@*":
+ version "3.4.38"
+ resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.38.tgz#5ba7f3bc4fbbdeaff8dded952e5ff2cc53f8d858"
+ integrity sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==
+ dependencies:
+ "@types/node" "*"
+
+"@types/d3-array@*":
+ version "3.2.1"
+ resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-3.2.1.tgz#1f6658e3d2006c4fceac53fde464166859f8b8c5"
+ integrity sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==
+
+"@types/d3-axis@*":
+ version "3.0.6"
+ resolved "https://registry.yarnpkg.com/@types/d3-axis/-/d3-axis-3.0.6.tgz#e760e5765b8188b1defa32bc8bb6062f81e4c795"
+ integrity sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==
+ dependencies:
+ "@types/d3-selection" "*"
+
+"@types/d3-brush@*":
+ version "3.0.6"
+ resolved "https://registry.yarnpkg.com/@types/d3-brush/-/d3-brush-3.0.6.tgz#c2f4362b045d472e1b186cdbec329ba52bdaee6c"
+ integrity sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==
+ dependencies:
+ "@types/d3-selection" "*"
+
+"@types/d3-chord@*":
+ version "3.0.6"
+ resolved "https://registry.yarnpkg.com/@types/d3-chord/-/d3-chord-3.0.6.tgz#1706ca40cf7ea59a0add8f4456efff8f8775793d"
+ integrity sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==
+
+"@types/d3-color@*":
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/@types/d3-color/-/d3-color-3.1.3.tgz#368c961a18de721da8200e80bf3943fb53136af2"
+ integrity sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==
+
+"@types/d3-contour@*":
+ version "3.0.6"
+ resolved "https://registry.yarnpkg.com/@types/d3-contour/-/d3-contour-3.0.6.tgz#9ada3fa9c4d00e3a5093fed0356c7ab929604231"
+ integrity sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==
+ dependencies:
+ "@types/d3-array" "*"
+ "@types/geojson" "*"
+
+"@types/d3-delaunay@*":
+ version "6.0.4"
+ resolved "https://registry.yarnpkg.com/@types/d3-delaunay/-/d3-delaunay-6.0.4.tgz#185c1a80cc807fdda2a3fe960f7c11c4a27952e1"
+ integrity sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==
+
+"@types/d3-dispatch@*":
+ version "3.0.6"
+ resolved "https://registry.yarnpkg.com/@types/d3-dispatch/-/d3-dispatch-3.0.6.tgz#096efdf55eb97480e3f5621ff9a8da552f0961e7"
+ integrity sha512-4fvZhzMeeuBJYZXRXrRIQnvUYfyXwYmLsdiN7XXmVNQKKw1cM8a5WdID0g1hVFZDqT9ZqZEY5pD44p24VS7iZQ==
+
+"@types/d3-drag@*":
+ version "3.0.7"
+ resolved "https://registry.yarnpkg.com/@types/d3-drag/-/d3-drag-3.0.7.tgz#b13aba8b2442b4068c9a9e6d1d82f8bcea77fc02"
+ integrity sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==
+ dependencies:
+ "@types/d3-selection" "*"
+
+"@types/d3-dsv@*":
+ version "3.0.7"
+ resolved "https://registry.yarnpkg.com/@types/d3-dsv/-/d3-dsv-3.0.7.tgz#0a351f996dc99b37f4fa58b492c2d1c04e3dac17"
+ integrity sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==
+
+"@types/d3-ease@*":
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/@types/d3-ease/-/d3-ease-3.0.2.tgz#e28db1bfbfa617076f7770dd1d9a48eaa3b6c51b"
+ integrity sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==
+
+"@types/d3-fetch@*":
+ version "3.0.7"
+ resolved "https://registry.yarnpkg.com/@types/d3-fetch/-/d3-fetch-3.0.7.tgz#c04a2b4f23181aa376f30af0283dbc7b3b569980"
+ integrity sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==
+ dependencies:
+ "@types/d3-dsv" "*"
+
+"@types/d3-force@*":
+ version "3.0.10"
+ resolved "https://registry.yarnpkg.com/@types/d3-force/-/d3-force-3.0.10.tgz#6dc8fc6e1f35704f3b057090beeeb7ac674bff1a"
+ integrity sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==
+
+"@types/d3-format@*":
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/@types/d3-format/-/d3-format-3.0.4.tgz#b1e4465644ddb3fdf3a263febb240a6cd616de90"
+ integrity sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==
+
+"@types/d3-geo@*":
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/@types/d3-geo/-/d3-geo-3.1.0.tgz#b9e56a079449174f0a2c8684a9a4df3f60522440"
+ integrity sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==
+ dependencies:
+ "@types/geojson" "*"
+
+"@types/d3-hierarchy@*":
+ version "3.1.7"
+ resolved "https://registry.yarnpkg.com/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz#6023fb3b2d463229f2d680f9ac4b47466f71f17b"
+ integrity sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==
+
+"@types/d3-interpolate@*":
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz#412b90e84870285f2ff8a846c6eb60344f12a41c"
+ integrity sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==
+ dependencies:
+ "@types/d3-color" "*"
+
+"@types/d3-path@*":
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/@types/d3-path/-/d3-path-3.1.1.tgz#f632b380c3aca1dba8e34aa049bcd6a4af23df8a"
+ integrity sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==
+
+"@types/d3-polygon@*":
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/@types/d3-polygon/-/d3-polygon-3.0.2.tgz#dfae54a6d35d19e76ac9565bcb32a8e54693189c"
+ integrity sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==
+
+"@types/d3-quadtree@*":
+ version "3.0.6"
+ resolved "https://registry.yarnpkg.com/@types/d3-quadtree/-/d3-quadtree-3.0.6.tgz#d4740b0fe35b1c58b66e1488f4e7ed02952f570f"
+ integrity sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==
+
+"@types/d3-random@*":
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/@types/d3-random/-/d3-random-3.0.3.tgz#ed995c71ecb15e0cd31e22d9d5d23942e3300cfb"
+ integrity sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==
+
+"@types/d3-scale-chromatic@*":
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz#dc6d4f9a98376f18ea50bad6c39537f1b5463c39"
+ integrity sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==
+
+"@types/d3-scale@*":
+ version "4.0.9"
+ resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-4.0.9.tgz#57a2f707242e6fe1de81ad7bfcccaaf606179afb"
+ integrity sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==
+ dependencies:
+ "@types/d3-time" "*"
+
+"@types/d3-selection@*":
+ version "3.0.11"
+ resolved "https://registry.yarnpkg.com/@types/d3-selection/-/d3-selection-3.0.11.tgz#bd7a45fc0a8c3167a631675e61bc2ca2b058d4a3"
+ integrity sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==
+
+"@types/d3-shape@*":
+ version "3.1.7"
+ resolved "https://registry.yarnpkg.com/@types/d3-shape/-/d3-shape-3.1.7.tgz#2b7b423dc2dfe69c8c93596e673e37443348c555"
+ integrity sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==
+ dependencies:
+ "@types/d3-path" "*"
+
+"@types/d3-time-format@*":
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/@types/d3-time-format/-/d3-time-format-4.0.3.tgz#d6bc1e6b6a7db69cccfbbdd4c34b70632d9e9db2"
+ integrity sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==
+
+"@types/d3-time@*":
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-3.0.3.tgz#3c186bbd9d12b9d84253b6be6487ca56b54f88be"
+ integrity sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==
+
+"@types/d3-timer@*":
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/@types/d3-timer/-/d3-timer-3.0.2.tgz#70bbda77dc23aa727413e22e214afa3f0e852f70"
+ integrity sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==
+
+"@types/d3-transition@*":
+ version "3.0.9"
+ resolved "https://registry.yarnpkg.com/@types/d3-transition/-/d3-transition-3.0.9.tgz#1136bc57e9ddb3c390dccc9b5ff3b7d2b8d94706"
+ integrity sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==
+ dependencies:
+ "@types/d3-selection" "*"
+
+"@types/d3-zoom@*":
+ version "3.0.8"
+ resolved "https://registry.yarnpkg.com/@types/d3-zoom/-/d3-zoom-3.0.8.tgz#dccb32d1c56b1e1c6e0f1180d994896f038bc40b"
+ integrity sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==
+ dependencies:
+ "@types/d3-interpolate" "*"
+ "@types/d3-selection" "*"
+
+"@types/d3@^7.4.3":
+ version "7.4.3"
+ resolved "https://registry.yarnpkg.com/@types/d3/-/d3-7.4.3.tgz#d4550a85d08f4978faf0a4c36b848c61eaac07e2"
+ integrity sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==
+ dependencies:
+ "@types/d3-array" "*"
+ "@types/d3-axis" "*"
+ "@types/d3-brush" "*"
+ "@types/d3-chord" "*"
+ "@types/d3-color" "*"
+ "@types/d3-contour" "*"
+ "@types/d3-delaunay" "*"
+ "@types/d3-dispatch" "*"
+ "@types/d3-drag" "*"
+ "@types/d3-dsv" "*"
+ "@types/d3-ease" "*"
+ "@types/d3-fetch" "*"
+ "@types/d3-force" "*"
+ "@types/d3-format" "*"
+ "@types/d3-geo" "*"
+ "@types/d3-hierarchy" "*"
+ "@types/d3-interpolate" "*"
+ "@types/d3-path" "*"
+ "@types/d3-polygon" "*"
+ "@types/d3-quadtree" "*"
+ "@types/d3-random" "*"
+ "@types/d3-scale" "*"
+ "@types/d3-scale-chromatic" "*"
+ "@types/d3-selection" "*"
+ "@types/d3-shape" "*"
+ "@types/d3-time" "*"
+ "@types/d3-time-format" "*"
+ "@types/d3-timer" "*"
+ "@types/d3-transition" "*"
+ "@types/d3-zoom" "*"
+
+"@types/debug@^4.0.0":
+ version "4.1.12"
+ resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.12.tgz#a155f21690871953410df4b6b6f53187f0500917"
+ integrity sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==
+ dependencies:
+ "@types/ms" "*"
+
+"@types/eslint-scope@^3.7.3", "@types/eslint-scope@^3.7.7":
+ version "3.7.7"
+ resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.7.tgz#3108bd5f18b0cdb277c867b3dd449c9ed7079ac5"
+ integrity sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==
+ dependencies:
+ "@types/eslint" "*"
+ "@types/estree" "*"
+
+"@types/eslint@*":
+ version "9.6.0"
+ resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-9.6.0.tgz#51d4fe4d0316da9e9f2c80884f2c20ed5fb022ff"
+ integrity sha512-gi6WQJ7cHRgZxtkQEoyHMppPjq9Kxo5Tjn2prSKDSmZrCz8TZ3jSRCeTJm+WoM+oB0WG37bRqLzaaU3q7JypGg==
+ dependencies:
+ "@types/estree" "*"
+ "@types/json-schema" "*"
+
+"@types/estree-jsx@^1.0.0":
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/@types/estree-jsx/-/estree-jsx-1.0.5.tgz#858a88ea20f34fe65111f005a689fa1ebf70dc18"
+ integrity sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==
+ dependencies:
+ "@types/estree" "*"
+
+"@types/estree@*", "@types/estree@^1.0.0", "@types/estree@^1.0.5":
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4"
+ integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==
+
+"@types/estree@^1.0.8":
+ version "1.0.8"
+ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.8.tgz#958b91c991b1867ced318bedea0e215ee050726e"
+ integrity sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==
+
+"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.33":
+ version "4.19.5"
+ resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.19.5.tgz#218064e321126fcf9048d1ca25dd2465da55d9c6"
+ integrity sha512-y6W03tvrACO72aijJ5uF02FRq5cgDR9lUxddQ8vyF+GvmjJQqbzDcJngEjURc+ZsG31VI3hODNZJ2URj86pzmg==
+ dependencies:
+ "@types/node" "*"
+ "@types/qs" "*"
+ "@types/range-parser" "*"
+ "@types/send" "*"
+
+"@types/express@*", "@types/express@^4.17.13":
+ version "4.17.21"
+ resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.21.tgz#c26d4a151e60efe0084b23dc3369ebc631ed192d"
+ integrity sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==
+ dependencies:
+ "@types/body-parser" "*"
+ "@types/express-serve-static-core" "^4.17.33"
+ "@types/qs" "*"
+ "@types/serve-static" "*"
+
+"@types/geojson@*":
+ version "7946.0.16"
+ resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-7946.0.16.tgz#8ebe53d69efada7044454e3305c19017d97ced2a"
+ integrity sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==
+
+"@types/gtag.js@^0.0.12":
+ version "0.0.12"
+ resolved "https://registry.yarnpkg.com/@types/gtag.js/-/gtag.js-0.0.12.tgz#095122edca896689bdfcdd73b057e23064d23572"
+ integrity sha512-YQV9bUsemkzG81Ea295/nF/5GijnD2Af7QhEofh7xu+kvCN6RdodgNwwGWXB5GMI3NoyvQo0odNctoH/qLMIpg==
+
+"@types/hast@^3.0.0":
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/@types/hast/-/hast-3.0.4.tgz#1d6b39993b82cea6ad783945b0508c25903e15aa"
+ integrity sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==
+ dependencies:
+ "@types/unist" "*"
+
+"@types/history@^4.7.11":
+ version "4.7.11"
+ resolved "https://registry.yarnpkg.com/@types/history/-/history-4.7.11.tgz#56588b17ae8f50c53983a524fc3cc47437969d64"
+ integrity sha512-qjDJRrmvBMiTx+jyLxvLfJU7UznFuokDv4f3WRuriHKERccVpFU+8XMQUAbDzoiJCsmexxRExQeMwwCdamSKDA==
+
+"@types/html-minifier-terser@^6.0.0":
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35"
+ integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==
+
+"@types/http-cache-semantics@^4.0.2":
+ version "4.0.4"
+ resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz#b979ebad3919799c979b17c72621c0bc0a31c6c4"
+ integrity sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==
+
+"@types/http-errors@*":
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.4.tgz#7eb47726c391b7345a6ec35ad7f4de469cf5ba4f"
+ integrity sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==
+
+"@types/http-proxy@^1.17.8":
+ version "1.17.15"
+ resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.15.tgz#12118141ce9775a6499ecb4c01d02f90fc839d36"
+ integrity sha512-25g5atgiVNTIv0LBDTg1H74Hvayx0ajtJPLLcYE3whFv75J0pWNtOBzaXJQgDTmrX1bx5U9YC2w/n65BN1HwRQ==
+ dependencies:
+ "@types/node" "*"
+
+"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0":
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz#7739c232a1fee9b4d3ce8985f314c0c6d33549d7"
+ integrity sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==
+
+"@types/istanbul-lib-report@*":
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz#53047614ae72e19fc0401d872de3ae2b4ce350bf"
+ integrity sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==
+ dependencies:
+ "@types/istanbul-lib-coverage" "*"
+
+"@types/istanbul-reports@^3.0.0":
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz#0f03e3d2f670fbdac586e34b433783070cc16f54"
+ integrity sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==
+ dependencies:
+ "@types/istanbul-lib-report" "*"
+
+"@types/json-schema@*", "@types/json-schema@^7.0.15", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9":
+ version "7.0.15"
+ resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841"
+ integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==
+
+"@types/mdast@^4.0.0", "@types/mdast@^4.0.2":
+ version "4.0.4"
+ resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-4.0.4.tgz#7ccf72edd2f1aa7dd3437e180c64373585804dd6"
+ integrity sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==
+ dependencies:
+ "@types/unist" "*"
+
+"@types/mdx@^2.0.0":
+ version "2.0.13"
+ resolved "https://registry.yarnpkg.com/@types/mdx/-/mdx-2.0.13.tgz#68f6877043d377092890ff5b298152b0a21671bd"
+ integrity sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==
+
+"@types/mime@^1":
+ version "1.3.5"
+ resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.5.tgz#1ef302e01cf7d2b5a0fa526790c9123bf1d06690"
+ integrity sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==
+
+"@types/ms@*":
+ version "0.7.34"
+ resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.34.tgz#10964ba0dee6ac4cd462e2795b6bebd407303433"
+ integrity sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==
+
+"@types/node-forge@^1.3.0":
+ version "1.3.11"
+ resolved "https://registry.yarnpkg.com/@types/node-forge/-/node-forge-1.3.11.tgz#0972ea538ddb0f4d9c2fa0ec5db5724773a604da"
+ integrity sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==
+ dependencies:
+ "@types/node" "*"
+
+"@types/node@*":
+ version "22.2.0"
+ resolved "https://registry.yarnpkg.com/@types/node/-/node-22.2.0.tgz#7cf046a99f0ba4d628ad3088cb21f790df9b0c5b"
+ integrity sha512-bm6EG6/pCpkxDf/0gDNDdtDILMOHgaQBVOJGdwsqClnxA3xL6jtMv76rLBc006RVMWbmaf0xbmom4Z/5o2nRkQ==
+ dependencies:
+ undici-types "~6.13.0"
+
+"@types/node@^17.0.5":
+ version "17.0.45"
+ resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.45.tgz#2c0fafd78705e7a18b7906b5201a522719dc5190"
+ integrity sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==
+
+"@types/parse-json@^4.0.0":
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.2.tgz#5950e50960793055845e956c427fc2b0d70c5239"
+ integrity sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==
+
+"@types/prismjs@^1.26.0":
+ version "1.26.4"
+ resolved "https://registry.yarnpkg.com/@types/prismjs/-/prismjs-1.26.4.tgz#1a9e1074619ce1d7322669e5b46fbe823925103a"
+ integrity sha512-rlAnzkW2sZOjbqZ743IHUhFcvzaGbqijwOu8QZnZCjfQzBqFE3s4lOTJEsxikImav9uzz/42I+O7YUs1mWgMlg==
+
+"@types/prop-types@*", "@types/prop-types@^15.7.12":
+ version "15.7.12"
+ resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.12.tgz#12bb1e2be27293c1406acb6af1c3f3a1481d98c6"
+ integrity sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==
+
+"@types/qs@*":
+ version "6.9.15"
+ resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.15.tgz#adde8a060ec9c305a82de1babc1056e73bd64dce"
+ integrity sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg==
+
+"@types/range-parser@*":
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.7.tgz#50ae4353eaaddc04044279812f52c8c65857dbcb"
+ integrity sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==
+
+"@types/react-router-config@*", "@types/react-router-config@^5.0.7":
+ version "5.0.11"
+ resolved "https://registry.yarnpkg.com/@types/react-router-config/-/react-router-config-5.0.11.tgz#2761a23acc7905a66a94419ee40294a65aaa483a"
+ integrity sha512-WmSAg7WgqW7m4x8Mt4N6ZyKz0BubSj/2tVUMsAHp+Yd2AMwcSbeFq9WympT19p5heCFmF97R9eD5uUR/t4HEqw==
+ dependencies:
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+ "@types/react-router" "^5.1.0"
+
+"@types/react-router-dom@*":
+ version "5.3.3"
+ resolved "https://registry.yarnpkg.com/@types/react-router-dom/-/react-router-dom-5.3.3.tgz#e9d6b4a66fcdbd651a5f106c2656a30088cc1e83"
+ integrity sha512-kpqnYK4wcdm5UaWI3fLcELopqLrHgLqNsdpHauzlQktfkHL3npOSwtj1Uz9oKBAzs7lFtVkV8j83voAz2D8fhw==
+ dependencies:
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+ "@types/react-router" "*"
+
+"@types/react-router@*", "@types/react-router@^5.1.0":
+ version "5.1.20"
+ resolved "https://registry.yarnpkg.com/@types/react-router/-/react-router-5.1.20.tgz#88eccaa122a82405ef3efbcaaa5dcdd9f021387c"
+ integrity sha512-jGjmu/ZqS7FjSH6owMcD5qpq19+1RS9DeVRqfl1FeBMxTDQAGwlMWOcs52NDoXaNKyG3d1cYQFMs9rCrb88o9Q==
+ dependencies:
+ "@types/history" "^4.7.11"
+ "@types/react" "*"
+
+"@types/react-transition-group@^4.4.10":
+ version "4.4.11"
+ resolved "https://registry.yarnpkg.com/@types/react-transition-group/-/react-transition-group-4.4.11.tgz#d963253a611d757de01ebb241143b1017d5d63d5"
+ integrity sha512-RM05tAniPZ5DZPzzNFP+DmrcOdD0efDUxMy3145oljWSl3x9ZV5vhme98gTxFrj2lhXvmGNnUiuDyJgY9IKkNA==
+ dependencies:
+ "@types/react" "*"
+
+"@types/react@*":
+ version "18.3.3"
+ resolved "https://registry.yarnpkg.com/@types/react/-/react-18.3.3.tgz#9679020895318b0915d7a3ab004d92d33375c45f"
+ integrity sha512-hti/R0pS0q1/xx+TsI73XIqk26eBsISZ2R0wUijXIngRK9R/e7Xw/cXVxQK7R5JjW+SV4zGcn5hXjudkN/pLIw==
+ dependencies:
+ "@types/prop-types" "*"
+ csstype "^3.0.2"
+
+"@types/retry@0.12.0":
+ version "0.12.0"
+ resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d"
+ integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==
+
+"@types/sax@^1.2.1":
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/@types/sax/-/sax-1.2.7.tgz#ba5fe7df9aa9c89b6dff7688a19023dd2963091d"
+ integrity sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==
+ dependencies:
+ "@types/node" "*"
+
+"@types/send@*":
+ version "0.17.4"
+ resolved "https://registry.yarnpkg.com/@types/send/-/send-0.17.4.tgz#6619cd24e7270793702e4e6a4b958a9010cfc57a"
+ integrity sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==
+ dependencies:
+ "@types/mime" "^1"
+ "@types/node" "*"
+
+"@types/serve-index@^1.9.1":
+ version "1.9.4"
+ resolved "https://registry.yarnpkg.com/@types/serve-index/-/serve-index-1.9.4.tgz#e6ae13d5053cb06ed36392110b4f9a49ac4ec898"
+ integrity sha512-qLpGZ/c2fhSs5gnYsQxtDEq3Oy8SXPClIXkW5ghvAvsNuVSA8k+gCONcUCS/UjLEYvYps+e8uBtfgXgvhwfNug==
+ dependencies:
+ "@types/express" "*"
+
+"@types/serve-static@*", "@types/serve-static@^1.13.10":
+ version "1.15.7"
+ resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.15.7.tgz#22174bbd74fb97fe303109738e9b5c2f3064f714"
+ integrity sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==
+ dependencies:
+ "@types/http-errors" "*"
+ "@types/node" "*"
+ "@types/send" "*"
+
+"@types/sockjs@^0.3.33":
+ version "0.3.36"
+ resolved "https://registry.yarnpkg.com/@types/sockjs/-/sockjs-0.3.36.tgz#ce322cf07bcc119d4cbf7f88954f3a3bd0f67535"
+ integrity sha512-MK9V6NzAS1+Ud7JV9lJLFqW85VbC9dq3LmwZCuBe4wBDgKC0Kj/jd8Xl+nSviU+Qc3+m7umHHyHg//2KSa0a0Q==
+ dependencies:
+ "@types/node" "*"
+
+"@types/trusted-types@^2.0.7":
+ version "2.0.7"
+ resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.7.tgz#baccb07a970b91707df3a3e8ba6896c57ead2d11"
+ integrity sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==
+
+"@types/unist@*", "@types/unist@^3.0.0":
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/@types/unist/-/unist-3.0.2.tgz#6dd61e43ef60b34086287f83683a5c1b2dc53d20"
+ integrity sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==
+
+"@types/unist@^2.0.0":
+ version "2.0.10"
+ resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.10.tgz#04ffa7f406ab628f7f7e97ca23e290cd8ab15efc"
+ integrity sha512-IfYcSBWE3hLpBg8+X2SEa8LVkJdJEkT2Ese2aaLs3ptGdVtABxndrMaxuFlQ1qdFf9Q5rDvDpxI3WwgvKFAsQA==
+
+"@types/ws@^8.5.5":
+ version "8.5.12"
+ resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.12.tgz#619475fe98f35ccca2a2f6c137702d85ec247b7e"
+ integrity sha512-3tPRkv1EtkDpzlgyKyI8pGsGZAGPEaXeu0DOj5DI25Ja91bdAYddYHbADRYVrZMRbfW+1l5YwXVDKohDJNQxkQ==
+ dependencies:
+ "@types/node" "*"
+
+"@types/yargs-parser@*":
+ version "21.0.3"
+ resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15"
+ integrity sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==
+
+"@types/yargs@^17.0.8":
+ version "17.0.33"
+ resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.33.tgz#8c32303da83eec050a84b3c7ae7b9f922d13e32d"
+ integrity sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==
+ dependencies:
+ "@types/yargs-parser" "*"
+
+"@ungap/structured-clone@^1.0.0":
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406"
+ integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==
+
+"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1":
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb"
+ integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==
+ dependencies:
+ "@webassemblyjs/helper-numbers" "1.11.6"
+ "@webassemblyjs/helper-wasm-bytecode" "1.11.6"
+
+"@webassemblyjs/ast@1.14.1", "@webassemblyjs/ast@^1.14.1":
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.14.1.tgz#a9f6a07f2b03c95c8d38c4536a1fdfb521ff55b6"
+ integrity sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==
+ dependencies:
+ "@webassemblyjs/helper-numbers" "1.13.2"
+ "@webassemblyjs/helper-wasm-bytecode" "1.13.2"
+
+"@webassemblyjs/floating-point-hex-parser@1.11.6":
+ version "1.11.6"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431"
+ integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==
+
+"@webassemblyjs/floating-point-hex-parser@1.13.2":
+ version "1.13.2"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.13.2.tgz#fcca1eeddb1cc4e7b6eed4fc7956d6813b21b9fb"
+ integrity sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==
+
+"@webassemblyjs/helper-api-error@1.11.6":
+ version "1.11.6"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768"
+ integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==
+
+"@webassemblyjs/helper-api-error@1.13.2":
+ version "1.13.2"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.13.2.tgz#e0a16152248bc38daee76dd7e21f15c5ef3ab1e7"
+ integrity sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==
+
+"@webassemblyjs/helper-buffer@1.12.1":
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz#6df20d272ea5439bf20ab3492b7fb70e9bfcb3f6"
+ integrity sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==
+
+"@webassemblyjs/helper-buffer@1.14.1":
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.14.1.tgz#822a9bc603166531f7d5df84e67b5bf99b72b96b"
+ integrity sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==
+
+"@webassemblyjs/helper-numbers@1.11.6":
+ version "1.11.6"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5"
+ integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g==
+ dependencies:
+ "@webassemblyjs/floating-point-hex-parser" "1.11.6"
+ "@webassemblyjs/helper-api-error" "1.11.6"
+ "@xtuc/long" "4.2.2"
+
+"@webassemblyjs/helper-numbers@1.13.2":
+ version "1.13.2"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.13.2.tgz#dbd932548e7119f4b8a7877fd5a8d20e63490b2d"
+ integrity sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==
+ dependencies:
+ "@webassemblyjs/floating-point-hex-parser" "1.13.2"
+ "@webassemblyjs/helper-api-error" "1.13.2"
+ "@xtuc/long" "4.2.2"
+
+"@webassemblyjs/helper-wasm-bytecode@1.11.6":
+ version "1.11.6"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9"
+ integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==
+
+"@webassemblyjs/helper-wasm-bytecode@1.13.2":
+ version "1.13.2"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.13.2.tgz#e556108758f448aae84c850e593ce18a0eb31e0b"
+ integrity sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==
+
+"@webassemblyjs/helper-wasm-section@1.12.1":
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz#3da623233ae1a60409b509a52ade9bc22a37f7bf"
+ integrity sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==
+ dependencies:
+ "@webassemblyjs/ast" "1.12.1"
+ "@webassemblyjs/helper-buffer" "1.12.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.11.6"
+ "@webassemblyjs/wasm-gen" "1.12.1"
+
+"@webassemblyjs/helper-wasm-section@1.14.1":
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.14.1.tgz#9629dda9c4430eab54b591053d6dc6f3ba050348"
+ integrity sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==
+ dependencies:
+ "@webassemblyjs/ast" "1.14.1"
+ "@webassemblyjs/helper-buffer" "1.14.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.13.2"
+ "@webassemblyjs/wasm-gen" "1.14.1"
+
+"@webassemblyjs/ieee754@1.11.6":
+ version "1.11.6"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a"
+ integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg==
+ dependencies:
+ "@xtuc/ieee754" "^1.2.0"
+
+"@webassemblyjs/ieee754@1.13.2":
+ version "1.13.2"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.13.2.tgz#1c5eaace1d606ada2c7fd7045ea9356c59ee0dba"
+ integrity sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==
+ dependencies:
+ "@xtuc/ieee754" "^1.2.0"
+
+"@webassemblyjs/leb128@1.11.6":
+ version "1.11.6"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7"
+ integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ==
+ dependencies:
+ "@xtuc/long" "4.2.2"
+
+"@webassemblyjs/leb128@1.13.2":
+ version "1.13.2"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.13.2.tgz#57c5c3deb0105d02ce25fa3fd74f4ebc9fd0bbb0"
+ integrity sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==
+ dependencies:
+ "@xtuc/long" "4.2.2"
+
+"@webassemblyjs/utf8@1.11.6":
+ version "1.11.6"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a"
+ integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==
+
+"@webassemblyjs/utf8@1.13.2":
+ version "1.13.2"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.13.2.tgz#917a20e93f71ad5602966c2d685ae0c6c21f60f1"
+ integrity sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==
+
+"@webassemblyjs/wasm-edit@^1.12.1":
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz#9f9f3ff52a14c980939be0ef9d5df9ebc678ae3b"
+ integrity sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==
+ dependencies:
+ "@webassemblyjs/ast" "1.12.1"
+ "@webassemblyjs/helper-buffer" "1.12.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.11.6"
+ "@webassemblyjs/helper-wasm-section" "1.12.1"
+ "@webassemblyjs/wasm-gen" "1.12.1"
+ "@webassemblyjs/wasm-opt" "1.12.1"
+ "@webassemblyjs/wasm-parser" "1.12.1"
+ "@webassemblyjs/wast-printer" "1.12.1"
+
+"@webassemblyjs/wasm-edit@^1.14.1":
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.14.1.tgz#ac6689f502219b59198ddec42dcd496b1004d597"
+ integrity sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==
+ dependencies:
+ "@webassemblyjs/ast" "1.14.1"
+ "@webassemblyjs/helper-buffer" "1.14.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.13.2"
+ "@webassemblyjs/helper-wasm-section" "1.14.1"
+ "@webassemblyjs/wasm-gen" "1.14.1"
+ "@webassemblyjs/wasm-opt" "1.14.1"
+ "@webassemblyjs/wasm-parser" "1.14.1"
+ "@webassemblyjs/wast-printer" "1.14.1"
+
+"@webassemblyjs/wasm-gen@1.12.1":
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz#a6520601da1b5700448273666a71ad0a45d78547"
+ integrity sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==
+ dependencies:
+ "@webassemblyjs/ast" "1.12.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.11.6"
+ "@webassemblyjs/ieee754" "1.11.6"
+ "@webassemblyjs/leb128" "1.11.6"
+ "@webassemblyjs/utf8" "1.11.6"
+
+"@webassemblyjs/wasm-gen@1.14.1":
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.14.1.tgz#991e7f0c090cb0bb62bbac882076e3d219da9570"
+ integrity sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==
+ dependencies:
+ "@webassemblyjs/ast" "1.14.1"
+ "@webassemblyjs/helper-wasm-bytecode" "1.13.2"
+ "@webassemblyjs/ieee754" "1.13.2"
+ "@webassemblyjs/leb128" "1.13.2"
+ "@webassemblyjs/utf8" "1.13.2"
+
+"@webassemblyjs/wasm-opt@1.12.1":
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz#9e6e81475dfcfb62dab574ac2dda38226c232bc5"
+ integrity sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==
+ dependencies:
+ "@webassemblyjs/ast" "1.12.1"
+ "@webassemblyjs/helper-buffer" "1.12.1"
+ "@webassemblyjs/wasm-gen" "1.12.1"
+ "@webassemblyjs/wasm-parser" "1.12.1"
+
+"@webassemblyjs/wasm-opt@1.14.1":
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.14.1.tgz#e6f71ed7ccae46781c206017d3c14c50efa8106b"
+ integrity sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==
+ dependencies:
+ "@webassemblyjs/ast" "1.14.1"
+ "@webassemblyjs/helper-buffer" "1.14.1"
+ "@webassemblyjs/wasm-gen" "1.14.1"
+ "@webassemblyjs/wasm-parser" "1.14.1"
+
+"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1":
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz#c47acb90e6f083391e3fa61d113650eea1e95937"
+ integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==
+ dependencies:
+ "@webassemblyjs/ast" "1.12.1"
+ "@webassemblyjs/helper-api-error" "1.11.6"
+ "@webassemblyjs/helper-wasm-bytecode" "1.11.6"
+ "@webassemblyjs/ieee754" "1.11.6"
+ "@webassemblyjs/leb128" "1.11.6"
+ "@webassemblyjs/utf8" "1.11.6"
+
+"@webassemblyjs/wasm-parser@1.14.1", "@webassemblyjs/wasm-parser@^1.14.1":
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.14.1.tgz#b3e13f1893605ca78b52c68e54cf6a865f90b9fb"
+ integrity sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==
+ dependencies:
+ "@webassemblyjs/ast" "1.14.1"
+ "@webassemblyjs/helper-api-error" "1.13.2"
+ "@webassemblyjs/helper-wasm-bytecode" "1.13.2"
+ "@webassemblyjs/ieee754" "1.13.2"
+ "@webassemblyjs/leb128" "1.13.2"
+ "@webassemblyjs/utf8" "1.13.2"
+
+"@webassemblyjs/wast-printer@1.12.1":
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz#bcecf661d7d1abdaf989d8341a4833e33e2b31ac"
+ integrity sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==
+ dependencies:
+ "@webassemblyjs/ast" "1.12.1"
+ "@xtuc/long" "4.2.2"
+
+"@webassemblyjs/wast-printer@1.14.1":
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.14.1.tgz#3bb3e9638a8ae5fdaf9610e7a06b4d9f9aa6fe07"
+ integrity sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==
+ dependencies:
+ "@webassemblyjs/ast" "1.14.1"
+ "@xtuc/long" "4.2.2"
+
+"@xtuc/ieee754@^1.2.0":
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790"
+ integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==
+
+"@xtuc/long@4.2.2":
+ version "4.2.2"
+ resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d"
+ integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==
+
+accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8:
+ version "1.3.8"
+ resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e"
+ integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==
+ dependencies:
+ mime-types "~2.1.34"
+ negotiator "0.6.3"
+
+acorn-import-attributes@^1.9.5:
+ version "1.9.5"
+ resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef"
+ integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==
+
+acorn-import-phases@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/acorn-import-phases/-/acorn-import-phases-1.0.4.tgz#16eb850ba99a056cb7cbfe872ffb8972e18c8bd7"
+ integrity sha512-wKmbr/DDiIXzEOiWrTTUcDm24kQ2vGfZQvM2fwg2vXqR5uW6aapr7ObPtj1th32b9u90/Pf4AItvdTh42fBmVQ==
+
+acorn-jsx@^5.0.0:
+ version "5.3.2"
+ resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
+ integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
+
+acorn-walk@^8.0.0:
+ version "8.3.3"
+ resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.3.tgz#9caeac29eefaa0c41e3d4c65137de4d6f34df43e"
+ integrity sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==
+ dependencies:
+ acorn "^8.11.0"
+
+acorn@^8.0.0, acorn@^8.0.4, acorn@^8.11.0, acorn@^8.7.1, acorn@^8.8.2:
+ version "8.12.1"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248"
+ integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==
+
+acorn@^8.14.0, acorn@^8.15.0:
+ version "8.15.0"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.15.0.tgz#a360898bc415edaac46c8241f6383975b930b816"
+ integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==
+
+address@^1.0.1:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/address/-/address-1.2.2.tgz#2b5248dac5485a6390532c6a517fda2e3faac89e"
+ integrity sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA==
+
+aggregate-error@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a"
+ integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==
+ dependencies:
+ clean-stack "^2.0.0"
+ indent-string "^4.0.0"
+
+ajv-formats@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520"
+ integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==
+ dependencies:
+ ajv "^8.0.0"
+
+ajv-keywords@^3.5.2:
+ version "3.5.2"
+ resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d"
+ integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==
+
+ajv-keywords@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16"
+ integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==
+ dependencies:
+ fast-deep-equal "^3.1.3"
+
+ajv@^6.12.5:
+ version "6.12.6"
+ resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
+ integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==
+ dependencies:
+ fast-deep-equal "^3.1.1"
+ fast-json-stable-stringify "^2.0.0"
+ json-schema-traverse "^0.4.1"
+ uri-js "^4.2.2"
+
+ajv@^8.0.0, ajv@^8.9.0:
+ version "8.17.1"
+ resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6"
+ integrity sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==
+ dependencies:
+ fast-deep-equal "^3.1.3"
+ fast-uri "^3.0.1"
+ json-schema-traverse "^1.0.0"
+ require-from-string "^2.0.2"
+
+algoliasearch-helper@^3.22.6:
+ version "3.26.0"
+ resolved "https://registry.yarnpkg.com/algoliasearch-helper/-/algoliasearch-helper-3.26.0.tgz#d6e283396a9fc5bf944f365dc3b712570314363f"
+ integrity sha512-Rv2x3GXleQ3ygwhkhJubhhYGsICmShLAiqtUuJTUkr9uOCOXyF2E71LVT4XDnVffbknv8XgScP4U0Oxtgm+hIw==
+ dependencies:
+ "@algolia/events" "^4.0.1"
+
+algoliasearch@^5.14.2, algoliasearch@^5.17.1:
+ version "5.34.0"
+ resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-5.34.0.tgz#eb71f52b254059bfe40342ff47a2d3641b93c68b"
+ integrity sha512-wioVnf/8uuG8Bmywhk5qKIQ3wzCCtmdvicPRb0fa3kKYGGoewfgDqLEaET1MV2NbTc3WGpPv+AgauLVBp1nB9A==
+ dependencies:
+ "@algolia/client-abtesting" "5.34.0"
+ "@algolia/client-analytics" "5.34.0"
+ "@algolia/client-common" "5.34.0"
+ "@algolia/client-insights" "5.34.0"
+ "@algolia/client-personalization" "5.34.0"
+ "@algolia/client-query-suggestions" "5.34.0"
+ "@algolia/client-search" "5.34.0"
+ "@algolia/ingestion" "1.34.0"
+ "@algolia/monitoring" "1.34.0"
+ "@algolia/recommend" "5.34.0"
+ "@algolia/requester-browser-xhr" "5.34.0"
+ "@algolia/requester-fetch" "5.34.0"
+ "@algolia/requester-node-http" "5.34.0"
+
+ansi-align@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-3.0.1.tgz#0cdf12e111ace773a86e9a1fad1225c43cb19a59"
+ integrity sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==
+ dependencies:
+ string-width "^4.1.0"
+
+ansi-escapes@^4.3.2:
+ version "4.3.2"
+ resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e"
+ integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==
+ dependencies:
+ type-fest "^0.21.3"
+
+ansi-html-community@^0.0.8:
+ version "0.0.8"
+ resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41"
+ integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==
+
+ansi-regex@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304"
+ integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
+
+ansi-regex@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a"
+ integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==
+
+ansi-styles@^3.2.1:
+ version "3.2.1"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
+ integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
+ dependencies:
+ color-convert "^1.9.0"
+
+ansi-styles@^4.0.0, ansi-styles@^4.1.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937"
+ integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==
+ dependencies:
+ color-convert "^2.0.1"
+
+ansi-styles@^6.1.0:
+ version "6.2.1"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5"
+ integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==
+
+anymatch@~3.1.2:
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e"
+ integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==
+ dependencies:
+ normalize-path "^3.0.0"
+ picomatch "^2.0.4"
+
+arg@^5.0.0:
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c"
+ integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==
+
+argparse@^1.0.7:
+ version "1.0.10"
+ resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
+ integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
+ dependencies:
+ sprintf-js "~1.0.2"
+
+argparse@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
+ integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==
+
+array-flatten@1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
+ integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==
+
+array-union@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d"
+ integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==
+
+astring@^1.8.0:
+ version "1.8.6"
+ resolved "https://registry.yarnpkg.com/astring/-/astring-1.8.6.tgz#2c9c157cf1739d67561c56ba896e6948f6b93731"
+ integrity sha512-ISvCdHdlTDlH5IpxQJIex7BWBywFWgjJSVdwst+/iQCoEYnyOaQ95+X1JGshuBjGp6nxKUy1jMgE3zPqN7fQdg==
+
+autoprefixer@^10.4.19:
+ version "10.4.20"
+ resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.20.tgz#5caec14d43976ef42e32dcb4bd62878e96be5b3b"
+ integrity sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g==
+ dependencies:
+ browserslist "^4.23.3"
+ caniuse-lite "^1.0.30001646"
+ fraction.js "^4.3.7"
+ normalize-range "^0.1.2"
+ picocolors "^1.0.1"
+ postcss-value-parser "^4.2.0"
+
+autoprefixer@^10.4.21:
+ version "10.4.21"
+ resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.21.tgz#77189468e7a8ad1d9a37fbc08efc9f480cf0a95d"
+ integrity sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==
+ dependencies:
+ browserslist "^4.24.4"
+ caniuse-lite "^1.0.30001702"
+ fraction.js "^4.3.7"
+ normalize-range "^0.1.2"
+ picocolors "^1.1.1"
+ postcss-value-parser "^4.2.0"
+
+babel-loader@^9.2.1:
+ version "9.2.1"
+ resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-9.2.1.tgz#04c7835db16c246dd19ba0914418f3937797587b"
+ integrity sha512-fqe8naHt46e0yIdkjUZYqddSXfej3AHajX+CSO5X7oy0EmPc6o5Xh+RClNoHjnieWz9AW4kZxW9yyFMhVB1QLA==
+ dependencies:
+ find-cache-dir "^4.0.0"
+ schema-utils "^4.0.0"
+
+babel-plugin-dynamic-import-node@^2.3.3:
+ version "2.3.3"
+ resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3"
+ integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==
+ dependencies:
+ object.assign "^4.1.0"
+
+babel-plugin-macros@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1"
+ integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==
+ dependencies:
+ "@babel/runtime" "^7.12.5"
+ cosmiconfig "^7.0.0"
+ resolve "^1.19.0"
+
+babel-plugin-polyfill-corejs2@^0.4.10:
+ version "0.4.11"
+ resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.11.tgz#30320dfe3ffe1a336c15afdcdafd6fd615b25e33"
+ integrity sha512-sMEJ27L0gRHShOh5G54uAAPaiCOygY/5ratXuiyb2G46FmlSpc9eFCzYVyDiPxfNbwzA7mYahmjQc5q+CZQ09Q==
+ dependencies:
+ "@babel/compat-data" "^7.22.6"
+ "@babel/helper-define-polyfill-provider" "^0.6.2"
+ semver "^6.3.1"
+
+babel-plugin-polyfill-corejs2@^0.4.14:
+ version "0.4.14"
+ resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.14.tgz#8101b82b769c568835611542488d463395c2ef8f"
+ integrity sha512-Co2Y9wX854ts6U8gAAPXfn0GmAyctHuK8n0Yhfjd6t30g7yvKjspvvOo9yG+z52PZRgFErt7Ka2pYnXCjLKEpg==
+ dependencies:
+ "@babel/compat-data" "^7.27.7"
+ "@babel/helper-define-polyfill-provider" "^0.6.5"
+ semver "^6.3.1"
+
+babel-plugin-polyfill-corejs3@^0.10.4:
+ version "0.10.6"
+ resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.10.6.tgz#2deda57caef50f59c525aeb4964d3b2f867710c7"
+ integrity sha512-b37+KR2i/khY5sKmWNVQAnitvquQbNdWy6lJdsr0kmquCKEEUgMKK4SboVM3HtfnZilfjr4MMQ7vY58FVWDtIA==
+ dependencies:
+ "@babel/helper-define-polyfill-provider" "^0.6.2"
+ core-js-compat "^3.38.0"
+
+babel-plugin-polyfill-corejs3@^0.13.0:
+ version "0.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.13.0.tgz#bb7f6aeef7addff17f7602a08a6d19a128c30164"
+ integrity sha512-U+GNwMdSFgzVmfhNm8GJUX88AadB3uo9KpJqS3FaqNIPKgySuvMb+bHPsOmmuWyIcuqZj/pzt1RUIUZns4y2+A==
+ dependencies:
+ "@babel/helper-define-polyfill-provider" "^0.6.5"
+ core-js-compat "^3.43.0"
+
+babel-plugin-polyfill-regenerator@^0.6.1:
+ version "0.6.2"
+ resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.2.tgz#addc47e240edd1da1058ebda03021f382bba785e"
+ integrity sha512-2R25rQZWP63nGwaAswvDazbPXfrM3HwVoBXK6HcqeKrSrL/JqcC/rDcf95l4r7LXLyxDXc8uQDa064GubtCABg==
+ dependencies:
+ "@babel/helper-define-polyfill-provider" "^0.6.2"
+
+babel-plugin-polyfill-regenerator@^0.6.5:
+ version "0.6.5"
+ resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.5.tgz#32752e38ab6f6767b92650347bf26a31b16ae8c5"
+ integrity sha512-ISqQ2frbiNU9vIJkzg7dlPpznPZ4jOiUQ1uSmB0fEHeowtN3COYRsXr/xexn64NpU13P06jc/L5TgiJXOgrbEg==
+ dependencies:
+ "@babel/helper-define-polyfill-provider" "^0.6.5"
+
+bail@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/bail/-/bail-2.0.2.tgz#d26f5cd8fe5d6f832a31517b9f7c356040ba6d5d"
+ integrity sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==
+
+balanced-match@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
+ integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
+
+batch@0.6.1:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16"
+ integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==
+
+big.js@^5.2.2:
+ version "5.2.2"
+ resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328"
+ integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==
+
+binary-extensions@^2.0.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522"
+ integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==
+
+body-parser@1.20.2:
+ version "1.20.2"
+ resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd"
+ integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==
+ dependencies:
+ bytes "3.1.2"
+ content-type "~1.0.5"
+ debug "2.6.9"
+ depd "2.0.0"
+ destroy "1.2.0"
+ http-errors "2.0.0"
+ iconv-lite "0.4.24"
+ on-finished "2.4.1"
+ qs "6.11.0"
+ raw-body "2.5.2"
+ type-is "~1.6.18"
+ unpipe "1.0.0"
+
+bonjour-service@^1.0.11:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/bonjour-service/-/bonjour-service-1.2.1.tgz#eb41b3085183df3321da1264719fbada12478d02"
+ integrity sha512-oSzCS2zV14bh2kji6vNe7vrpJYCHGvcZnlffFQ1MEoX/WOeQ/teD8SYWKR942OI3INjq8OMNJlbPK5LLLUxFDw==
+ dependencies:
+ fast-deep-equal "^3.1.3"
+ multicast-dns "^7.2.5"
+
+boolbase@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e"
+ integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==
+
+boxen@^6.2.1:
+ version "6.2.1"
+ resolved "https://registry.yarnpkg.com/boxen/-/boxen-6.2.1.tgz#b098a2278b2cd2845deef2dff2efc38d329b434d"
+ integrity sha512-H4PEsJXfFI/Pt8sjDWbHlQPx4zL/bvSQjcilJmaulGt5mLDorHOHpmdXAJcBcmru7PhYSp/cDMWRko4ZUMFkSw==
+ dependencies:
+ ansi-align "^3.0.1"
+ camelcase "^6.2.0"
+ chalk "^4.1.2"
+ cli-boxes "^3.0.0"
+ string-width "^5.0.1"
+ type-fest "^2.5.0"
+ widest-line "^4.0.1"
+ wrap-ansi "^8.0.1"
+
+boxen@^7.0.0:
+ version "7.1.1"
+ resolved "https://registry.yarnpkg.com/boxen/-/boxen-7.1.1.tgz#f9ba525413c2fec9cdb88987d835c4f7cad9c8f4"
+ integrity sha512-2hCgjEmP8YLWQ130n2FerGv7rYpfBmnmp9Uy2Le1vge6X3gZIfSmEzP5QTDElFxcvVcXlEn8Aq6MU/PZygIOog==
+ dependencies:
+ ansi-align "^3.0.1"
+ camelcase "^7.0.1"
+ chalk "^5.2.0"
+ cli-boxes "^3.0.0"
+ string-width "^5.1.2"
+ type-fest "^2.13.0"
+ widest-line "^4.0.1"
+ wrap-ansi "^8.1.0"
+
+brace-expansion@^1.1.7:
+ version "1.1.11"
+ resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
+ integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
+ dependencies:
+ balanced-match "^1.0.0"
+ concat-map "0.0.1"
+
+braces@^3.0.3, braces@~3.0.2:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789"
+ integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==
+ dependencies:
+ fill-range "^7.1.1"
+
+browserslist@^4.0.0, browserslist@^4.21.10, browserslist@^4.23.0, browserslist@^4.23.1, browserslist@^4.23.3:
+ version "4.23.3"
+ resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.3.tgz#debb029d3c93ebc97ffbc8d9cbb03403e227c800"
+ integrity sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==
+ dependencies:
+ caniuse-lite "^1.0.30001646"
+ electron-to-chromium "^1.5.4"
+ node-releases "^2.0.18"
+ update-browserslist-db "^1.1.0"
+
+browserslist@^4.24.0, browserslist@^4.24.4, browserslist@^4.25.0, browserslist@^4.25.1:
+ version "4.25.1"
+ resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.25.1.tgz#ba9e8e6f298a1d86f829c9b975e07948967bb111"
+ integrity sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==
+ dependencies:
+ caniuse-lite "^1.0.30001726"
+ electron-to-chromium "^1.5.173"
+ node-releases "^2.0.19"
+ update-browserslist-db "^1.1.3"
+
+buffer-from@^1.0.0:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
+ integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
+
+bytes@3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048"
+ integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==
+
+bytes@3.1.2:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5"
+ integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==
+
+cacheable-lookup@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz#3476a8215d046e5a3202a9209dd13fec1f933a27"
+ integrity sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==
+
+cacheable-request@^10.2.8:
+ version "10.2.14"
+ resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-10.2.14.tgz#eb915b665fda41b79652782df3f553449c406b9d"
+ integrity sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ==
+ dependencies:
+ "@types/http-cache-semantics" "^4.0.2"
+ get-stream "^6.0.1"
+ http-cache-semantics "^4.1.1"
+ keyv "^4.5.3"
+ mimic-response "^4.0.0"
+ normalize-url "^8.0.0"
+ responselike "^3.0.0"
+
+call-bind@^1.0.5, call-bind@^1.0.7:
+ version "1.0.7"
+ resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9"
+ integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==
+ dependencies:
+ es-define-property "^1.0.0"
+ es-errors "^1.3.0"
+ function-bind "^1.1.2"
+ get-intrinsic "^1.2.4"
+ set-function-length "^1.2.1"
+
+callsites@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
+ integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
+
+camel-case@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a"
+ integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==
+ dependencies:
+ pascal-case "^3.1.2"
+ tslib "^2.0.3"
+
+camelcase@^6.2.0:
+ version "6.3.0"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a"
+ integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
+
+camelcase@^7.0.1:
+ version "7.0.1"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-7.0.1.tgz#f02e50af9fd7782bc8b88a3558c32fd3a388f048"
+ integrity sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==
+
+caniuse-api@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0"
+ integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==
+ dependencies:
+ browserslist "^4.0.0"
+ caniuse-lite "^1.0.0"
+ lodash.memoize "^4.1.2"
+ lodash.uniq "^4.5.0"
+
+caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001646:
+ version "1.0.30001651"
+ resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001651.tgz#52de59529e8b02b1aedcaaf5c05d9e23c0c28138"
+ integrity sha512-9Cf+Xv1jJNe1xPZLGuUXLNkE1BoDkqRqYyFJ9TDYSqhduqA4hu4oR9HluGoWYQC/aj8WHjsGVV+bwkh0+tegRg==
+
+caniuse-lite@^1.0.30001702, caniuse-lite@^1.0.30001726:
+ version "1.0.30001727"
+ resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001727.tgz#22e9706422ad37aa50556af8c10e40e2d93a8b85"
+ integrity sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==
+
+ccount@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/ccount/-/ccount-2.0.1.tgz#17a3bf82302e0870d6da43a01311a8bc02a3ecf5"
+ integrity sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==
+
+chalk@^2.4.2:
+ version "2.4.2"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
+ integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
+ dependencies:
+ ansi-styles "^3.2.1"
+ escape-string-regexp "^1.0.5"
+ supports-color "^5.3.0"
+
+chalk@^4.0.0, chalk@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
+ integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
+ dependencies:
+ ansi-styles "^4.1.0"
+ supports-color "^7.1.0"
+
+chalk@^5.0.1, chalk@^5.2.0:
+ version "5.3.0"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385"
+ integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==
+
+char-regex@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf"
+ integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==
+
+character-entities-html4@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/character-entities-html4/-/character-entities-html4-2.1.0.tgz#1f1adb940c971a4b22ba39ddca6b618dc6e56b2b"
+ integrity sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==
+
+character-entities-legacy@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz#76bc83a90738901d7bc223a9e93759fdd560125b"
+ integrity sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==
+
+character-entities@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/character-entities/-/character-entities-2.0.2.tgz#2d09c2e72cd9523076ccb21157dff66ad43fcc22"
+ integrity sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==
+
+character-reference-invalid@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz#85c66b041e43b47210faf401278abf808ac45cb9"
+ integrity sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==
+
+cheerio-select@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/cheerio-select/-/cheerio-select-2.1.0.tgz#4d8673286b8126ca2a8e42740d5e3c4884ae21b4"
+ integrity sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==
+ dependencies:
+ boolbase "^1.0.0"
+ css-select "^5.1.0"
+ css-what "^6.1.0"
+ domelementtype "^2.3.0"
+ domhandler "^5.0.3"
+ domutils "^3.0.1"
+
+cheerio@1.0.0-rc.12:
+ version "1.0.0-rc.12"
+ resolved "https://registry.yarnpkg.com/cheerio/-/cheerio-1.0.0-rc.12.tgz#788bf7466506b1c6bf5fae51d24a2c4d62e47683"
+ integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==
+ dependencies:
+ cheerio-select "^2.1.0"
+ dom-serializer "^2.0.0"
+ domhandler "^5.0.3"
+ domutils "^3.0.1"
+ htmlparser2 "^8.0.1"
+ parse5 "^7.0.0"
+ parse5-htmlparser2-tree-adapter "^7.0.0"
+
+chevrotain-allstar@~0.3.0:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/chevrotain-allstar/-/chevrotain-allstar-0.3.1.tgz#b7412755f5d83cc139ab65810cdb00d8db40e6ca"
+ integrity sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==
+ dependencies:
+ lodash-es "^4.17.21"
+
+chevrotain@~11.0.3:
+ version "11.0.3"
+ resolved "https://registry.yarnpkg.com/chevrotain/-/chevrotain-11.0.3.tgz#88ffc1fb4b5739c715807eaeedbbf200e202fc1b"
+ integrity sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==
+ dependencies:
+ "@chevrotain/cst-dts-gen" "11.0.3"
+ "@chevrotain/gast" "11.0.3"
+ "@chevrotain/regexp-to-ast" "11.0.3"
+ "@chevrotain/types" "11.0.3"
+ "@chevrotain/utils" "11.0.3"
+ lodash-es "4.17.21"
+
+chokidar@^3.5.3:
+ version "3.6.0"
+ resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.6.0.tgz#197c6cc669ef2a8dc5e7b4d97ee4e092c3eb0d5b"
+ integrity sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==
+ dependencies:
+ anymatch "~3.1.2"
+ braces "~3.0.2"
+ glob-parent "~5.1.2"
+ is-binary-path "~2.1.0"
+ is-glob "~4.0.1"
+ normalize-path "~3.0.0"
+ readdirp "~3.6.0"
+ optionalDependencies:
+ fsevents "~2.3.2"
+
+chrome-trace-event@^1.0.2:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.4.tgz#05bffd7ff928465093314708c93bdfa9bd1f0f5b"
+ integrity sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==
+
+ci-info@^3.2.0:
+ version "3.9.0"
+ resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4"
+ integrity sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==
+
+clean-css@^5.2.2, clean-css@^5.3.3, clean-css@~5.3.2:
+ version "5.3.3"
+ resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.3.tgz#b330653cd3bd6b75009cc25c714cae7b93351ccd"
+ integrity sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg==
+ dependencies:
+ source-map "~0.6.0"
+
+clean-stack@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b"
+ integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==
+
+cli-boxes@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-3.0.0.tgz#71a10c716feeba005e4504f36329ef0b17cf3145"
+ integrity sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==
+
+cli-table3@^0.6.3:
+ version "0.6.5"
+ resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.6.5.tgz#013b91351762739c16a9567c21a04632e449bf2f"
+ integrity sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==
+ dependencies:
+ string-width "^4.2.0"
+ optionalDependencies:
+ "@colors/colors" "1.5.0"
+
+clone-deep@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387"
+ integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==
+ dependencies:
+ is-plain-object "^2.0.4"
+ kind-of "^6.0.2"
+ shallow-clone "^3.0.0"
+
+clsx@^2.0.0, clsx@^2.1.0, clsx@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999"
+ integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==
+
+collapse-white-space@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-2.1.0.tgz#640257174f9f42c740b40f3b55ee752924feefca"
+ integrity sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==
+
+color-convert@^1.9.0:
+ version "1.9.3"
+ resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
+ integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
+ dependencies:
+ color-name "1.1.3"
+
+color-convert@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
+ integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
+ dependencies:
+ color-name "~1.1.4"
+
+color-name@1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
+ integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==
+
+color-name@~1.1.4:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
+ integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
+
+colord@^2.9.3:
+ version "2.9.3"
+ resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43"
+ integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==
+
+colorette@^2.0.10:
+ version "2.0.20"
+ resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a"
+ integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==
+
+combine-promises@^1.1.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/combine-promises/-/combine-promises-1.2.0.tgz#5f2e68451862acf85761ded4d9e2af7769c2ca6a"
+ integrity sha512-VcQB1ziGD0NXrhKxiwyNbCDmRzs/OShMs2GqW2DlU2A/Sd0nQxE1oWDAE5O0ygSx5mgQOn9eIFh7yKPgFRVkPQ==
+
+comma-separated-tokens@^2.0.0:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz#4e89c9458acb61bc8fef19f4529973b2392839ee"
+ integrity sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==
+
+commander@7, commander@^7.2.0:
+ version "7.2.0"
+ resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7"
+ integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==
+
+commander@^10.0.0:
+ version "10.0.1"
+ resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06"
+ integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==
+
+commander@^2.20.0:
+ version "2.20.3"
+ resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33"
+ integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==
+
+commander@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae"
+ integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==
+
+commander@^8.3.0:
+ version "8.3.0"
+ resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66"
+ integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==
+
+common-path-prefix@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0"
+ integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==
+
+compressible@~2.0.16:
+ version "2.0.18"
+ resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba"
+ integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==
+ dependencies:
+ mime-db ">= 1.43.0 < 2"
+
+compression@^1.7.4:
+ version "1.7.4"
+ resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f"
+ integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==
+ dependencies:
+ accepts "~1.3.5"
+ bytes "3.0.0"
+ compressible "~2.0.16"
+ debug "2.6.9"
+ on-headers "~1.0.2"
+ safe-buffer "5.1.2"
+ vary "~1.1.2"
+
+concat-map@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
+ integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==
+
+confbox@^0.1.8:
+ version "0.1.8"
+ resolved "https://registry.yarnpkg.com/confbox/-/confbox-0.1.8.tgz#820d73d3b3c82d9bd910652c5d4d599ef8ff8b06"
+ integrity sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==
+
+confbox@^0.2.2:
+ version "0.2.2"
+ resolved "https://registry.yarnpkg.com/confbox/-/confbox-0.2.2.tgz#8652f53961c74d9e081784beed78555974a9c110"
+ integrity sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==
+
+config-chain@^1.1.11:
+ version "1.1.13"
+ resolved "https://registry.yarnpkg.com/config-chain/-/config-chain-1.1.13.tgz#fad0795aa6a6cdaff9ed1b68e9dff94372c232f4"
+ integrity sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==
+ dependencies:
+ ini "^1.3.4"
+ proto-list "~1.2.1"
+
+configstore@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/configstore/-/configstore-6.0.0.tgz#49eca2ebc80983f77e09394a1a56e0aca8235566"
+ integrity sha512-cD31W1v3GqUlQvbBCGcXmd2Nj9SvLDOP1oQ0YFuLETufzSPaKp11rYBsSOm7rCsW3OnIRAFM3OxRhceaXNYHkA==
+ dependencies:
+ dot-prop "^6.0.1"
+ graceful-fs "^4.2.6"
+ unique-string "^3.0.0"
+ write-file-atomic "^3.0.3"
+ xdg-basedir "^5.0.1"
+
+connect-history-api-fallback@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8"
+ integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==
+
+consola@^3.2.3:
+ version "3.4.2"
+ resolved "https://registry.yarnpkg.com/consola/-/consola-3.4.2.tgz#5af110145397bb67afdab77013fdc34cae590ea7"
+ integrity sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==
+
+content-disposition@0.5.2:
+ version "0.5.2"
+ resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.2.tgz#0cf68bb9ddf5f2be7961c3a85178cb85dba78cb4"
+ integrity sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA==
+
+content-disposition@0.5.4:
+ version "0.5.4"
+ resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe"
+ integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==
+ dependencies:
+ safe-buffer "5.2.1"
+
+content-type@~1.0.4, content-type@~1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918"
+ integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==
+
+convert-source-map@^1.5.0:
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f"
+ integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==
+
+convert-source-map@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a"
+ integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==
+
+cookie-signature@1.0.6:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
+ integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==
+
+cookie@0.6.0:
+ version "0.6.0"
+ resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051"
+ integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==
+
+copy-text-to-clipboard@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/copy-text-to-clipboard/-/copy-text-to-clipboard-3.2.0.tgz#0202b2d9bdae30a49a53f898626dcc3b49ad960b"
+ integrity sha512-RnJFp1XR/LOBDckxTib5Qjr/PMfkatD0MUCQgdpqS8MdKiNUzBjAQBEN6oUy+jW7LI93BBG3DtMB2KOOKpGs2Q==
+
+copy-webpack-plugin@^11.0.0:
+ version "11.0.0"
+ resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz#96d4dbdb5f73d02dd72d0528d1958721ab72e04a"
+ integrity sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ==
+ dependencies:
+ fast-glob "^3.2.11"
+ glob-parent "^6.0.1"
+ globby "^13.1.1"
+ normalize-path "^3.0.0"
+ schema-utils "^4.0.0"
+ serialize-javascript "^6.0.0"
+
+core-js-compat@^3.37.1, core-js-compat@^3.38.0:
+ version "3.38.0"
+ resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.38.0.tgz#d93393b1aa346b6ee683377b0c31172ccfe607aa"
+ integrity sha512-75LAicdLa4OJVwFxFbQR3NdnZjNgX6ILpVcVzcC4T2smerB5lELMrJQQQoWV6TiuC/vlaFqgU2tKQx9w5s0e0A==
+ dependencies:
+ browserslist "^4.23.3"
+
+core-js-compat@^3.43.0:
+ version "3.44.0"
+ resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.44.0.tgz#62b9165b97e4cbdb8bca16b14818e67428b4a0f8"
+ integrity sha512-JepmAj2zfl6ogy34qfWtcE7nHKAJnKsQFRn++scjVS2bZFllwptzw61BZcZFYBPpUznLfAvh0LGhxKppk04ClA==
+ dependencies:
+ browserslist "^4.25.1"
+
+core-js-pure@^3.43.0:
+ version "3.44.0"
+ resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.44.0.tgz#6e9d6c128c8b967c5eac4f181c2b654d85c28090"
+ integrity sha512-gvMQAGB4dfVUxpYD0k3Fq8J+n5bB6Ytl15lqlZrOIXFzxOhtPaObfkQGHtMRdyjIf7z2IeNULwi1jEwyS+ltKQ==
+
+core-js@^3.31.1:
+ version "3.38.0"
+ resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.38.0.tgz#8acb7c050bf2ccbb35f938c0d040132f6110f636"
+ integrity sha512-XPpwqEodRljce9KswjZShh95qJ1URisBeKCjUdq27YdenkslVe7OO0ZJhlYXAChW7OhXaRLl8AAba7IBfoIHug==
+
+core-util-is@~1.0.0:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
+ integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
+
+cose-base@^1.0.0:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/cose-base/-/cose-base-1.0.3.tgz#650334b41b869578a543358b80cda7e0abe0a60a"
+ integrity sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==
+ dependencies:
+ layout-base "^1.0.0"
+
+cose-base@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/cose-base/-/cose-base-2.2.0.tgz#1c395c35b6e10bb83f9769ca8b817d614add5c01"
+ integrity sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==
+ dependencies:
+ layout-base "^2.0.0"
+
+cosmiconfig@^7.0.0:
+ version "7.1.0"
+ resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.1.0.tgz#1443b9afa596b670082ea46cbd8f6a62b84635f6"
+ integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==
+ dependencies:
+ "@types/parse-json" "^4.0.0"
+ import-fresh "^3.2.1"
+ parse-json "^5.0.0"
+ path-type "^4.0.0"
+ yaml "^1.10.0"
+
+cosmiconfig@^8.1.3, cosmiconfig@^8.3.5:
+ version "8.3.6"
+ resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.3.6.tgz#060a2b871d66dba6c8538ea1118ba1ac16f5fae3"
+ integrity sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==
+ dependencies:
+ import-fresh "^3.3.0"
+ js-yaml "^4.1.0"
+ parse-json "^5.2.0"
+ path-type "^4.0.0"
+
+cross-spawn@^7.0.3:
+ version "7.0.3"
+ resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
+ integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
+ dependencies:
+ path-key "^3.1.0"
+ shebang-command "^2.0.0"
+ which "^2.0.1"
+
+crypto-random-string@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-4.0.0.tgz#5a3cc53d7dd86183df5da0312816ceeeb5bb1fc2"
+ integrity sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==
+ dependencies:
+ type-fest "^1.0.1"
+
+css-blank-pseudo@^7.0.1:
+ version "7.0.1"
+ resolved "https://registry.yarnpkg.com/css-blank-pseudo/-/css-blank-pseudo-7.0.1.tgz#32020bff20a209a53ad71b8675852b49e8d57e46"
+ integrity sha512-jf+twWGDf6LDoXDUode+nc7ZlrqfaNphrBIBrcmeP3D8yw1uPaix1gCC8LUQUGQ6CycuK2opkbFFWFuq/a94ag==
+ dependencies:
+ postcss-selector-parser "^7.0.0"
+
+css-declaration-sorter@^7.2.0:
+ version "7.2.0"
+ resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-7.2.0.tgz#6dec1c9523bc4a643e088aab8f09e67a54961024"
+ integrity sha512-h70rUM+3PNFuaBDTLe8wF/cdWu+dOZmb7pJt8Z2sedYbAcQVQV/tEchueg3GWxwqS0cxtbxmaHEdkNACqcvsow==
+
+css-has-pseudo@^7.0.2:
+ version "7.0.2"
+ resolved "https://registry.yarnpkg.com/css-has-pseudo/-/css-has-pseudo-7.0.2.tgz#fb42e8de7371f2896961e1f6308f13c2c7019b72"
+ integrity sha512-nzol/h+E0bId46Kn2dQH5VElaknX2Sr0hFuB/1EomdC7j+OISt2ZzK7EHX9DZDY53WbIVAR7FYKSO2XnSf07MQ==
+ dependencies:
+ "@csstools/selector-specificity" "^5.0.0"
+ postcss-selector-parser "^7.0.0"
+ postcss-value-parser "^4.2.0"
+
+css-loader@^6.11.0:
+ version "6.11.0"
+ resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.11.0.tgz#33bae3bf6363d0a7c2cf9031c96c744ff54d85ba"
+ integrity sha512-CTJ+AEQJjq5NzLga5pE39qdiSV56F8ywCIsqNIRF0r7BDgWsN25aazToqAFg7ZrtA/U016xudB3ffgweORxX7g==
+ dependencies:
+ icss-utils "^5.1.0"
+ postcss "^8.4.33"
+ postcss-modules-extract-imports "^3.1.0"
+ postcss-modules-local-by-default "^4.0.5"
+ postcss-modules-scope "^3.2.0"
+ postcss-modules-values "^4.0.0"
+ postcss-value-parser "^4.2.0"
+ semver "^7.5.4"
+
+css-minimizer-webpack-plugin@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-5.0.1.tgz#33effe662edb1a0bf08ad633c32fa75d0f7ec565"
+ integrity sha512-3caImjKFQkS+ws1TGcFn0V1HyDJFq1Euy589JlD6/3rV2kj+w7r5G9WDMgSHvpvXHNZ2calVypZWuEDQd9wfLg==
+ dependencies:
+ "@jridgewell/trace-mapping" "^0.3.18"
+ cssnano "^6.0.1"
+ jest-worker "^29.4.3"
+ postcss "^8.4.24"
+ schema-utils "^4.0.1"
+ serialize-javascript "^6.0.1"
+
+css-prefers-color-scheme@^10.0.0:
+ version "10.0.0"
+ resolved "https://registry.yarnpkg.com/css-prefers-color-scheme/-/css-prefers-color-scheme-10.0.0.tgz#ba001b99b8105b8896ca26fc38309ddb2278bd3c"
+ integrity sha512-VCtXZAWivRglTZditUfB4StnsWr6YVZ2PRtuxQLKTNRdtAf8tpzaVPE9zXIF3VaSc7O70iK/j1+NXxyQCqdPjQ==
+
+css-select@^4.1.3:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b"
+ integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==
+ dependencies:
+ boolbase "^1.0.0"
+ css-what "^6.0.1"
+ domhandler "^4.3.1"
+ domutils "^2.8.0"
+ nth-check "^2.0.1"
+
+css-select@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/css-select/-/css-select-5.1.0.tgz#b8ebd6554c3637ccc76688804ad3f6a6fdaea8a6"
+ integrity sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==
+ dependencies:
+ boolbase "^1.0.0"
+ css-what "^6.1.0"
+ domhandler "^5.0.2"
+ domutils "^3.0.1"
+ nth-check "^2.0.1"
+
+css-tree@^2.3.1:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-2.3.1.tgz#10264ce1e5442e8572fc82fbe490644ff54b5c20"
+ integrity sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==
+ dependencies:
+ mdn-data "2.0.30"
+ source-map-js "^1.0.1"
+
+css-tree@~2.2.0:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-2.2.1.tgz#36115d382d60afd271e377f9c5f67d02bd48c032"
+ integrity sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==
+ dependencies:
+ mdn-data "2.0.28"
+ source-map-js "^1.0.1"
+
+css-vendor@^2.0.8:
+ version "2.0.8"
+ resolved "https://registry.yarnpkg.com/css-vendor/-/css-vendor-2.0.8.tgz#e47f91d3bd3117d49180a3c935e62e3d9f7f449d"
+ integrity sha512-x9Aq0XTInxrkuFeHKbYC7zWY8ai7qJ04Kxd9MnvbC1uO5DagxoHQjm4JvG+vCdXOoFtCjbL2XSZfxmoYa9uQVQ==
+ dependencies:
+ "@babel/runtime" "^7.8.3"
+ is-in-browser "^1.0.2"
+
+css-what@^6.0.1, css-what@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4"
+ integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==
+
+cssdb@^8.3.0:
+ version "8.3.1"
+ resolved "https://registry.yarnpkg.com/cssdb/-/cssdb-8.3.1.tgz#0ac96395b7092ffee14563e948cf43c2019b051e"
+ integrity sha512-XnDRQMXucLueX92yDe0LPKupXetWoFOgawr4O4X41l5TltgK2NVbJJVDnnOywDYfW1sTJ28AcXGKOqdRKwCcmQ==
+
+cssesc@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee"
+ integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==
+
+cssnano-preset-advanced@^6.1.2:
+ version "6.1.2"
+ resolved "https://registry.yarnpkg.com/cssnano-preset-advanced/-/cssnano-preset-advanced-6.1.2.tgz#82b090872b8f98c471f681d541c735acf8b94d3f"
+ integrity sha512-Nhao7eD8ph2DoHolEzQs5CfRpiEP0xa1HBdnFZ82kvqdmbwVBUr2r1QuQ4t1pi+D1ZpqpcO4T+wy/7RxzJ/WPQ==
+ dependencies:
+ autoprefixer "^10.4.19"
+ browserslist "^4.23.0"
+ cssnano-preset-default "^6.1.2"
+ postcss-discard-unused "^6.0.5"
+ postcss-merge-idents "^6.0.3"
+ postcss-reduce-idents "^6.0.3"
+ postcss-zindex "^6.0.2"
+
+cssnano-preset-default@^6.1.2:
+ version "6.1.2"
+ resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-6.1.2.tgz#adf4b89b975aa775f2750c89dbaf199bbd9da35e"
+ integrity sha512-1C0C+eNaeN8OcHQa193aRgYexyJtU8XwbdieEjClw+J9d94E41LwT6ivKH0WT+fYwYWB0Zp3I3IZ7tI/BbUbrg==
+ dependencies:
+ browserslist "^4.23.0"
+ css-declaration-sorter "^7.2.0"
+ cssnano-utils "^4.0.2"
+ postcss-calc "^9.0.1"
+ postcss-colormin "^6.1.0"
+ postcss-convert-values "^6.1.0"
+ postcss-discard-comments "^6.0.2"
+ postcss-discard-duplicates "^6.0.3"
+ postcss-discard-empty "^6.0.3"
+ postcss-discard-overridden "^6.0.2"
+ postcss-merge-longhand "^6.0.5"
+ postcss-merge-rules "^6.1.1"
+ postcss-minify-font-values "^6.1.0"
+ postcss-minify-gradients "^6.0.3"
+ postcss-minify-params "^6.1.0"
+ postcss-minify-selectors "^6.0.4"
+ postcss-normalize-charset "^6.0.2"
+ postcss-normalize-display-values "^6.0.2"
+ postcss-normalize-positions "^6.0.2"
+ postcss-normalize-repeat-style "^6.0.2"
+ postcss-normalize-string "^6.0.2"
+ postcss-normalize-timing-functions "^6.0.2"
+ postcss-normalize-unicode "^6.1.0"
+ postcss-normalize-url "^6.0.2"
+ postcss-normalize-whitespace "^6.0.2"
+ postcss-ordered-values "^6.0.2"
+ postcss-reduce-initial "^6.1.0"
+ postcss-reduce-transforms "^6.0.2"
+ postcss-svgo "^6.0.3"
+ postcss-unique-selectors "^6.0.4"
+
+cssnano-utils@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-4.0.2.tgz#56f61c126cd0f11f2eef1596239d730d9fceff3c"
+ integrity sha512-ZR1jHg+wZ8o4c3zqf1SIUSTIvm/9mU343FMR6Obe/unskbvpGhZOo1J6d/r8D1pzkRQYuwbcH3hToOuoA2G7oQ==
+
+cssnano@^6.0.1, cssnano@^6.1.2:
+ version "6.1.2"
+ resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-6.1.2.tgz#4bd19e505bd37ee7cf0dc902d3d869f6d79c66b8"
+ integrity sha512-rYk5UeX7VAM/u0lNqewCdasdtPK81CgX8wJFLEIXHbV2oldWRgJAsZrdhRXkV1NJzA2g850KiFm9mMU2HxNxMA==
+ dependencies:
+ cssnano-preset-default "^6.1.2"
+ lilconfig "^3.1.1"
+
+csso@^5.0.5:
+ version "5.0.5"
+ resolved "https://registry.yarnpkg.com/csso/-/csso-5.0.5.tgz#f9b7fe6cc6ac0b7d90781bb16d5e9874303e2ca6"
+ integrity sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==
+ dependencies:
+ css-tree "~2.2.0"
+
+csstype@^3.0.2, csstype@^3.1.3:
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81"
+ integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==
+
+cytoscape-cose-bilkent@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz#762fa121df9930ffeb51a495d87917c570ac209b"
+ integrity sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==
+ dependencies:
+ cose-base "^1.0.0"
+
+cytoscape-fcose@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz#e4d6f6490df4fab58ae9cea9e5c3ab8d7472f471"
+ integrity sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==
+ dependencies:
+ cose-base "^2.2.0"
+
+cytoscape@^3.29.3:
+ version "3.32.1"
+ resolved "https://registry.yarnpkg.com/cytoscape/-/cytoscape-3.32.1.tgz#8475a892a43c79a26825b4f16946fa1491020798"
+ integrity sha512-dbeqFTLYEwlFg7UGtcZhCCG/2WayX72zK3Sq323CEX29CY81tYfVhw1MIdduCtpstB0cTOhJswWlM/OEB3Xp+Q==
+
+"d3-array@1 - 2":
+ version "2.12.1"
+ resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-2.12.1.tgz#e20b41aafcdffdf5d50928004ececf815a465e81"
+ integrity sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==
+ dependencies:
+ internmap "^1.0.0"
+
+"d3-array@2 - 3", "d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3", d3-array@3, d3-array@^3.2.0:
+ version "3.2.4"
+ resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.4.tgz#15fec33b237f97ac5d7c986dc77da273a8ed0bb5"
+ integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==
+ dependencies:
+ internmap "1 - 2"
+
+d3-axis@3:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/d3-axis/-/d3-axis-3.0.0.tgz#c42a4a13e8131d637b745fc2973824cfeaf93322"
+ integrity sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==
+
+d3-brush@3:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/d3-brush/-/d3-brush-3.0.0.tgz#6f767c4ed8dcb79de7ede3e1c0f89e63ef64d31c"
+ integrity sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==
+ dependencies:
+ d3-dispatch "1 - 3"
+ d3-drag "2 - 3"
+ d3-interpolate "1 - 3"
+ d3-selection "3"
+ d3-transition "3"
+
+d3-chord@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-chord/-/d3-chord-3.0.1.tgz#d156d61f485fce8327e6abf339cb41d8cbba6966"
+ integrity sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==
+ dependencies:
+ d3-path "1 - 3"
+
+"d3-color@1 - 3", d3-color@3:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-3.1.0.tgz#395b2833dfac71507f12ac2f7af23bf819de24e2"
+ integrity sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==
+
+d3-contour@4:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/d3-contour/-/d3-contour-4.0.2.tgz#bb92063bc8c5663acb2422f99c73cbb6c6ae3bcc"
+ integrity sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==
+ dependencies:
+ d3-array "^3.2.0"
+
+d3-delaunay@6:
+ version "6.0.4"
+ resolved "https://registry.yarnpkg.com/d3-delaunay/-/d3-delaunay-6.0.4.tgz#98169038733a0a5babbeda55054f795bb9e4a58b"
+ integrity sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==
+ dependencies:
+ delaunator "5"
+
+"d3-dispatch@1 - 3", d3-dispatch@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-dispatch/-/d3-dispatch-3.0.1.tgz#5fc75284e9c2375c36c839411a0cf550cbfc4d5e"
+ integrity sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==
+
+"d3-drag@2 - 3", d3-drag@3:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/d3-drag/-/d3-drag-3.0.0.tgz#994aae9cd23c719f53b5e10e3a0a6108c69607ba"
+ integrity sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==
+ dependencies:
+ d3-dispatch "1 - 3"
+ d3-selection "3"
+
+"d3-dsv@1 - 3", d3-dsv@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-3.0.1.tgz#c63af978f4d6a0d084a52a673922be2160789b73"
+ integrity sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==
+ dependencies:
+ commander "7"
+ iconv-lite "0.6"
+ rw "1"
+
+"d3-ease@1 - 3", d3-ease@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-3.0.1.tgz#9658ac38a2140d59d346160f1f6c30fda0bd12f4"
+ integrity sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==
+
+d3-fetch@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-fetch/-/d3-fetch-3.0.1.tgz#83141bff9856a0edb5e38de89cdcfe63d0a60a22"
+ integrity sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==
+ dependencies:
+ d3-dsv "1 - 3"
+
+d3-force@3:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/d3-force/-/d3-force-3.0.0.tgz#3e2ba1a61e70888fe3d9194e30d6d14eece155c4"
+ integrity sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==
+ dependencies:
+ d3-dispatch "1 - 3"
+ d3-quadtree "1 - 3"
+ d3-timer "1 - 3"
+
+"d3-format@1 - 3", d3-format@3:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641"
+ integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==
+
+d3-geo@3:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/d3-geo/-/d3-geo-3.1.1.tgz#6027cf51246f9b2ebd64f99e01dc7c3364033a4d"
+ integrity sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==
+ dependencies:
+ d3-array "2.5.0 - 3"
+
+d3-hierarchy@3:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz#b01cd42c1eed3d46db77a5966cf726f8c09160c6"
+ integrity sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==
+
+"d3-interpolate@1 - 3", "d3-interpolate@1.2.0 - 3", d3-interpolate@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d"
+ integrity sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==
+ dependencies:
+ d3-color "1 - 3"
+
+d3-path@1:
+ version "1.0.9"
+ resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-1.0.9.tgz#48c050bb1fe8c262493a8caf5524e3e9591701cf"
+ integrity sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==
+
+"d3-path@1 - 3", d3-path@3, d3-path@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-3.1.0.tgz#22df939032fb5a71ae8b1800d61ddb7851c42526"
+ integrity sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==
+
+d3-polygon@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-polygon/-/d3-polygon-3.0.1.tgz#0b45d3dd1c48a29c8e057e6135693ec80bf16398"
+ integrity sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==
+
+"d3-quadtree@1 - 3", d3-quadtree@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-quadtree/-/d3-quadtree-3.0.1.tgz#6dca3e8be2b393c9a9d514dabbd80a92deef1a4f"
+ integrity sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==
+
+d3-random@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-random/-/d3-random-3.0.1.tgz#d4926378d333d9c0bfd1e6fa0194d30aebaa20f4"
+ integrity sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==
+
+d3-sankey@^0.12.3:
+ version "0.12.3"
+ resolved "https://registry.yarnpkg.com/d3-sankey/-/d3-sankey-0.12.3.tgz#b3c268627bd72e5d80336e8de6acbfec9d15d01d"
+ integrity sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==
+ dependencies:
+ d3-array "1 - 2"
+ d3-shape "^1.2.0"
+
+d3-scale-chromatic@3:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz#34c39da298b23c20e02f1a4b239bd0f22e7f1314"
+ integrity sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==
+ dependencies:
+ d3-color "1 - 3"
+ d3-interpolate "1 - 3"
+
+d3-scale@4:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-4.0.2.tgz#82b38e8e8ff7080764f8dcec77bd4be393689396"
+ integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==
+ dependencies:
+ d3-array "2.10.0 - 3"
+ d3-format "1 - 3"
+ d3-interpolate "1.2.0 - 3"
+ d3-time "2.1.1 - 3"
+ d3-time-format "2 - 4"
+
+"d3-selection@2 - 3", d3-selection@3:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/d3-selection/-/d3-selection-3.0.0.tgz#c25338207efa72cc5b9bd1458a1a41901f1e1b31"
+ integrity sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==
+
+d3-shape@3:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-3.2.0.tgz#a1a839cbd9ba45f28674c69d7f855bcf91dfc6a5"
+ integrity sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==
+ dependencies:
+ d3-path "^3.1.0"
+
+d3-shape@^1.2.0:
+ version "1.3.7"
+ resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-1.3.7.tgz#df63801be07bc986bc54f63789b4fe502992b5d7"
+ integrity sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==
+ dependencies:
+ d3-path "1"
+
+"d3-time-format@2 - 4", d3-time-format@4:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a"
+ integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==
+ dependencies:
+ d3-time "1 - 3"
+
+"d3-time@1 - 3", "d3-time@2.1.1 - 3", d3-time@3:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-3.1.0.tgz#9310db56e992e3c0175e1ef385e545e48a9bb5c7"
+ integrity sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==
+ dependencies:
+ d3-array "2 - 3"
+
+"d3-timer@1 - 3", d3-timer@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-3.0.1.tgz#6284d2a2708285b1abb7e201eda4380af35e63b0"
+ integrity sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==
+
+"d3-transition@2 - 3", d3-transition@3:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/d3-transition/-/d3-transition-3.0.1.tgz#6869fdde1448868077fdd5989200cb61b2a1645f"
+ integrity sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==
+ dependencies:
+ d3-color "1 - 3"
+ d3-dispatch "1 - 3"
+ d3-ease "1 - 3"
+ d3-interpolate "1 - 3"
+ d3-timer "1 - 3"
+
+d3-zoom@3:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/d3-zoom/-/d3-zoom-3.0.0.tgz#d13f4165c73217ffeaa54295cd6969b3e7aee8f3"
+ integrity sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==
+ dependencies:
+ d3-dispatch "1 - 3"
+ d3-drag "2 - 3"
+ d3-interpolate "1 - 3"
+ d3-selection "2 - 3"
+ d3-transition "2 - 3"
+
+d3@^7.9.0:
+ version "7.9.0"
+ resolved "https://registry.yarnpkg.com/d3/-/d3-7.9.0.tgz#579e7acb3d749caf8860bd1741ae8d371070cd5d"
+ integrity sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==
+ dependencies:
+ d3-array "3"
+ d3-axis "3"
+ d3-brush "3"
+ d3-chord "3"
+ d3-color "3"
+ d3-contour "4"
+ d3-delaunay "6"
+ d3-dispatch "3"
+ d3-drag "3"
+ d3-dsv "3"
+ d3-ease "3"
+ d3-fetch "3"
+ d3-force "3"
+ d3-format "3"
+ d3-geo "3"
+ d3-hierarchy "3"
+ d3-interpolate "3"
+ d3-path "3"
+ d3-polygon "3"
+ d3-quadtree "3"
+ d3-random "3"
+ d3-scale "4"
+ d3-scale-chromatic "3"
+ d3-selection "3"
+ d3-shape "3"
+ d3-time "3"
+ d3-time-format "4"
+ d3-timer "3"
+ d3-transition "3"
+ d3-zoom "3"
+
+dagre-d3-es@7.0.11:
+ version "7.0.11"
+ resolved "https://registry.yarnpkg.com/dagre-d3-es/-/dagre-d3-es-7.0.11.tgz#2237e726c0577bfe67d1a7cfd2265b9ab2c15c40"
+ integrity sha512-tvlJLyQf834SylNKax8Wkzco/1ias1OPw8DcUMDE7oUIoSEW25riQVuiu/0OWEFqT0cxHT3Pa9/D82Jr47IONw==
+ dependencies:
+ d3 "^7.9.0"
+ lodash-es "^4.17.21"
+
+dayjs@^1.11.13:
+ version "1.11.13"
+ resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.13.tgz#92430b0139055c3ebb60150aa13e860a4b5a366c"
+ integrity sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==
+
+debounce@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/debounce/-/debounce-1.2.1.tgz#38881d8f4166a5c5848020c11827b834bcb3e0a5"
+ integrity sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==
+
+debug@2.6.9:
+ version "2.6.9"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
+ integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
+ dependencies:
+ ms "2.0.0"
+
+debug@4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1:
+ version "4.3.6"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.6.tgz#2ab2c38fbaffebf8aa95fdfe6d88438c7a13c52b"
+ integrity sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==
+ dependencies:
+ ms "2.1.2"
+
+debug@^4.4.0, debug@^4.4.1:
+ version "4.4.1"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.1.tgz#e5a8bc6cbc4c6cd3e64308b0693a3d4fa550189b"
+ integrity sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==
+ dependencies:
+ ms "^2.1.3"
+
+decode-named-character-reference@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz#daabac9690874c394c81e4162a0304b35d824f0e"
+ integrity sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==
+ dependencies:
+ character-entities "^2.0.0"
+
+decompress-response@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc"
+ integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==
+ dependencies:
+ mimic-response "^3.1.0"
+
+deep-extend@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac"
+ integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==
+
+deepmerge@^4.3.1:
+ version "4.3.1"
+ resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a"
+ integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==
+
+default-gateway@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71"
+ integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg==
+ dependencies:
+ execa "^5.0.0"
+
+defer-to-connect@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587"
+ integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==
+
+define-data-property@^1.0.1, define-data-property@^1.1.4:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e"
+ integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==
+ dependencies:
+ es-define-property "^1.0.0"
+ es-errors "^1.3.0"
+ gopd "^1.0.1"
+
+define-lazy-prop@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f"
+ integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==
+
+define-properties@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c"
+ integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==
+ dependencies:
+ define-data-property "^1.0.1"
+ has-property-descriptors "^1.0.0"
+ object-keys "^1.1.1"
+
+delaunator@5:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/delaunator/-/delaunator-5.0.1.tgz#39032b08053923e924d6094fe2cde1a99cc51278"
+ integrity sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==
+ dependencies:
+ robust-predicates "^3.0.2"
+
+depd@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df"
+ integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==
+
+depd@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
+ integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==
+
+dequal@^2.0.0:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be"
+ integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==
+
+destroy@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015"
+ integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==
+
+detect-node@^2.0.4:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1"
+ integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==
+
+detect-port@^1.5.1:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/detect-port/-/detect-port-1.6.1.tgz#45e4073997c5f292b957cb678fb0bb8ed4250a67"
+ integrity sha512-CmnVc+Hek2egPx1PeTFVta2W78xy2K/9Rkf6cC4T59S50tVnzKj+tnx5mmx5lwvCkujZ4uRrpRSuV+IVs3f90Q==
+ dependencies:
+ address "^1.0.1"
+ debug "4"
+
+devlop@^1.0.0, devlop@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/devlop/-/devlop-1.1.0.tgz#4db7c2ca4dc6e0e834c30be70c94bbc976dc7018"
+ integrity sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==
+ dependencies:
+ dequal "^2.0.0"
+
+dir-glob@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f"
+ integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==
+ dependencies:
+ path-type "^4.0.0"
+
+dns-packet@^5.2.2:
+ version "5.6.1"
+ resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.6.1.tgz#ae888ad425a9d1478a0674256ab866de1012cf2f"
+ integrity sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==
+ dependencies:
+ "@leichtgewicht/ip-codec" "^2.0.1"
+
+dom-converter@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768"
+ integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==
+ dependencies:
+ utila "~0.4"
+
+dom-helpers@^5.0.1:
+ version "5.2.1"
+ resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-5.2.1.tgz#d9400536b2bf8225ad98fe052e029451ac40e902"
+ integrity sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==
+ dependencies:
+ "@babel/runtime" "^7.8.7"
+ csstype "^3.0.2"
+
+dom-serializer@^1.0.1:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30"
+ integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==
+ dependencies:
+ domelementtype "^2.0.1"
+ domhandler "^4.2.0"
+ entities "^2.0.0"
+
+dom-serializer@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-2.0.0.tgz#e41b802e1eedf9f6cae183ce5e622d789d7d8e53"
+ integrity sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==
+ dependencies:
+ domelementtype "^2.3.0"
+ domhandler "^5.0.2"
+ entities "^4.2.0"
+
+domelementtype@^2.0.1, domelementtype@^2.2.0, domelementtype@^2.3.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d"
+ integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==
+
+domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1:
+ version "4.3.1"
+ resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c"
+ integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==
+ dependencies:
+ domelementtype "^2.2.0"
+
+domhandler@^5.0.2, domhandler@^5.0.3:
+ version "5.0.3"
+ resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-5.0.3.tgz#cc385f7f751f1d1fc650c21374804254538c7d31"
+ integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==
+ dependencies:
+ domelementtype "^2.3.0"
+
+dompurify@^3.2.5:
+ version "3.2.6"
+ resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-3.2.6.tgz#ca040a6ad2b88e2a92dc45f38c79f84a714a1cad"
+ integrity sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ==
+ optionalDependencies:
+ "@types/trusted-types" "^2.0.7"
+
+domutils@^2.5.2, domutils@^2.8.0:
+ version "2.8.0"
+ resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135"
+ integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==
+ dependencies:
+ dom-serializer "^1.0.1"
+ domelementtype "^2.2.0"
+ domhandler "^4.2.0"
+
+domutils@^3.0.1:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/domutils/-/domutils-3.1.0.tgz#c47f551278d3dc4b0b1ab8cbb42d751a6f0d824e"
+ integrity sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==
+ dependencies:
+ dom-serializer "^2.0.0"
+ domelementtype "^2.3.0"
+ domhandler "^5.0.3"
+
+dot-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751"
+ integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+
+dot-prop@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-6.0.1.tgz#fc26b3cf142b9e59b74dbd39ed66ce620c681083"
+ integrity sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==
+ dependencies:
+ is-obj "^2.0.0"
+
+duplexer@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6"
+ integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==
+
+eastasianwidth@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb"
+ integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==
+
+ee-first@1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
+ integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==
+
+electron-to-chromium@^1.5.173:
+ version "1.5.187"
+ resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.187.tgz#8c58854e065962351dc87e95614dd78d50425966"
+ integrity sha512-cl5Jc9I0KGUoOoSbxvTywTa40uspGJt/BDBoDLoxJRSBpWh4FFXBsjNRHfQrONsV/OoEjDfHUmZQa2d6Ze4YgA==
+
+electron-to-chromium@^1.5.4:
+ version "1.5.6"
+ resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.6.tgz#c81d9938b5a877314ad370feb73b4e5409b36abd"
+ integrity sha512-jwXWsM5RPf6j9dPYzaorcBSUg6AiqocPEyMpkchkvntaH9HGfOOMZwxMJjDY/XEs3T5dM7uyH1VhRMkqUU9qVw==
+
+emoji-regex@^8.0.0:
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37"
+ integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==
+
+emoji-regex@^9.2.2:
+ version "9.2.2"
+ resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72"
+ integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==
+
+emojilib@^2.4.0:
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/emojilib/-/emojilib-2.4.0.tgz#ac518a8bb0d5f76dda57289ccb2fdf9d39ae721e"
+ integrity sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw==
+
+emojis-list@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78"
+ integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==
+
+emoticon@^4.0.1:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/emoticon/-/emoticon-4.1.0.tgz#d5a156868ee173095627a33de3f1e914c3dde79e"
+ integrity sha512-VWZfnxqwNcc51hIy/sbOdEem6D+cVtpPzEEtVAFdaas30+1dgkyaOQ4sQ6Bp0tOMqWO1v+HQfYaoodOkdhK6SQ==
+
+encodeurl@~1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
+ integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==
+
+enhanced-resolve@^5.17.0:
+ version "5.17.1"
+ resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15"
+ integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==
+ dependencies:
+ graceful-fs "^4.2.4"
+ tapable "^2.2.0"
+
+enhanced-resolve@^5.17.2:
+ version "5.18.2"
+ resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.18.2.tgz#7903c5b32ffd4b2143eeb4b92472bd68effd5464"
+ integrity sha512-6Jw4sE1maoRJo3q8MsSIn2onJFbLTOjY9hlx4DZXmOKvLRd1Ok2kXmAGXaafL2+ijsJZ1ClYbl/pmqr9+k4iUQ==
+ dependencies:
+ graceful-fs "^4.2.4"
+ tapable "^2.2.0"
+
+entities@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55"
+ integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==
+
+entities@^4.2.0, entities@^4.4.0:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48"
+ integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==
+
+error-ex@^1.3.1:
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
+ integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==
+ dependencies:
+ is-arrayish "^0.2.1"
+
+es-define-property@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845"
+ integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==
+ dependencies:
+ get-intrinsic "^1.2.4"
+
+es-errors@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f"
+ integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==
+
+es-module-lexer@^1.2.1:
+ version "1.5.4"
+ resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.5.4.tgz#a8efec3a3da991e60efa6b633a7cad6ab8d26b78"
+ integrity sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw==
+
+escalade@^3.1.1, escalade@^3.1.2:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.2.tgz#54076e9ab29ea5bf3d8f1ed62acffbb88272df27"
+ integrity sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==
+
+escalade@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5"
+ integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==
+
+escape-goat@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-4.0.0.tgz#9424820331b510b0666b98f7873fe11ac4aa8081"
+ integrity sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==
+
+escape-html@^1.0.3, escape-html@~1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
+ integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==
+
+escape-string-regexp@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
+ integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==
+
+escape-string-regexp@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34"
+ integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==
+
+escape-string-regexp@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz#4683126b500b61762f2dbebace1806e8be31b1c8"
+ integrity sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==
+
+eslint-scope@5.1.1:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c"
+ integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==
+ dependencies:
+ esrecurse "^4.3.0"
+ estraverse "^4.1.1"
+
+esprima@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
+ integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
+
+esrecurse@^4.3.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921"
+ integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==
+ dependencies:
+ estraverse "^5.2.0"
+
+estraverse@^4.1.1:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d"
+ integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==
+
+estraverse@^5.2.0:
+ version "5.3.0"
+ resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123"
+ integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==
+
+estree-util-attach-comments@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/estree-util-attach-comments/-/estree-util-attach-comments-3.0.0.tgz#344bde6a64c8a31d15231e5ee9e297566a691c2d"
+ integrity sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw==
+ dependencies:
+ "@types/estree" "^1.0.0"
+
+estree-util-build-jsx@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/estree-util-build-jsx/-/estree-util-build-jsx-3.0.1.tgz#b6d0bced1dcc4f06f25cf0ceda2b2dcaf98168f1"
+ integrity sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ==
+ dependencies:
+ "@types/estree-jsx" "^1.0.0"
+ devlop "^1.0.0"
+ estree-util-is-identifier-name "^3.0.0"
+ estree-walker "^3.0.0"
+
+estree-util-is-identifier-name@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz#0b5ef4c4ff13508b34dcd01ecfa945f61fce5dbd"
+ integrity sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==
+
+estree-util-to-js@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/estree-util-to-js/-/estree-util-to-js-2.0.0.tgz#10a6fb924814e6abb62becf0d2bc4dea51d04f17"
+ integrity sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg==
+ dependencies:
+ "@types/estree-jsx" "^1.0.0"
+ astring "^1.8.0"
+ source-map "^0.7.0"
+
+estree-util-value-to-estree@^3.0.1:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/estree-util-value-to-estree/-/estree-util-value-to-estree-3.1.2.tgz#d2f0e5d350a6c181673eb7299743325b86a9bf5c"
+ integrity sha512-S0gW2+XZkmsx00tU2uJ4L9hUT7IFabbml9pHh2WQqFmAbxit++YGZne0sKJbNwkj9Wvg9E4uqWl4nCIFQMmfag==
+ dependencies:
+ "@types/estree" "^1.0.0"
+
+estree-util-visit@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/estree-util-visit/-/estree-util-visit-2.0.0.tgz#13a9a9f40ff50ed0c022f831ddf4b58d05446feb"
+ integrity sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==
+ dependencies:
+ "@types/estree-jsx" "^1.0.0"
+ "@types/unist" "^3.0.0"
+
+estree-walker@^3.0.0:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-3.0.3.tgz#67c3e549ec402a487b4fc193d1953a524752340d"
+ integrity sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==
+ dependencies:
+ "@types/estree" "^1.0.0"
+
+esutils@^2.0.2:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
+ integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
+
+eta@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/eta/-/eta-2.2.0.tgz#eb8b5f8c4e8b6306561a455e62cd7492fe3a9b8a"
+ integrity sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g==
+
+etag@~1.8.1:
+ version "1.8.1"
+ resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
+ integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==
+
+eval@^0.1.8:
+ version "0.1.8"
+ resolved "https://registry.yarnpkg.com/eval/-/eval-0.1.8.tgz#2b903473b8cc1d1989b83a1e7923f883eb357f85"
+ integrity sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==
+ dependencies:
+ "@types/node" "*"
+ require-like ">= 0.1.1"
+
+eventemitter3@^4.0.0, eventemitter3@^4.0.4:
+ version "4.0.7"
+ resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f"
+ integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==
+
+events@^3.2.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400"
+ integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
+
+execa@5.1.1, execa@^5.0.0:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd"
+ integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==
+ dependencies:
+ cross-spawn "^7.0.3"
+ get-stream "^6.0.0"
+ human-signals "^2.1.0"
+ is-stream "^2.0.0"
+ merge-stream "^2.0.0"
+ npm-run-path "^4.0.1"
+ onetime "^5.1.2"
+ signal-exit "^3.0.3"
+ strip-final-newline "^2.0.0"
+
+express@^4.17.3:
+ version "4.19.2"
+ resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465"
+ integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==
+ dependencies:
+ accepts "~1.3.8"
+ array-flatten "1.1.1"
+ body-parser "1.20.2"
+ content-disposition "0.5.4"
+ content-type "~1.0.4"
+ cookie "0.6.0"
+ cookie-signature "1.0.6"
+ debug "2.6.9"
+ depd "2.0.0"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ etag "~1.8.1"
+ finalhandler "1.2.0"
+ fresh "0.5.2"
+ http-errors "2.0.0"
+ merge-descriptors "1.0.1"
+ methods "~1.1.2"
+ on-finished "2.4.1"
+ parseurl "~1.3.3"
+ path-to-regexp "0.1.7"
+ proxy-addr "~2.0.7"
+ qs "6.11.0"
+ range-parser "~1.2.1"
+ safe-buffer "5.2.1"
+ send "0.18.0"
+ serve-static "1.15.0"
+ setprototypeof "1.2.0"
+ statuses "2.0.1"
+ type-is "~1.6.18"
+ utils-merge "1.0.1"
+ vary "~1.1.2"
+
+exsolve@^1.0.7:
+ version "1.0.7"
+ resolved "https://registry.yarnpkg.com/exsolve/-/exsolve-1.0.7.tgz#3b74e4c7ca5c5f9a19c3626ca857309fa99f9e9e"
+ integrity sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==
+
+extend-shallow@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f"
+ integrity sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==
+ dependencies:
+ is-extendable "^0.1.0"
+
+extend@^3.0.0:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
+ integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==
+
+fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
+ integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
+
+fast-glob@^3.2.11, fast-glob@^3.2.9, fast-glob@^3.3.0:
+ version "3.3.2"
+ resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.2.tgz#a904501e57cfdd2ffcded45e99a54fef55e46129"
+ integrity sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==
+ dependencies:
+ "@nodelib/fs.stat" "^2.0.2"
+ "@nodelib/fs.walk" "^1.2.3"
+ glob-parent "^5.1.2"
+ merge2 "^1.3.0"
+ micromatch "^4.0.4"
+
+fast-json-stable-stringify@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
+ integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
+
+fast-uri@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.0.1.tgz#cddd2eecfc83a71c1be2cc2ef2061331be8a7134"
+ integrity sha512-MWipKbbYiYI0UC7cl8m/i/IWTqfC8YXsqjzybjddLsFjStroQzsHXkc73JutMvBiXmOvapk+axIl79ig5t55Bw==
+
+fastq@^1.6.0:
+ version "1.17.1"
+ resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.17.1.tgz#2a523f07a4e7b1e81a42b91b8bf2254107753b47"
+ integrity sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==
+ dependencies:
+ reusify "^1.0.4"
+
+fault@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/fault/-/fault-2.0.1.tgz#d47ca9f37ca26e4bd38374a7c500b5a384755b6c"
+ integrity sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==
+ dependencies:
+ format "^0.2.0"
+
+faye-websocket@^0.11.3:
+ version "0.11.4"
+ resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da"
+ integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==
+ dependencies:
+ websocket-driver ">=0.5.1"
+
+feed@^4.2.2:
+ version "4.2.2"
+ resolved "https://registry.yarnpkg.com/feed/-/feed-4.2.2.tgz#865783ef6ed12579e2c44bbef3c9113bc4956a7e"
+ integrity sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ==
+ dependencies:
+ xml-js "^1.6.11"
+
+figures@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af"
+ integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==
+ dependencies:
+ escape-string-regexp "^1.0.5"
+
+file-loader@^6.2.0:
+ version "6.2.0"
+ resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d"
+ integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==
+ dependencies:
+ loader-utils "^2.0.0"
+ schema-utils "^3.0.0"
+
+fill-range@^7.1.1:
+ version "7.1.1"
+ resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292"
+ integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==
+ dependencies:
+ to-regex-range "^5.0.1"
+
+finalhandler@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32"
+ integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==
+ dependencies:
+ debug "2.6.9"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ on-finished "2.4.1"
+ parseurl "~1.3.3"
+ statuses "2.0.1"
+ unpipe "~1.0.0"
+
+find-cache-dir@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-4.0.0.tgz#a30ee0448f81a3990708f6453633c733e2f6eec2"
+ integrity sha512-9ZonPT4ZAK4a+1pUPVPZJapbi7O5qbbJPdYw/NOQWZZbVLdDTYM3A4R9z/DpAM08IDaFGsvPgiGZ82WEwUDWjg==
+ dependencies:
+ common-path-prefix "^3.0.0"
+ pkg-dir "^7.0.0"
+
+find-root@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4"
+ integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==
+
+find-up@^6.3.0:
+ version "6.3.0"
+ resolved "https://registry.yarnpkg.com/find-up/-/find-up-6.3.0.tgz#2abab3d3280b2dc7ac10199ef324c4e002c8c790"
+ integrity sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==
+ dependencies:
+ locate-path "^7.1.0"
+ path-exists "^5.0.0"
+
+flat@^5.0.2:
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241"
+ integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==
+
+follow-redirects@^1.0.0:
+ version "1.15.6"
+ resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b"
+ integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==
+
+form-data-encoder@^2.1.2:
+ version "2.1.4"
+ resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-2.1.4.tgz#261ea35d2a70d48d30ec7a9603130fa5515e9cd5"
+ integrity sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==
+
+format@^0.2.0:
+ version "0.2.2"
+ resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b"
+ integrity sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==
+
+forwarded@0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811"
+ integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==
+
+fraction.js@^4.3.7:
+ version "4.3.7"
+ resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7"
+ integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==
+
+fresh@0.5.2:
+ version "0.5.2"
+ resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7"
+ integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==
+
+fs-extra@^11.1.1, fs-extra@^11.2.0:
+ version "11.2.0"
+ resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.2.0.tgz#e70e17dfad64232287d01929399e0ea7c86b0e5b"
+ integrity sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==
+ dependencies:
+ graceful-fs "^4.2.0"
+ jsonfile "^6.0.1"
+ universalify "^2.0.0"
+
+fs-monkey@^1.0.4:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.6.tgz#8ead082953e88d992cf3ff844faa907b26756da2"
+ integrity sha512-b1FMfwetIKymC0eioW7mTywihSQE4oLzQn1dB6rZB5fx/3NpNEdAWeCSMB+60/AeT0TCXsxzAlcYVEFCTAksWg==
+
+fs.realpath@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
+ integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==
+
+fsevents@~2.3.2:
+ version "2.3.3"
+ resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6"
+ integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==
+
+function-bind@^1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c"
+ integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==
+
+gensync@^1.0.0-beta.2:
+ version "1.0.0-beta.2"
+ resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0"
+ integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==
+
+get-intrinsic@^1.1.3, get-intrinsic@^1.2.4:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd"
+ integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==
+ dependencies:
+ es-errors "^1.3.0"
+ function-bind "^1.1.2"
+ has-proto "^1.0.1"
+ has-symbols "^1.0.3"
+ hasown "^2.0.0"
+
+get-own-enumerable-property-symbols@^3.0.0:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
+ integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
+
+get-stream@^6.0.0, get-stream@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7"
+ integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==
+
+github-slugger@^1.5.0:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-1.5.0.tgz#17891bbc73232051474d68bd867a34625c955f7d"
+ integrity sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==
+
+glob-parent@^5.1.2, glob-parent@~5.1.2:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
+ integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
+ dependencies:
+ is-glob "^4.0.1"
+
+glob-parent@^6.0.1:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3"
+ integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==
+ dependencies:
+ is-glob "^4.0.3"
+
+glob-to-regexp@^0.4.1:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e"
+ integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==
+
+glob@^7.1.3:
+ version "7.2.3"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b"
+ integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==
+ dependencies:
+ fs.realpath "^1.0.0"
+ inflight "^1.0.4"
+ inherits "2"
+ minimatch "^3.1.1"
+ once "^1.3.0"
+ path-is-absolute "^1.0.0"
+
+global-dirs@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-3.0.1.tgz#0c488971f066baceda21447aecb1a8b911d22485"
+ integrity sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA==
+ dependencies:
+ ini "2.0.0"
+
+globals@^11.1.0:
+ version "11.12.0"
+ resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e"
+ integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==
+
+globals@^15.14.0:
+ version "15.15.0"
+ resolved "https://registry.yarnpkg.com/globals/-/globals-15.15.0.tgz#7c4761299d41c32b075715a4ce1ede7897ff72a8"
+ integrity sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==
+
+globby@^11.1.0:
+ version "11.1.0"
+ resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b"
+ integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==
+ dependencies:
+ array-union "^2.1.0"
+ dir-glob "^3.0.1"
+ fast-glob "^3.2.9"
+ ignore "^5.2.0"
+ merge2 "^1.4.1"
+ slash "^3.0.0"
+
+globby@^13.1.1:
+ version "13.2.2"
+ resolved "https://registry.yarnpkg.com/globby/-/globby-13.2.2.tgz#63b90b1bf68619c2135475cbd4e71e66aa090592"
+ integrity sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==
+ dependencies:
+ dir-glob "^3.0.1"
+ fast-glob "^3.3.0"
+ ignore "^5.2.4"
+ merge2 "^1.4.1"
+ slash "^4.0.0"
+
+gopd@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c"
+ integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==
+ dependencies:
+ get-intrinsic "^1.1.3"
+
+got@^12.1.0:
+ version "12.6.1"
+ resolved "https://registry.yarnpkg.com/got/-/got-12.6.1.tgz#8869560d1383353204b5a9435f782df9c091f549"
+ integrity sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==
+ dependencies:
+ "@sindresorhus/is" "^5.2.0"
+ "@szmarczak/http-timer" "^5.0.1"
+ cacheable-lookup "^7.0.0"
+ cacheable-request "^10.2.8"
+ decompress-response "^6.0.0"
+ form-data-encoder "^2.1.2"
+ get-stream "^6.0.1"
+ http2-wrapper "^2.1.10"
+ lowercase-keys "^3.0.0"
+ p-cancelable "^3.0.0"
+ responselike "^3.0.0"
+
+graceful-fs@4.2.10:
+ version "4.2.10"
+ resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c"
+ integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==
+
+graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9:
+ version "4.2.11"
+ resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
+ integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
+
+gray-matter@^4.0.3:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/gray-matter/-/gray-matter-4.0.3.tgz#e893c064825de73ea1f5f7d88c7a9f7274288798"
+ integrity sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==
+ dependencies:
+ js-yaml "^3.13.1"
+ kind-of "^6.0.2"
+ section-matter "^1.0.0"
+ strip-bom-string "^1.0.0"
+
+gzip-size@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462"
+ integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==
+ dependencies:
+ duplexer "^0.1.2"
+
+hachure-fill@^0.5.2:
+ version "0.5.2"
+ resolved "https://registry.yarnpkg.com/hachure-fill/-/hachure-fill-0.5.2.tgz#d19bc4cc8750a5962b47fb1300557a85fcf934cc"
+ integrity sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==
+
+handle-thing@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e"
+ integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==
+
+has-flag@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
+ integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==
+
+has-flag@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
+ integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
+
+has-property-descriptors@^1.0.0, has-property-descriptors@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854"
+ integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==
+ dependencies:
+ es-define-property "^1.0.0"
+
+has-proto@^1.0.1:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.3.tgz#b31ddfe9b0e6e9914536a6ab286426d0214f77fd"
+ integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==
+
+has-symbols@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8"
+ integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==
+
+has-yarn@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-3.0.0.tgz#c3c21e559730d1d3b57e28af1f30d06fac38147d"
+ integrity sha512-IrsVwUHhEULx3R8f/aA8AHuEzAorplsab/v8HBzEiIukwq5i/EC+xmOW+HfP1OaDP+2JkgT1yILHN2O3UFIbcA==
+
+hasown@^2.0.0, hasown@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003"
+ integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==
+ dependencies:
+ function-bind "^1.1.2"
+
+hast-util-from-parse5@^8.0.0:
+ version "8.0.1"
+ resolved "https://registry.yarnpkg.com/hast-util-from-parse5/-/hast-util-from-parse5-8.0.1.tgz#654a5676a41211e14ee80d1b1758c399a0327651"
+ integrity sha512-Er/Iixbc7IEa7r/XLtuG52zoqn/b3Xng/w6aZQ0xGVxzhw5xUFxcRqdPzP6yFi/4HBYRaifaI5fQ1RH8n0ZeOQ==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ "@types/unist" "^3.0.0"
+ devlop "^1.0.0"
+ hastscript "^8.0.0"
+ property-information "^6.0.0"
+ vfile "^6.0.0"
+ vfile-location "^5.0.0"
+ web-namespaces "^2.0.0"
+
+hast-util-parse-selector@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz#352879fa86e25616036037dd8931fb5f34cb4a27"
+ integrity sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==
+ dependencies:
+ "@types/hast" "^3.0.0"
+
+hast-util-raw@^9.0.0:
+ version "9.0.4"
+ resolved "https://registry.yarnpkg.com/hast-util-raw/-/hast-util-raw-9.0.4.tgz#2da03e37c46eb1a6f1391f02f9b84ae65818f7ed"
+ integrity sha512-LHE65TD2YiNsHD3YuXcKPHXPLuYh/gjp12mOfU8jxSrm1f/yJpsb0F/KKljS6U9LJoP0Ux+tCe8iJ2AsPzTdgA==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ "@types/unist" "^3.0.0"
+ "@ungap/structured-clone" "^1.0.0"
+ hast-util-from-parse5 "^8.0.0"
+ hast-util-to-parse5 "^8.0.0"
+ html-void-elements "^3.0.0"
+ mdast-util-to-hast "^13.0.0"
+ parse5 "^7.0.0"
+ unist-util-position "^5.0.0"
+ unist-util-visit "^5.0.0"
+ vfile "^6.0.0"
+ web-namespaces "^2.0.0"
+ zwitch "^2.0.0"
+
+hast-util-to-estree@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/hast-util-to-estree/-/hast-util-to-estree-3.1.0.tgz#f2afe5e869ddf0cf690c75f9fc699f3180b51b19"
+ integrity sha512-lfX5g6hqVh9kjS/B9E2gSkvHH4SZNiQFiqWS0x9fENzEl+8W12RqdRxX6d/Cwxi30tPQs3bIO+aolQJNp1bIyw==
+ dependencies:
+ "@types/estree" "^1.0.0"
+ "@types/estree-jsx" "^1.0.0"
+ "@types/hast" "^3.0.0"
+ comma-separated-tokens "^2.0.0"
+ devlop "^1.0.0"
+ estree-util-attach-comments "^3.0.0"
+ estree-util-is-identifier-name "^3.0.0"
+ hast-util-whitespace "^3.0.0"
+ mdast-util-mdx-expression "^2.0.0"
+ mdast-util-mdx-jsx "^3.0.0"
+ mdast-util-mdxjs-esm "^2.0.0"
+ property-information "^6.0.0"
+ space-separated-tokens "^2.0.0"
+ style-to-object "^0.4.0"
+ unist-util-position "^5.0.0"
+ zwitch "^2.0.0"
+
+hast-util-to-jsx-runtime@^2.0.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.0.tgz#3ed27caf8dc175080117706bf7269404a0aa4f7c"
+ integrity sha512-H/y0+IWPdsLLS738P8tDnrQ8Z+dj12zQQ6WC11TIM21C8WFVoIxcqWXf2H3hiTVZjF1AWqoimGwrTWecWrnmRQ==
+ dependencies:
+ "@types/estree" "^1.0.0"
+ "@types/hast" "^3.0.0"
+ "@types/unist" "^3.0.0"
+ comma-separated-tokens "^2.0.0"
+ devlop "^1.0.0"
+ estree-util-is-identifier-name "^3.0.0"
+ hast-util-whitespace "^3.0.0"
+ mdast-util-mdx-expression "^2.0.0"
+ mdast-util-mdx-jsx "^3.0.0"
+ mdast-util-mdxjs-esm "^2.0.0"
+ property-information "^6.0.0"
+ space-separated-tokens "^2.0.0"
+ style-to-object "^1.0.0"
+ unist-util-position "^5.0.0"
+ vfile-message "^4.0.0"
+
+hast-util-to-parse5@^8.0.0:
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz#477cd42d278d4f036bc2ea58586130f6f39ee6ed"
+ integrity sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ comma-separated-tokens "^2.0.0"
+ devlop "^1.0.0"
+ property-information "^6.0.0"
+ space-separated-tokens "^2.0.0"
+ web-namespaces "^2.0.0"
+ zwitch "^2.0.0"
+
+hast-util-whitespace@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz#7778ed9d3c92dd9e8c5c8f648a49c21fc51cb621"
+ integrity sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==
+ dependencies:
+ "@types/hast" "^3.0.0"
+
+hastscript@^8.0.0:
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-8.0.0.tgz#4ef795ec8dee867101b9f23cc830d4baf4fd781a"
+ integrity sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ comma-separated-tokens "^2.0.0"
+ hast-util-parse-selector "^4.0.0"
+ property-information "^6.0.0"
+ space-separated-tokens "^2.0.0"
+
+he@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
+ integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
+
+history@^4.9.0:
+ version "4.10.1"
+ resolved "https://registry.yarnpkg.com/history/-/history-4.10.1.tgz#33371a65e3a83b267434e2b3f3b1b4c58aad4cf3"
+ integrity sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==
+ dependencies:
+ "@babel/runtime" "^7.1.2"
+ loose-envify "^1.2.0"
+ resolve-pathname "^3.0.0"
+ tiny-invariant "^1.0.2"
+ tiny-warning "^1.0.0"
+ value-equal "^1.0.1"
+
+hoist-non-react-statics@^3.1.0, hoist-non-react-statics@^3.3.1, hoist-non-react-statics@^3.3.2:
+ version "3.3.2"
+ resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45"
+ integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==
+ dependencies:
+ react-is "^16.7.0"
+
+hpack.js@^2.1.6:
+ version "2.1.6"
+ resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2"
+ integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==
+ dependencies:
+ inherits "^2.0.1"
+ obuf "^1.0.0"
+ readable-stream "^2.0.1"
+ wbuf "^1.1.0"
+
+html-entities@^2.3.2:
+ version "2.5.2"
+ resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.5.2.tgz#201a3cf95d3a15be7099521620d19dfb4f65359f"
+ integrity sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA==
+
+html-escaper@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453"
+ integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==
+
+html-minifier-terser@^6.0.2:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab"
+ integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==
+ dependencies:
+ camel-case "^4.1.2"
+ clean-css "^5.2.2"
+ commander "^8.3.0"
+ he "^1.2.0"
+ param-case "^3.0.4"
+ relateurl "^0.2.7"
+ terser "^5.10.0"
+
+html-minifier-terser@^7.2.0:
+ version "7.2.0"
+ resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-7.2.0.tgz#18752e23a2f0ed4b0f550f217bb41693e975b942"
+ integrity sha512-tXgn3QfqPIpGl9o+K5tpcj3/MN4SfLtsx2GWwBC3SSd0tXQGyF3gsSqad8loJgKZGM3ZxbYDd5yhiBIdWpmvLA==
+ dependencies:
+ camel-case "^4.1.2"
+ clean-css "~5.3.2"
+ commander "^10.0.0"
+ entities "^4.4.0"
+ param-case "^3.0.4"
+ relateurl "^0.2.7"
+ terser "^5.15.1"
+
+html-tags@^3.3.1:
+ version "3.3.1"
+ resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.3.1.tgz#a04026a18c882e4bba8a01a3d39cfe465d40b5ce"
+ integrity sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==
+
+html-void-elements@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-3.0.0.tgz#fc9dbd84af9e747249034d4d62602def6517f1d7"
+ integrity sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==
+
+html-webpack-plugin@^5.6.0:
+ version "5.6.3"
+ resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.6.3.tgz#a31145f0fee4184d53a794f9513147df1e653685"
+ integrity sha512-QSf1yjtSAsmf7rYBV7XX86uua4W/vkhIt0xNXKbsi2foEeW7vjJQz4bhnpL3xH+l1ryl1680uNv968Z+X6jSYg==
+ dependencies:
+ "@types/html-minifier-terser" "^6.0.0"
+ html-minifier-terser "^6.0.2"
+ lodash "^4.17.21"
+ pretty-error "^4.0.0"
+ tapable "^2.0.0"
+
+htmlparser2@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7"
+ integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==
+ dependencies:
+ domelementtype "^2.0.1"
+ domhandler "^4.0.0"
+ domutils "^2.5.2"
+ entities "^2.0.0"
+
+htmlparser2@^8.0.1:
+ version "8.0.2"
+ resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-8.0.2.tgz#f002151705b383e62433b5cf466f5b716edaec21"
+ integrity sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==
+ dependencies:
+ domelementtype "^2.3.0"
+ domhandler "^5.0.3"
+ domutils "^3.0.1"
+ entities "^4.4.0"
+
+http-cache-semantics@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a"
+ integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==
+
+http-deceiver@^1.2.7:
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87"
+ integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==
+
+http-errors@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3"
+ integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==
+ dependencies:
+ depd "2.0.0"
+ inherits "2.0.4"
+ setprototypeof "1.2.0"
+ statuses "2.0.1"
+ toidentifier "1.0.1"
+
+http-errors@~1.6.2:
+ version "1.6.3"
+ resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d"
+ integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==
+ dependencies:
+ depd "~1.1.2"
+ inherits "2.0.3"
+ setprototypeof "1.1.0"
+ statuses ">= 1.4.0 < 2"
+
+http-parser-js@>=0.5.1:
+ version "0.5.8"
+ resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3"
+ integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==
+
+http-proxy-middleware@^2.0.3:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f"
+ integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==
+ dependencies:
+ "@types/http-proxy" "^1.17.8"
+ http-proxy "^1.18.1"
+ is-glob "^4.0.1"
+ is-plain-obj "^3.0.0"
+ micromatch "^4.0.2"
+
+http-proxy@^1.18.1:
+ version "1.18.1"
+ resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549"
+ integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==
+ dependencies:
+ eventemitter3 "^4.0.0"
+ follow-redirects "^1.0.0"
+ requires-port "^1.0.0"
+
+http2-wrapper@^2.1.10:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-2.2.1.tgz#310968153dcdedb160d8b72114363ef5fce1f64a"
+ integrity sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==
+ dependencies:
+ quick-lru "^5.1.1"
+ resolve-alpn "^1.2.0"
+
+human-signals@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
+ integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==
+
+hyphenate-style-name@^1.0.3:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/hyphenate-style-name/-/hyphenate-style-name-1.1.0.tgz#1797bf50369588b47b72ca6d5e65374607cf4436"
+ integrity sha512-WDC/ui2VVRrz3jOVi+XtjqkDjiVjTtFaAGiW37k6b+ohyQ5wYDOGkvCZa8+H0nx3gyvv0+BST9xuOgIyGQ00gw==
+
+iconv-lite@0.4.24:
+ version "0.4.24"
+ resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
+ integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
+ dependencies:
+ safer-buffer ">= 2.1.2 < 3"
+
+iconv-lite@0.6:
+ version "0.6.3"
+ resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501"
+ integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==
+ dependencies:
+ safer-buffer ">= 2.1.2 < 3.0.0"
+
+icss-utils@^5.0.0, icss-utils@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae"
+ integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==
+
+ignore@^5.2.0, ignore@^5.2.4:
+ version "5.3.1"
+ resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.1.tgz#5073e554cd42c5b33b394375f538b8593e34d4ef"
+ integrity sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==
+
+image-size@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/image-size/-/image-size-2.0.2.tgz#84a7b43704db5736f364bf0d1b029821299b4bdc"
+ integrity sha512-IRqXKlaXwgSMAMtpNzZa1ZAe8m+Sa1770Dhk8VkSsP9LS+iHD62Zd8FQKs8fbPiagBE7BzoFX23cxFnwshpV6w==
+
+import-fresh@^3.2.1, import-fresh@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b"
+ integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==
+ dependencies:
+ parent-module "^1.0.0"
+ resolve-from "^4.0.0"
+
+import-lazy@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-4.0.0.tgz#e8eb627483a0a43da3c03f3e35548be5cb0cc153"
+ integrity sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==
+
+imurmurhash@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
+ integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==
+
+indent-string@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251"
+ integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==
+
+infima@0.2.0-alpha.45:
+ version "0.2.0-alpha.45"
+ resolved "https://registry.yarnpkg.com/infima/-/infima-0.2.0-alpha.45.tgz#542aab5a249274d81679631b492973dd2c1e7466"
+ integrity sha512-uyH0zfr1erU1OohLk0fT4Rrb94AOhguWNOcD9uGrSpRvNB+6gZXUoJX5J0NtvzBO10YZ9PgvA4NFgt+fYg8ojw==
+
+inflight@^1.0.4:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
+ integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==
+ dependencies:
+ once "^1.3.0"
+ wrappy "1"
+
+inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
+ integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
+
+inherits@2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
+ integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==
+
+ini@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/ini/-/ini-2.0.0.tgz#e5fd556ecdd5726be978fa1001862eacb0a94bc5"
+ integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==
+
+ini@^1.3.4, ini@~1.3.0:
+ version "1.3.8"
+ resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c"
+ integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==
+
+inline-style-parser@0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.1.1.tgz#ec8a3b429274e9c0a1f1c4ffa9453a7fef72cea1"
+ integrity sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==
+
+inline-style-parser@0.2.3:
+ version "0.2.3"
+ resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.2.3.tgz#e35c5fb45f3a83ed7849fe487336eb7efa25971c"
+ integrity sha512-qlD8YNDqyTKTyuITrDOffsl6Tdhv+UC4hcdAVuQsK4IMQ99nSgd1MIA/Q+jQYoh9r3hVUXhYh7urSRmXPkW04g==
+
+"internmap@1 - 2":
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009"
+ integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==
+
+internmap@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/internmap/-/internmap-1.0.1.tgz#0017cc8a3b99605f0302f2b198d272e015e5df95"
+ integrity sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==
+
+invariant@^2.2.4:
+ version "2.2.4"
+ resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6"
+ integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==
+ dependencies:
+ loose-envify "^1.0.0"
+
+ipaddr.js@1.9.1:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3"
+ integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==
+
+ipaddr.js@^2.0.1:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.2.0.tgz#d33fa7bac284f4de7af949638c9d68157c6b92e8"
+ integrity sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==
+
+is-alphabetical@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/is-alphabetical/-/is-alphabetical-2.0.1.tgz#01072053ea7c1036df3c7d19a6daaec7f19e789b"
+ integrity sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==
+
+is-alphanumerical@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz#7c03fbe96e3e931113e57f964b0a368cc2dfd875"
+ integrity sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==
+ dependencies:
+ is-alphabetical "^2.0.0"
+ is-decimal "^2.0.0"
+
+is-arrayish@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
+ integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==
+
+is-binary-path@~2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
+ integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
+ dependencies:
+ binary-extensions "^2.0.0"
+
+is-ci@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-3.0.1.tgz#db6ecbed1bd659c43dac0f45661e7674103d1867"
+ integrity sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==
+ dependencies:
+ ci-info "^3.2.0"
+
+is-core-module@^2.13.0:
+ version "2.15.0"
+ resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.15.0.tgz#71c72ec5442ace7e76b306e9d48db361f22699ea"
+ integrity sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==
+ dependencies:
+ hasown "^2.0.2"
+
+is-core-module@^2.16.0:
+ version "2.16.1"
+ resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.16.1.tgz#2a98801a849f43e2add644fbb6bc6229b19a4ef4"
+ integrity sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==
+ dependencies:
+ hasown "^2.0.2"
+
+is-decimal@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/is-decimal/-/is-decimal-2.0.1.tgz#9469d2dc190d0214fd87d78b78caecc0cc14eef7"
+ integrity sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==
+
+is-docker@^2.0.0, is-docker@^2.1.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa"
+ integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==
+
+is-extendable@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89"
+ integrity sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==
+
+is-extglob@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
+ integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==
+
+is-fullwidth-code-point@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d"
+ integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==
+
+is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
+ integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==
+ dependencies:
+ is-extglob "^2.1.1"
+
+is-hexadecimal@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz#86b5bf668fca307498d319dfc03289d781a90027"
+ integrity sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==
+
+is-in-browser@^1.0.2, is-in-browser@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/is-in-browser/-/is-in-browser-1.1.3.tgz#56ff4db683a078c6082eb95dad7dc62e1d04f835"
+ integrity sha512-FeXIBgG/CPGd/WUxuEyvgGTEfwiG9Z4EKGxjNMRqviiIIfsmgrpnHLffEDdwUHqNva1VEW91o3xBT/m8Elgl9g==
+
+is-installed-globally@^0.4.0:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.4.0.tgz#9a0fd407949c30f86eb6959ef1b7994ed0b7b520"
+ integrity sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==
+ dependencies:
+ global-dirs "^3.0.0"
+ is-path-inside "^3.0.2"
+
+is-npm@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-6.0.0.tgz#b59e75e8915543ca5d881ecff864077cba095261"
+ integrity sha512-JEjxbSmtPSt1c8XTkVrlujcXdKV1/tvuQ7GwKcAlyiVLeYFQ2VHat8xfrDJsIkhCdF/tZ7CiIR3sy141c6+gPQ==
+
+is-number@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
+ integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
+
+is-obj@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f"
+ integrity sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==
+
+is-obj@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982"
+ integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==
+
+is-path-inside@^3.0.2:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283"
+ integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==
+
+is-plain-obj@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7"
+ integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==
+
+is-plain-obj@^4.0.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-4.1.0.tgz#d65025edec3657ce032fd7db63c97883eaed71f0"
+ integrity sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==
+
+is-plain-object@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677"
+ integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==
+ dependencies:
+ isobject "^3.0.1"
+
+is-reference@^3.0.0:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-3.0.2.tgz#154747a01f45cd962404ee89d43837af2cba247c"
+ integrity sha512-v3rht/LgVcsdZa3O2Nqs+NMowLOxeOm7Ay9+/ARQ2F+qEoANRcqrjAZKGN0v8ymUetZGgkp26LTnGT7H0Qo9Pg==
+ dependencies:
+ "@types/estree" "*"
+
+is-regexp@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069"
+ integrity sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==
+
+is-stream@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
+ integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
+
+is-typedarray@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
+ integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==
+
+is-wsl@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271"
+ integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==
+ dependencies:
+ is-docker "^2.0.0"
+
+is-yarn-global@^0.4.0:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/is-yarn-global/-/is-yarn-global-0.4.1.tgz#b312d902b313f81e4eaf98b6361ba2b45cd694bb"
+ integrity sha512-/kppl+R+LO5VmhYSEWARUFjodS25D68gvj8W7z0I7OWhUla5xWu8KL6CtB2V0R6yqhnRgbcaREMr4EEM6htLPQ==
+
+isarray@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
+ integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==
+
+isarray@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
+ integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==
+
+isexe@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
+ integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
+
+isobject@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
+ integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==
+
+jest-util@^29.7.0:
+ version "29.7.0"
+ resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.7.0.tgz#23c2b62bfb22be82b44de98055802ff3710fc0bc"
+ integrity sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==
+ dependencies:
+ "@jest/types" "^29.6.3"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ ci-info "^3.2.0"
+ graceful-fs "^4.2.9"
+ picomatch "^2.2.3"
+
+jest-worker@^27.4.5:
+ version "27.5.1"
+ resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0"
+ integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==
+ dependencies:
+ "@types/node" "*"
+ merge-stream "^2.0.0"
+ supports-color "^8.0.0"
+
+jest-worker@^29.4.3:
+ version "29.7.0"
+ resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.7.0.tgz#acad073acbbaeb7262bd5389e1bcf43e10058d4a"
+ integrity sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==
+ dependencies:
+ "@types/node" "*"
+ jest-util "^29.7.0"
+ merge-stream "^2.0.0"
+ supports-color "^8.0.0"
+
+jiti@^1.20.0:
+ version "1.21.6"
+ resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.6.tgz#6c7f7398dd4b3142767f9a168af2f317a428d268"
+ integrity sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w==
+
+joi@^17.9.2:
+ version "17.13.3"
+ resolved "https://registry.yarnpkg.com/joi/-/joi-17.13.3.tgz#0f5cc1169c999b30d344366d384b12d92558bcec"
+ integrity sha512-otDA4ldcIx+ZXsKHWmp0YizCweVRZG96J10b0FevjfuncLO1oX59THoAmHkNubYJ+9gWsYsp5k8v4ib6oDv1fA==
+ dependencies:
+ "@hapi/hoek" "^9.3.0"
+ "@hapi/topo" "^5.1.0"
+ "@sideway/address" "^4.1.5"
+ "@sideway/formula" "^3.0.1"
+ "@sideway/pinpoint" "^2.0.0"
+
+"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
+ integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
+
+js-yaml@^3.13.1:
+ version "3.14.1"
+ resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537"
+ integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==
+ dependencies:
+ argparse "^1.0.7"
+ esprima "^4.0.0"
+
+js-yaml@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602"
+ integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==
+ dependencies:
+ argparse "^2.0.1"
+
+jsesc@^2.5.1:
+ version "2.5.2"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4"
+ integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==
+
+jsesc@^3.0.2:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-3.1.0.tgz#74d335a234f67ed19907fdadfac7ccf9d409825d"
+ integrity sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==
+
+jsesc@~0.5.0:
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
+ integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==
+
+jsesc@~3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-3.0.2.tgz#bb8b09a6597ba426425f2e4a07245c3d00b9343e"
+ integrity sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==
+
+json-buffer@3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13"
+ integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==
+
+json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d"
+ integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==
+
+json-schema-traverse@^0.4.1:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
+ integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==
+
+json-schema-traverse@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2"
+ integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==
+
+json5@^2.1.2, json5@^2.2.3:
+ version "2.2.3"
+ resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283"
+ integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==
+
+jsonfile@^6.0.1:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae"
+ integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==
+ dependencies:
+ universalify "^2.0.0"
+ optionalDependencies:
+ graceful-fs "^4.1.6"
+
+jss-plugin-camel-case@^10.10.0:
+ version "10.10.0"
+ resolved "https://registry.yarnpkg.com/jss-plugin-camel-case/-/jss-plugin-camel-case-10.10.0.tgz#27ea159bab67eb4837fa0260204eb7925d4daa1c"
+ integrity sha512-z+HETfj5IYgFxh1wJnUAU8jByI48ED+v0fuTuhKrPR+pRBYS2EDwbusU8aFOpCdYhtRc9zhN+PJ7iNE8pAWyPw==
+ dependencies:
+ "@babel/runtime" "^7.3.1"
+ hyphenate-style-name "^1.0.3"
+ jss "10.10.0"
+
+jss-plugin-default-unit@^10.10.0:
+ version "10.10.0"
+ resolved "https://registry.yarnpkg.com/jss-plugin-default-unit/-/jss-plugin-default-unit-10.10.0.tgz#db3925cf6a07f8e1dd459549d9c8aadff9804293"
+ integrity sha512-SvpajxIECi4JDUbGLefvNckmI+c2VWmP43qnEy/0eiwzRUsafg5DVSIWSzZe4d2vFX1u9nRDP46WCFV/PXVBGQ==
+ dependencies:
+ "@babel/runtime" "^7.3.1"
+ jss "10.10.0"
+
+jss-plugin-global@^10.10.0:
+ version "10.10.0"
+ resolved "https://registry.yarnpkg.com/jss-plugin-global/-/jss-plugin-global-10.10.0.tgz#1c55d3c35821fab67a538a38918292fc9c567efd"
+ integrity sha512-icXEYbMufiNuWfuazLeN+BNJO16Ge88OcXU5ZDC2vLqElmMybA31Wi7lZ3lf+vgufRocvPj8443irhYRgWxP+A==
+ dependencies:
+ "@babel/runtime" "^7.3.1"
+ jss "10.10.0"
+
+jss-plugin-nested@^10.10.0:
+ version "10.10.0"
+ resolved "https://registry.yarnpkg.com/jss-plugin-nested/-/jss-plugin-nested-10.10.0.tgz#db872ed8925688806e77f1fc87f6e62264513219"
+ integrity sha512-9R4JHxxGgiZhurDo3q7LdIiDEgtA1bTGzAbhSPyIOWb7ZubrjQe8acwhEQ6OEKydzpl8XHMtTnEwHXCARLYqYA==
+ dependencies:
+ "@babel/runtime" "^7.3.1"
+ jss "10.10.0"
+ tiny-warning "^1.0.2"
+
+jss-plugin-props-sort@^10.10.0:
+ version "10.10.0"
+ resolved "https://registry.yarnpkg.com/jss-plugin-props-sort/-/jss-plugin-props-sort-10.10.0.tgz#67f4dd4c70830c126f4ec49b4b37ccddb680a5d7"
+ integrity sha512-5VNJvQJbnq/vRfje6uZLe/FyaOpzP/IH1LP+0fr88QamVrGJa0hpRRyAa0ea4U/3LcorJfBFVyC4yN2QC73lJg==
+ dependencies:
+ "@babel/runtime" "^7.3.1"
+ jss "10.10.0"
+
+jss-plugin-rule-value-function@^10.10.0:
+ version "10.10.0"
+ resolved "https://registry.yarnpkg.com/jss-plugin-rule-value-function/-/jss-plugin-rule-value-function-10.10.0.tgz#7d99e3229e78a3712f78ba50ab342e881d26a24b"
+ integrity sha512-uEFJFgaCtkXeIPgki8ICw3Y7VMkL9GEan6SqmT9tqpwM+/t+hxfMUdU4wQ0MtOiMNWhwnckBV0IebrKcZM9C0g==
+ dependencies:
+ "@babel/runtime" "^7.3.1"
+ jss "10.10.0"
+ tiny-warning "^1.0.2"
+
+jss-plugin-vendor-prefixer@^10.10.0:
+ version "10.10.0"
+ resolved "https://registry.yarnpkg.com/jss-plugin-vendor-prefixer/-/jss-plugin-vendor-prefixer-10.10.0.tgz#c01428ef5a89f2b128ec0af87a314d0c767931c7"
+ integrity sha512-UY/41WumgjW8r1qMCO8l1ARg7NHnfRVWRhZ2E2m0DMYsr2DD91qIXLyNhiX83hHswR7Wm4D+oDYNC1zWCJWtqg==
+ dependencies:
+ "@babel/runtime" "^7.3.1"
+ css-vendor "^2.0.8"
+ jss "10.10.0"
+
+jss@10.10.0, jss@^10.10.0:
+ version "10.10.0"
+ resolved "https://registry.yarnpkg.com/jss/-/jss-10.10.0.tgz#a75cc85b0108c7ac8c7b7d296c520a3e4fbc6ccc"
+ integrity sha512-cqsOTS7jqPsPMjtKYDUpdFC0AbhYFLTcuGRqymgmdJIeQ8cH7+AgX7YSgQy79wXloZq2VvATYxUOUQEvS1V/Zw==
+ dependencies:
+ "@babel/runtime" "^7.3.1"
+ csstype "^3.0.2"
+ is-in-browser "^1.1.3"
+ tiny-warning "^1.0.2"
+
+katex@^0.16.22:
+ version "0.16.22"
+ resolved "https://registry.yarnpkg.com/katex/-/katex-0.16.22.tgz#d2b3d66464b1e6d69e6463b28a86ced5a02c5ccd"
+ integrity sha512-XCHRdUw4lf3SKBaJe4EvgqIuWwkPSo9XoeO8GjQW94Bp7TWv9hNhzZjZ+OH9yf1UmLygb7DIT5GSFQiyt16zYg==
+ dependencies:
+ commander "^8.3.0"
+
+keyv@^4.5.3:
+ version "4.5.4"
+ resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93"
+ integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==
+ dependencies:
+ json-buffer "3.0.1"
+
+khroma@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/khroma/-/khroma-2.1.0.tgz#45f2ce94ce231a437cf5b63c2e886e6eb42bbbb1"
+ integrity sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==
+
+kind-of@^6.0.0, kind-of@^6.0.2:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd"
+ integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==
+
+kleur@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e"
+ integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==
+
+kolorist@^1.8.0:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/kolorist/-/kolorist-1.8.0.tgz#edddbbbc7894bc13302cdf740af6374d4a04743c"
+ integrity sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==
+
+langium@3.3.1:
+ version "3.3.1"
+ resolved "https://registry.yarnpkg.com/langium/-/langium-3.3.1.tgz#da745a40d5ad8ee565090fed52eaee643be4e591"
+ integrity sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==
+ dependencies:
+ chevrotain "~11.0.3"
+ chevrotain-allstar "~0.3.0"
+ vscode-languageserver "~9.0.1"
+ vscode-languageserver-textdocument "~1.0.11"
+ vscode-uri "~3.0.8"
+
+latest-version@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-7.0.0.tgz#843201591ea81a4d404932eeb61240fe04e9e5da"
+ integrity sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==
+ dependencies:
+ package-json "^8.1.0"
+
+launch-editor@^2.6.0:
+ version "2.8.1"
+ resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.8.1.tgz#3bda72af213ec9b46b170e39661916ec66c2f463"
+ integrity sha512-elBx2l/tp9z99X5H/qev8uyDywVh0VXAwEbjk8kJhnc5grOFkGh7aW6q55me9xnYbss261XtnUrysZ+XvGbhQA==
+ dependencies:
+ picocolors "^1.0.0"
+ shell-quote "^1.8.1"
+
+layout-base@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/layout-base/-/layout-base-1.0.2.tgz#1291e296883c322a9dd4c5dd82063721b53e26e2"
+ integrity sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==
+
+layout-base@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/layout-base/-/layout-base-2.0.1.tgz#d0337913586c90f9c2c075292069f5c2da5dd285"
+ integrity sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==
+
+leven@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2"
+ integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==
+
+lilconfig@^3.1.1:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.1.2.tgz#e4a7c3cb549e3a606c8dcc32e5ae1005e62c05cb"
+ integrity sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==
+
+lines-and-columns@^1.1.6:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632"
+ integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==
+
+loader-runner@^4.2.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1"
+ integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==
+
+loader-utils@^2.0.0:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c"
+ integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==
+ dependencies:
+ big.js "^5.2.2"
+ emojis-list "^3.0.0"
+ json5 "^2.1.2"
+
+local-pkg@^1.0.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/local-pkg/-/local-pkg-1.1.1.tgz#f5fe74a97a3bd3c165788ee08ca9fbe998dc58dd"
+ integrity sha512-WunYko2W1NcdfAFpuLUoucsgULmgDBRkdxHxWQ7mK0cQqwPiy8E1enjuRBrhLtZkB5iScJ1XIPdhVEFK8aOLSg==
+ dependencies:
+ mlly "^1.7.4"
+ pkg-types "^2.0.1"
+ quansync "^0.2.8"
+
+locate-path@^7.1.0:
+ version "7.2.0"
+ resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-7.2.0.tgz#69cb1779bd90b35ab1e771e1f2f89a202c2a8a8a"
+ integrity sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==
+ dependencies:
+ p-locate "^6.0.0"
+
+lodash-es@4.17.21, lodash-es@^4.17.21:
+ version "4.17.21"
+ resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee"
+ integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==
+
+lodash.debounce@^4.0.8:
+ version "4.0.8"
+ resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af"
+ integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==
+
+lodash.memoize@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe"
+ integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==
+
+lodash.uniq@^4.5.0:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
+ integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==
+
+lodash@^4.17.20, lodash@^4.17.21:
+ version "4.17.21"
+ resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
+ integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
+
+longest-streak@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/longest-streak/-/longest-streak-3.1.0.tgz#62fa67cd958742a1574af9f39866364102d90cd4"
+ integrity sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==
+
+loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf"
+ integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==
+ dependencies:
+ js-tokens "^3.0.0 || ^4.0.0"
+
+lower-case@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28"
+ integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==
+ dependencies:
+ tslib "^2.0.3"
+
+lowercase-keys@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-3.0.0.tgz#c5e7d442e37ead247ae9db117a9d0a467c89d4f2"
+ integrity sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==
+
+lru-cache@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920"
+ integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==
+ dependencies:
+ yallist "^3.0.2"
+
+markdown-extensions@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/markdown-extensions/-/markdown-extensions-2.0.0.tgz#34bebc83e9938cae16e0e017e4a9814a8330d3c4"
+ integrity sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==
+
+markdown-table@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/markdown-table/-/markdown-table-2.0.0.tgz#194a90ced26d31fe753d8b9434430214c011865b"
+ integrity sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==
+ dependencies:
+ repeat-string "^1.0.0"
+
+markdown-table@^3.0.0:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/markdown-table/-/markdown-table-3.0.3.tgz#e6331d30e493127e031dd385488b5bd326e4a6bd"
+ integrity sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==
+
+marked@^16.0.0:
+ version "16.1.1"
+ resolved "https://registry.yarnpkg.com/marked/-/marked-16.1.1.tgz#a7839dcf19fa5e349cad12c561f231320690acd4"
+ integrity sha512-ij/2lXfCRT71L6u0M29tJPhP0bM5shLL3u5BePhFwPELj2blMJ6GDtD7PfJhRLhJ/c2UwrK17ySVcDzy2YHjHQ==
+
+mdast-util-directive@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-directive/-/mdast-util-directive-3.0.0.tgz#3fb1764e705bbdf0afb0d3f889e4404c3e82561f"
+ integrity sha512-JUpYOqKI4mM3sZcNxmF/ox04XYFFkNwr0CFlrQIkCwbvH0xzMCqkMqAde9wRd80VAhaUrwFwKm2nxretdT1h7Q==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ "@types/unist" "^3.0.0"
+ devlop "^1.0.0"
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+ parse-entities "^4.0.0"
+ stringify-entities "^4.0.0"
+ unist-util-visit-parents "^6.0.0"
+
+mdast-util-find-and-replace@^3.0.0, mdast-util-find-and-replace@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.1.tgz#a6fc7b62f0994e973490e45262e4bc07607b04e0"
+ integrity sha512-SG21kZHGC3XRTSUhtofZkBzZTJNM5ecCi0SK2IMKmSXR8vO3peL+kb1O0z7Zl83jKtutG4k5Wv/W7V3/YHvzPA==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ escape-string-regexp "^5.0.0"
+ unist-util-is "^6.0.0"
+ unist-util-visit-parents "^6.0.0"
+
+mdast-util-from-markdown@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.1.tgz#32a6e8f512b416e1f51eb817fc64bd867ebcd9cc"
+ integrity sha512-aJEUyzZ6TzlsX2s5B4Of7lN7EQtAxvtradMMglCQDyaTFgse6CmtmdJ15ElnVRlCg1vpNyVtbem0PWzlNieZsA==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ "@types/unist" "^3.0.0"
+ decode-named-character-reference "^1.0.0"
+ devlop "^1.0.0"
+ mdast-util-to-string "^4.0.0"
+ micromark "^4.0.0"
+ micromark-util-decode-numeric-character-reference "^2.0.0"
+ micromark-util-decode-string "^2.0.0"
+ micromark-util-normalize-identifier "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+ unist-util-stringify-position "^4.0.0"
+
+mdast-util-frontmatter@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/mdast-util-frontmatter/-/mdast-util-frontmatter-2.0.1.tgz#f5f929eb1eb36c8a7737475c7eb438261f964ee8"
+ integrity sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ devlop "^1.0.0"
+ escape-string-regexp "^5.0.0"
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+ micromark-extension-frontmatter "^2.0.0"
+
+mdast-util-gfm-autolink-literal@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.0.tgz#5baf35407421310a08e68c15e5d8821e8898ba2a"
+ integrity sha512-FyzMsduZZHSc3i0Px3PQcBT4WJY/X/RCtEJKuybiC6sjPqLv7h1yqAkmILZtuxMSsUyaLUWNp71+vQH2zqp5cg==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ ccount "^2.0.0"
+ devlop "^1.0.0"
+ mdast-util-find-and-replace "^3.0.0"
+ micromark-util-character "^2.0.0"
+
+mdast-util-gfm-footnote@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.0.0.tgz#25a1753c7d16db8bfd53cd84fe50562bd1e6d6a9"
+ integrity sha512-5jOT2boTSVkMnQ7LTrd6n/18kqwjmuYqo7JUPe+tRCY6O7dAuTFMtTPauYYrMPpox9hlN0uOx/FL8XvEfG9/mQ==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ devlop "^1.1.0"
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+ micromark-util-normalize-identifier "^2.0.0"
+
+mdast-util-gfm-strikethrough@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz#d44ef9e8ed283ac8c1165ab0d0dfd058c2764c16"
+ integrity sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+
+mdast-util-gfm-table@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz#7a435fb6223a72b0862b33afbd712b6dae878d38"
+ integrity sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ devlop "^1.0.0"
+ markdown-table "^3.0.0"
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+
+mdast-util-gfm-task-list-item@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz#e68095d2f8a4303ef24094ab642e1047b991a936"
+ integrity sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ devlop "^1.0.0"
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+
+mdast-util-gfm@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-gfm/-/mdast-util-gfm-3.0.0.tgz#3f2aecc879785c3cb6a81ff3a243dc11eca61095"
+ integrity sha512-dgQEX5Amaq+DuUqf26jJqSK9qgixgd6rYDHAv4aTBuA92cTknZlKpPfa86Z/s8Dj8xsAQpFfBmPUHWJBWqS4Bw==
+ dependencies:
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-gfm-autolink-literal "^2.0.0"
+ mdast-util-gfm-footnote "^2.0.0"
+ mdast-util-gfm-strikethrough "^2.0.0"
+ mdast-util-gfm-table "^2.0.0"
+ mdast-util-gfm-task-list-item "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+
+mdast-util-mdx-expression@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.0.tgz#4968b73724d320a379110d853e943a501bfd9d87"
+ integrity sha512-fGCu8eWdKUKNu5mohVGkhBXCXGnOTLuFqOvGMvdikr+J1w7lDJgxThOKpwRWzzbyXAU2hhSwsmssOY4yTokluw==
+ dependencies:
+ "@types/estree-jsx" "^1.0.0"
+ "@types/hast" "^3.0.0"
+ "@types/mdast" "^4.0.0"
+ devlop "^1.0.0"
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+
+mdast-util-mdx-jsx@^3.0.0:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.1.2.tgz#daae777c72f9c4a106592e3025aa50fb26068e1b"
+ integrity sha512-eKMQDeywY2wlHc97k5eD8VC+9ASMjN8ItEZQNGwJ6E0XWKiW/Z0V5/H8pvoXUf+y+Mj0VIgeRRbujBmFn4FTyA==
+ dependencies:
+ "@types/estree-jsx" "^1.0.0"
+ "@types/hast" "^3.0.0"
+ "@types/mdast" "^4.0.0"
+ "@types/unist" "^3.0.0"
+ ccount "^2.0.0"
+ devlop "^1.1.0"
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+ parse-entities "^4.0.0"
+ stringify-entities "^4.0.0"
+ unist-util-remove-position "^5.0.0"
+ unist-util-stringify-position "^4.0.0"
+ vfile-message "^4.0.0"
+
+mdast-util-mdx@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-mdx/-/mdast-util-mdx-3.0.0.tgz#792f9cf0361b46bee1fdf1ef36beac424a099c41"
+ integrity sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w==
+ dependencies:
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-mdx-expression "^2.0.0"
+ mdast-util-mdx-jsx "^3.0.0"
+ mdast-util-mdxjs-esm "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+
+mdast-util-mdxjs-esm@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz#019cfbe757ad62dd557db35a695e7314bcc9fa97"
+ integrity sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==
+ dependencies:
+ "@types/estree-jsx" "^1.0.0"
+ "@types/hast" "^3.0.0"
+ "@types/mdast" "^4.0.0"
+ devlop "^1.0.0"
+ mdast-util-from-markdown "^2.0.0"
+ mdast-util-to-markdown "^2.0.0"
+
+mdast-util-phrasing@^4.0.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz#7cc0a8dec30eaf04b7b1a9661a92adb3382aa6e3"
+ integrity sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ unist-util-is "^6.0.0"
+
+mdast-util-to-hast@^13.0.0:
+ version "13.2.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz#5ca58e5b921cc0a3ded1bc02eed79a4fe4fe41f4"
+ integrity sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ "@types/mdast" "^4.0.0"
+ "@ungap/structured-clone" "^1.0.0"
+ devlop "^1.0.0"
+ micromark-util-sanitize-uri "^2.0.0"
+ trim-lines "^3.0.0"
+ unist-util-position "^5.0.0"
+ unist-util-visit "^5.0.0"
+ vfile "^6.0.0"
+
+mdast-util-to-markdown@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.0.tgz#9813f1d6e0cdaac7c244ec8c6dabfdb2102ea2b4"
+ integrity sha512-SR2VnIEdVNCJbP6y7kVTJgPLifdr8WEU440fQec7qHoHOUz/oJ2jmNRqdDQ3rbiStOXb2mCDGTuwsK5OPUgYlQ==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ "@types/unist" "^3.0.0"
+ longest-streak "^3.0.0"
+ mdast-util-phrasing "^4.0.0"
+ mdast-util-to-string "^4.0.0"
+ micromark-util-decode-string "^2.0.0"
+ unist-util-visit "^5.0.0"
+ zwitch "^2.0.0"
+
+mdast-util-to-string@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz#7a5121475556a04e7eddeb67b264aae79d312814"
+ integrity sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+
+mdn-data@2.0.28:
+ version "2.0.28"
+ resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.28.tgz#5ec48e7bef120654539069e1ae4ddc81ca490eba"
+ integrity sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==
+
+mdn-data@2.0.30:
+ version "2.0.30"
+ resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.30.tgz#ce4df6f80af6cfbe218ecd5c552ba13c4dfa08cc"
+ integrity sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==
+
+media-typer@0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
+ integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==
+
+memfs@^3.4.3:
+ version "3.6.0"
+ resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.6.0.tgz#d7a2110f86f79dd950a8b6df6d57bc984aa185f6"
+ integrity sha512-EGowvkkgbMcIChjMTMkESFDbZeSh8xZ7kNSF0hAiAN4Jh6jgHCRS0Ga/+C8y6Au+oqpezRHCfPsmJ2+DwAgiwQ==
+ dependencies:
+ fs-monkey "^1.0.4"
+
+merge-descriptors@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
+ integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==
+
+merge-stream@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60"
+ integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==
+
+merge2@^1.3.0, merge2@^1.4.1:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
+ integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==
+
+mermaid@>=11.6.0:
+ version "11.9.0"
+ resolved "https://registry.yarnpkg.com/mermaid/-/mermaid-11.9.0.tgz#fdc055d0f2a7f2afc13a78cb3e3c9b1374614e2e"
+ integrity sha512-YdPXn9slEwO0omQfQIsW6vS84weVQftIyyTGAZCwM//MGhPzL1+l6vO6bkf0wnP4tHigH1alZ5Ooy3HXI2gOag==
+ dependencies:
+ "@braintree/sanitize-url" "^7.0.4"
+ "@iconify/utils" "^2.1.33"
+ "@mermaid-js/parser" "^0.6.2"
+ "@types/d3" "^7.4.3"
+ cytoscape "^3.29.3"
+ cytoscape-cose-bilkent "^4.1.0"
+ cytoscape-fcose "^2.2.0"
+ d3 "^7.9.0"
+ d3-sankey "^0.12.3"
+ dagre-d3-es "7.0.11"
+ dayjs "^1.11.13"
+ dompurify "^3.2.5"
+ katex "^0.16.22"
+ khroma "^2.1.0"
+ lodash-es "^4.17.21"
+ marked "^16.0.0"
+ roughjs "^4.6.6"
+ stylis "^4.3.6"
+ ts-dedent "^2.2.0"
+ uuid "^11.1.0"
+
+methods@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
+ integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==
+
+micromark-core-commonmark@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/micromark-core-commonmark/-/micromark-core-commonmark-2.0.1.tgz#9a45510557d068605c6e9a80f282b2bb8581e43d"
+ integrity sha512-CUQyKr1e///ZODyD1U3xit6zXwy1a8q2a1S1HKtIlmgvurrEpaw/Y9y6KSIbF8P59cn/NjzHyO+Q2fAyYLQrAA==
+ dependencies:
+ decode-named-character-reference "^1.0.0"
+ devlop "^1.0.0"
+ micromark-factory-destination "^2.0.0"
+ micromark-factory-label "^2.0.0"
+ micromark-factory-space "^2.0.0"
+ micromark-factory-title "^2.0.0"
+ micromark-factory-whitespace "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-chunked "^2.0.0"
+ micromark-util-classify-character "^2.0.0"
+ micromark-util-html-tag-name "^2.0.0"
+ micromark-util-normalize-identifier "^2.0.0"
+ micromark-util-resolve-all "^2.0.0"
+ micromark-util-subtokenize "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-extension-directive@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/micromark-extension-directive/-/micromark-extension-directive-3.0.1.tgz#67b3985bb991a69dbcae52664c57ee54b22f635a"
+ integrity sha512-VGV2uxUzhEZmaP7NSFo2vtq7M2nUD+WfmYQD+d8i/1nHbzE+rMy9uzTvUybBbNiVbrhOZibg3gbyoARGqgDWyg==
+ dependencies:
+ devlop "^1.0.0"
+ micromark-factory-space "^2.0.0"
+ micromark-factory-whitespace "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+ parse-entities "^4.0.0"
+
+micromark-extension-frontmatter@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-frontmatter/-/micromark-extension-frontmatter-2.0.0.tgz#651c52ffa5d7a8eeed687c513cd869885882d67a"
+ integrity sha512-C4AkuM3dA58cgZha7zVnuVxBhDsbttIMiytjgsM2XbHAB2faRVaHRle40558FBN+DJcrLNCoqG5mlrpdU4cRtg==
+ dependencies:
+ fault "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-extension-gfm-autolink-literal@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz#6286aee9686c4462c1e3552a9d505feddceeb935"
+ integrity sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==
+ dependencies:
+ micromark-util-character "^2.0.0"
+ micromark-util-sanitize-uri "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-extension-gfm-footnote@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz#4dab56d4e398b9853f6fe4efac4fc9361f3e0750"
+ integrity sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==
+ dependencies:
+ devlop "^1.0.0"
+ micromark-core-commonmark "^2.0.0"
+ micromark-factory-space "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-normalize-identifier "^2.0.0"
+ micromark-util-sanitize-uri "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-extension-gfm-strikethrough@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz#86106df8b3a692b5f6a92280d3879be6be46d923"
+ integrity sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==
+ dependencies:
+ devlop "^1.0.0"
+ micromark-util-chunked "^2.0.0"
+ micromark-util-classify-character "^2.0.0"
+ micromark-util-resolve-all "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-extension-gfm-table@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.0.tgz#5cadedfbb29fca7abf752447967003dc3b6583c9"
+ integrity sha512-Ub2ncQv+fwD70/l4ou27b4YzfNaCJOvyX4HxXU15m7mpYY+rjuWzsLIPZHJL253Z643RpbcP1oeIJlQ/SKW67g==
+ dependencies:
+ devlop "^1.0.0"
+ micromark-factory-space "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-extension-gfm-tagfilter@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz#f26d8a7807b5985fba13cf61465b58ca5ff7dc57"
+ integrity sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==
+ dependencies:
+ micromark-util-types "^2.0.0"
+
+micromark-extension-gfm-task-list-item@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz#bcc34d805639829990ec175c3eea12bb5b781f2c"
+ integrity sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==
+ dependencies:
+ devlop "^1.0.0"
+ micromark-factory-space "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-extension-gfm@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz#3e13376ab95dd7a5cfd0e29560dfe999657b3c5b"
+ integrity sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==
+ dependencies:
+ micromark-extension-gfm-autolink-literal "^2.0.0"
+ micromark-extension-gfm-footnote "^2.0.0"
+ micromark-extension-gfm-strikethrough "^2.0.0"
+ micromark-extension-gfm-table "^2.0.0"
+ micromark-extension-gfm-tagfilter "^2.0.0"
+ micromark-extension-gfm-task-list-item "^2.0.0"
+ micromark-util-combine-extensions "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-extension-mdx-expression@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-3.0.0.tgz#1407b9ce69916cf5e03a196ad9586889df25302a"
+ integrity sha512-sI0nwhUDz97xyzqJAbHQhp5TfaxEvZZZ2JDqUo+7NvyIYG6BZ5CPPqj2ogUoPJlmXHBnyZUzISg9+oUmU6tUjQ==
+ dependencies:
+ "@types/estree" "^1.0.0"
+ devlop "^1.0.0"
+ micromark-factory-mdx-expression "^2.0.0"
+ micromark-factory-space "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-events-to-acorn "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-extension-mdx-jsx@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-3.0.0.tgz#4aba0797c25efb2366a3fd2d367c6b1c1159f4f5"
+ integrity sha512-uvhhss8OGuzR4/N17L1JwvmJIpPhAd8oByMawEKx6NVdBCbesjH4t+vjEp3ZXft9DwvlKSD07fCeI44/N0Vf2w==
+ dependencies:
+ "@types/acorn" "^4.0.0"
+ "@types/estree" "^1.0.0"
+ devlop "^1.0.0"
+ estree-util-is-identifier-name "^3.0.0"
+ micromark-factory-mdx-expression "^2.0.0"
+ micromark-factory-space "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+ vfile-message "^4.0.0"
+
+micromark-extension-mdx-md@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-mdx-md/-/micromark-extension-mdx-md-2.0.0.tgz#1d252881ea35d74698423ab44917e1f5b197b92d"
+ integrity sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ==
+ dependencies:
+ micromark-util-types "^2.0.0"
+
+micromark-extension-mdxjs-esm@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-3.0.0.tgz#de21b2b045fd2059bd00d36746081de38390d54a"
+ integrity sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A==
+ dependencies:
+ "@types/estree" "^1.0.0"
+ devlop "^1.0.0"
+ micromark-core-commonmark "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-events-to-acorn "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+ unist-util-position-from-estree "^2.0.0"
+ vfile-message "^4.0.0"
+
+micromark-extension-mdxjs@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-extension-mdxjs/-/micromark-extension-mdxjs-3.0.0.tgz#b5a2e0ed449288f3f6f6c544358159557549de18"
+ integrity sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ==
+ dependencies:
+ acorn "^8.0.0"
+ acorn-jsx "^5.0.0"
+ micromark-extension-mdx-expression "^3.0.0"
+ micromark-extension-mdx-jsx "^3.0.0"
+ micromark-extension-mdx-md "^2.0.0"
+ micromark-extension-mdxjs-esm "^3.0.0"
+ micromark-util-combine-extensions "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-factory-destination@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-factory-destination/-/micromark-factory-destination-2.0.0.tgz#857c94debd2c873cba34e0445ab26b74f6a6ec07"
+ integrity sha512-j9DGrQLm/Uhl2tCzcbLhy5kXsgkHUrjJHg4fFAeoMRwJmJerT9aw4FEhIbZStWN8A3qMwOp1uzHr4UL8AInxtA==
+ dependencies:
+ micromark-util-character "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-factory-label@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-factory-label/-/micromark-factory-label-2.0.0.tgz#17c5c2e66ce39ad6f4fc4cbf40d972f9096f726a"
+ integrity sha512-RR3i96ohZGde//4WSe/dJsxOX6vxIg9TimLAS3i4EhBAFx8Sm5SmqVfR8E87DPSR31nEAjZfbt91OMZWcNgdZw==
+ dependencies:
+ devlop "^1.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-factory-mdx-expression@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-2.0.1.tgz#f2a9724ce174f1751173beb2c1f88062d3373b1b"
+ integrity sha512-F0ccWIUHRLRrYp5TC9ZYXmZo+p2AM13ggbsW4T0b5CRKP8KHVRB8t4pwtBgTxtjRmwrK0Irwm7vs2JOZabHZfg==
+ dependencies:
+ "@types/estree" "^1.0.0"
+ devlop "^1.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-events-to-acorn "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+ unist-util-position-from-estree "^2.0.0"
+ vfile-message "^4.0.0"
+
+micromark-factory-space@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz#c8f40b0640a0150751d3345ed885a080b0d15faf"
+ integrity sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==
+ dependencies:
+ micromark-util-character "^1.0.0"
+ micromark-util-types "^1.0.0"
+
+micromark-factory-space@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-factory-space/-/micromark-factory-space-2.0.0.tgz#5e7afd5929c23b96566d0e1ae018ae4fcf81d030"
+ integrity sha512-TKr+LIDX2pkBJXFLzpyPyljzYK3MtmllMUMODTQJIUfDGncESaqB90db9IAUcz4AZAJFdd8U9zOp9ty1458rxg==
+ dependencies:
+ micromark-util-character "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-factory-title@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-factory-title/-/micromark-factory-title-2.0.0.tgz#726140fc77892af524705d689e1cf06c8a83ea95"
+ integrity sha512-jY8CSxmpWLOxS+t8W+FG3Xigc0RDQA9bKMY/EwILvsesiRniiVMejYTE4wumNc2f4UbAa4WsHqe3J1QS1sli+A==
+ dependencies:
+ micromark-factory-space "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-factory-whitespace@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.0.tgz#9e92eb0f5468083381f923d9653632b3cfb5f763"
+ integrity sha512-28kbwaBjc5yAI1XadbdPYHX/eDnqaUFVikLwrO7FDnKG7lpgxnvk/XGRhX/PN0mOZ+dBSZ+LgunHS+6tYQAzhA==
+ dependencies:
+ micromark-factory-space "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-util-character@^1.0.0, micromark-util-character@^1.1.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-character/-/micromark-util-character-1.2.0.tgz#4fedaa3646db249bc58caeb000eb3549a8ca5dcc"
+ integrity sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==
+ dependencies:
+ micromark-util-symbol "^1.0.0"
+ micromark-util-types "^1.0.0"
+
+micromark-util-character@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-character/-/micromark-util-character-2.1.0.tgz#31320ace16b4644316f6bf057531689c71e2aee1"
+ integrity sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==
+ dependencies:
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-util-chunked@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-chunked/-/micromark-util-chunked-2.0.0.tgz#e51f4db85fb203a79dbfef23fd41b2f03dc2ef89"
+ integrity sha512-anK8SWmNphkXdaKgz5hJvGa7l00qmcaUQoMYsBwDlSKFKjc6gjGXPDw3FNL3Nbwq5L8gE+RCbGqTw49FK5Qyvg==
+ dependencies:
+ micromark-util-symbol "^2.0.0"
+
+micromark-util-classify-character@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-classify-character/-/micromark-util-classify-character-2.0.0.tgz#8c7537c20d0750b12df31f86e976d1d951165f34"
+ integrity sha512-S0ze2R9GH+fu41FA7pbSqNWObo/kzwf8rN/+IGlW/4tC6oACOs8B++bh+i9bVyNnwCcuksbFwsBme5OCKXCwIw==
+ dependencies:
+ micromark-util-character "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-util-combine-extensions@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.0.tgz#75d6ab65c58b7403616db8d6b31315013bfb7ee5"
+ integrity sha512-vZZio48k7ON0fVS3CUgFatWHoKbbLTK/rT7pzpJ4Bjp5JjkZeasRfrS9wsBdDJK2cJLHMckXZdzPSSr1B8a4oQ==
+ dependencies:
+ micromark-util-chunked "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-util-decode-numeric-character-reference@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.1.tgz#2698bbb38f2a9ba6310e359f99fcb2b35a0d2bd5"
+ integrity sha512-bmkNc7z8Wn6kgjZmVHOX3SowGmVdhYS7yBpMnuMnPzDq/6xwVA604DuOXMZTO1lvq01g+Adfa0pE2UKGlxL1XQ==
+ dependencies:
+ micromark-util-symbol "^2.0.0"
+
+micromark-util-decode-string@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-decode-string/-/micromark-util-decode-string-2.0.0.tgz#7dfa3a63c45aecaa17824e656bcdb01f9737154a"
+ integrity sha512-r4Sc6leeUTn3P6gk20aFMj2ntPwn6qpDZqWvYmAG6NgvFTIlj4WtrAudLi65qYoaGdXYViXYw2pkmn7QnIFasA==
+ dependencies:
+ decode-named-character-reference "^1.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-decode-numeric-character-reference "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+
+micromark-util-encode@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz#0921ac7953dc3f1fd281e3d1932decfdb9382ab1"
+ integrity sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==
+
+micromark-util-events-to-acorn@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-2.0.2.tgz#4275834f5453c088bd29cd72dfbf80e3327cec07"
+ integrity sha512-Fk+xmBrOv9QZnEDguL9OI9/NQQp6Hz4FuQ4YmCb/5V7+9eAh1s6AYSvL20kHkD67YIg7EpE54TiSlcsf3vyZgA==
+ dependencies:
+ "@types/acorn" "^4.0.0"
+ "@types/estree" "^1.0.0"
+ "@types/unist" "^3.0.0"
+ devlop "^1.0.0"
+ estree-util-visit "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+ vfile-message "^4.0.0"
+
+micromark-util-html-tag-name@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.0.tgz#ae34b01cbe063363847670284c6255bb12138ec4"
+ integrity sha512-xNn4Pqkj2puRhKdKTm8t1YHC/BAjx6CEwRFXntTaRf/x16aqka6ouVoutm+QdkISTlT7e2zU7U4ZdlDLJd2Mcw==
+
+micromark-util-normalize-identifier@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.0.tgz#91f9a4e65fe66cc80c53b35b0254ad67aa431d8b"
+ integrity sha512-2xhYT0sfo85FMrUPtHcPo2rrp1lwbDEEzpx7jiH2xXJLqBuy4H0GgXk5ToU8IEwoROtXuL8ND0ttVa4rNqYK3w==
+ dependencies:
+ micromark-util-symbol "^2.0.0"
+
+micromark-util-resolve-all@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.0.tgz#189656e7e1a53d0c86a38a652b284a252389f364"
+ integrity sha512-6KU6qO7DZ7GJkaCgwBNtplXCvGkJToU86ybBAUdavvgsCiG8lSSvYxr9MhwmQ+udpzywHsl4RpGJsYWG1pDOcA==
+ dependencies:
+ micromark-util-types "^2.0.0"
+
+micromark-util-sanitize-uri@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz#ec8fbf0258e9e6d8f13d9e4770f9be64342673de"
+ integrity sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==
+ dependencies:
+ micromark-util-character "^2.0.0"
+ micromark-util-encode "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+
+micromark-util-subtokenize@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/micromark-util-subtokenize/-/micromark-util-subtokenize-2.0.1.tgz#76129c49ac65da6e479c09d0ec4b5f29ec6eace5"
+ integrity sha512-jZNtiFl/1aY73yS3UGQkutD0UbhTt68qnRpw2Pifmz5wV9h8gOVsN70v+Lq/f1rKaU/W8pxRe8y8Q9FX1AOe1Q==
+ dependencies:
+ devlop "^1.0.0"
+ micromark-util-chunked "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromark-util-symbol@^1.0.0, micromark-util-symbol@^1.0.1:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz#813cd17837bdb912d069a12ebe3a44b6f7063142"
+ integrity sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==
+
+micromark-util-symbol@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz#12225c8f95edf8b17254e47080ce0862d5db8044"
+ integrity sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==
+
+micromark-util-types@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-types/-/micromark-util-types-1.1.0.tgz#e6676a8cae0bb86a2171c498167971886cb7e283"
+ integrity sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==
+
+micromark-util-types@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/micromark-util-types/-/micromark-util-types-2.0.0.tgz#63b4b7ffeb35d3ecf50d1ca20e68fc7caa36d95e"
+ integrity sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==
+
+micromark@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/micromark/-/micromark-4.0.0.tgz#84746a249ebd904d9658cfabc1e8e5f32cbc6249"
+ integrity sha512-o/sd0nMof8kYff+TqcDx3VSrgBTcZpSvYcAHIfHhv5VAuNmisCxjhx6YmxS8PFEpb9z5WKWKPdzf0jM23ro3RQ==
+ dependencies:
+ "@types/debug" "^4.0.0"
+ debug "^4.0.0"
+ decode-named-character-reference "^1.0.0"
+ devlop "^1.0.0"
+ micromark-core-commonmark "^2.0.0"
+ micromark-factory-space "^2.0.0"
+ micromark-util-character "^2.0.0"
+ micromark-util-chunked "^2.0.0"
+ micromark-util-combine-extensions "^2.0.0"
+ micromark-util-decode-numeric-character-reference "^2.0.0"
+ micromark-util-encode "^2.0.0"
+ micromark-util-normalize-identifier "^2.0.0"
+ micromark-util-resolve-all "^2.0.0"
+ micromark-util-sanitize-uri "^2.0.0"
+ micromark-util-subtokenize "^2.0.0"
+ micromark-util-symbol "^2.0.0"
+ micromark-util-types "^2.0.0"
+
+micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5:
+ version "4.0.7"
+ resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.7.tgz#33e8190d9fe474a9895525f5618eee136d46c2e5"
+ integrity sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==
+ dependencies:
+ braces "^3.0.3"
+ picomatch "^2.3.1"
+
+mime-db@1.52.0:
+ version "1.52.0"
+ resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
+ integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
+
+"mime-db@>= 1.43.0 < 2":
+ version "1.53.0"
+ resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.53.0.tgz#3cb63cd820fc29896d9d4e8c32ab4fcd74ccb447"
+ integrity sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==
+
+mime-db@~1.33.0:
+ version "1.33.0"
+ resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.33.0.tgz#a3492050a5cb9b63450541e39d9788d2272783db"
+ integrity sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==
+
+mime-types@2.1.18:
+ version "2.1.18"
+ resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.18.tgz#6f323f60a83d11146f831ff11fd66e2fe5503bb8"
+ integrity sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==
+ dependencies:
+ mime-db "~1.33.0"
+
+mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34:
+ version "2.1.35"
+ resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
+ integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
+ dependencies:
+ mime-db "1.52.0"
+
+mime@1.6.0:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
+ integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
+
+mimic-fn@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b"
+ integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==
+
+mimic-response@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9"
+ integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==
+
+mimic-response@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-4.0.0.tgz#35468b19e7c75d10f5165ea25e75a5ceea7cf70f"
+ integrity sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==
+
+mini-css-extract-plugin@^2.9.2:
+ version "2.9.2"
+ resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.9.2.tgz#966031b468917a5446f4c24a80854b2947503c5b"
+ integrity sha512-GJuACcS//jtq4kCtd5ii/M0SZf7OZRH+BxdqXZHaJfb8TJiVl+NgQRPwiYt2EuqeSkNydn/7vP+bcE27C5mb9w==
+ dependencies:
+ schema-utils "^4.0.0"
+ tapable "^2.2.1"
+
+minimalistic-assert@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7"
+ integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==
+
+minimatch@3.1.2, minimatch@^3.1.1:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b"
+ integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==
+ dependencies:
+ brace-expansion "^1.1.7"
+
+minimist@^1.2.0:
+ version "1.2.8"
+ resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c"
+ integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==
+
+mlly@^1.7.4:
+ version "1.7.4"
+ resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.7.4.tgz#3d7295ea2358ec7a271eaa5d000a0f84febe100f"
+ integrity sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==
+ dependencies:
+ acorn "^8.14.0"
+ pathe "^2.0.1"
+ pkg-types "^1.3.0"
+ ufo "^1.5.4"
+
+mrmime@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/mrmime/-/mrmime-2.0.0.tgz#151082a6e06e59a9a39b46b3e14d5cfe92b3abb4"
+ integrity sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==
+
+ms@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
+ integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==
+
+ms@2.1.2:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
+ integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
+
+ms@2.1.3, ms@^2.1.3:
+ version "2.1.3"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
+ integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
+
+multicast-dns@^7.2.5:
+ version "7.2.5"
+ resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced"
+ integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==
+ dependencies:
+ dns-packet "^5.2.2"
+ thunky "^1.0.2"
+
+nanoid@^3.3.11:
+ version "3.3.11"
+ resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.11.tgz#4f4f112cefbe303202f2199838128936266d185b"
+ integrity sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==
+
+nanoid@^3.3.7:
+ version "3.3.7"
+ resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8"
+ integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==
+
+negotiator@0.6.3:
+ version "0.6.3"
+ resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd"
+ integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==
+
+neo-async@^2.6.2:
+ version "2.6.2"
+ resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f"
+ integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==
+
+no-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d"
+ integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==
+ dependencies:
+ lower-case "^2.0.2"
+ tslib "^2.0.3"
+
+node-emoji@^2.1.0:
+ version "2.1.3"
+ resolved "https://registry.yarnpkg.com/node-emoji/-/node-emoji-2.1.3.tgz#93cfabb5cc7c3653aa52f29d6ffb7927d8047c06"
+ integrity sha512-E2WEOVsgs7O16zsURJ/eH8BqhF029wGpEOnv7Urwdo2wmQanOACwJQh0devF9D9RhoZru0+9JXIS0dBXIAz+lA==
+ dependencies:
+ "@sindresorhus/is" "^4.6.0"
+ char-regex "^1.0.2"
+ emojilib "^2.4.0"
+ skin-tone "^2.0.0"
+
+node-forge@^1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3"
+ integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==
+
+node-releases@^2.0.18:
+ version "2.0.18"
+ resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.18.tgz#f010e8d35e2fe8d6b2944f03f70213ecedc4ca3f"
+ integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==
+
+node-releases@^2.0.19:
+ version "2.0.19"
+ resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.19.tgz#9e445a52950951ec4d177d843af370b411caf314"
+ integrity sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==
+
+normalize-path@^3.0.0, normalize-path@~3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
+ integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
+
+normalize-range@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942"
+ integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==
+
+normalize-url@^8.0.0:
+ version "8.0.1"
+ resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-8.0.1.tgz#9b7d96af9836577c58f5883e939365fa15623a4a"
+ integrity sha512-IO9QvjUMWxPQQhs60oOu10CRkWCiZzSUkzbXGGV9pviYl1fXYcvkzQ5jV9z8Y6un8ARoVRl4EtC6v6jNqbaJ/w==
+
+npm-run-path@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea"
+ integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==
+ dependencies:
+ path-key "^3.0.0"
+
+nprogress@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/nprogress/-/nprogress-0.2.0.tgz#cb8f34c53213d895723fcbab907e9422adbcafb1"
+ integrity sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA==
+
+nth-check@^2.0.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d"
+ integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==
+ dependencies:
+ boolbase "^1.0.0"
+
+null-loader@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-4.0.1.tgz#8e63bd3a2dd3c64236a4679428632edd0a6dbc6a"
+ integrity sha512-pxqVbi4U6N26lq+LmgIbB5XATP0VdZKOG25DhHi8btMmJJefGArFyDg1yc4U3hWCJbMqSrw0qyrz1UQX+qYXqg==
+ dependencies:
+ loader-utils "^2.0.0"
+ schema-utils "^3.0.0"
+
+object-assign@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
+ integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==
+
+object-inspect@^1.13.1:
+ version "1.13.2"
+ resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.2.tgz#dea0088467fb991e67af4058147a24824a3043ff"
+ integrity sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==
+
+object-keys@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e"
+ integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
+
+object.assign@^4.1.0:
+ version "4.1.5"
+ resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.5.tgz#3a833f9ab7fdb80fc9e8d2300c803d216d8fdbb0"
+ integrity sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==
+ dependencies:
+ call-bind "^1.0.5"
+ define-properties "^1.2.1"
+ has-symbols "^1.0.3"
+ object-keys "^1.1.1"
+
+obuf@^1.0.0, obuf@^1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e"
+ integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==
+
+on-finished@2.4.1:
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f"
+ integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==
+ dependencies:
+ ee-first "1.1.1"
+
+on-headers@~1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f"
+ integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==
+
+once@^1.3.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
+ integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==
+ dependencies:
+ wrappy "1"
+
+onetime@^5.1.2:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e"
+ integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==
+ dependencies:
+ mimic-fn "^2.1.0"
+
+open@^8.0.9, open@^8.4.0:
+ version "8.4.2"
+ resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9"
+ integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==
+ dependencies:
+ define-lazy-prop "^2.0.0"
+ is-docker "^2.1.1"
+ is-wsl "^2.2.0"
+
+opener@^1.5.2:
+ version "1.5.2"
+ resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598"
+ integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==
+
+p-cancelable@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-3.0.0.tgz#63826694b54d61ca1c20ebcb6d3ecf5e14cd8050"
+ integrity sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==
+
+p-finally@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
+ integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==
+
+p-limit@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-4.0.0.tgz#914af6544ed32bfa54670b061cafcbd04984b644"
+ integrity sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==
+ dependencies:
+ yocto-queue "^1.0.0"
+
+p-locate@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-6.0.0.tgz#3da9a49d4934b901089dca3302fa65dc5a05c04f"
+ integrity sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==
+ dependencies:
+ p-limit "^4.0.0"
+
+p-map@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b"
+ integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==
+ dependencies:
+ aggregate-error "^3.0.0"
+
+p-queue@^6.6.2:
+ version "6.6.2"
+ resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-6.6.2.tgz#2068a9dcf8e67dd0ec3e7a2bcb76810faa85e426"
+ integrity sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==
+ dependencies:
+ eventemitter3 "^4.0.4"
+ p-timeout "^3.2.0"
+
+p-retry@^4.5.0:
+ version "4.6.2"
+ resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16"
+ integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==
+ dependencies:
+ "@types/retry" "0.12.0"
+ retry "^0.13.1"
+
+p-timeout@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-3.2.0.tgz#c7e17abc971d2a7962ef83626b35d635acf23dfe"
+ integrity sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==
+ dependencies:
+ p-finally "^1.0.0"
+
+package-json@^8.1.0:
+ version "8.1.1"
+ resolved "https://registry.yarnpkg.com/package-json/-/package-json-8.1.1.tgz#3e9948e43df40d1e8e78a85485f1070bf8f03dc8"
+ integrity sha512-cbH9IAIJHNj9uXi196JVsRlt7cHKak6u/e6AkL/bkRelZ7rlL3X1YKxsZwa36xipOEKAsdtmaG6aAJoM1fx2zA==
+ dependencies:
+ got "^12.1.0"
+ registry-auth-token "^5.0.1"
+ registry-url "^6.0.0"
+ semver "^7.3.7"
+
+package-manager-detector@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/package-manager-detector/-/package-manager-detector-1.3.0.tgz#b42d641c448826e03c2b354272456a771ce453c0"
+ integrity sha512-ZsEbbZORsyHuO00lY1kV3/t72yp6Ysay6Pd17ZAlNGuGwmWDLCJxFpRs0IzfXfj1o4icJOkUEioexFHzyPurSQ==
+
+param-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5"
+ integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==
+ dependencies:
+ dot-case "^3.0.4"
+ tslib "^2.0.3"
+
+parent-module@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2"
+ integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==
+ dependencies:
+ callsites "^3.0.0"
+
+parse-entities@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-4.0.1.tgz#4e2a01111fb1c986549b944af39eeda258fc9e4e"
+ integrity sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w==
+ dependencies:
+ "@types/unist" "^2.0.0"
+ character-entities "^2.0.0"
+ character-entities-legacy "^3.0.0"
+ character-reference-invalid "^2.0.0"
+ decode-named-character-reference "^1.0.0"
+ is-alphanumerical "^2.0.0"
+ is-decimal "^2.0.0"
+ is-hexadecimal "^2.0.0"
+
+parse-json@^5.0.0, parse-json@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd"
+ integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==
+ dependencies:
+ "@babel/code-frame" "^7.0.0"
+ error-ex "^1.3.1"
+ json-parse-even-better-errors "^2.3.0"
+ lines-and-columns "^1.1.6"
+
+parse-numeric-range@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz#7c63b61190d61e4d53a1197f0c83c47bb670ffa3"
+ integrity sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ==
+
+parse5-htmlparser2-tree-adapter@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz#23c2cc233bcf09bb7beba8b8a69d46b08c62c2f1"
+ integrity sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==
+ dependencies:
+ domhandler "^5.0.2"
+ parse5 "^7.0.0"
+
+parse5@^7.0.0:
+ version "7.1.2"
+ resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.1.2.tgz#0736bebbfd77793823240a23b7fc5e010b7f8e32"
+ integrity sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==
+ dependencies:
+ entities "^4.4.0"
+
+parseurl@~1.3.2, parseurl@~1.3.3:
+ version "1.3.3"
+ resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4"
+ integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==
+
+pascal-case@^3.1.2:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb"
+ integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+
+path-data-parser@0.1.0, path-data-parser@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/path-data-parser/-/path-data-parser-0.1.0.tgz#8f5ba5cc70fc7becb3dcefaea08e2659aba60b8c"
+ integrity sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==
+
+path-exists@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-5.0.0.tgz#a6aad9489200b21fab31e49cf09277e5116fb9e7"
+ integrity sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==
+
+path-is-absolute@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
+ integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==
+
+path-is-inside@1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53"
+ integrity sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==
+
+path-key@^3.0.0, path-key@^3.1.0:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
+ integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
+
+path-parse@^1.0.7:
+ version "1.0.7"
+ resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
+ integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
+
+path-to-regexp@0.1.7:
+ version "0.1.7"
+ resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
+ integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==
+
+path-to-regexp@3.3.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-3.3.0.tgz#f7f31d32e8518c2660862b644414b6d5c63a611b"
+ integrity sha512-qyCH421YQPS2WFDxDjftfc1ZR5WKQzVzqsp4n9M2kQhVOo/ByahFoUNJfl58kOcEGfQ//7weFTDhm+ss8Ecxgw==
+
+path-to-regexp@^1.7.0:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a"
+ integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==
+ dependencies:
+ isarray "0.0.1"
+
+path-type@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b"
+ integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==
+
+pathe@^2.0.1, pathe@^2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/pathe/-/pathe-2.0.3.tgz#3ecbec55421685b70a9da872b2cff3e1cbed1716"
+ integrity sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==
+
+periscopic@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/periscopic/-/periscopic-3.1.0.tgz#7e9037bf51c5855bd33b48928828db4afa79d97a"
+ integrity sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==
+ dependencies:
+ "@types/estree" "^1.0.0"
+ estree-walker "^3.0.0"
+ is-reference "^3.0.0"
+
+picocolors@^1.0.0, picocolors@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.1.tgz#a8ad579b571952f0e5d25892de5445bcfe25aaa1"
+ integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==
+
+picocolors@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b"
+ integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==
+
+picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.1:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
+ integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==
+
+pkg-dir@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-7.0.0.tgz#8f0c08d6df4476756c5ff29b3282d0bab7517d11"
+ integrity sha512-Ie9z/WINcxxLp27BKOCHGde4ITq9UklYKDzVo1nhk5sqGEXU3FpkwP5GM2voTGJkGd9B3Otl+Q4uwSOeSUtOBA==
+ dependencies:
+ find-up "^6.3.0"
+
+pkg-types@^1.3.0:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/pkg-types/-/pkg-types-1.3.1.tgz#bd7cc70881192777eef5326c19deb46e890917df"
+ integrity sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==
+ dependencies:
+ confbox "^0.1.8"
+ mlly "^1.7.4"
+ pathe "^2.0.1"
+
+pkg-types@^2.0.1:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/pkg-types/-/pkg-types-2.2.0.tgz#049bf404f82a66c465200149457acf0c5fb0fb2d"
+ integrity sha512-2SM/GZGAEkPp3KWORxQZns4M+WSeXbC2HEvmOIJe3Cmiv6ieAJvdVhDldtHqM5J1Y7MrR1XhkBT/rMlhh9FdqQ==
+ dependencies:
+ confbox "^0.2.2"
+ exsolve "^1.0.7"
+ pathe "^2.0.3"
+
+points-on-curve@0.2.0, points-on-curve@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/points-on-curve/-/points-on-curve-0.2.0.tgz#7dbb98c43791859434284761330fa893cb81b4d1"
+ integrity sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==
+
+points-on-path@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/points-on-path/-/points-on-path-0.2.1.tgz#553202b5424c53bed37135b318858eacff85dd52"
+ integrity sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==
+ dependencies:
+ path-data-parser "0.1.0"
+ points-on-curve "0.2.0"
+
+postcss-attribute-case-insensitive@^7.0.1:
+ version "7.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-7.0.1.tgz#0c4500e3bcb2141848e89382c05b5a31c23033a3"
+ integrity sha512-Uai+SupNSqzlschRyNx3kbCTWgY/2hcwtHEI/ej2LJWc9JJ77qKgGptd8DHwY1mXtZ7Aoh4z4yxfwMBue9eNgw==
+ dependencies:
+ postcss-selector-parser "^7.0.0"
+
+postcss-calc@^9.0.1:
+ version "9.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-9.0.1.tgz#a744fd592438a93d6de0f1434c572670361eb6c6"
+ integrity sha512-TipgjGyzP5QzEhsOZUaIkeO5mKeMFpebWzRogWG/ysonUlnHcq5aJe0jOjpfzUU8PeSaBQnrE8ehR0QA5vs8PQ==
+ dependencies:
+ postcss-selector-parser "^6.0.11"
+ postcss-value-parser "^4.2.0"
+
+postcss-clamp@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363"
+ integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-color-functional-notation@^7.0.10:
+ version "7.0.10"
+ resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-7.0.10.tgz#f1e9c3e4371889dcdfeabfa8515464fd8338cedc"
+ integrity sha512-k9qX+aXHBiLTRrWoCJuUFI6F1iF6QJQUXNVWJVSbqZgj57jDhBlOvD8gNUGl35tgqDivbGLhZeW3Ongz4feuKA==
+ dependencies:
+ "@csstools/css-color-parser" "^3.0.10"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+postcss-color-hex-alpha@^10.0.0:
+ version "10.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-color-hex-alpha/-/postcss-color-hex-alpha-10.0.0.tgz#5dd3eba1f8facb4ea306cba6e3f7712e876b0c76"
+ integrity sha512-1kervM2cnlgPs2a8Vt/Qbe5cQ++N7rkYo/2rz2BkqJZIHQwaVuJgQH38REHrAi4uM0b1fqxMkWYmese94iMp3w==
+ dependencies:
+ "@csstools/utilities" "^2.0.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-color-rebeccapurple@^10.0.0:
+ version "10.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-10.0.0.tgz#5ada28406ac47e0796dff4056b0a9d5a6ecead98"
+ integrity sha512-JFta737jSP+hdAIEhk1Vs0q0YF5P8fFcj+09pweS8ktuGuZ8pPlykHsk6mPxZ8awDl4TrcxUqJo9l1IhVr/OjQ==
+ dependencies:
+ "@csstools/utilities" "^2.0.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-colormin@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-6.1.0.tgz#076e8d3fb291fbff7b10e6b063be9da42ff6488d"
+ integrity sha512-x9yX7DOxeMAR+BgGVnNSAxmAj98NX/YxEMNFP+SDCEeNLb2r3i6Hh1ksMsnW8Ub5SLCpbescQqn9YEbE9554Sw==
+ dependencies:
+ browserslist "^4.23.0"
+ caniuse-api "^3.0.0"
+ colord "^2.9.3"
+ postcss-value-parser "^4.2.0"
+
+postcss-convert-values@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-6.1.0.tgz#3498387f8efedb817cbc63901d45bd1ceaa40f48"
+ integrity sha512-zx8IwP/ts9WvUM6NkVSkiU902QZL1bwPhaVaLynPtCsOTqp+ZKbNi+s6XJg3rfqpKGA/oc7Oxk5t8pOQJcwl/w==
+ dependencies:
+ browserslist "^4.23.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-custom-media@^11.0.6:
+ version "11.0.6"
+ resolved "https://registry.yarnpkg.com/postcss-custom-media/-/postcss-custom-media-11.0.6.tgz#6b450e5bfa209efb736830066682e6567bd04967"
+ integrity sha512-C4lD4b7mUIw+RZhtY7qUbf4eADmb7Ey8BFA2px9jUbwg7pjTZDl4KY4bvlUV+/vXQvzQRfiGEVJyAbtOsCMInw==
+ dependencies:
+ "@csstools/cascade-layer-name-parser" "^2.0.5"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/media-query-list-parser" "^4.0.3"
+
+postcss-custom-properties@^14.0.6:
+ version "14.0.6"
+ resolved "https://registry.yarnpkg.com/postcss-custom-properties/-/postcss-custom-properties-14.0.6.tgz#1af73a650bf115ba052cf915287c9982825fc90e"
+ integrity sha512-fTYSp3xuk4BUeVhxCSJdIPhDLpJfNakZKoiTDx7yRGCdlZrSJR7mWKVOBS4sBF+5poPQFMj2YdXx1VHItBGihQ==
+ dependencies:
+ "@csstools/cascade-layer-name-parser" "^2.0.5"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/utilities" "^2.0.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-custom-selectors@^8.0.5:
+ version "8.0.5"
+ resolved "https://registry.yarnpkg.com/postcss-custom-selectors/-/postcss-custom-selectors-8.0.5.tgz#9448ed37a12271d7ab6cb364b6f76a46a4a323e8"
+ integrity sha512-9PGmckHQswiB2usSO6XMSswO2yFWVoCAuih1yl9FVcwkscLjRKjwsjM3t+NIWpSU2Jx3eOiK2+t4vVTQaoCHHg==
+ dependencies:
+ "@csstools/cascade-layer-name-parser" "^2.0.5"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ postcss-selector-parser "^7.0.0"
+
+postcss-dir-pseudo-class@^9.0.1:
+ version "9.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-9.0.1.tgz#80d9e842c9ae9d29f6bf5fd3cf9972891d6cc0ca"
+ integrity sha512-tRBEK0MHYvcMUrAuYMEOa0zg9APqirBcgzi6P21OhxtJyJADo/SWBwY1CAwEohQ/6HDaa9jCjLRG7K3PVQYHEA==
+ dependencies:
+ postcss-selector-parser "^7.0.0"
+
+postcss-discard-comments@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-6.0.2.tgz#e768dcfdc33e0216380623652b0a4f69f4678b6c"
+ integrity sha512-65w/uIqhSBBfQmYnG92FO1mWZjJ4GL5b8atm5Yw2UgrwD7HiNiSSNwJor1eCFGzUgYnN/iIknhNRVqjrrpuglw==
+
+postcss-discard-duplicates@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-6.0.3.tgz#d121e893c38dc58a67277f75bb58ba43fce4c3eb"
+ integrity sha512-+JA0DCvc5XvFAxwx6f/e68gQu/7Z9ud584VLmcgto28eB8FqSFZwtrLwB5Kcp70eIoWP/HXqz4wpo8rD8gpsTw==
+
+postcss-discard-empty@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-6.0.3.tgz#ee39c327219bb70473a066f772621f81435a79d9"
+ integrity sha512-znyno9cHKQsK6PtxL5D19Fj9uwSzC2mB74cpT66fhgOadEUPyXFkbgwm5tvc3bt3NAy8ltE5MrghxovZRVnOjQ==
+
+postcss-discard-overridden@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-6.0.2.tgz#4e9f9c62ecd2df46e8fdb44dc17e189776572e2d"
+ integrity sha512-j87xzI4LUggC5zND7KdjsI25APtyMuynXZSujByMaav2roV6OZX+8AaCUcZSWqckZpjAjRyFDdpqybgjFO0HJQ==
+
+postcss-discard-unused@^6.0.5:
+ version "6.0.5"
+ resolved "https://registry.yarnpkg.com/postcss-discard-unused/-/postcss-discard-unused-6.0.5.tgz#c1b0e8c032c6054c3fbd22aaddba5b248136f338"
+ integrity sha512-wHalBlRHkaNnNwfC8z+ppX57VhvS+HWgjW508esjdaEYr3Mx7Gnn2xA4R/CKf5+Z9S5qsqC+Uzh4ueENWwCVUA==
+ dependencies:
+ postcss-selector-parser "^6.0.16"
+
+postcss-double-position-gradients@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-double-position-gradients/-/postcss-double-position-gradients-6.0.2.tgz#185f8eab2db9cf4e34be69b5706c905895bb52ae"
+ integrity sha512-7qTqnL7nfLRyJK/AHSVrrXOuvDDzettC+wGoienURV8v2svNbu6zJC52ruZtHaO6mfcagFmuTGFdzRsJKB3k5Q==
+ dependencies:
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-focus-visible@^10.0.1:
+ version "10.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-focus-visible/-/postcss-focus-visible-10.0.1.tgz#1f7904904368a2d1180b220595d77b6f8a957868"
+ integrity sha512-U58wyjS/I1GZgjRok33aE8juW9qQgQUNwTSdxQGuShHzwuYdcklnvK/+qOWX1Q9kr7ysbraQ6ht6r+udansalA==
+ dependencies:
+ postcss-selector-parser "^7.0.0"
+
+postcss-focus-within@^9.0.1:
+ version "9.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-focus-within/-/postcss-focus-within-9.0.1.tgz#ac01ce80d3f2e8b2b3eac4ff84f8e15cd0057bc7"
+ integrity sha512-fzNUyS1yOYa7mOjpci/bR+u+ESvdar6hk8XNK/TRR0fiGTp2QT5N+ducP0n3rfH/m9I7H/EQU6lsa2BrgxkEjw==
+ dependencies:
+ postcss-selector-parser "^7.0.0"
+
+postcss-font-variant@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66"
+ integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA==
+
+postcss-gap-properties@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-gap-properties/-/postcss-gap-properties-6.0.0.tgz#d5ff0bdf923c06686499ed2b12e125fe64054fed"
+ integrity sha512-Om0WPjEwiM9Ru+VhfEDPZJAKWUd0mV1HmNXqp2C29z80aQ2uP9UVhLc7e3aYMIor/S5cVhoPgYQ7RtfeZpYTRw==
+
+postcss-image-set-function@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-image-set-function/-/postcss-image-set-function-7.0.0.tgz#538e94e16716be47f9df0573b56bbaca86e1da53"
+ integrity sha512-QL7W7QNlZuzOwBTeXEmbVckNt1FSmhQtbMRvGGqqU4Nf4xk6KUEQhAoWuMzwbSv5jxiRiSZ5Tv7eiDB9U87znA==
+ dependencies:
+ "@csstools/utilities" "^2.0.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-lab-function@^7.0.10:
+ version "7.0.10"
+ resolved "https://registry.yarnpkg.com/postcss-lab-function/-/postcss-lab-function-7.0.10.tgz#0537bd7245b935fc133298c8896bcbd160540cae"
+ integrity sha512-tqs6TCEv9tC1Riq6fOzHuHcZyhg4k3gIAMB8GGY/zA1ssGdm6puHMVE7t75aOSoFg7UD2wyrFFhbldiCMyyFTQ==
+ dependencies:
+ "@csstools/css-color-parser" "^3.0.10"
+ "@csstools/css-parser-algorithms" "^3.0.5"
+ "@csstools/css-tokenizer" "^3.0.4"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/utilities" "^2.0.0"
+
+postcss-loader@^7.3.4:
+ version "7.3.4"
+ resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-7.3.4.tgz#aed9b79ce4ed7e9e89e56199d25ad1ec8f606209"
+ integrity sha512-iW5WTTBSC5BfsBJ9daFMPVrLT36MrNiC6fqOZTTaHjBNX6Pfd5p+hSBqe/fEeNd7pc13QiAyGt7VdGMw4eRC4A==
+ dependencies:
+ cosmiconfig "^8.3.5"
+ jiti "^1.20.0"
+ semver "^7.5.4"
+
+postcss-logical@^8.1.0:
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-logical/-/postcss-logical-8.1.0.tgz#4092b16b49e3ecda70c4d8945257da403d167228"
+ integrity sha512-pL1hXFQ2fEXNKiNiAgtfA005T9FBxky5zkX6s4GZM2D8RkVgRqz3f4g1JUoq925zXv495qk8UNldDwh8uGEDoA==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-merge-idents@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-merge-idents/-/postcss-merge-idents-6.0.3.tgz#7b9c31c7bc823c94bec50f297f04e3c2b838ea65"
+ integrity sha512-1oIoAsODUs6IHQZkLQGO15uGEbK3EAl5wi9SS8hs45VgsxQfMnxvt+L+zIr7ifZFIH14cfAeVe2uCTa+SPRa3g==
+ dependencies:
+ cssnano-utils "^4.0.2"
+ postcss-value-parser "^4.2.0"
+
+postcss-merge-longhand@^6.0.5:
+ version "6.0.5"
+ resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-6.0.5.tgz#ba8a8d473617c34a36abbea8dda2b215750a065a"
+ integrity sha512-5LOiordeTfi64QhICp07nzzuTDjNSO8g5Ksdibt44d+uvIIAE1oZdRn8y/W5ZtYgRH/lnLDlvi9F8btZcVzu3w==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+ stylehacks "^6.1.1"
+
+postcss-merge-rules@^6.1.1:
+ version "6.1.1"
+ resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-6.1.1.tgz#7aa539dceddab56019469c0edd7d22b64c3dea9d"
+ integrity sha512-KOdWF0gju31AQPZiD+2Ar9Qjowz1LTChSjFFbS+e2sFgc4uHOp3ZvVX4sNeTlk0w2O31ecFGgrFzhO0RSWbWwQ==
+ dependencies:
+ browserslist "^4.23.0"
+ caniuse-api "^3.0.0"
+ cssnano-utils "^4.0.2"
+ postcss-selector-parser "^6.0.16"
+
+postcss-minify-font-values@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-6.1.0.tgz#a0e574c02ee3f299be2846369211f3b957ea4c59"
+ integrity sha512-gklfI/n+9rTh8nYaSJXlCo3nOKqMNkxuGpTn/Qm0gstL3ywTr9/WRKznE+oy6fvfolH6dF+QM4nCo8yPLdvGJg==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-minify-gradients@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-6.0.3.tgz#ca3eb55a7bdb48a1e187a55c6377be918743dbd6"
+ integrity sha512-4KXAHrYlzF0Rr7uc4VrfwDJ2ajrtNEpNEuLxFgwkhFZ56/7gaE4Nr49nLsQDZyUe+ds+kEhf+YAUolJiYXF8+Q==
+ dependencies:
+ colord "^2.9.3"
+ cssnano-utils "^4.0.2"
+ postcss-value-parser "^4.2.0"
+
+postcss-minify-params@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-6.1.0.tgz#54551dec77b9a45a29c3cb5953bf7325a399ba08"
+ integrity sha512-bmSKnDtyyE8ujHQK0RQJDIKhQ20Jq1LYiez54WiaOoBtcSuflfK3Nm596LvbtlFcpipMjgClQGyGr7GAs+H1uA==
+ dependencies:
+ browserslist "^4.23.0"
+ cssnano-utils "^4.0.2"
+ postcss-value-parser "^4.2.0"
+
+postcss-minify-selectors@^6.0.4:
+ version "6.0.4"
+ resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-6.0.4.tgz#197f7d72e6dd19eed47916d575d69dc38b396aff"
+ integrity sha512-L8dZSwNLgK7pjTto9PzWRoMbnLq5vsZSTu8+j1P/2GB8qdtGQfn+K1uSvFgYvgh83cbyxT5m43ZZhUMTJDSClQ==
+ dependencies:
+ postcss-selector-parser "^6.0.16"
+
+postcss-modules-extract-imports@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.1.0.tgz#b4497cb85a9c0c4b5aabeb759bb25e8d89f15002"
+ integrity sha512-k3kNe0aNFQDAZGbin48pL2VNidTF0w4/eASDsxlyspobzU3wZQLOGj7L9gfRe0Jo9/4uud09DsjFNH7winGv8Q==
+
+postcss-modules-local-by-default@^4.0.5:
+ version "4.0.5"
+ resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.5.tgz#f1b9bd757a8edf4d8556e8d0f4f894260e3df78f"
+ integrity sha512-6MieY7sIfTK0hYfafw1OMEG+2bg8Q1ocHCpoWLqOKj3JXlKu4G7btkmM/B7lFubYkYWmRSPLZi5chid63ZaZYw==
+ dependencies:
+ icss-utils "^5.0.0"
+ postcss-selector-parser "^6.0.2"
+ postcss-value-parser "^4.1.0"
+
+postcss-modules-scope@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.2.0.tgz#a43d28289a169ce2c15c00c4e64c0858e43457d5"
+ integrity sha512-oq+g1ssrsZOsx9M96c5w8laRmvEu9C3adDSjI8oTcbfkrTE8hx/zfyobUoWIxaKPO8bt6S62kxpw5GqypEw1QQ==
+ dependencies:
+ postcss-selector-parser "^6.0.4"
+
+postcss-modules-values@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c"
+ integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==
+ dependencies:
+ icss-utils "^5.0.0"
+
+postcss-nesting@^13.0.2:
+ version "13.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-nesting/-/postcss-nesting-13.0.2.tgz#fde0d4df772b76d03b52eccc84372e8d1ca1402e"
+ integrity sha512-1YCI290TX+VP0U/K/aFxzHzQWHWURL+CtHMSbex1lCdpXD1SoR2sYuxDu5aNI9lPoXpKTCggFZiDJbwylU0LEQ==
+ dependencies:
+ "@csstools/selector-resolve-nested" "^3.1.0"
+ "@csstools/selector-specificity" "^5.0.0"
+ postcss-selector-parser "^7.0.0"
+
+postcss-normalize-charset@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-6.0.2.tgz#1ec25c435057a8001dac942942a95ffe66f721e1"
+ integrity sha512-a8N9czmdnrjPHa3DeFlwqst5eaL5W8jYu3EBbTTkI5FHkfMhFZh1EGbku6jhHhIzTA6tquI2P42NtZ59M/H/kQ==
+
+postcss-normalize-display-values@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-6.0.2.tgz#54f02764fed0b288d5363cbb140d6950dbbdd535"
+ integrity sha512-8H04Mxsb82ON/aAkPeq8kcBbAtI5Q2a64X/mnRRfPXBq7XeogoQvReqxEfc0B4WPq1KimjezNC8flUtC3Qz6jg==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-positions@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-6.0.2.tgz#e982d284ec878b9b819796266f640852dbbb723a"
+ integrity sha512-/JFzI441OAB9O7VnLA+RtSNZvQ0NCFZDOtp6QPFo1iIyawyXg0YI3CYM9HBy1WvwCRHnPep/BvI1+dGPKoXx/Q==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-repeat-style@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-6.0.2.tgz#f8006942fd0617c73f049dd8b6201c3a3040ecf3"
+ integrity sha512-YdCgsfHkJ2jEXwR4RR3Tm/iOxSfdRt7jplS6XRh9Js9PyCR/aka/FCb6TuHT2U8gQubbm/mPmF6L7FY9d79VwQ==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-string@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-6.0.2.tgz#e3cc6ad5c95581acd1fc8774b309dd7c06e5e363"
+ integrity sha512-vQZIivlxlfqqMp4L9PZsFE4YUkWniziKjQWUtsxUiVsSSPelQydwS8Wwcuw0+83ZjPWNTl02oxlIvXsmmG+CiQ==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-timing-functions@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-6.0.2.tgz#40cb8726cef999de984527cbd9d1db1f3e9062c0"
+ integrity sha512-a+YrtMox4TBtId/AEwbA03VcJgtyW4dGBizPl7e88cTFULYsprgHWTbfyjSLyHeBcK/Q9JhXkt2ZXiwaVHoMzA==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-unicode@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-6.1.0.tgz#aaf8bbd34c306e230777e80f7f12a4b7d27ce06e"
+ integrity sha512-QVC5TQHsVj33otj8/JD869Ndr5Xcc/+fwRh4HAsFsAeygQQXm+0PySrKbr/8tkDKzW+EVT3QkqZMfFrGiossDg==
+ dependencies:
+ browserslist "^4.23.0"
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-url@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-6.0.2.tgz#292792386be51a8de9a454cb7b5c58ae22db0f79"
+ integrity sha512-kVNcWhCeKAzZ8B4pv/DnrU1wNh458zBNp8dh4y5hhxih5RZQ12QWMuQrDgPRw3LRl8mN9vOVfHl7uhvHYMoXsQ==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-normalize-whitespace@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-6.0.2.tgz#fbb009e6ebd312f8b2efb225c2fcc7cf32b400cd"
+ integrity sha512-sXZ2Nj1icbJOKmdjXVT9pnyHQKiSAyuNQHSgRCUgThn2388Y9cGVDR+E9J9iAYbSbLHI+UUwLVl1Wzco/zgv0Q==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-opacity-percentage@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-opacity-percentage/-/postcss-opacity-percentage-3.0.0.tgz#0b0db5ed5db5670e067044b8030b89c216e1eb0a"
+ integrity sha512-K6HGVzyxUxd/VgZdX04DCtdwWJ4NGLG212US4/LA1TLAbHgmAsTWVR86o+gGIbFtnTkfOpb9sCRBx8K7HO66qQ==
+
+postcss-ordered-values@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-6.0.2.tgz#366bb663919707093451ab70c3f99c05672aaae5"
+ integrity sha512-VRZSOB+JU32RsEAQrO94QPkClGPKJEL/Z9PCBImXMhIeK5KAYo6slP/hBYlLgrCjFxyqvn5VC81tycFEDBLG1Q==
+ dependencies:
+ cssnano-utils "^4.0.2"
+ postcss-value-parser "^4.2.0"
+
+postcss-overflow-shorthand@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-overflow-shorthand/-/postcss-overflow-shorthand-6.0.0.tgz#f5252b4a2ee16c68cd8a9029edb5370c4a9808af"
+ integrity sha512-BdDl/AbVkDjoTofzDQnwDdm/Ym6oS9KgmO7Gr+LHYjNWJ6ExORe4+3pcLQsLA9gIROMkiGVjjwZNoL/mpXHd5Q==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-page-break@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f"
+ integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ==
+
+postcss-place@^10.0.0:
+ version "10.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-place/-/postcss-place-10.0.0.tgz#ba36ee4786ca401377ced17a39d9050ed772e5a9"
+ integrity sha512-5EBrMzat2pPAxQNWYavwAfoKfYcTADJ8AXGVPcUZ2UkNloUTWzJQExgrzrDkh3EKzmAx1evfTAzF9I8NGcc+qw==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-preset-env@^10.2.1:
+ version "10.2.4"
+ resolved "https://registry.yarnpkg.com/postcss-preset-env/-/postcss-preset-env-10.2.4.tgz#17d386b5a86b136dfbca89b52ef03a95ad9e32fa"
+ integrity sha512-q+lXgqmTMdB0Ty+EQ31SuodhdfZetUlwCA/F0zRcd/XdxjzI+Rl2JhZNz5US2n/7t9ePsvuhCnEN4Bmu86zXlA==
+ dependencies:
+ "@csstools/postcss-cascade-layers" "^5.0.2"
+ "@csstools/postcss-color-function" "^4.0.10"
+ "@csstools/postcss-color-mix-function" "^3.0.10"
+ "@csstools/postcss-color-mix-variadic-function-arguments" "^1.0.0"
+ "@csstools/postcss-content-alt-text" "^2.0.6"
+ "@csstools/postcss-exponential-functions" "^2.0.9"
+ "@csstools/postcss-font-format-keywords" "^4.0.0"
+ "@csstools/postcss-gamut-mapping" "^2.0.10"
+ "@csstools/postcss-gradients-interpolation-method" "^5.0.10"
+ "@csstools/postcss-hwb-function" "^4.0.10"
+ "@csstools/postcss-ic-unit" "^4.0.2"
+ "@csstools/postcss-initial" "^2.0.1"
+ "@csstools/postcss-is-pseudo-class" "^5.0.3"
+ "@csstools/postcss-light-dark-function" "^2.0.9"
+ "@csstools/postcss-logical-float-and-clear" "^3.0.0"
+ "@csstools/postcss-logical-overflow" "^2.0.0"
+ "@csstools/postcss-logical-overscroll-behavior" "^2.0.0"
+ "@csstools/postcss-logical-resize" "^3.0.0"
+ "@csstools/postcss-logical-viewport-units" "^3.0.4"
+ "@csstools/postcss-media-minmax" "^2.0.9"
+ "@csstools/postcss-media-queries-aspect-ratio-number-values" "^3.0.5"
+ "@csstools/postcss-nested-calc" "^4.0.0"
+ "@csstools/postcss-normalize-display-values" "^4.0.0"
+ "@csstools/postcss-oklab-function" "^4.0.10"
+ "@csstools/postcss-progressive-custom-properties" "^4.1.0"
+ "@csstools/postcss-random-function" "^2.0.1"
+ "@csstools/postcss-relative-color-syntax" "^3.0.10"
+ "@csstools/postcss-scope-pseudo-class" "^4.0.1"
+ "@csstools/postcss-sign-functions" "^1.1.4"
+ "@csstools/postcss-stepped-value-functions" "^4.0.9"
+ "@csstools/postcss-text-decoration-shorthand" "^4.0.2"
+ "@csstools/postcss-trigonometric-functions" "^4.0.9"
+ "@csstools/postcss-unset-value" "^4.0.0"
+ autoprefixer "^10.4.21"
+ browserslist "^4.25.0"
+ css-blank-pseudo "^7.0.1"
+ css-has-pseudo "^7.0.2"
+ css-prefers-color-scheme "^10.0.0"
+ cssdb "^8.3.0"
+ postcss-attribute-case-insensitive "^7.0.1"
+ postcss-clamp "^4.1.0"
+ postcss-color-functional-notation "^7.0.10"
+ postcss-color-hex-alpha "^10.0.0"
+ postcss-color-rebeccapurple "^10.0.0"
+ postcss-custom-media "^11.0.6"
+ postcss-custom-properties "^14.0.6"
+ postcss-custom-selectors "^8.0.5"
+ postcss-dir-pseudo-class "^9.0.1"
+ postcss-double-position-gradients "^6.0.2"
+ postcss-focus-visible "^10.0.1"
+ postcss-focus-within "^9.0.1"
+ postcss-font-variant "^5.0.0"
+ postcss-gap-properties "^6.0.0"
+ postcss-image-set-function "^7.0.0"
+ postcss-lab-function "^7.0.10"
+ postcss-logical "^8.1.0"
+ postcss-nesting "^13.0.2"
+ postcss-opacity-percentage "^3.0.0"
+ postcss-overflow-shorthand "^6.0.0"
+ postcss-page-break "^3.0.4"
+ postcss-place "^10.0.0"
+ postcss-pseudo-class-any-link "^10.0.1"
+ postcss-replace-overflow-wrap "^4.0.0"
+ postcss-selector-not "^8.0.1"
+
+postcss-pseudo-class-any-link@^10.0.1:
+ version "10.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-10.0.1.tgz#06455431171bf44b84d79ebaeee9fd1c05946544"
+ integrity sha512-3el9rXlBOqTFaMFkWDOkHUTQekFIYnaQY55Rsp8As8QQkpiSgIYEcF/6Ond93oHiDsGb4kad8zjt+NPlOC1H0Q==
+ dependencies:
+ postcss-selector-parser "^7.0.0"
+
+postcss-reduce-idents@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-idents/-/postcss-reduce-idents-6.0.3.tgz#b0d9c84316d2a547714ebab523ec7d13704cd486"
+ integrity sha512-G3yCqZDpsNPoQgbDUy3T0E6hqOQ5xigUtBQyrmq3tn2GxlyiL0yyl7H+T8ulQR6kOcHJ9t7/9H4/R2tv8tJbMA==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-reduce-initial@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-6.1.0.tgz#4401297d8e35cb6e92c8e9586963e267105586ba"
+ integrity sha512-RarLgBK/CrL1qZags04oKbVbrrVK2wcxhvta3GCxrZO4zveibqbRPmm2VI8sSgCXwoUHEliRSbOfpR0b/VIoiw==
+ dependencies:
+ browserslist "^4.23.0"
+ caniuse-api "^3.0.0"
+
+postcss-reduce-transforms@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-6.0.2.tgz#6fa2c586bdc091a7373caeee4be75a0f3e12965d"
+ integrity sha512-sB+Ya++3Xj1WaT9+5LOOdirAxP7dJZms3GRcYheSPi1PiTMigsxHAdkrbItHxwYHr4kt1zL7mmcHstgMYT+aiA==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+
+postcss-replace-overflow-wrap@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319"
+ integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw==
+
+postcss-selector-not@^8.0.1:
+ version "8.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-selector-not/-/postcss-selector-not-8.0.1.tgz#f2df9c6ac9f95e9fe4416ca41a957eda16130172"
+ integrity sha512-kmVy/5PYVb2UOhy0+LqUYAhKj7DUGDpSWa5LZqlkWJaaAV+dxxsOG3+St0yNLu6vsKD7Dmqx+nWQt0iil89+WA==
+ dependencies:
+ postcss-selector-parser "^7.0.0"
+
+postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.16, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4:
+ version "6.1.1"
+ resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.1.1.tgz#5be94b277b8955904476a2400260002ce6c56e38"
+ integrity sha512-b4dlw/9V8A71rLIDsSwVmak9z2DuBUB7CA1/wSdelNEzqsjoSPeADTWNO09lpH49Diy3/JIZ2bSPB1dI3LJCHg==
+ dependencies:
+ cssesc "^3.0.0"
+ util-deprecate "^1.0.2"
+
+postcss-selector-parser@^7.0.0:
+ version "7.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz#4d6af97eba65d73bc4d84bcb343e865d7dd16262"
+ integrity sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==
+ dependencies:
+ cssesc "^3.0.0"
+ util-deprecate "^1.0.2"
+
+postcss-sort-media-queries@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/postcss-sort-media-queries/-/postcss-sort-media-queries-5.2.0.tgz#4556b3f982ef27d3bac526b99b6c0d3359a6cf97"
+ integrity sha512-AZ5fDMLD8SldlAYlvi8NIqo0+Z8xnXU2ia0jxmuhxAU+Lqt9K+AlmLNJ/zWEnE9x+Zx3qL3+1K20ATgNOr3fAA==
+ dependencies:
+ sort-css-media-queries "2.2.0"
+
+postcss-svgo@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-6.0.3.tgz#1d6e180d6df1fa8a3b30b729aaa9161e94f04eaa"
+ integrity sha512-dlrahRmxP22bX6iKEjOM+c8/1p+81asjKT+V5lrgOH944ryx/OHpclnIbGsKVd3uWOXFLYJwCVf0eEkJGvO96g==
+ dependencies:
+ postcss-value-parser "^4.2.0"
+ svgo "^3.2.0"
+
+postcss-unique-selectors@^6.0.4:
+ version "6.0.4"
+ resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-6.0.4.tgz#983ab308896b4bf3f2baaf2336e14e52c11a2088"
+ integrity sha512-K38OCaIrO8+PzpArzkLKB42dSARtC2tmG6PvD4b1o1Q2E9Os8jzfWFfSy/rixsHwohtsDdFtAWGjFVFUdwYaMg==
+ dependencies:
+ postcss-selector-parser "^6.0.16"
+
+postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514"
+ integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==
+
+postcss-zindex@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-zindex/-/postcss-zindex-6.0.2.tgz#e498304b83a8b165755f53db40e2ea65a99b56e1"
+ integrity sha512-5BxW9l1evPB/4ZIc+2GobEBoKC+h8gPGCMi+jxsYvd2x0mjq7wazk6DrP71pStqxE9Foxh5TVnonbWpFZzXaYg==
+
+postcss@^8.4.21, postcss@^8.4.24, postcss@^8.4.33:
+ version "8.4.41"
+ resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.41.tgz#d6104d3ba272d882fe18fc07d15dc2da62fa2681"
+ integrity sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==
+ dependencies:
+ nanoid "^3.3.7"
+ picocolors "^1.0.1"
+ source-map-js "^1.2.0"
+
+postcss@^8.5.4:
+ version "8.5.6"
+ resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.5.6.tgz#2825006615a619b4f62a9e7426cc120b349a8f3c"
+ integrity sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==
+ dependencies:
+ nanoid "^3.3.11"
+ picocolors "^1.1.1"
+ source-map-js "^1.2.1"
+
+pretty-error@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6"
+ integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==
+ dependencies:
+ lodash "^4.17.20"
+ renderkid "^3.0.0"
+
+pretty-time@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/pretty-time/-/pretty-time-1.1.0.tgz#ffb7429afabb8535c346a34e41873adf3d74dd0e"
+ integrity sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA==
+
+prism-react-renderer@^2.3.0, prism-react-renderer@^2.3.1:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/prism-react-renderer/-/prism-react-renderer-2.3.1.tgz#e59e5450052ede17488f6bc85de1553f584ff8d5"
+ integrity sha512-Rdf+HzBLR7KYjzpJ1rSoxT9ioO85nZngQEoFIhL07XhtJHlCU3SOz0GJ6+qvMyQe0Se+BV3qpe6Yd/NmQF5Juw==
+ dependencies:
+ "@types/prismjs" "^1.26.0"
+ clsx "^2.0.0"
+
+prismjs@^1.29.0:
+ version "1.29.0"
+ resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.29.0.tgz#f113555a8fa9b57c35e637bba27509dcf802dd12"
+ integrity sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==
+
+process-nextick-args@~2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
+ integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
+
+prompts@^2.4.2:
+ version "2.4.2"
+ resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069"
+ integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==
+ dependencies:
+ kleur "^3.0.3"
+ sisteransi "^1.0.5"
+
+prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1:
+ version "15.8.1"
+ resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5"
+ integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==
+ dependencies:
+ loose-envify "^1.4.0"
+ object-assign "^4.1.1"
+ react-is "^16.13.1"
+
+property-information@^6.0.0:
+ version "6.5.0"
+ resolved "https://registry.yarnpkg.com/property-information/-/property-information-6.5.0.tgz#6212fbb52ba757e92ef4fb9d657563b933b7ffec"
+ integrity sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==
+
+proto-list@~1.2.1:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849"
+ integrity sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==
+
+proxy-addr@~2.0.7:
+ version "2.0.7"
+ resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025"
+ integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==
+ dependencies:
+ forwarded "0.2.0"
+ ipaddr.js "1.9.1"
+
+punycode@^2.1.0:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5"
+ integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==
+
+pupa@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/pupa/-/pupa-3.1.0.tgz#f15610274376bbcc70c9a3aa8b505ea23f41c579"
+ integrity sha512-FLpr4flz5xZTSJxSeaheeMKN/EDzMdK7b8PTOC6a5PYFKTucWbdqjgqaEyH0shFiSJrVB1+Qqi4Tk19ccU6Aug==
+ dependencies:
+ escape-goat "^4.0.0"
+
+qs@6.11.0:
+ version "6.11.0"
+ resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a"
+ integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==
+ dependencies:
+ side-channel "^1.0.4"
+
+quansync@^0.2.8:
+ version "0.2.10"
+ resolved "https://registry.yarnpkg.com/quansync/-/quansync-0.2.10.tgz#32053cf166fa36511aae95fc49796116f2dc20e1"
+ integrity sha512-t41VRkMYbkHyCYmOvx/6URnN80H7k4X0lLdBMGsz+maAwrJQYB1djpV6vHrQIBE0WBSGqhtEHrK9U3DWWH8v7A==
+
+queue-microtask@^1.2.2:
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
+ integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
+
+quick-lru@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932"
+ integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==
+
+randombytes@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a"
+ integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==
+ dependencies:
+ safe-buffer "^5.1.0"
+
+range-parser@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.0.tgz#f49be6b487894ddc40dcc94a322f611092e00d5e"
+ integrity sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A==
+
+range-parser@^1.2.1, range-parser@~1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031"
+ integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
+
+raw-body@2.5.2:
+ version "2.5.2"
+ resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a"
+ integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==
+ dependencies:
+ bytes "3.1.2"
+ http-errors "2.0.0"
+ iconv-lite "0.4.24"
+ unpipe "1.0.0"
+
+rc@1.2.8:
+ version "1.2.8"
+ resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed"
+ integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==
+ dependencies:
+ deep-extend "^0.6.0"
+ ini "~1.3.0"
+ minimist "^1.2.0"
+ strip-json-comments "~2.0.1"
+
+react-dom@^18.0.0:
+ version "18.3.1"
+ resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.3.1.tgz#c2265d79511b57d479b3dd3fdfa51536494c5cb4"
+ integrity sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==
+ dependencies:
+ loose-envify "^1.1.0"
+ scheduler "^0.23.2"
+
+react-fast-compare@^3.2.0:
+ version "3.2.2"
+ resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-3.2.2.tgz#929a97a532304ce9fee4bcae44234f1ce2c21d49"
+ integrity sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==
+
+"react-helmet-async@npm:@slorber/react-helmet-async@1.3.0":
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/@slorber/react-helmet-async/-/react-helmet-async-1.3.0.tgz#11fbc6094605cf60aa04a28c17e0aab894b4ecff"
+ integrity sha512-e9/OK8VhwUSc67diWI8Rb3I0YgI9/SBQtnhe9aEuK6MhZm7ntZZimXgwXnd8W96YTmSOb9M4d8LwhRZyhWr/1A==
+ dependencies:
+ "@babel/runtime" "^7.12.5"
+ invariant "^2.2.4"
+ prop-types "^15.7.2"
+ react-fast-compare "^3.2.0"
+ shallowequal "^1.1.0"
+
+react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0:
+ version "16.13.1"
+ resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
+ integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
+
+react-is@^18.3.1:
+ version "18.3.1"
+ resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.3.1.tgz#e83557dc12eae63a99e003a46388b1dcbb44db7e"
+ integrity sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==
+
+react-json-view-lite@^2.3.0:
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/react-json-view-lite/-/react-json-view-lite-2.4.1.tgz#0d06696a06aaf4a74e890302b76cf8cddcc45d60"
+ integrity sha512-fwFYknRIBxjbFm0kBDrzgBy1xa5tDg2LyXXBepC5f1b+MY3BUClMCsvanMPn089JbV1Eg3nZcrp0VCuH43aXnA==
+
+react-loadable-ssr-addon-v5-slorber@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/react-loadable-ssr-addon-v5-slorber/-/react-loadable-ssr-addon-v5-slorber-1.0.1.tgz#2cdc91e8a744ffdf9e3556caabeb6e4278689883"
+ integrity sha512-lq3Lyw1lGku8zUEJPDxsNm1AfYHBrO9Y1+olAYwpUJ2IGFBskM0DMKok97A6LWUpHm+o7IvQBOWu9MLenp9Z+A==
+ dependencies:
+ "@babel/runtime" "^7.10.3"
+
+"react-loadable@npm:@docusaurus/react-loadable@6.0.0":
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/@docusaurus/react-loadable/-/react-loadable-6.0.0.tgz#de6c7f73c96542bd70786b8e522d535d69069dc4"
+ integrity sha512-YMMxTUQV/QFSnbgrP3tjDzLHRg7vsbMn8e9HAa8o/1iXoiomo48b7sk/kkmWEuWNDPJVlKSJRB6Y2fHqdJk+SQ==
+ dependencies:
+ "@types/react" "*"
+
+react-router-config@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/react-router-config/-/react-router-config-5.1.1.tgz#0f4263d1a80c6b2dc7b9c1902c9526478194a988"
+ integrity sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg==
+ dependencies:
+ "@babel/runtime" "^7.1.2"
+
+react-router-dom@^5.3.4:
+ version "5.3.4"
+ resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-5.3.4.tgz#2ed62ffd88cae6db134445f4a0c0ae8b91d2e5e6"
+ integrity sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ==
+ dependencies:
+ "@babel/runtime" "^7.12.13"
+ history "^4.9.0"
+ loose-envify "^1.3.1"
+ prop-types "^15.6.2"
+ react-router "5.3.4"
+ tiny-invariant "^1.0.2"
+ tiny-warning "^1.0.0"
+
+react-router@5.3.4, react-router@^5.3.4:
+ version "5.3.4"
+ resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.3.4.tgz#8ca252d70fcc37841e31473c7a151cf777887bb5"
+ integrity sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA==
+ dependencies:
+ "@babel/runtime" "^7.12.13"
+ history "^4.9.0"
+ hoist-non-react-statics "^3.1.0"
+ loose-envify "^1.3.1"
+ path-to-regexp "^1.7.0"
+ prop-types "^15.6.2"
+ react-is "^16.6.0"
+ tiny-invariant "^1.0.2"
+ tiny-warning "^1.0.0"
+
+react-transition-group@^4.4.5:
+ version "4.4.5"
+ resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-4.4.5.tgz#e53d4e3f3344da8521489fbef8f2581d42becdd1"
+ integrity sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==
+ dependencies:
+ "@babel/runtime" "^7.5.5"
+ dom-helpers "^5.0.1"
+ loose-envify "^1.4.0"
+ prop-types "^15.6.2"
+
+react@^18.0.0:
+ version "18.3.1"
+ resolved "https://registry.yarnpkg.com/react/-/react-18.3.1.tgz#49ab892009c53933625bd16b2533fc754cab2891"
+ integrity sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==
+ dependencies:
+ loose-envify "^1.1.0"
+
+readable-stream@^2.0.1:
+ version "2.3.8"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b"
+ integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==
+ dependencies:
+ core-util-is "~1.0.0"
+ inherits "~2.0.3"
+ isarray "~1.0.0"
+ process-nextick-args "~2.0.0"
+ safe-buffer "~5.1.1"
+ string_decoder "~1.1.1"
+ util-deprecate "~1.0.1"
+
+readable-stream@^3.0.6:
+ version "3.6.2"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
+ integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==
+ dependencies:
+ inherits "^2.0.3"
+ string_decoder "^1.1.1"
+ util-deprecate "^1.0.1"
+
+readdirp@~3.6.0:
+ version "3.6.0"
+ resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7"
+ integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==
+ dependencies:
+ picomatch "^2.2.1"
+
+regenerate-unicode-properties@^10.1.0:
+ version "10.1.1"
+ resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.1.tgz#6b0e05489d9076b04c436f318d9b067bba459480"
+ integrity sha512-X007RyZLsCJVVrjgEFVpLUTZwyOZk3oiL75ZcuYjlIWd6rNJtOjkBwQc5AsRrpbKVkxN6sklw/k/9m2jJYOf8Q==
+ dependencies:
+ regenerate "^1.4.2"
+
+regenerate-unicode-properties@^10.2.0:
+ version "10.2.0"
+ resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.0.tgz#626e39df8c372338ea9b8028d1f99dc3fd9c3db0"
+ integrity sha512-DqHn3DwbmmPVzeKj9woBadqmXxLvQoQIwu7nopMc72ztvxVmVk2SBhSnx67zuye5TP+lJsb/TBQsjLKhnDf3MA==
+ dependencies:
+ regenerate "^1.4.2"
+
+regenerate@^1.4.2:
+ version "1.4.2"
+ resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a"
+ integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==
+
+regenerator-runtime@^0.14.0:
+ version "0.14.1"
+ resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f"
+ integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==
+
+regenerator-transform@^0.15.2:
+ version "0.15.2"
+ resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.2.tgz#5bbae58b522098ebdf09bca2f83838929001c7a4"
+ integrity sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg==
+ dependencies:
+ "@babel/runtime" "^7.8.4"
+
+regexpu-core@^5.3.1:
+ version "5.3.2"
+ resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.2.tgz#11a2b06884f3527aec3e93dbbf4a3b958a95546b"
+ integrity sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==
+ dependencies:
+ "@babel/regjsgen" "^0.8.0"
+ regenerate "^1.4.2"
+ regenerate-unicode-properties "^10.1.0"
+ regjsparser "^0.9.1"
+ unicode-match-property-ecmascript "^2.0.0"
+ unicode-match-property-value-ecmascript "^2.1.0"
+
+regexpu-core@^6.2.0:
+ version "6.2.0"
+ resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-6.2.0.tgz#0e5190d79e542bf294955dccabae04d3c7d53826"
+ integrity sha512-H66BPQMrv+V16t8xtmq+UC0CBpiTBA60V8ibS1QVReIp8T1z8hwFxqcGzm9K6lgsN7sB5edVH8a+ze6Fqm4weA==
+ dependencies:
+ regenerate "^1.4.2"
+ regenerate-unicode-properties "^10.2.0"
+ regjsgen "^0.8.0"
+ regjsparser "^0.12.0"
+ unicode-match-property-ecmascript "^2.0.0"
+ unicode-match-property-value-ecmascript "^2.1.0"
+
+registry-auth-token@^5.0.1:
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-5.0.2.tgz#8b026cc507c8552ebbe06724136267e63302f756"
+ integrity sha512-o/3ikDxtXaA59BmZuZrJZDJv8NMDGSj+6j6XaeBmHw8eY1i1qd9+6H+LjVvQXx3HN6aRCGa1cUdJ9RaJZUugnQ==
+ dependencies:
+ "@pnpm/npm-conf" "^2.1.0"
+
+registry-url@^6.0.0:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-6.0.1.tgz#056d9343680f2f64400032b1e199faa692286c58"
+ integrity sha512-+crtS5QjFRqFCoQmvGduwYWEBng99ZvmFvF+cUJkGYF1L1BfU8C6Zp9T7f5vPAwyLkUExpvK+ANVZmGU49qi4Q==
+ dependencies:
+ rc "1.2.8"
+
+regjsgen@^0.8.0:
+ version "0.8.0"
+ resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.8.0.tgz#df23ff26e0c5b300a6470cad160a9d090c3a37ab"
+ integrity sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q==
+
+regjsparser@^0.12.0:
+ version "0.12.0"
+ resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.12.0.tgz#0e846df6c6530586429377de56e0475583b088dc"
+ integrity sha512-cnE+y8bz4NhMjISKbgeVJtqNbtf5QpjZP+Bslo+UqkIt9QPnX9q095eiRRASJG1/tz6dlNr6Z5NsBiWYokp6EQ==
+ dependencies:
+ jsesc "~3.0.2"
+
+regjsparser@^0.9.1:
+ version "0.9.1"
+ resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709"
+ integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==
+ dependencies:
+ jsesc "~0.5.0"
+
+rehype-raw@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/rehype-raw/-/rehype-raw-7.0.0.tgz#59d7348fd5dbef3807bbaa1d443efd2dd85ecee4"
+ integrity sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ hast-util-raw "^9.0.0"
+ vfile "^6.0.0"
+
+relateurl@^0.2.7:
+ version "0.2.7"
+ resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9"
+ integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==
+
+remark-directive@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/remark-directive/-/remark-directive-3.0.0.tgz#34452d951b37e6207d2e2a4f830dc33442923268"
+ integrity sha512-l1UyWJ6Eg1VPU7Hm/9tt0zKtReJQNOA4+iDMAxTyZNWnJnFlbS/7zhiel/rogTLQ2vMYwDzSJa4BiVNqGlqIMA==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ mdast-util-directive "^3.0.0"
+ micromark-extension-directive "^3.0.0"
+ unified "^11.0.0"
+
+remark-emoji@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/remark-emoji/-/remark-emoji-4.0.1.tgz#671bfda668047689e26b2078c7356540da299f04"
+ integrity sha512-fHdvsTR1dHkWKev9eNyhTo4EFwbUvJ8ka9SgeWkMPYFX4WoI7ViVBms3PjlQYgw5TLvNQso3GUB/b/8t3yo+dg==
+ dependencies:
+ "@types/mdast" "^4.0.2"
+ emoticon "^4.0.1"
+ mdast-util-find-and-replace "^3.0.1"
+ node-emoji "^2.1.0"
+ unified "^11.0.4"
+
+remark-frontmatter@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/remark-frontmatter/-/remark-frontmatter-5.0.0.tgz#b68d61552a421ec412c76f4f66c344627dc187a2"
+ integrity sha512-XTFYvNASMe5iPN0719nPrdItC9aU0ssC4v14mH1BCi1u0n1gAocqcujWUrByftZTbLhRtiKRyjYTSIOcr69UVQ==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ mdast-util-frontmatter "^2.0.0"
+ micromark-extension-frontmatter "^2.0.0"
+ unified "^11.0.0"
+
+remark-gfm@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/remark-gfm/-/remark-gfm-4.0.0.tgz#aea777f0744701aa288b67d28c43565c7e8c35de"
+ integrity sha512-U92vJgBPkbw4Zfu/IiW2oTZLSL3Zpv+uI7My2eq8JxKgqraFdU8YUGicEJCEgSbeaG+QDFqIcwwfMTOEelPxuA==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ mdast-util-gfm "^3.0.0"
+ micromark-extension-gfm "^3.0.0"
+ remark-parse "^11.0.0"
+ remark-stringify "^11.0.0"
+ unified "^11.0.0"
+
+remark-mdx@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/remark-mdx/-/remark-mdx-3.0.1.tgz#8f73dd635c1874e44426e243f72c0977cf60e212"
+ integrity sha512-3Pz3yPQ5Rht2pM5R+0J2MrGoBSrzf+tJG94N+t/ilfdh8YLyyKYtidAYwTveB20BoHAcwIopOUqhcmh2F7hGYA==
+ dependencies:
+ mdast-util-mdx "^3.0.0"
+ micromark-extension-mdxjs "^3.0.0"
+
+remark-parse@^11.0.0:
+ version "11.0.0"
+ resolved "https://registry.yarnpkg.com/remark-parse/-/remark-parse-11.0.0.tgz#aa60743fcb37ebf6b069204eb4da304e40db45a1"
+ integrity sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ mdast-util-from-markdown "^2.0.0"
+ micromark-util-types "^2.0.0"
+ unified "^11.0.0"
+
+remark-rehype@^11.0.0:
+ version "11.1.0"
+ resolved "https://registry.yarnpkg.com/remark-rehype/-/remark-rehype-11.1.0.tgz#d5f264f42bcbd4d300f030975609d01a1697ccdc"
+ integrity sha512-z3tJrAs2kIs1AqIIy6pzHmAHlF1hWQ+OdY4/hv+Wxe35EhyLKcajL33iUEn3ScxtFox9nUvRufR/Zre8Q08H/g==
+ dependencies:
+ "@types/hast" "^3.0.0"
+ "@types/mdast" "^4.0.0"
+ mdast-util-to-hast "^13.0.0"
+ unified "^11.0.0"
+ vfile "^6.0.0"
+
+remark-stringify@^11.0.0:
+ version "11.0.0"
+ resolved "https://registry.yarnpkg.com/remark-stringify/-/remark-stringify-11.0.0.tgz#4c5b01dd711c269df1aaae11743eb7e2e7636fd3"
+ integrity sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==
+ dependencies:
+ "@types/mdast" "^4.0.0"
+ mdast-util-to-markdown "^2.0.0"
+ unified "^11.0.0"
+
+renderkid@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a"
+ integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==
+ dependencies:
+ css-select "^4.1.3"
+ dom-converter "^0.2.0"
+ htmlparser2 "^6.1.0"
+ lodash "^4.17.21"
+ strip-ansi "^6.0.1"
+
+repeat-string@^1.0.0:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637"
+ integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==
+
+require-from-string@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909"
+ integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==
+
+"require-like@>= 0.1.1":
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/require-like/-/require-like-0.1.2.tgz#ad6f30c13becd797010c468afa775c0c0a6b47fa"
+ integrity sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==
+
+requires-port@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
+ integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==
+
+resolve-alpn@^1.2.0:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz#b7adbdac3546aaaec20b45e7d8265927072726f9"
+ integrity sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==
+
+resolve-from@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
+ integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
+
+resolve-pathname@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-pathname/-/resolve-pathname-3.0.0.tgz#99d02224d3cf263689becbb393bc560313025dcd"
+ integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==
+
+resolve@^1.14.2, resolve@^1.19.0:
+ version "1.22.8"
+ resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d"
+ integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==
+ dependencies:
+ is-core-module "^2.13.0"
+ path-parse "^1.0.7"
+ supports-preserve-symlinks-flag "^1.0.0"
+
+resolve@^1.22.10:
+ version "1.22.10"
+ resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.10.tgz#b663e83ffb09bbf2386944736baae803029b8b39"
+ integrity sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==
+ dependencies:
+ is-core-module "^2.16.0"
+ path-parse "^1.0.7"
+ supports-preserve-symlinks-flag "^1.0.0"
+
+responselike@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/responselike/-/responselike-3.0.0.tgz#20decb6c298aff0dbee1c355ca95461d42823626"
+ integrity sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==
+ dependencies:
+ lowercase-keys "^3.0.0"
+
+retry@^0.13.1:
+ version "0.13.1"
+ resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658"
+ integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==
+
+reusify@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76"
+ integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==
+
+rimraf@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a"
+ integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==
+ dependencies:
+ glob "^7.1.3"
+
+robust-predicates@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/robust-predicates/-/robust-predicates-3.0.2.tgz#d5b28528c4824d20fc48df1928d41d9efa1ad771"
+ integrity sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==
+
+roughjs@^4.6.6:
+ version "4.6.6"
+ resolved "https://registry.yarnpkg.com/roughjs/-/roughjs-4.6.6.tgz#1059f49a5e0c80dee541a005b20cc322b222158b"
+ integrity sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==
+ dependencies:
+ hachure-fill "^0.5.2"
+ path-data-parser "^0.1.0"
+ points-on-curve "^0.2.0"
+ points-on-path "^0.2.1"
+
+rtlcss@^4.1.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/rtlcss/-/rtlcss-4.2.0.tgz#627b08806bd6851adb4d0670b63919fb6a3ea038"
+ integrity sha512-AV+V3oOVvCrqyH5Q/6RuT1IDH1Xy5kJTkEWTWZPN5rdQ3HCFOd8SrbC7c6N5Y8bPpCfZSR6yYbUATXslvfvu5g==
+ dependencies:
+ escalade "^3.1.1"
+ picocolors "^1.0.0"
+ postcss "^8.4.21"
+ strip-json-comments "^3.1.1"
+
+run-parallel@^1.1.9:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee"
+ integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==
+ dependencies:
+ queue-microtask "^1.2.2"
+
+rw@1:
+ version "1.3.3"
+ resolved "https://registry.yarnpkg.com/rw/-/rw-1.3.3.tgz#3f862dfa91ab766b14885ef4d01124bfda074fb4"
+ integrity sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==
+
+safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
+ integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
+
+safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0:
+ version "5.2.1"
+ resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
+ integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
+
+"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0":
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
+ integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
+
+sax@^1.2.4:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/sax/-/sax-1.4.1.tgz#44cc8988377f126304d3b3fc1010c733b929ef0f"
+ integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==
+
+scheduler@^0.23.2:
+ version "0.23.2"
+ resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.2.tgz#414ba64a3b282892e944cf2108ecc078d115cdc3"
+ integrity sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==
+ dependencies:
+ loose-envify "^1.1.0"
+
+schema-dts@^1.1.2:
+ version "1.1.5"
+ resolved "https://registry.yarnpkg.com/schema-dts/-/schema-dts-1.1.5.tgz#9237725d305bac3469f02b292a035107595dc324"
+ integrity sha512-RJr9EaCmsLzBX2NDiO5Z3ux2BVosNZN5jo0gWgsyKvxKIUL5R3swNvoorulAeL9kLB0iTSX7V6aokhla2m7xbg==
+
+schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe"
+ integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==
+ dependencies:
+ "@types/json-schema" "^7.0.8"
+ ajv "^6.12.5"
+ ajv-keywords "^3.5.2"
+
+schema-utils@^4.0.0, schema-utils@^4.0.1:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.2.0.tgz#70d7c93e153a273a805801882ebd3bff20d89c8b"
+ integrity sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw==
+ dependencies:
+ "@types/json-schema" "^7.0.9"
+ ajv "^8.9.0"
+ ajv-formats "^2.1.1"
+ ajv-keywords "^5.1.0"
+
+schema-utils@^4.3.0, schema-utils@^4.3.2:
+ version "4.3.2"
+ resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.3.2.tgz#0c10878bf4a73fd2b1dfd14b9462b26788c806ae"
+ integrity sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==
+ dependencies:
+ "@types/json-schema" "^7.0.9"
+ ajv "^8.9.0"
+ ajv-formats "^2.1.1"
+ ajv-keywords "^5.1.0"
+
+section-matter@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/section-matter/-/section-matter-1.0.0.tgz#e9041953506780ec01d59f292a19c7b850b84167"
+ integrity sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==
+ dependencies:
+ extend-shallow "^2.0.1"
+ kind-of "^6.0.0"
+
+select-hose@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca"
+ integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==
+
+selfsigned@^2.1.1:
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.4.1.tgz#560d90565442a3ed35b674034cec4e95dceb4ae0"
+ integrity sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q==
+ dependencies:
+ "@types/node-forge" "^1.3.0"
+ node-forge "^1"
+
+semver-diff@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-4.0.0.tgz#3afcf5ed6d62259f5c72d0d5d50dffbdc9680df5"
+ integrity sha512-0Ju4+6A8iOnpL/Thra7dZsSlOHYAHIeMxfhWQRI1/VLcT3WDBZKKtQt/QkBOsiIN9ZpuvHE6cGZ0x4glCMmfiA==
+ dependencies:
+ semver "^7.3.5"
+
+semver@^6.3.1:
+ version "6.3.1"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
+ integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
+
+semver@^7.3.5, semver@^7.3.7, semver@^7.5.4:
+ version "7.6.3"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143"
+ integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==
+
+send@0.18.0:
+ version "0.18.0"
+ resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be"
+ integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==
+ dependencies:
+ debug "2.6.9"
+ depd "2.0.0"
+ destroy "1.2.0"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ etag "~1.8.1"
+ fresh "0.5.2"
+ http-errors "2.0.0"
+ mime "1.6.0"
+ ms "2.1.3"
+ on-finished "2.4.1"
+ range-parser "~1.2.1"
+ statuses "2.0.1"
+
+serialize-javascript@^6.0.0, serialize-javascript@^6.0.1, serialize-javascript@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2"
+ integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==
+ dependencies:
+ randombytes "^2.1.0"
+
+serve-handler@^6.1.6:
+ version "6.1.6"
+ resolved "https://registry.yarnpkg.com/serve-handler/-/serve-handler-6.1.6.tgz#50803c1d3e947cd4a341d617f8209b22bd76cfa1"
+ integrity sha512-x5RL9Y2p5+Sh3D38Fh9i/iQ5ZK+e4xuXRd/pGbM4D13tgo/MGwbttUk8emytcr1YYzBYs+apnUngBDFYfpjPuQ==
+ dependencies:
+ bytes "3.0.0"
+ content-disposition "0.5.2"
+ mime-types "2.1.18"
+ minimatch "3.1.2"
+ path-is-inside "1.0.2"
+ path-to-regexp "3.3.0"
+ range-parser "1.2.0"
+
+serve-index@^1.9.1:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239"
+ integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==
+ dependencies:
+ accepts "~1.3.4"
+ batch "0.6.1"
+ debug "2.6.9"
+ escape-html "~1.0.3"
+ http-errors "~1.6.2"
+ mime-types "~2.1.17"
+ parseurl "~1.3.2"
+
+serve-static@1.15.0:
+ version "1.15.0"
+ resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540"
+ integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==
+ dependencies:
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ parseurl "~1.3.3"
+ send "0.18.0"
+
+set-function-length@^1.2.1:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449"
+ integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==
+ dependencies:
+ define-data-property "^1.1.4"
+ es-errors "^1.3.0"
+ function-bind "^1.1.2"
+ get-intrinsic "^1.2.4"
+ gopd "^1.0.1"
+ has-property-descriptors "^1.0.2"
+
+setprototypeof@1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656"
+ integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==
+
+setprototypeof@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424"
+ integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==
+
+shallow-clone@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3"
+ integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==
+ dependencies:
+ kind-of "^6.0.2"
+
+shallowequal@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/shallowequal/-/shallowequal-1.1.0.tgz#188d521de95b9087404fd4dcb68b13df0ae4e7f8"
+ integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==
+
+shebang-command@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
+ integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==
+ dependencies:
+ shebang-regex "^3.0.0"
+
+shebang-regex@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
+ integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
+
+shell-quote@^1.8.1:
+ version "1.8.1"
+ resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680"
+ integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==
+
+side-channel@^1.0.4:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2"
+ integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==
+ dependencies:
+ call-bind "^1.0.7"
+ es-errors "^1.3.0"
+ get-intrinsic "^1.2.4"
+ object-inspect "^1.13.1"
+
+signal-exit@^3.0.2, signal-exit@^3.0.3:
+ version "3.0.7"
+ resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9"
+ integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==
+
+sirv@^2.0.3:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/sirv/-/sirv-2.0.4.tgz#5dd9a725c578e34e449f332703eb2a74e46a29b0"
+ integrity sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==
+ dependencies:
+ "@polka/url" "^1.0.0-next.24"
+ mrmime "^2.0.0"
+ totalist "^3.0.0"
+
+sisteransi@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed"
+ integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==
+
+sitemap@^7.1.1:
+ version "7.1.2"
+ resolved "https://registry.yarnpkg.com/sitemap/-/sitemap-7.1.2.tgz#6ce1deb43f6f177c68bc59cf93632f54e3ae6b72"
+ integrity sha512-ARCqzHJ0p4gWt+j7NlU5eDlIO9+Rkr/JhPFZKKQ1l5GCus7rJH4UdrlVAh0xC/gDS/Qir2UMxqYNHtsKr2rpCw==
+ dependencies:
+ "@types/node" "^17.0.5"
+ "@types/sax" "^1.2.1"
+ arg "^5.0.0"
+ sax "^1.2.4"
+
+skin-tone@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/skin-tone/-/skin-tone-2.0.0.tgz#4e3933ab45c0d4f4f781745d64b9f4c208e41237"
+ integrity sha512-kUMbT1oBJCpgrnKoSr0o6wPtvRWT9W9UKvGLwfJYO2WuahZRHOpEyL1ckyMGgMWh0UdpmaoFqKKD29WTomNEGA==
+ dependencies:
+ unicode-emoji-modifier-base "^1.0.0"
+
+slash@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
+ integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
+
+slash@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7"
+ integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==
+
+snake-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-3.0.4.tgz#4f2bbd568e9935abdfd593f34c691dadb49c452c"
+ integrity sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==
+ dependencies:
+ dot-case "^3.0.4"
+ tslib "^2.0.3"
+
+sockjs@^0.3.24:
+ version "0.3.24"
+ resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce"
+ integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==
+ dependencies:
+ faye-websocket "^0.11.3"
+ uuid "^8.3.2"
+ websocket-driver "^0.7.4"
+
+sort-css-media-queries@2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/sort-css-media-queries/-/sort-css-media-queries-2.2.0.tgz#aa33cf4a08e0225059448b6c40eddbf9f1c8334c"
+ integrity sha512-0xtkGhWCC9MGt/EzgnvbbbKhqWjl1+/rncmhTh5qCpbYguXh6S/qwePfv/JQ8jePXXmqingylxoC49pCkSPIbA==
+
+source-map-js@^1.0.1, source-map-js@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af"
+ integrity sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==
+
+source-map-js@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46"
+ integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==
+
+source-map-support@~0.5.20:
+ version "0.5.21"
+ resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f"
+ integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==
+ dependencies:
+ buffer-from "^1.0.0"
+ source-map "^0.6.0"
+
+source-map@^0.5.7:
+ version "0.5.7"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
+ integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==
+
+source-map@^0.6.0, source-map@~0.6.0:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
+ integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
+
+source-map@^0.7.0:
+ version "0.7.4"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656"
+ integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==
+
+space-separated-tokens@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz#1ecd9d2350a3844572c3f4a312bceb018348859f"
+ integrity sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==
+
+spdy-transport@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31"
+ integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==
+ dependencies:
+ debug "^4.1.0"
+ detect-node "^2.0.4"
+ hpack.js "^2.1.6"
+ obuf "^1.1.2"
+ readable-stream "^3.0.6"
+ wbuf "^1.7.3"
+
+spdy@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b"
+ integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==
+ dependencies:
+ debug "^4.1.0"
+ handle-thing "^2.0.0"
+ http-deceiver "^1.2.7"
+ select-hose "^2.0.0"
+ spdy-transport "^3.0.0"
+
+sprintf-js@~1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
+ integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==
+
+srcset@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/srcset/-/srcset-4.0.0.tgz#336816b665b14cd013ba545b6fe62357f86e65f4"
+ integrity sha512-wvLeHgcVHKO8Sc/H/5lkGreJQVeYMm9rlmt8PuR1xE31rIuXhuzznUUqAt8MqLhB3MqJdFzlNAfpcWnxiFUcPw==
+
+statuses@2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63"
+ integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==
+
+"statuses@>= 1.4.0 < 2":
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c"
+ integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==
+
+std-env@^3.7.0:
+ version "3.9.0"
+ resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.9.0.tgz#1a6f7243b339dca4c9fd55e1c7504c77ef23e8f1"
+ integrity sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==
+
+string-width@^4.1.0, string-width@^4.2.0:
+ version "4.2.3"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
+ integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
+ dependencies:
+ emoji-regex "^8.0.0"
+ is-fullwidth-code-point "^3.0.0"
+ strip-ansi "^6.0.1"
+
+string-width@^5.0.1, string-width@^5.1.2:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794"
+ integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==
+ dependencies:
+ eastasianwidth "^0.2.0"
+ emoji-regex "^9.2.2"
+ strip-ansi "^7.0.1"
+
+string_decoder@^1.1.1:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
+ integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
+ dependencies:
+ safe-buffer "~5.2.0"
+
+string_decoder@~1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
+ integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
+ dependencies:
+ safe-buffer "~5.1.0"
+
+stringify-entities@^4.0.0:
+ version "4.0.4"
+ resolved "https://registry.yarnpkg.com/stringify-entities/-/stringify-entities-4.0.4.tgz#b3b79ef5f277cc4ac73caeb0236c5ba939b3a4f3"
+ integrity sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==
+ dependencies:
+ character-entities-html4 "^2.0.0"
+ character-entities-legacy "^3.0.0"
+
+stringify-object@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629"
+ integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==
+ dependencies:
+ get-own-enumerable-property-symbols "^3.0.0"
+ is-obj "^1.0.1"
+ is-regexp "^1.0.0"
+
+strip-ansi@^6.0.0, strip-ansi@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
+ integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
+ dependencies:
+ ansi-regex "^5.0.1"
+
+strip-ansi@^7.0.1:
+ version "7.1.0"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45"
+ integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==
+ dependencies:
+ ansi-regex "^6.0.1"
+
+strip-bom-string@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/strip-bom-string/-/strip-bom-string-1.0.0.tgz#e5211e9224369fbb81d633a2f00044dc8cedad92"
+ integrity sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==
+
+strip-final-newline@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad"
+ integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==
+
+strip-json-comments@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
+ integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+
+strip-json-comments@~2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
+ integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==
+
+style-to-object@^0.4.0:
+ version "0.4.4"
+ resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-0.4.4.tgz#266e3dfd56391a7eefb7770423612d043c3f33ec"
+ integrity sha512-HYNoHZa2GorYNyqiCaBgsxvcJIn7OHq6inEga+E6Ke3m5JkoqpQbnFssk4jwe+K7AhGa2fcha4wSOf1Kn01dMg==
+ dependencies:
+ inline-style-parser "0.1.1"
+
+style-to-object@^1.0.0:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-1.0.6.tgz#0c28aed8be1813d166c60d962719b2907c26547b"
+ integrity sha512-khxq+Qm3xEyZfKd/y9L3oIWQimxuc4STrQKtQn8aSDRHb8mFgpukgX1hdzfrMEW6JCjyJ8p89x+IUMVnCBI1PA==
+ dependencies:
+ inline-style-parser "0.2.3"
+
+stylehacks@^6.1.1:
+ version "6.1.1"
+ resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-6.1.1.tgz#543f91c10d17d00a440430362d419f79c25545a6"
+ integrity sha512-gSTTEQ670cJNoaeIp9KX6lZmm8LJ3jPB5yJmX8Zq/wQxOsAFXV3qjWzHas3YYk1qesuVIyYWWUpZ0vSE/dTSGg==
+ dependencies:
+ browserslist "^4.23.0"
+ postcss-selector-parser "^6.0.16"
+
+stylis@4.2.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.2.0.tgz#79daee0208964c8fe695a42fcffcac633a211a51"
+ integrity sha512-Orov6g6BB1sDfYgzWfTHDOxamtX1bE/zo104Dh9e6fqJ3PooipYyfJ0pUmrZO2wAvO8YbEyeFrkV91XTsGMSrw==
+
+stylis@^4.3.6:
+ version "4.3.6"
+ resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.3.6.tgz#7c7b97191cb4f195f03ecab7d52f7902ed378320"
+ integrity sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==
+
+supports-color@^5.3.0:
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
+ integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
+ dependencies:
+ has-flag "^3.0.0"
+
+supports-color@^7.1.0:
+ version "7.2.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da"
+ integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==
+ dependencies:
+ has-flag "^4.0.0"
+
+supports-color@^8.0.0:
+ version "8.1.1"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c"
+ integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==
+ dependencies:
+ has-flag "^4.0.0"
+
+supports-preserve-symlinks-flag@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09"
+ integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==
+
+svg-parser@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5"
+ integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==
+
+svgo@^3.0.2, svgo@^3.2.0:
+ version "3.3.2"
+ resolved "https://registry.yarnpkg.com/svgo/-/svgo-3.3.2.tgz#ad58002652dffbb5986fc9716afe52d869ecbda8"
+ integrity sha512-OoohrmuUlBs8B8o6MB2Aevn+pRIH9zDALSR+6hhqVfa6fRwG/Qw9VUMSMW9VNg2CFc/MTIfabtdOVl9ODIJjpw==
+ dependencies:
+ "@trysound/sax" "0.2.0"
+ commander "^7.2.0"
+ css-select "^5.1.0"
+ css-tree "^2.3.1"
+ css-what "^6.1.0"
+ csso "^5.0.5"
+ picocolors "^1.0.0"
+
+tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0, tapable@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0"
+ integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==
+
+terser-webpack-plugin@^5.3.10, terser-webpack-plugin@^5.3.9:
+ version "5.3.10"
+ resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz#904f4c9193c6fd2a03f693a2150c62a92f40d199"
+ integrity sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w==
+ dependencies:
+ "@jridgewell/trace-mapping" "^0.3.20"
+ jest-worker "^27.4.5"
+ schema-utils "^3.1.1"
+ serialize-javascript "^6.0.1"
+ terser "^5.26.0"
+
+terser-webpack-plugin@^5.3.11:
+ version "5.3.14"
+ resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.14.tgz#9031d48e57ab27567f02ace85c7d690db66c3e06"
+ integrity sha512-vkZjpUjb6OMS7dhV+tILUW6BhpDR7P2L/aQSAv+Uwk+m8KATX9EccViHTJR2qDtACKPIYndLGCyl3FMo+r2LMw==
+ dependencies:
+ "@jridgewell/trace-mapping" "^0.3.25"
+ jest-worker "^27.4.5"
+ schema-utils "^4.3.0"
+ serialize-javascript "^6.0.2"
+ terser "^5.31.1"
+
+terser@^5.10.0, terser@^5.15.1, terser@^5.26.0:
+ version "5.31.5"
+ resolved "https://registry.yarnpkg.com/terser/-/terser-5.31.5.tgz#e48b7c65f32d2808e7dad803e4586a0bc3829b87"
+ integrity sha512-YPmas0L0rE1UyLL/llTWA0SiDOqIcAQYLeUj7cJYzXHlRTAnMSg9pPe4VJ5PlKvTrPQsdVFuiRiwyeNlYgwh2Q==
+ dependencies:
+ "@jridgewell/source-map" "^0.3.3"
+ acorn "^8.8.2"
+ commander "^2.20.0"
+ source-map-support "~0.5.20"
+
+terser@^5.31.1:
+ version "5.43.1"
+ resolved "https://registry.yarnpkg.com/terser/-/terser-5.43.1.tgz#88387f4f9794ff1a29e7ad61fb2932e25b4fdb6d"
+ integrity sha512-+6erLbBm0+LROX2sPXlUYx/ux5PyE9K/a92Wrt6oA+WDAoFTdpHE5tCYCI5PNzq2y8df4rA+QgHLJuR4jNymsg==
+ dependencies:
+ "@jridgewell/source-map" "^0.3.3"
+ acorn "^8.14.0"
+ commander "^2.20.0"
+ source-map-support "~0.5.20"
+
+thunky@^1.0.2:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d"
+ integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==
+
+tiny-invariant@^1.0.2:
+ version "1.3.3"
+ resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.3.3.tgz#46680b7a873a0d5d10005995eb90a70d74d60127"
+ integrity sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==
+
+tiny-warning@^1.0.0, tiny-warning@^1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754"
+ integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==
+
+tinyexec@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/tinyexec/-/tinyexec-1.0.1.tgz#70c31ab7abbb4aea0a24f55d120e5990bfa1e0b1"
+ integrity sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==
+
+tinypool@^1.0.2:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/tinypool/-/tinypool-1.1.1.tgz#059f2d042bd37567fbc017d3d426bdd2a2612591"
+ integrity sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==
+
+to-fast-properties@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e"
+ integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==
+
+to-regex-range@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
+ integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
+ dependencies:
+ is-number "^7.0.0"
+
+toidentifier@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35"
+ integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==
+
+totalist@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/totalist/-/totalist-3.0.1.tgz#ba3a3d600c915b1a97872348f79c127475f6acf8"
+ integrity sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==
+
+trim-lines@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/trim-lines/-/trim-lines-3.0.1.tgz#d802e332a07df861c48802c04321017b1bd87338"
+ integrity sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==
+
+trough@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/trough/-/trough-2.2.0.tgz#94a60bd6bd375c152c1df911a4b11d5b0256f50f"
+ integrity sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==
+
+ts-dedent@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/ts-dedent/-/ts-dedent-2.2.0.tgz#39e4bd297cd036292ae2394eb3412be63f563bb5"
+ integrity sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==
+
+tslib@^2.0.3, tslib@^2.6.0:
+ version "2.6.3"
+ resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.3.tgz#0438f810ad7a9edcde7a241c3d80db693c8cbfe0"
+ integrity sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==
+
+type-fest@^0.21.3:
+ version "0.21.3"
+ resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37"
+ integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==
+
+type-fest@^1.0.1:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-1.4.0.tgz#e9fb813fe3bf1744ec359d55d1affefa76f14be1"
+ integrity sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==
+
+type-fest@^2.13.0, type-fest@^2.5.0:
+ version "2.19.0"
+ resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b"
+ integrity sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==
+
+type-is@~1.6.18:
+ version "1.6.18"
+ resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
+ integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
+ dependencies:
+ media-typer "0.3.0"
+ mime-types "~2.1.24"
+
+typedarray-to-buffer@^3.1.5:
+ version "3.1.5"
+ resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"
+ integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==
+ dependencies:
+ is-typedarray "^1.0.0"
+
+ufo@^1.5.4:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.6.1.tgz#ac2db1d54614d1b22c1d603e3aef44a85d8f146b"
+ integrity sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==
+
+undici-types@~6.13.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.13.0.tgz#e3e79220ab8c81ed1496b5812471afd7cf075ea5"
+ integrity sha512-xtFJHudx8S2DSoujjMd1WeWvn7KKWFRESZTMeL1RptAYERu29D6jphMjjY+vn96jvN3kVPDNxU/E13VTaXj6jg==
+
+unicode-canonical-property-names-ecmascript@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc"
+ integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==
+
+unicode-emoji-modifier-base@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/unicode-emoji-modifier-base/-/unicode-emoji-modifier-base-1.0.0.tgz#dbbd5b54ba30f287e2a8d5a249da6c0cef369459"
+ integrity sha512-yLSH4py7oFH3oG/9K+XWrz1pSi3dfUrWEnInbxMfArOfc1+33BlGPQtLsOYwvdMy11AwUBetYuaRxSPqgkq+8g==
+
+unicode-match-property-ecmascript@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3"
+ integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==
+ dependencies:
+ unicode-canonical-property-names-ecmascript "^2.0.0"
+ unicode-property-aliases-ecmascript "^2.0.0"
+
+unicode-match-property-value-ecmascript@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz#cb5fffdcd16a05124f5a4b0bf7c3770208acbbe0"
+ integrity sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA==
+
+unicode-property-aliases-ecmascript@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd"
+ integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==
+
+unified@^11.0.0, unified@^11.0.3, unified@^11.0.4:
+ version "11.0.5"
+ resolved "https://registry.yarnpkg.com/unified/-/unified-11.0.5.tgz#f66677610a5c0a9ee90cab2b8d4d66037026d9e1"
+ integrity sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ bail "^2.0.0"
+ devlop "^1.0.0"
+ extend "^3.0.0"
+ is-plain-obj "^4.0.0"
+ trough "^2.0.0"
+ vfile "^6.0.0"
+
+unique-string@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-3.0.0.tgz#84a1c377aff5fd7a8bc6b55d8244b2bd90d75b9a"
+ integrity sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==
+ dependencies:
+ crypto-random-string "^4.0.0"
+
+unist-util-is@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-6.0.0.tgz#b775956486aff107a9ded971d996c173374be424"
+ integrity sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==
+ dependencies:
+ "@types/unist" "^3.0.0"
+
+unist-util-position-from-estree@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/unist-util-position-from-estree/-/unist-util-position-from-estree-2.0.0.tgz#d94da4df596529d1faa3de506202f0c9a23f2200"
+ integrity sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ==
+ dependencies:
+ "@types/unist" "^3.0.0"
+
+unist-util-position@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/unist-util-position/-/unist-util-position-5.0.0.tgz#678f20ab5ca1207a97d7ea8a388373c9cf896be4"
+ integrity sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==
+ dependencies:
+ "@types/unist" "^3.0.0"
+
+unist-util-remove-position@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/unist-util-remove-position/-/unist-util-remove-position-5.0.0.tgz#fea68a25658409c9460408bc6b4991b965b52163"
+ integrity sha512-Hp5Kh3wLxv0PHj9m2yZhhLt58KzPtEYKQQ4yxfYFEO7EvHwzyDYnduhHnY1mDxoqr7VUwVuHXk9RXKIiYS1N8Q==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ unist-util-visit "^5.0.0"
+
+unist-util-stringify-position@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz#449c6e21a880e0855bf5aabadeb3a740314abac2"
+ integrity sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==
+ dependencies:
+ "@types/unist" "^3.0.0"
+
+unist-util-visit-parents@^6.0.0:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz#4d5f85755c3b8f0dc69e21eca5d6d82d22162815"
+ integrity sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ unist-util-is "^6.0.0"
+
+unist-util-visit@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-5.0.0.tgz#a7de1f31f72ffd3519ea71814cccf5fd6a9217d6"
+ integrity sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ unist-util-is "^6.0.0"
+ unist-util-visit-parents "^6.0.0"
+
+universalify@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.1.tgz#168efc2180964e6386d061e094df61afe239b18d"
+ integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==
+
+unpipe@1.0.0, unpipe@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
+ integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==
+
+update-browserslist-db@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz#7ca61c0d8650766090728046e416a8cde682859e"
+ integrity sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==
+ dependencies:
+ escalade "^3.1.2"
+ picocolors "^1.0.1"
+
+update-browserslist-db@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz#348377dd245216f9e7060ff50b15a1b740b75420"
+ integrity sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==
+ dependencies:
+ escalade "^3.2.0"
+ picocolors "^1.1.1"
+
+update-notifier@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-6.0.2.tgz#a6990253dfe6d5a02bd04fbb6a61543f55026b60"
+ integrity sha512-EDxhTEVPZZRLWYcJ4ZXjGFN0oP7qYvbXWzEgRm/Yql4dHX5wDbvh89YHP6PK1lzZJYrMtXUuZZz8XGK+U6U1og==
+ dependencies:
+ boxen "^7.0.0"
+ chalk "^5.0.1"
+ configstore "^6.0.0"
+ has-yarn "^3.0.0"
+ import-lazy "^4.0.0"
+ is-ci "^3.0.1"
+ is-installed-globally "^0.4.0"
+ is-npm "^6.0.0"
+ is-yarn-global "^0.4.0"
+ latest-version "^7.0.0"
+ pupa "^3.1.0"
+ semver "^7.3.7"
+ semver-diff "^4.0.0"
+ xdg-basedir "^5.1.0"
+
+uri-js@^4.2.2:
+ version "4.4.1"
+ resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e"
+ integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==
+ dependencies:
+ punycode "^2.1.0"
+
+url-loader@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-4.1.1.tgz#28505e905cae158cf07c92ca622d7f237e70a4e2"
+ integrity sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==
+ dependencies:
+ loader-utils "^2.0.0"
+ mime-types "^2.1.27"
+ schema-utils "^3.0.0"
+
+util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
+ integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
+
+utila@~0.4:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c"
+ integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==
+
+utility-types@^3.10.0:
+ version "3.11.0"
+ resolved "https://registry.yarnpkg.com/utility-types/-/utility-types-3.11.0.tgz#607c40edb4f258915e901ea7995607fdf319424c"
+ integrity sha512-6Z7Ma2aVEWisaL6TvBCy7P8rm2LQoPv6dJ7ecIaIixHcwfbJ0x7mWdbcwlIM5IGQxPZSFYeqRCqlOOeKoJYMkw==
+
+utils-merge@1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
+ integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==
+
+uuid@^11.1.0:
+ version "11.1.0"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-11.1.0.tgz#9549028be1753bb934fc96e2bca09bb4105ae912"
+ integrity sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==
+
+uuid@^8.3.2:
+ version "8.3.2"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
+ integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
+
+value-equal@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c"
+ integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==
+
+vary@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
+ integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==
+
+vfile-location@^5.0.0:
+ version "5.0.3"
+ resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-5.0.3.tgz#cb9eacd20f2b6426d19451e0eafa3d0a846225c3"
+ integrity sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ vfile "^6.0.0"
+
+vfile-message@^4.0.0:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-4.0.2.tgz#c883c9f677c72c166362fd635f21fc165a7d1181"
+ integrity sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ unist-util-stringify-position "^4.0.0"
+
+vfile@^6.0.0, vfile@^6.0.1:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/vfile/-/vfile-6.0.2.tgz#ef49548ea3d270097a67011921411130ceae7deb"
+ integrity sha512-zND7NlS8rJYb/sPqkb13ZvbbUoExdbi4w3SfRrMq6R3FvnLQmmfpajJNITuuYm6AZ5uao9vy4BAos3EXBPf2rg==
+ dependencies:
+ "@types/unist" "^3.0.0"
+ unist-util-stringify-position "^4.0.0"
+ vfile-message "^4.0.0"
+
+vscode-jsonrpc@8.2.0:
+ version "8.2.0"
+ resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz#f43dfa35fb51e763d17cd94dcca0c9458f35abf9"
+ integrity sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==
+
+vscode-languageserver-protocol@3.17.5:
+ version "3.17.5"
+ resolved "https://registry.yarnpkg.com/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz#864a8b8f390835572f4e13bd9f8313d0e3ac4bea"
+ integrity sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==
+ dependencies:
+ vscode-jsonrpc "8.2.0"
+ vscode-languageserver-types "3.17.5"
+
+vscode-languageserver-textdocument@~1.0.11:
+ version "1.0.12"
+ resolved "https://registry.yarnpkg.com/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz#457ee04271ab38998a093c68c2342f53f6e4a631"
+ integrity sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==
+
+vscode-languageserver-types@3.17.5:
+ version "3.17.5"
+ resolved "https://registry.yarnpkg.com/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz#3273676f0cf2eab40b3f44d085acbb7f08a39d8a"
+ integrity sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==
+
+vscode-languageserver@~9.0.1:
+ version "9.0.1"
+ resolved "https://registry.yarnpkg.com/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz#500aef82097eb94df90d008678b0b6b5f474015b"
+ integrity sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==
+ dependencies:
+ vscode-languageserver-protocol "3.17.5"
+
+vscode-uri@~3.0.8:
+ version "3.0.8"
+ resolved "https://registry.yarnpkg.com/vscode-uri/-/vscode-uri-3.0.8.tgz#1770938d3e72588659a172d0fd4642780083ff9f"
+ integrity sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==
+
+watchpack@^2.4.1:
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.1.tgz#29308f2cac150fa8e4c92f90e0ec954a9fed7fff"
+ integrity sha512-8wrBCMtVhqcXP2Sup1ctSkga6uc2Bx0IIvKyT7yTFier5AXHooSI+QyQQAtTb7+E0IUCCKyTFmXqdqgum2XWGg==
+ dependencies:
+ glob-to-regexp "^0.4.1"
+ graceful-fs "^4.1.2"
+
+wbuf@^1.1.0, wbuf@^1.7.3:
+ version "1.7.3"
+ resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df"
+ integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==
+ dependencies:
+ minimalistic-assert "^1.0.0"
+
+web-namespaces@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-2.0.1.tgz#1010ff7c650eccb2592cebeeaf9a1b253fd40692"
+ integrity sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==
+
+webpack-bundle-analyzer@^4.10.2:
+ version "4.10.2"
+ resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.10.2.tgz#633af2862c213730be3dbdf40456db171b60d5bd"
+ integrity sha512-vJptkMm9pk5si4Bv922ZbKLV8UTT4zib4FPgXMhgzUny0bfDDkLXAVQs3ly3fS4/TN9ROFtb0NFrm04UXFE/Vw==
+ dependencies:
+ "@discoveryjs/json-ext" "0.5.7"
+ acorn "^8.0.4"
+ acorn-walk "^8.0.0"
+ commander "^7.2.0"
+ debounce "^1.2.1"
+ escape-string-regexp "^4.0.0"
+ gzip-size "^6.0.0"
+ html-escaper "^2.0.2"
+ opener "^1.5.2"
+ picocolors "^1.0.0"
+ sirv "^2.0.3"
+ ws "^7.3.1"
+
+webpack-dev-middleware@^5.3.4:
+ version "5.3.4"
+ resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz#eb7b39281cbce10e104eb2b8bf2b63fce49a3517"
+ integrity sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==
+ dependencies:
+ colorette "^2.0.10"
+ memfs "^3.4.3"
+ mime-types "^2.1.31"
+ range-parser "^1.2.1"
+ schema-utils "^4.0.0"
+
+webpack-dev-server@^4.15.2:
+ version "4.15.2"
+ resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.15.2.tgz#9e0c70a42a012560860adb186986da1248333173"
+ integrity sha512-0XavAZbNJ5sDrCbkpWL8mia0o5WPOd2YGtxrEiZkBK9FjLppIUK2TgxK6qGD2P3hUXTJNNPVibrerKcx5WkR1g==
+ dependencies:
+ "@types/bonjour" "^3.5.9"
+ "@types/connect-history-api-fallback" "^1.3.5"
+ "@types/express" "^4.17.13"
+ "@types/serve-index" "^1.9.1"
+ "@types/serve-static" "^1.13.10"
+ "@types/sockjs" "^0.3.33"
+ "@types/ws" "^8.5.5"
+ ansi-html-community "^0.0.8"
+ bonjour-service "^1.0.11"
+ chokidar "^3.5.3"
+ colorette "^2.0.10"
+ compression "^1.7.4"
+ connect-history-api-fallback "^2.0.0"
+ default-gateway "^6.0.3"
+ express "^4.17.3"
+ graceful-fs "^4.2.6"
+ html-entities "^2.3.2"
+ http-proxy-middleware "^2.0.3"
+ ipaddr.js "^2.0.1"
+ launch-editor "^2.6.0"
+ open "^8.0.9"
+ p-retry "^4.5.0"
+ rimraf "^3.0.2"
+ schema-utils "^4.0.0"
+ selfsigned "^2.1.1"
+ serve-index "^1.9.1"
+ sockjs "^0.3.24"
+ spdy "^4.0.2"
+ webpack-dev-middleware "^5.3.4"
+ ws "^8.13.0"
+
+webpack-merge@^5.9.0:
+ version "5.10.0"
+ resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.10.0.tgz#a3ad5d773241e9c682803abf628d4cd62b8a4177"
+ integrity sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==
+ dependencies:
+ clone-deep "^4.0.1"
+ flat "^5.0.2"
+ wildcard "^2.0.0"
+
+webpack-merge@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-6.0.1.tgz#50c776868e080574725abc5869bd6e4ef0a16c6a"
+ integrity sha512-hXXvrjtx2PLYx4qruKl+kyRSLc52V+cCvMxRjmKwoA+CBbbF5GfIBtR6kCvl0fYGqTUPKB+1ktVmTHqMOzgCBg==
+ dependencies:
+ clone-deep "^4.0.1"
+ flat "^5.0.2"
+ wildcard "^2.0.1"
+
+webpack-sources@^3.2.3:
+ version "3.2.3"
+ resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde"
+ integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==
+
+webpack-sources@^3.3.3:
+ version "3.3.3"
+ resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.3.3.tgz#d4bf7f9909675d7a070ff14d0ef2a4f3c982c723"
+ integrity sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==
+
+webpack@^5.88.1:
+ version "5.93.0"
+ resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.93.0.tgz#2e89ec7035579bdfba9760d26c63ac5c3462a5e5"
+ integrity sha512-Y0m5oEY1LRuwly578VqluorkXbvXKh7U3rLoQCEO04M97ScRr44afGVkI0FQFsXzysk5OgFAxjZAb9rsGQVihA==
+ dependencies:
+ "@types/eslint-scope" "^3.7.3"
+ "@types/estree" "^1.0.5"
+ "@webassemblyjs/ast" "^1.12.1"
+ "@webassemblyjs/wasm-edit" "^1.12.1"
+ "@webassemblyjs/wasm-parser" "^1.12.1"
+ acorn "^8.7.1"
+ acorn-import-attributes "^1.9.5"
+ browserslist "^4.21.10"
+ chrome-trace-event "^1.0.2"
+ enhanced-resolve "^5.17.0"
+ es-module-lexer "^1.2.1"
+ eslint-scope "5.1.1"
+ events "^3.2.0"
+ glob-to-regexp "^0.4.1"
+ graceful-fs "^4.2.11"
+ json-parse-even-better-errors "^2.3.1"
+ loader-runner "^4.2.0"
+ mime-types "^2.1.27"
+ neo-async "^2.6.2"
+ schema-utils "^3.2.0"
+ tapable "^2.1.1"
+ terser-webpack-plugin "^5.3.10"
+ watchpack "^2.4.1"
+ webpack-sources "^3.2.3"
+
+webpack@^5.95.0:
+ version "5.100.2"
+ resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.100.2.tgz#e2341facf9f7de1d702147c91bcb65b693adf9e8"
+ integrity sha512-QaNKAvGCDRh3wW1dsDjeMdDXwZm2vqq3zn6Pvq4rHOEOGSaUMgOOjG2Y9ZbIGzpfkJk9ZYTHpDqgDfeBDcnLaw==
+ dependencies:
+ "@types/eslint-scope" "^3.7.7"
+ "@types/estree" "^1.0.8"
+ "@types/json-schema" "^7.0.15"
+ "@webassemblyjs/ast" "^1.14.1"
+ "@webassemblyjs/wasm-edit" "^1.14.1"
+ "@webassemblyjs/wasm-parser" "^1.14.1"
+ acorn "^8.15.0"
+ acorn-import-phases "^1.0.3"
+ browserslist "^4.24.0"
+ chrome-trace-event "^1.0.2"
+ enhanced-resolve "^5.17.2"
+ es-module-lexer "^1.2.1"
+ eslint-scope "5.1.1"
+ events "^3.2.0"
+ glob-to-regexp "^0.4.1"
+ graceful-fs "^4.2.11"
+ json-parse-even-better-errors "^2.3.1"
+ loader-runner "^4.2.0"
+ mime-types "^2.1.27"
+ neo-async "^2.6.2"
+ schema-utils "^4.3.2"
+ tapable "^2.1.1"
+ terser-webpack-plugin "^5.3.11"
+ watchpack "^2.4.1"
+ webpack-sources "^3.3.3"
+
+webpackbar@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/webpackbar/-/webpackbar-6.0.1.tgz#5ef57d3bf7ced8b19025477bc7496ea9d502076b"
+ integrity sha512-TnErZpmuKdwWBdMoexjio3KKX6ZtoKHRVvLIU0A47R0VVBDtx3ZyOJDktgYixhoJokZTYTt1Z37OkO9pnGJa9Q==
+ dependencies:
+ ansi-escapes "^4.3.2"
+ chalk "^4.1.2"
+ consola "^3.2.3"
+ figures "^3.2.0"
+ markdown-table "^2.0.0"
+ pretty-time "^1.1.0"
+ std-env "^3.7.0"
+ wrap-ansi "^7.0.0"
+
+websocket-driver@>=0.5.1, websocket-driver@^0.7.4:
+ version "0.7.4"
+ resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760"
+ integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==
+ dependencies:
+ http-parser-js ">=0.5.1"
+ safe-buffer ">=5.1.0"
+ websocket-extensions ">=0.1.1"
+
+websocket-extensions@>=0.1.1:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42"
+ integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==
+
+which@^2.0.1:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"
+ integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
+ dependencies:
+ isexe "^2.0.0"
+
+widest-line@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-4.0.1.tgz#a0fc673aaba1ea6f0a0d35b3c2795c9a9cc2ebf2"
+ integrity sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==
+ dependencies:
+ string-width "^5.0.1"
+
+wildcard@^2.0.0, wildcard@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.1.tgz#5ab10d02487198954836b6349f74fff961e10f67"
+ integrity sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==
+
+wrap-ansi@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
+ integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
+ dependencies:
+ ansi-styles "^4.0.0"
+ string-width "^4.1.0"
+ strip-ansi "^6.0.0"
+
+wrap-ansi@^8.0.1, wrap-ansi@^8.1.0:
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214"
+ integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==
+ dependencies:
+ ansi-styles "^6.1.0"
+ string-width "^5.0.1"
+ strip-ansi "^7.0.1"
+
+wrappy@1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
+ integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
+
+write-file-atomic@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8"
+ integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==
+ dependencies:
+ imurmurhash "^0.1.4"
+ is-typedarray "^1.0.0"
+ signal-exit "^3.0.2"
+ typedarray-to-buffer "^3.1.5"
+
+ws@^7.3.1:
+ version "7.5.10"
+ resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9"
+ integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==
+
+ws@^8.13.0:
+ version "8.18.0"
+ resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.0.tgz#0d7505a6eafe2b0e712d232b42279f53bc289bbc"
+ integrity sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==
+
+xdg-basedir@^5.0.1, xdg-basedir@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-5.1.0.tgz#1efba19425e73be1bc6f2a6ceb52a3d2c884c0c9"
+ integrity sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ==
+
+xml-js@^1.6.11:
+ version "1.6.11"
+ resolved "https://registry.yarnpkg.com/xml-js/-/xml-js-1.6.11.tgz#927d2f6947f7f1c19a316dd8eea3614e8b18f8e9"
+ integrity sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==
+ dependencies:
+ sax "^1.2.4"
+
+yallist@^3.0.2:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
+ integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
+
+yaml@^1.10.0:
+ version "1.10.2"
+ resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b"
+ integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==
+
+yocto-queue@^1.0.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.1.1.tgz#fef65ce3ac9f8a32ceac5a634f74e17e5b232110"
+ integrity sha512-b4JR1PFR10y1mKjhHY9LaGo6tmrgjit7hxVIeAmyMw3jegXR4dhYqLaQF5zMXZxY7tLpMyJeLjr1C4rLmkVe8g==
+
+zwitch@^2.0.0:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/zwitch/-/zwitch-2.0.4.tgz#c827d4b0acb76fc3e685a4c6ec2902d51070e9d7"
+ integrity sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==
diff --git a/src/app.rs b/src/app.rs
new file mode 100644
index 0000000..01a9b12
--- /dev/null
+++ b/src/app.rs
@@ -0,0 +1,128 @@
+// app.rs
+
+//! # StackQL Deploy Application Constants
+//!
+//! This module defines various constants and configuration values for the StackQL Deploy application.
+//! It includes general application metadata, default settings, supported providers, and paths to templates.
+//!
+//! ## Usage Example
+//! ```rust
+//! use crate::app::{APP_NAME, APP_VERSION, DEFAULT_SERVER_HOST, DEFAULT_SERVER_PORT};
+//!
+//! println!("{} v{} running on {}:{}",
+//! APP_NAME, APP_VERSION, DEFAULT_SERVER_HOST, DEFAULT_SERVER_PORT
+//! );
+//! ```
+//!
+//! This module also contains sub-modules for template-related constants specific to
+//! AWS, Azure, and Google platforms.
+
+/// Application name
+pub const APP_NAME: &str = "stackql-deploy";
+
+/// Application version
+pub const APP_VERSION: &str = "0.1.0";
+
+/// Application author
+pub const APP_AUTHOR: &str = "Jeffrey Aven ";
+
+/// Application description
+pub const APP_DESCRIPTION: &str = "Model driven IaC using stackql";
+
+/// Default server host
+pub const DEFAULT_SERVER_HOST: &str = "localhost";
+
+/// Default StackQL (PostgreSQL protocol) server port
+pub const DEFAULT_SERVER_PORT: u16 = 5444;
+
+/// Default StackQL (PostgreSQL protocol) server port as a string
+pub const DEFAULT_SERVER_PORT_STR: &str = "5444";
+
+/// Local server addresses
+pub const LOCAL_SERVER_ADDRESSES: [&str; 3] = ["localhost", "0.0.0.0", "127.0.0.1"];
+
+/// Default log file name
+pub const DEFAULT_LOG_FILE: &str = "stackql.log";
+
+/// Default log level
+pub const LOG_LEVELS: &[&str] = &["trace", "debug", "info", "warn", "error"];
+
+/// Default log level for the application
+pub const DEFAULT_LOG_LEVEL: &str = "info";
+
+/// Supported cloud providers for the `--provider` argument in the `init` command
+pub const SUPPORTED_PROVIDERS: [&str; 3] = ["aws", "google", "azure"];
+
+/// Default provider for `init` command
+pub const DEFAULT_PROVIDER: &str = "azure";
+
+/// StackQL binary name (platform dependent)
+#[cfg_attr(
+ target_os = "windows",
+ doc = "StackQL binary name (platform dependent)"
+)]
+#[cfg(target_os = "windows")]
+pub const STACKQL_BINARY_NAME: &str = "stackql.exe";
+
+#[cfg_attr(
+ not(target_os = "windows"),
+ doc = "StackQL binary name (platform dependent)"
+)]
+#[cfg(not(target_os = "windows"))]
+pub const STACKQL_BINARY_NAME: &str = "stackql";
+
+/// StackQL download URLs by platform
+#[cfg_attr(
+ target_os = "windows",
+ doc = "StackQL download URL (platform dependent)"
+)]
+#[cfg(target_os = "windows")]
+pub const STACKQL_DOWNLOAD_URL: &str =
+ "https://releases.stackql.io/stackql/latest/stackql_windows_amd64.zip";
+
+#[cfg_attr(target_os = "linux", doc = "StackQL download URL (platform dependent)")]
+#[cfg(target_os = "linux")]
+pub const STACKQL_DOWNLOAD_URL: &str =
+ "https://releases.stackql.io/stackql/latest/stackql_linux_amd64.zip";
+
+#[cfg_attr(target_os = "macos", doc = "StackQL download URL (platform dependent)")]
+#[cfg(target_os = "macos")]
+pub const STACKQL_DOWNLOAD_URL: &str =
+ "https://storage.googleapis.com/stackql-public-releases/latest/stackql_darwin_multiarch.pkg";
+
+/// Commands exempt from binary check
+pub const EXEMPT_COMMANDS: [&str; 1] = ["init"];
+
+/// The base URL for GitHub template repository
+pub const GITHUB_TEMPLATE_BASE: &str =
+ "https://raw.githubusercontent.com/stackql/stackql-deploy-rust/main/template-hub/";
+
+/// Template constants for AWS
+pub mod aws_templates {
+ pub const RESOURCE_TEMPLATE: &str =
+ include_str!("../template-hub/aws/starter/resources/example_vpc.iql.template");
+ pub const MANIFEST_TEMPLATE: &str =
+ include_str!("../template-hub/aws/starter/stackql_manifest.yml.template");
+ pub const README_TEMPLATE: &str =
+ include_str!("../template-hub/aws/starter/README.md.template");
+}
+
+/// Template constants for Azure
+pub mod azure_templates {
+ pub const RESOURCE_TEMPLATE: &str =
+ include_str!("../template-hub/azure/starter/resources/example_res_grp.iql.template");
+ pub const MANIFEST_TEMPLATE: &str =
+ include_str!("../template-hub/azure/starter/stackql_manifest.yml.template");
+ pub const README_TEMPLATE: &str =
+ include_str!("../template-hub/azure/starter/README.md.template");
+}
+
+/// Template constants for Google
+pub mod google_templates {
+ pub const RESOURCE_TEMPLATE: &str =
+ include_str!("../template-hub/google/starter/resources/example_vpc.iql.template");
+ pub const MANIFEST_TEMPLATE: &str =
+ include_str!("../template-hub/google/starter/stackql_manifest.yml.template");
+ pub const README_TEMPLATE: &str =
+ include_str!("../template-hub/google/starter/README.md.template");
+}
diff --git a/src/commands/build.rs b/src/commands/build.rs
index 84440e7..69c0619 100644
--- a/src/commands/build.rs
+++ b/src/commands/build.rs
@@ -1,26 +1,81 @@
+// commands/build.rs
+
+//! # Build Command Module
+//!
+//! This module handles the `build` command, which is responsible for creating or updating resources
+//! within a specified stack environment.
+//!
+//! ## Features
+//! - Accepts a stack directory and environment as input arguments.
+//! - Displays a deployment message with the provided inputs.
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy build /path/to/stack/production prod
+//! ```
+//! The above command deploys resources from the specified stack directory to the `prod` environment.
+
+use clap::{ArgMatches, Command};
+
+use crate::commands::common_args::{
+ dry_run, env_file, env_var, log_level, on_failure, show_queries, stack_dir, stack_env,
+ FailureAction,
+};
use crate::utils::display::print_unicode_box;
-use clap::{Arg, ArgMatches, Command};
+use crate::utils::logging::initialize_logger;
+use log::{debug, info};
+/// Defines the `build` command for the CLI application.
pub fn command() -> Command {
Command::new("build")
.about("Create or update resources")
- .arg(
- Arg::new("stack_dir")
- .required(true)
- .help("Path to stack directory"),
- )
- .arg(
- Arg::new("stack_env")
- .required(true)
- .help("Environment to deploy"),
- )
+ .arg(stack_dir())
+ .arg(stack_env())
+ .arg(log_level())
+ .arg(env_file())
+ .arg(env_var())
+ .arg(dry_run())
+ .arg(show_queries())
+ .arg(on_failure())
}
+/// Executes the `build` command.
pub fn execute(matches: &ArgMatches) {
let stack_dir = matches.get_one::("stack_dir").unwrap();
let stack_env = matches.get_one::("stack_env").unwrap();
+
+ // Extract the common arguments
+ let log_level = matches.get_one::("log-level").unwrap();
+ let env_file = matches.get_one::("env-file").unwrap();
+ let env_vars = matches.get_many::("env");
+ let dry_run = matches.get_flag("dry-run");
+ let show_queries = matches.get_flag("show-queries");
+ let on_failure = matches.get_one::("on-failure").unwrap();
+
+ // Initialize the logger
+ initialize_logger(log_level);
+
print_unicode_box(&format!(
- "Deploying stack: [{}] to environment: [{}]",
+ "š Deploying stack: [{}] to environment: [{}]",
stack_dir, stack_env
));
+
+ info!("Stack Directory: {}", stack_dir);
+
+ println!("Log Level: {}", log_level);
+ debug!("Log Level: {}", log_level);
+ println!("Environment File: {}", env_file);
+
+ if let Some(vars) = env_vars {
+ println!("Environment Variables:");
+ for var in vars {
+ println!(" - {}", var);
+ }
+ }
+
+ println!("Dry Run: {}", dry_run);
+ println!("Show Queries: {}", show_queries);
+ println!("On Failure: {:?}", on_failure);
+
+ // Actual implementation would go here
}
diff --git a/src/commands/common_args.rs b/src/commands/common_args.rs
new file mode 100644
index 0000000..93d9424
--- /dev/null
+++ b/src/commands/common_args.rs
@@ -0,0 +1,129 @@
+// commands/common_args.rs
+
+//! # Common Command Arguments
+//!
+//! This module defines common command-line arguments that can be reused across
+//! different commands in the application.
+
+use clap::{value_parser, Arg, ArgAction, ArgMatches};
+use std::str::FromStr;
+
+/// Possible actions to take on failure
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum FailureAction {
+ Rollback,
+ Ignore,
+ Error,
+}
+
+impl FromStr for FailureAction {
+ type Err = String;
+
+ fn from_str(s: &str) -> Result {
+ match s.to_lowercase().as_str() {
+ "rollback" => Ok(FailureAction::Rollback),
+ "ignore" => Ok(FailureAction::Ignore),
+ "error" => Ok(FailureAction::Error),
+ _ => Err(format!("Unknown failure action: {}", s)),
+ }
+ }
+}
+
+// Positional arguments
+/// Common positional argument for the stack directory
+pub fn stack_dir() -> Arg {
+ Arg::new("stack_dir")
+ .required(true)
+ .help("Path to the stack directory containing resources")
+}
+
+/// Common positional argument for the stack environment
+pub fn stack_env() -> Arg {
+ Arg::new("stack_env")
+ .required(true)
+ .help("Environment to deploy to (e.g., `prod`, `dev`, `test`)")
+}
+
+// Optional arguments
+/// Common argument for setting the log level
+pub fn log_level() -> Arg {
+ Arg::new("log-level")
+ .long("log-level")
+ .help("Set the logging level")
+ .default_value("INFO")
+ .value_parser(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"])
+}
+
+/// Common argument for specifying an environment file
+pub fn env_file() -> Arg {
+ Arg::new("env-file")
+ .long("env-file")
+ .help("Environment variables file")
+ .default_value(".env")
+}
+
+/// Common argument for setting additional environment variables
+pub fn env_var() -> Arg {
+ Arg::new("env")
+ .short('e')
+ .long("env")
+ .help("Set additional environment variables (format: KEY=VALUE)")
+ .action(ArgAction::Append)
+}
+
+/// Common argument for performing a dry run
+pub fn dry_run() -> Arg {
+ Arg::new("dry-run")
+ .long("dry-run")
+ .help("Perform a dry run of the operation")
+ .action(ArgAction::SetTrue)
+}
+
+/// Common argument for showing queries in the output logs
+pub fn show_queries() -> Arg {
+ Arg::new("show-queries")
+ .long("show-queries")
+ .help("Show queries run in the output logs")
+ .action(ArgAction::SetTrue)
+}
+
+/// Common argument for specifying the action on failure
+pub fn on_failure() -> Arg {
+ Arg::new("on-failure")
+ .long("on-failure")
+ .help("Action to take on failure")
+ .value_parser(value_parser!(FailureAction))
+ .default_value("error")
+}
+
+/// Structure to hold common command arguments
+#[derive(Debug)]
+pub struct CommonCommandArgs<'a> {
+ /// Directory containing stack configuration
+ pub stack_dir: &'a str,
+ /// Environment to operate on
+ pub stack_env: &'a str,
+ /// Logging level
+ pub log_level: &'a str,
+ /// Environment file path
+ pub env_file: &'a str,
+ /// Whether to run in dry-run mode
+ pub dry_run: bool,
+ /// Whether to show queries
+ pub show_queries: bool,
+ /// What to do on failure
+ pub on_failure: &'a FailureAction,
+}
+
+/// Create CommonCommandArgs from ArgMatches
+pub fn args_from_matches(matches: &ArgMatches) -> CommonCommandArgs {
+ CommonCommandArgs {
+ stack_dir: matches.get_one::("stack_dir").unwrap(),
+ stack_env: matches.get_one::("stack_env").unwrap(),
+ log_level: matches.get_one::("log-level").unwrap(),
+ env_file: matches.get_one::("env-file").unwrap(),
+ dry_run: matches.get_flag("dry-run"),
+ show_queries: matches.get_flag("show-queries"),
+ on_failure: matches.get_one::("on-failure").unwrap(),
+ }
+}
diff --git a/src/commands/info.rs b/src/commands/info.rs
index bb0fb82..b2f0a83 100644
--- a/src/commands/info.rs
+++ b/src/commands/info.rs
@@ -1,15 +1,40 @@
+// commands/info.rs
+
+//! # Info Command Module
+//!
+//! This module handles the `info` command, which displays detailed version and configuration information
+//! about the StackQL Deploy application. It also lists installed providers and running servers.
+//!
+//! ## Features
+//! - Displays version information for the StackQL Deploy CLI.
+//! - Retrieves and displays StackQL binary version, SHA, platform, and binary path.
+//! - Lists all running local StackQL servers by PID and port.
+//! - Displays installed providers and their versions.
+//! - Lists contributors if available via the `CONTRIBUTORS` environment variable.
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy info
+//! ```
+//! This command will output various details about the application, library, providers, and contributors.
+
+use std::process;
+
+use clap::Command;
+use colored::*;
+use log::error;
+
use crate::utils::display::print_unicode_box;
use crate::utils::platform::get_platform;
-use crate::utils::server::{get_server_pid, is_server_running};
+use crate::utils::server::find_all_running_servers;
use crate::utils::stackql::{get_installed_providers, get_stackql_path, get_version};
-use clap::Command;
-use colored::*;
-use std::process;
+/// Defines the `info` command for the CLI application.
pub fn command() -> Command {
Command::new("info").about("Display version information")
}
+/// Executes the `info` command.
pub fn execute() {
print_unicode_box("š Getting program information...");
@@ -17,7 +42,7 @@ pub fn execute() {
let version_info = match get_version() {
Ok(info) => info,
Err(e) => {
- eprintln!("{}", format!("Error: {}", e).red());
+ error!("Failed to retrieve version info: {}", e);
process::exit(1);
}
};
@@ -31,14 +56,8 @@ pub fn execute() {
_none => "Not found".to_string(),
};
- // Check server status
- let default_port = 5444;
- let server_running = is_server_running(default_port);
- let server_pid = if server_running {
- get_server_pid(default_port).unwrap_or(0)
- } else {
- 0
- };
+ // Get all running StackQL servers
+ let running_servers = find_all_running_servers();
// Get installed providers
let providers = get_installed_providers().unwrap_or_default();
@@ -53,16 +72,17 @@ pub fn execute() {
println!(" Platform: {:?}", platform);
println!(" Binary Path: {}", binary_path);
- println!("\n{}", "StackQL Server".green().bold());
- if server_running {
- println!(" Status: {}", "Running".green());
- println!(" PID: {}", server_pid);
- println!(" Port: {}", default_port);
+ // Display running servers
+ println!("\n{}", "Local StackQL Servers".green().bold());
+ if running_servers.is_empty() {
+ println!(" None");
} else {
- println!(" Status: {}", "Not Running".yellow());
+ for server in running_servers {
+ println!(" PID: {}, Port: {}", server.pid, server.port);
+ }
}
- // Update the providers display section
+ // Display installed providers
println!("\n{}", "Installed Providers".green().bold());
if providers.is_empty() {
println!(" No providers installed");
@@ -72,7 +92,7 @@ pub fn execute() {
}
}
- // Display contributors
+ // Display contributors if available
let raw_contributors = option_env!("CONTRIBUTORS").unwrap_or("");
let contributors: Vec<&str> = raw_contributors
.split(',')
diff --git a/src/commands/init.rs b/src/commands/init.rs
index 75d4bda..423bdc4 100644
--- a/src/commands/init.rs
+++ b/src/commands/init.rs
@@ -1,52 +1,45 @@
-use crate::utils::display::print_unicode_box;
-use clap::{Arg, ArgAction, ArgMatches, Command};
-use colored::*;
-use reqwest::blocking::Client;
-use reqwest::StatusCode;
+// commands/init.rs
+
+//! # Init Command Module
+//!
+//! This module handles the `init` command, which initializes a new StackQL Deploy project structure.
+//! It supports built-in templates for major providers (AWS, Azure, Google) as well as custom templates via URL or file path.
+//!
+//! ## Features
+//! - Initializes project directory structure.
+//! - Supports both embedded templates and custom templates.
+//! - Fetches templates from URLs or uses built-in ones.
+//! - Validates supported providers and applies default providers when necessary.
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy init my-project --provider aws
+//! ./stackql-deploy init my-project --template https://github.com/user/template-repo
+//! ```
+
use std::collections::HashSet;
use std::fs;
use std::io::Write;
use std::path::Path;
+
+use clap::{Arg, ArgAction, ArgMatches, Command};
+use reqwest::blocking::Client;
+use reqwest::StatusCode;
use tera::{Context, Tera};
-// The base URL for GitHub template repository
-const GITHUB_TEMPLATE_BASE: &str =
- "https://raw.githubusercontent.com/stackql/stackql-deploy-rust/main/template-hub/";
-
-// AWS templates
-const AWS_RESOURCE_TEMPLATE: &str =
- include_str!("../../template-hub/aws/starter/resources/example_vpc.iql.template");
-const AWS_MANIFEST_TEMPLATE: &str =
- include_str!("../../template-hub/aws/starter/stackql_manifest.yml.template");
-const AWS_README_TEMPLATE: &str = include_str!("../../template-hub/aws/starter/README.md.template");
-
-// Azure templates
-const AZURE_RESOURCE_TEMPLATE: &str =
- include_str!("../../template-hub/azure/starter/resources/example_res_grp.iql.template");
-const AZURE_MANIFEST_TEMPLATE: &str =
- include_str!("../../template-hub/azure/starter/stackql_manifest.yml.template");
-const AZURE_README_TEMPLATE: &str =
- include_str!("../../template-hub/azure/starter/README.md.template");
-
-// Google templates
-const GOOGLE_RESOURCE_TEMPLATE: &str =
- include_str!("../../template-hub/google/starter/resources/example_vpc.iql.template");
-const GOOGLE_MANIFEST_TEMPLATE: &str =
- include_str!("../../template-hub/google/starter/stackql_manifest.yml.template");
-const GOOGLE_README_TEMPLATE: &str =
- include_str!("../../template-hub/google/starter/README.md.template");
-
-const DEFAULT_PROVIDER: &str = "azure";
-const SUPPORTED_PROVIDERS: [&str; 3] = ["aws", "google", "azure"];
-
-// Define template sources
+use crate::app::{
+ aws_templates, azure_templates, google_templates, DEFAULT_PROVIDER, GITHUB_TEMPLATE_BASE,
+ SUPPORTED_PROVIDERS,
+};
+use crate::utils::display::print_unicode_box;
+use crate::{print_error, print_info, print_success};
+
enum TemplateSource {
Embedded(String), // Built-in template using one of the supported providers
Custom(String), // Custom template path or URL
}
impl TemplateSource {
- // Get provider name (for embedded) or template path (for custom)
#[allow(dead_code)]
fn provider_or_path(&self) -> &str {
match self {
@@ -55,7 +48,6 @@ impl TemplateSource {
}
}
- // Determine sample resource name based on provider or template
fn get_sample_res_name(&self) -> &str {
match self {
TemplateSource::Embedded(provider) => match provider.as_str() {
@@ -80,6 +72,7 @@ impl TemplateSource {
}
}
+/// Configures the `init` command for the CLI application.
pub fn command() -> Command {
Command::new("init")
.about("Initialize a new stackql-deploy project structure")
@@ -105,16 +98,9 @@ pub fn command() -> Command {
.action(ArgAction::Set)
.conflicts_with("provider"),
)
- .arg(
- Arg::new("env")
- .short('e')
- .long("env")
- .help("Environment name (dev, test, prod)")
- .default_value("dev")
- .action(ArgAction::Set),
- )
}
+/// Executes the `init` command to initialize a new project structure.
pub fn execute(matches: &ArgMatches) {
print_unicode_box("š Initializing new project...");
@@ -141,27 +127,24 @@ pub fn execute(matches: &ArgMatches) {
// Create project structure
match create_project_structure(&stack_name, &template_source, &env) {
Ok(_) => {
- println!(
- "{}",
- format!("Project {} initialized successfully.", stack_name).green()
- );
+ print_success!("Project '{}' initialized successfully.", stack_name);
}
Err(e) => {
- eprintln!("{}", format!("Error initializing project: {}", e).red());
+ print_error!("Error initializing project: {}", e);
}
}
}
+/// Validates the provided provider and returns the appropriate string value.
fn validate_provider(provider: Option<&str>) -> String {
let supported: HashSet<&str> = SUPPORTED_PROVIDERS.iter().cloned().collect();
match provider {
Some(p) if supported.contains(p) => p.to_string(),
Some(p) => {
- println!("{}", format!(
- "Provider '{}' is not supported for `init`, supported providers are: {}, defaulting to `{}`",
+ print_info!("Provider '{}' is not supported for `init`, supported providers are: {}, defaulting to `{}`",
p, SUPPORTED_PROVIDERS.join(", "), DEFAULT_PROVIDER
- ).yellow());
+ );
DEFAULT_PROVIDER.to_string()
}
_none => {
@@ -171,7 +154,7 @@ fn validate_provider(provider: Option<&str>) -> String {
}
}
-// Function to fetch template content from URL
+/// Fetches template content from a given URL.
fn fetch_template(url: &str) -> Result {
let client = Client::new();
let response = client
@@ -197,7 +180,7 @@ fn fetch_template(url: &str) -> Result {
.map_err(|e| format!("Failed to read template content: {}", e))
}
-// Normalize GitHub URL to raw content URL
+/// Normalizes GitHub URL to raw content URL
fn normalize_github_url(url: &str) -> String {
if url.starts_with("https://github.com") {
// Convert github.com URL to raw.githubusercontent.com
@@ -208,7 +191,7 @@ fn normalize_github_url(url: &str) -> String {
}
}
-// Build full URL or path for templates
+/// Builds full URL or path for templates
fn build_template_url(template_path: &str, resource_name: &str, file_type: &str) -> String {
// Check if template_path is an absolute URL
if template_path.starts_with("http://") || template_path.starts_with("https://") {
@@ -233,6 +216,7 @@ fn build_template_url(template_path: &str, resource_name: &str, file_type: &str)
}
}
+/// Gets the template content based on the source and type
fn get_template_content(
template_source: &TemplateSource,
template_type: &str,
@@ -242,15 +226,15 @@ fn get_template_content(
TemplateSource::Embedded(provider) => {
// Use embedded templates
match (provider.as_str(), template_type) {
- ("aws", "resource") => Ok(AWS_RESOURCE_TEMPLATE.to_string()),
- ("aws", "manifest") => Ok(AWS_MANIFEST_TEMPLATE.to_string()),
- ("aws", "readme") => Ok(AWS_README_TEMPLATE.to_string()),
- ("azure", "resource") => Ok(AZURE_RESOURCE_TEMPLATE.to_string()),
- ("azure", "manifest") => Ok(AZURE_MANIFEST_TEMPLATE.to_string()),
- ("azure", "readme") => Ok(AZURE_README_TEMPLATE.to_string()),
- ("google", "resource") => Ok(GOOGLE_RESOURCE_TEMPLATE.to_string()),
- ("google", "manifest") => Ok(GOOGLE_MANIFEST_TEMPLATE.to_string()),
- ("google", "readme") => Ok(GOOGLE_README_TEMPLATE.to_string()),
+ ("aws", "resource") => Ok(aws_templates::RESOURCE_TEMPLATE.to_string()),
+ ("aws", "manifest") => Ok(aws_templates::MANIFEST_TEMPLATE.to_string()),
+ ("aws", "readme") => Ok(aws_templates::README_TEMPLATE.to_string()),
+ ("azure", "resource") => Ok(azure_templates::RESOURCE_TEMPLATE.to_string()),
+ ("azure", "manifest") => Ok(azure_templates::MANIFEST_TEMPLATE.to_string()),
+ ("azure", "readme") => Ok(azure_templates::README_TEMPLATE.to_string()),
+ ("google", "resource") => Ok(google_templates::RESOURCE_TEMPLATE.to_string()),
+ ("google", "manifest") => Ok(google_templates::MANIFEST_TEMPLATE.to_string()),
+ ("google", "readme") => Ok(google_templates::README_TEMPLATE.to_string()),
_ => Err(format!(
"Unsupported provider or template type: {}, {}",
provider, template_type
@@ -262,15 +246,13 @@ fn get_template_content(
let template_url = build_template_url(path, resource_name, template_type);
// Fetch content from URL
- println!(
- "{}",
- format!("Fetching template from: {}", template_url).blue()
- );
+ print_info!("Fetching template from: {}", template_url);
fetch_template(&template_url)
}
}
}
+/// Creates the project structure for a new StackQL Deploy project.
fn create_project_structure(
stack_name: &str,
template_source: &TemplateSource,
@@ -311,6 +293,7 @@ fn create_project_structure(
Ok(())
}
+/// Creates a resource file in the specified directory using the provided template and context.
fn create_resource_file(
resource_dir: &Path,
sample_res_name: &str,
@@ -331,6 +314,7 @@ fn create_resource_file(
Ok(())
}
+/// Creates a manifest file in the specified directory using the provided template and context.
fn create_manifest_file(
base_path: &Path,
template_str: &str,
@@ -350,6 +334,7 @@ fn create_manifest_file(
Ok(())
}
+/// Creates a README file in the specified directory using the provided template and context.
fn create_readme_file(
base_path: &Path,
template_str: &str,
@@ -369,6 +354,7 @@ fn create_readme_file(
Ok(())
}
+/// Renders a template string using Tera with the provided context.
fn render_template(template_str: &str, context: &Context) -> Result {
// Create a one-off Tera instance for rendering a single template
let mut tera = Tera::default();
diff --git a/src/commands/mod.rs b/src/commands/mod.rs
index f576dd4..fcb7d93 100644
--- a/src/commands/mod.rs
+++ b/src/commands/mod.rs
@@ -1,4 +1,5 @@
pub mod build;
+pub mod common_args;
pub mod info;
pub mod init;
pub mod plan;
diff --git a/src/commands/plan.rs b/src/commands/plan.rs
index 48d6baf..55819b4 100644
--- a/src/commands/plan.rs
+++ b/src/commands/plan.rs
@@ -1,11 +1,75 @@
+// commands/plan.rs
+
+//! # Plan Command Module
+//!
+//! This module provides the `plan` command for the StackQL Deploy application.
+//! The `plan` command compares the current state of infrastructure (live, not from a state file)
+//! against the desired state defined by configuration files. It outputs the necessary queries
+//! that would need to be run to achieve the desired state.
+//!
+//! ## Features
+//! - Compare live infrastructure state against desired state.
+//! - Generate queries required to achieve the desired state.
+//! - Provide dry-run capability for previewing changes before applying.
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy plan path/to/stack dev
+//! ```
+
+use clap::{ArgMatches, Command};
+
+use crate::commands::common_args::{
+ dry_run, env_file, env_var, log_level, on_failure, show_queries, stack_dir, stack_env,
+ FailureAction,
+};
use crate::utils::display::print_unicode_box;
-use clap::Command;
+/// Configures the `plan` command for the CLI application.
pub fn command() -> Command {
- Command::new("plan").about("Plan infrastructure changes (coming soon)")
+ Command::new("plan")
+ .about("Plan infrastructure changes (coming soon)")
+ .arg(stack_dir())
+ .arg(stack_env())
+ .arg(log_level())
+ .arg(env_file())
+ .arg(env_var())
+ .arg(dry_run())
+ .arg(show_queries())
+ .arg(on_failure())
}
-pub fn execute() {
- print_unicode_box("š® Infrastructure planning (coming soon)...");
- println!("The 'plan' feature is coming soon!");
+/// Executes the `plan` command.
+pub fn execute(matches: &ArgMatches) {
+ let stack_dir = matches.get_one::("stack_dir").unwrap();
+ let stack_env = matches.get_one::("stack_env").unwrap();
+
+ // Extract the common arguments
+ let log_level = matches.get_one::("log-level").unwrap();
+ let env_file = matches.get_one::("env-file").unwrap();
+ let env_vars = matches.get_many::("env");
+ let dry_run = matches.get_flag("dry-run");
+ let show_queries = matches.get_flag("show-queries");
+ let on_failure = matches.get_one::("on-failure").unwrap();
+
+ print_unicode_box(&format!(
+ "š® Planning changes for stack: [{}] in environment: [{}]",
+ stack_dir, stack_env
+ ));
+
+ println!("Log Level: {}", log_level);
+ println!("Environment File: {}", env_file);
+
+ if let Some(vars) = env_vars {
+ println!("Environment Variables:");
+ for var in vars {
+ println!(" - {}", var);
+ }
+ }
+
+ println!("Dry Run: {}", dry_run);
+ println!("Show Queries: {}", show_queries);
+ println!("On Failure: {:?}", on_failure);
+
+ println!("š plan complete (dry run: {})", dry_run);
}
diff --git a/src/commands/shell.rs b/src/commands/shell.rs
index d477534..6bdbedb 100644
--- a/src/commands/shell.rs
+++ b/src/commands/shell.rs
@@ -1,125 +1,124 @@
-use crate::utils::display::print_unicode_box;
-use crate::utils::query::{execute_query, QueryResult};
-use crate::utils::server::{is_server_running, start_server, ServerOptions};
-use clap::{Arg, ArgAction, ArgMatches, Command};
+// commands/shell.rs
+
+//! # Shell Command Module
+//!
+//! This module provides the `shell` command for the StackQL Deploy application.
+//! The `shell` command launches an interactive shell where users can execute queries
+//! against a StackQL server. Queries can be entered across multiple lines and are
+//! only executed when terminated with a semicolon (`;`).
+//!
+//! ## Features
+//! - Interactive query input with line history support.
+//! - Multi-line query handling using a semicolon (`;`) to indicate query completion.
+//! - Automatic server startup if not running.
+//! - Connection handling using a global connection function (`create_client`).
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy shell
+//! ```
+//!
+
+use clap::{ArgMatches, Command};
use colored::*;
-use postgres::Client;
-use postgres::NoTls;
use rustyline::error::ReadlineError;
use rustyline::Editor;
-use std::process;
+use crate::globals::{server_host, server_port};
+use crate::utils::connection::create_client;
+use crate::utils::display::print_unicode_box;
+use crate::utils::query::{execute_query, QueryResult};
+use crate::utils::server::check_and_start_server;
+
+/// Configures the `shell` command for the CLI application.
pub fn command() -> Command {
- Command::new("shell")
- .about("Launch the interactive shell")
- .arg(
- Arg::new("port")
- .short('p')
- .long("port")
- .help("Port to connect to")
- .default_value("5444")
- .action(ArgAction::Set),
- )
- .arg(
- Arg::new("host")
- .short('h')
- .long("host")
- .help("Host to connect to")
- .default_value("localhost")
- .action(ArgAction::Set),
- )
+ Command::new("shell").about("Launch the interactive shell")
}
-pub fn execute(matches: &ArgMatches) {
+/// Executes the `shell` command, launching an interactive query interface.
+pub fn execute(_matches: &ArgMatches) {
print_unicode_box("š Launching interactive shell...");
- let port = matches
- .get_one::("port")
- .unwrap_or(&"5444".to_string())
- .parse::()
- .unwrap_or(5444);
-
- let localhost = String::from("localhost");
- let host = matches.get_one::("host").unwrap_or(&localhost);
+ let host = server_host();
+ let port = server_port();
- if host == "localhost" && !is_server_running(port) {
- println!("{}", "Server not running. Starting server...".yellow());
- let options = ServerOptions {
- port,
- ..Default::default()
- };
+ check_and_start_server();
- match start_server(&options) {
- Ok(_) => {
- println!("{}", "Server started successfully".green());
- }
- Err(e) => {
- eprintln!("{}", format!("Failed to start server: {}", e).red());
- process::exit(1);
- }
- }
- }
+ // Connect to the server using the global host and port
+ let mut stackql_client_conn = create_client();
- let connection_string = format!(
- "host={} port={} user=postgres dbname=stackql application_name=stackql",
- host, port
- );
- let _client = match Client::connect(&connection_string, NoTls) {
- Ok(client) => client,
- Err(e) => {
- eprintln!("{}", format!("Failed to connect to server: {}", e).red());
- process::exit(1);
- }
- };
-
- println!("Connected to stackql server at {}:{}", host, port);
println!("Type 'exit' to quit the shell");
println!("---");
let mut rl = Editor::<()>::new().unwrap();
let _ = rl.load_history("stackql_history.txt");
+ let mut query_buffer = String::new(); // Accumulates input until a semicolon is found
+
loop {
- let prompt = format!("stackql ({}:{})=> ", host, port);
+ let prompt = if query_buffer.is_empty() {
+ format!("stackql ({}:{})=> ", host, port)
+ } else {
+ "... ".to_string()
+ };
+
let readline = rl.readline(&prompt);
match readline {
Ok(line) => {
let input = line.trim();
- if input.is_empty() {
- continue;
- }
-
- rl.add_history_entry(input);
if input.eq_ignore_ascii_case("exit") || input.eq_ignore_ascii_case("quit") {
println!("Goodbye");
break;
}
- match execute_query(input, port) {
- Ok(result) => match result {
- QueryResult::Data {
- columns,
- rows,
- notices: _,
- } => {
- print_table(columns, rows);
+ // Accumulate the query
+ query_buffer.push_str(input);
+ query_buffer.push(' ');
+
+ if input.ends_with(';') {
+ let normalized_input = normalize_query(&query_buffer);
+ rl.add_history_entry(&normalized_input);
+
+ match execute_query(&normalized_input, &mut stackql_client_conn) {
+ Ok(result) => match result {
+ QueryResult::Data {
+ columns,
+ rows,
+ notices,
+ } => {
+ print_table(columns, rows);
+
+ // Display notices if any
+ if !notices.is_empty() {
+ println!("\n{}", "Notices:".yellow().bold());
+ for notice in notices {
+ // Split notice text by newlines to format each line
+ for line in notice.lines() {
+ println!(" {}", line.yellow());
+ }
+ }
+ }
+ }
+ QueryResult::Command(cmd) => {
+ println!("{}", cmd.green());
+ }
+ QueryResult::Empty => {
+ println!("{}", "Query executed successfully. No results.".green());
+ }
+ },
+ Err(e) => {
+ eprintln!("{}", format!("Error: {}", e).red());
}
- QueryResult::Command(cmd) => {
- println!("{}", cmd.green());
- }
- QueryResult::Empty => {
- println!("{}", "Query executed successfully. No results.".green());
- }
- },
- Err(e) => {
- eprintln!("{}", format!("Error: {}", e).red());
}
+
+ query_buffer.clear();
}
}
Err(ReadlineError::Interrupted) => {
println!("CTRL-C");
+ query_buffer.clear();
continue;
}
Err(ReadlineError::Eof) => {
@@ -136,6 +135,17 @@ pub fn execute(matches: &ArgMatches) {
let _ = rl.save_history("stackql_history.txt");
}
+/// Normalizes a query by trimming whitespace and combining lines.
+fn normalize_query(input: &str) -> String {
+ input
+ .split('\n')
+ .map(|line| line.trim())
+ .filter(|line| !line.is_empty())
+ .collect::>()
+ .join(" ")
+}
+
+/// Prints the query result in a tabular format.
fn print_table(
columns: Vec,
rows: Vec,
diff --git a/src/commands/start_server.rs b/src/commands/start_server.rs
index ab47d96..759e02e 100644
--- a/src/commands/start_server.rs
+++ b/src/commands/start_server.rs
@@ -1,63 +1,120 @@
-use crate::utils::display::print_unicode_box;
-use crate::utils::server::{start_server, ServerOptions};
+// commands/start_server.rs
+
+//! # Start Server Command Module
+//!
+//! This module provides the `start-server` command for the StackQL Deploy application.
+//! The `start-server` command initializes and starts a local StackQL server based on the
+//! specified configuration options such as mTLS, custom authentication, and logging levels.
+//!
+//! ## Features
+//! - Validates if the server is already running before attempting to start a new instance.
+//! - Supports configuration of mTLS and custom authentication via JSON inputs.
+//! - Allows setting of logging levels for better observability.
+//! - Uses global configuration for host and port.
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy start-server --registry "http://localhost:8000" --log-level INFO
+//! ```
+
+use std::process;
+
use clap::{Arg, ArgAction, ArgMatches, Command};
use colored::*;
-use std::process;
+use crate::app::LOCAL_SERVER_ADDRESSES;
+use crate::globals::{server_host, server_port};
+use crate::utils::display::print_unicode_box;
+use crate::utils::server::{is_server_running, start_server, StartServerOptions};
+
+/// Configures the `start-server` command for the CLI application.
pub fn command() -> Command {
Command::new("start-server")
.about("Start the stackql server")
- .arg(
- Arg::new("port")
- .short('p')
- .long("port")
- .help("Port to listen on")
- .default_value("5444")
- .action(ArgAction::Set),
- )
.arg(
Arg::new("registry")
.short('r')
.long("registry")
- .help("Custom registry URL")
+ .help("[OPTIONAL] Custom registry URL")
.action(ArgAction::Set),
)
.arg(
- Arg::new("arg")
+ Arg::new("mtls_config")
+ .short('m')
+ .long("mtls-config")
+ .help("[OPTIONAL] mTLS configuration for the server (JSON object)")
+ .action(ArgAction::Set),
+ )
+ .arg(
+ Arg::new("custom_auth_config")
.short('a')
- .long("arg")
- .help("Additional arguments to pass to stackql")
- .action(ArgAction::Append),
+ .long("custom-auth-config")
+ .help("[OPTIONAL] Custom provider authentication configuration for the server (JSON object)")
+ .action(ArgAction::Set),
+ )
+ .arg(
+ Arg::new("log_level")
+ .short('l')
+ .long("log-level")
+ .help("[OPTIONAL] Server log level (default: WARN)")
+ .value_parser(["TRACE", "DEBUG", "INFO", "WARN", "ERROR", "FATAL"])
+ .action(ArgAction::Set),
)
}
+/// Executes the `start-server` command.
pub fn execute(matches: &ArgMatches) {
print_unicode_box("š Starting stackql server...");
- let port = matches
- .get_one::("port")
- .unwrap_or(&"5444".to_string())
- .parse::()
- .unwrap_or(5444);
+ // Use global vars for host and port
+ let port = server_port();
+ let host = server_host().to_string();
- let registry = matches.get_one::("registry").cloned();
+ // Validate host - must be localhost or 0.0.0.0
+ if !LOCAL_SERVER_ADDRESSES.contains(&host.as_str()) {
+ eprintln!(
+ "{}",
+ "Error: Host must be 'localhost' or '0.0.0.0' for local server setup.".red()
+ );
+ eprintln!("The start-server command is only for starting a local server instance.");
+ process::exit(1);
+ }
- let additional_args = matches
- .get_many::("arg")
- .map(|vals| vals.cloned().collect())
- .unwrap_or_default();
+ // Check if server is already running
+ if is_server_running(port) {
+ println!(
+ "{}",
+ format!(
+ "Server is already running on port {}. No action needed.",
+ port
+ )
+ .yellow()
+ );
+ process::exit(0);
+ }
+
+ // Get optional settings
+ let registry = matches.get_one::("registry").cloned();
+ let mtls_config = matches.get_one::("mtls_config").cloned();
+ let custom_auth_config = matches.get_one::("custom_auth_config").cloned();
+ let log_level = matches.get_one::("log_level").cloned();
- let options = ServerOptions {
+ // Create server options
+ let options = StartServerOptions {
+ host: host.clone(),
port,
registry,
- additional_args,
+ mtls_config,
+ custom_auth_config,
+ log_level,
};
+ // Start the server
match start_server(&options) {
- Ok(pid) => {
+ Ok(_pid) => {
println!(
"{}",
- format!("Stackql server started with PID: {}", pid).green()
+ format!("Server is listening on {}:{}", options.host, options.port).green()
);
}
Err(e) => {
diff --git a/src/commands/stop_server.rs b/src/commands/stop_server.rs
index cd69f6c..9853cd5 100644
--- a/src/commands/stop_server.rs
+++ b/src/commands/stop_server.rs
@@ -1,34 +1,49 @@
+// commands/stop_server.rs
+
+//! # Stop Server Command Module
+//!
+//! This module provides the `stop-server` command for the StackQL Deploy application.
+//! The `stop-server` command stops a running StackQL server by communicating with it
+//! over the specified port. This command only applies to local server instances.
+//!
+//! ## Features
+//! - Graceful shutdown of the StackQL server.
+//! - Provides feedback on successful or unsuccessful termination attempts.
+//! - Uses global port configuration to identify the server to stop.
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy stop-server
+//! ```
+
+use std::process;
+
+use clap::{ArgMatches, Command};
+use colored::*;
+
+use crate::globals::server_port;
use crate::utils::display::print_unicode_box;
use crate::utils::server::stop_server;
-use clap::{Arg, ArgAction, ArgMatches, Command};
-use colored::*;
-use std::process;
+/// Configures the `stop-server` command for the CLI application.
pub fn command() -> Command {
- Command::new("stop-server")
- .about("Stop the stackql server")
- .arg(
- Arg::new("port")
- .short('p')
- .long("port")
- .help("Port the server is running on")
- .default_value("5444")
- .action(ArgAction::Set),
- )
+ Command::new("stop-server").about("Stop the stackql server")
}
-pub fn execute(matches: &ArgMatches) {
+/// Executes the `stop-server` command.
+pub fn execute(_matches: &ArgMatches) {
+ let port = server_port();
+
print_unicode_box("š Stopping stackql server...");
- let port = matches
- .get_one::("port")
- .unwrap_or(&"5444".to_string())
- .parse::()
- .unwrap_or(5444);
+ println!(
+ "{}",
+ format!("Processing request to stop server on port {}", port).yellow()
+ );
match stop_server(port) {
Ok(_) => {
- println!("{}", "Stackql server stopped successfully".green());
+ println!("{}", "stackql server stopped successfully".green());
}
Err(e) => {
eprintln!("{}", format!("Failed to stop server: {}", e).red());
diff --git a/src/commands/teardown.rs b/src/commands/teardown.rs
index cf3e34d..1c02cdf 100644
--- a/src/commands/teardown.rs
+++ b/src/commands/teardown.rs
@@ -1,18 +1,77 @@
+// commands/teardown.rs
+
+//! # Teardown Command Module
+//!
+//! This module provides the `teardown` command for the StackQL Deploy application.
+//! The `teardown` command deprovisions resources for a given stack in a specified environment.
+//! It accepts the same arguments as the `build` and `plan` commands and is intended to
+//! reverse all operations performed during provisioning.
+//!
+//! ## Features
+//! - Deprovisioning of a specified stack in a given environment.
+//! - Uses a declarative approach to identify resources that should be destroyed.
+//! - Intended to be used as a cleanup or rollback mechanism.
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy teardown /path/to/stack dev
+//! ```
+
+use clap::{ArgMatches, Command};
+
+use crate::commands::common_args::{
+ dry_run, env_file, env_var, log_level, on_failure, show_queries, stack_dir, stack_env,
+ FailureAction,
+};
use crate::utils::display::print_unicode_box;
-use clap::{Arg, ArgMatches, Command};
+/// Configures the `teardown` command for the CLI application.
pub fn command() -> Command {
Command::new("teardown")
.about("Teardown a provisioned stack")
- .arg(Arg::new("stack_dir").required(true))
- .arg(Arg::new("stack_env").required(true))
+ .arg(stack_dir())
+ .arg(stack_env())
+ .arg(log_level())
+ .arg(env_file())
+ .arg(env_var())
+ .arg(dry_run())
+ .arg(show_queries())
+ .arg(on_failure())
}
+/// Executes the `teardown` command.
pub fn execute(matches: &ArgMatches) {
let stack_dir = matches.get_one::("stack_dir").unwrap();
let stack_env = matches.get_one::("stack_env").unwrap();
+
+ // Extract the common arguments
+ let log_level = matches.get_one::("log-level").unwrap();
+ let env_file = matches.get_one::("env-file").unwrap();
+ let env_vars = matches.get_many::("env");
+ let dry_run = matches.get_flag("dry-run");
+ let show_queries = matches.get_flag("show-queries");
+ let on_failure = matches.get_one::("on-failure").unwrap();
+
print_unicode_box(&format!(
"Tearing down stack: [{}] in environment: [{}]",
stack_dir, stack_env
));
+
+ println!("Log Level: {}", log_level);
+ println!("Environment File: {}", env_file);
+
+ if let Some(vars) = env_vars {
+ println!("Environment Variables:");
+ for var in vars {
+ println!(" - {}", var);
+ }
+ }
+
+ println!("Dry Run: {}", dry_run);
+ println!("Show Queries: {}", show_queries);
+ println!("On Failure: {:?}", on_failure);
+
+ // Here you would implement the actual teardown functionality
+
+ println!("š§ teardown complete (dry run: {})", dry_run);
}
diff --git a/src/commands/test.rs b/src/commands/test.rs
index 28b6f33..9c0b0b7 100644
--- a/src/commands/test.rs
+++ b/src/commands/test.rs
@@ -1,18 +1,105 @@
-use crate::utils::display::print_unicode_box;
-use clap::{Arg, ArgMatches, Command};
+// commands/test.rs
+//! # Test Command Module
+//!
+//! This module provides the `test` command for the StackQL Deploy application.
+//! The `test` command checks whether a specified stack is in the correct desired state
+//! within a given environment. It validates the current state against expected outputs
+//! defined in the stack configuration.
+//!
+//! ## Features
+//! - Validates the current infrastructure state against the desired state.
+//! - Ensures all resources are correctly provisioned and meet specified requirements.
+//! - Uses the same positional arguments as `build`, `plan`, and `teardown` commands.
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy test /path/to/stack dev
+//! ```
+
+use clap::{ArgMatches, Command};
+use log::{debug, info};
+
+use crate::commands::common_args::{
+ args_from_matches, dry_run, env_file, env_var, log_level, on_failure, show_queries, stack_dir,
+ stack_env,
+};
+use crate::resource::manifest::Manifest;
+use crate::utils::display::{log_common_command_args, print_unicode_box};
+
+/// Configures the `test` command for the CLI application.
pub fn command() -> Command {
Command::new("test")
.about("Run test queries for the stack")
- .arg(Arg::new("stack_dir").required(true))
- .arg(Arg::new("stack_env").required(true))
+ .arg(stack_dir())
+ .arg(stack_env())
+ .arg(log_level())
+ .arg(env_file())
+ .arg(env_var())
+ .arg(dry_run())
+ .arg(show_queries())
+ .arg(on_failure())
}
+/// Executes the `test` command.
pub fn execute(matches: &ArgMatches) {
- let stack_dir = matches.get_one::("stack_dir").unwrap();
- let stack_env = matches.get_one::("stack_env").unwrap();
+ // Create the CommonCommandArgs struct directly from matches
+ let args = args_from_matches(matches);
+
+ // Log the command arguments
+ log_common_command_args(&args, matches);
+
print_unicode_box(&format!(
- "Testing stack: [{}] in environment: [{}]",
- stack_dir, stack_env
+ "Testing stack: [{}] in environment: [{}] (dry run: {})",
+ args.stack_dir, args.stack_env, args.dry_run
));
+
+ // Load the manifest using the reusable function
+ let manifest = Manifest::load_from_dir_or_exit(args.stack_dir);
+
+ // Process resources
+ info!("Testing {} resources...", manifest.resources.len());
+
+ for resource in &manifest.resources {
+ debug!("Processing resource: {}", resource.name);
+
+ // Skip resources that have a condition (if) that evaluates to false
+ if let Some(condition) = &resource.r#if {
+ debug!("Resource has condition: {}", condition);
+ // TODO: evaluate the condition here
+ }
+
+ // Get environment-specific property values
+ debug!("Properties for resource {}:", resource.name);
+ for prop in &resource.props {
+ let value = Manifest::get_property_value(prop, args.stack_env);
+ match value {
+ Some(val) => debug!(
+ " [prop] {}: {}",
+ prop.name,
+ serde_json::to_string(val)
+ .unwrap_or_else(|_| "Error serializing value".to_string())
+ ),
+ None => debug!(
+ "[prop] {}: ",
+ prop.name, args.stack_env
+ ),
+ }
+ }
+
+ // Get the query file path
+ let query_path =
+ manifest.get_resource_query_path(std::path::Path::new(args.stack_dir), resource);
+ debug!("Query file path: {:?}", query_path);
+
+ // In a real implementation, you would:
+ // 1. Read the query file
+ // 2. Replace property placeholders with actual values
+ // 3. Execute the query against the infrastructure
+ // 4. Verify the results match expectations
+
+ info!("ā Resource {} passed tests", resource.name);
+ }
+
+ info!("š tests complete (dry run: {})", args.dry_run);
}
diff --git a/src/commands/upgrade.rs b/src/commands/upgrade.rs
index 9018f43..1d0e25a 100644
--- a/src/commands/upgrade.rs
+++ b/src/commands/upgrade.rs
@@ -1,16 +1,39 @@
+// commands/upgrade.rs
+
+//! # Upgrade Command Module
+//!
+//! This module provides the `upgrade` command for the StackQL Deploy application.
+//! The `upgrade` command downloads and installs the latest version of the StackQL binary.
+//! It verifies the version of the newly installed binary to ensure the upgrade was successful.
+//!
+//! ## Features
+//! - Automatically fetches the latest version of the StackQL binary from the official repository.
+//! - Verifies the version after installation.
+//! - Provides user feedback on successful or failed upgrades.
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy upgrade
+//! ```
+
+use std::process;
+
+use clap::Command;
+use colored::*;
+use log::{error, info};
+
use crate::utils::display::print_unicode_box;
use crate::utils::download::download_binary;
use crate::utils::stackql::get_version;
-use clap::Command;
-use colored::*;
-use std::process;
+/// Configures the `upgrade` command for the CLI application.
pub fn command() -> Command {
Command::new("upgrade").about("Upgrade stackql to the latest version")
}
+/// Executes the `upgrade` command.
pub fn execute() {
- print_unicode_box("š¦ Upgrading stackql...");
+ print_unicode_box("š¦ Installing or upgrading stackql...");
// Download the latest version of stackql binary
match download_binary() {
@@ -18,20 +41,19 @@ pub fn execute() {
// Get the version of the newly installed binary
match get_version() {
Ok(version_info) => {
- println!(
- "Successfully upgraded stackql binary to the latest version ({}) at:",
- version_info.version
+ info!(
+ "Successfully installed the latest stackql binary, version ({}) at: {}",
+ version_info.version,
+ path.display().to_string().green()
);
}
- Err(_) => {
- println!("Successfully upgraded stackql binary to the latest version at:");
+ Err(e) => {
+ error!("Failed to get stackql version: {}", e);
}
}
- println!("{}", path.display().to_string().green());
- println!("Upgrade complete!");
}
Err(e) => {
- eprintln!("{}", format!("Error upgrading stackql binary: {}", e).red());
+ error!("Error upgrading stackql binary: {}", e);
process::exit(1);
}
}
diff --git a/src/error.rs b/src/error.rs
index 2bfc61f..a5852ca 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -1,15 +1,59 @@
+// error.rs
+
+//! # Error Handling Module
+//!
+//! This module provides custom error handling for the StackQL Deploy application.
+//! It defines a comprehensive `AppError` enum that encapsulates various error conditions
+//! the application may encounter. Implementations of standard traits like `Display` and `Error`
+//! are provided to allow seamless integration with Rust's error handling ecosystem.
+//!
+//! # Usage Example
+//! ```rust
+//! use crate::error::AppError;
+//!
+//! fn example_function() -> Result<(), AppError> {
+//! Err(AppError::BinaryNotFound)
+//! }
+//! ```
+
use std::error::Error;
use std::fmt;
use std::path::PathBuf;
+// ============================
+// Application Error Definitions
+// ============================
+
+/// Represents errors that may occur within the application.
+///
+/// This enum provides a common error type that encapsulates various issues such as:
+/// - Missing binary files
+/// - Failed command execution
+/// - I/O errors
#[derive(Debug)]
pub enum AppError {
+ /// Error returned when the stackql binary is not found.
BinaryNotFound,
+
+ /// Error returned when a command fails to execute.
+ ///
+ /// The error message is stored as a `String` for detailed reporting.
CommandFailed(String),
+
+ /// Wrapper for standard I/O errors.
+ ///
+ /// This variant allows propagating errors originating from `std::io` operations.
IoError(std::io::Error),
}
+// ============================
+// Display Trait Implementation
+// ============================
+
impl fmt::Display for AppError {
+ /// Formats the `AppError` for user-friendly output.
+ ///
+ /// This implementation converts each variant into a descriptive error message.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::BinaryNotFound => write!(f, "The stackql binary was not found"),
@@ -19,15 +63,44 @@ impl fmt::Display for AppError {
}
}
+// ============================
+// Error Trait Implementation
+// ============================
+
impl Error for AppError {}
+// ============================
+// Conversion From std::io::Error
+// ============================
+
impl From for AppError {
+ /// Converts a standard I/O error into an `AppError::IoError`.
fn from(error: std::io::Error) -> Self {
Self::IoError(error)
}
}
-// New helper function
+// ============================
+// Utility Functions
+// ============================
+
+/// Attempts to retrieve the binary path, returning an `AppError` if not found.
+///
+/// This function calls `get_binary_path()` from the `utils::binary` module and converts
+/// an `Option` to a `Result`.
+///
+/// # Errors
+/// - Returns `AppError::BinaryNotFound` if the binary path cannot be located.
+///
+/// # Example
+/// ```rust
+/// use crate::error::{get_binary_path_with_error, AppError};
+///
+/// match get_binary_path_with_error() {
+/// Ok(path) => println!("Binary found at: {:?}", path),
+/// Err(e) => eprintln!("Error: {}", e),
+/// }
+/// ```
pub fn get_binary_path_with_error() -> Result {
crate::utils::binary::get_binary_path().ok_or(AppError::BinaryNotFound)
}
diff --git a/src/globals.rs b/src/globals.rs
new file mode 100644
index 0000000..83b3a1a
--- /dev/null
+++ b/src/globals.rs
@@ -0,0 +1,139 @@
+// globals.rs
+
+//! # Global Configuration Module
+//!
+//! This module provides global variables for the StackQL server configuration.
+//! It manages the global host, port, and connection string settings using `OnceCell` for safe, single initialization.
+//!
+//! ## Features
+//! - Stores global server configuration values (`host`, `port`, `connection_string`) using `OnceCell`.
+//! - Provides initialization functions to set global values (`init_globals`).
+//! - Exposes getter functions for retrieving configured global values from other modules.
+//!
+//! ## Example Usage
+//! ```rust
+//! use crate::globals::{init_globals, server_host, server_port, connection_string};
+//!
+//! fn setup() {
+//! init_globals("localhost".to_string(), 5444);
+//! println!("Host: {}", server_host());
+//! println!("Port: {}", server_port());
+//! println!("Connection String: {}", connection_string());
+//! }
+//! ```
+
+use once_cell::sync::OnceCell;
+
+use crate::app::{DEFAULT_SERVER_HOST, DEFAULT_SERVER_PORT};
+
+// ============================
+// Global Static Variables
+// ============================
+
+/// Stores the global server host.
+///
+/// The server host is initialized via the `init_globals` function and is only set once per application lifetime.
+static STACKQL_SERVER_HOST: OnceCell = OnceCell::new();
+
+/// Stores the global server port.
+///
+/// The server port is initialized via the `init_globals` function and is only set once per application lifetime.
+static STACKQL_SERVER_PORT: OnceCell = OnceCell::new();
+
+/// Stores the global connection string used for database connections.
+///
+/// This string is generated using the `init_globals` function based on the provided host and port.
+static STACKQL_CONNECTION_STRING: OnceCell = OnceCell::new();
+
+// ============================
+// Initialization Function
+// ============================
+
+/// Initializes the global variables for host, port, and connection string.
+///
+/// This function must be called once before accessing global values via getter functions.
+/// It uses `OnceCell` to ensure each value is only initialized once.
+///
+/// # Arguments
+/// - `host` - The server host address as a `String`.
+/// - `port` - The server port as a `u16`.
+///
+/// # Example
+/// ```rust
+/// use crate::globals::init_globals;
+/// init_globals("localhost".to_string(), 5444);
+/// ```
+pub fn init_globals(host: String, port: u16) {
+ // Only set if not already set (first initialization wins)
+ STACKQL_SERVER_HOST.set(host.clone()).ok();
+ STACKQL_SERVER_PORT.set(port).ok();
+
+ // Create a connection string and store it globally
+ let connection_string = format!(
+ "host={} port={} user=stackql dbname=stackql application_name=stackql",
+ host, port
+ );
+ STACKQL_CONNECTION_STRING.set(connection_string).ok();
+}
+
+// ============================
+// Getter Functions
+// ============================
+
+/// Retrieves the configured global server host.
+///
+/// If the host is not set via `init_globals`, it returns the default value from `app`.
+///
+/// # Returns
+/// - `&'static str` - The configured server host or the default host.
+///
+/// # Example
+/// ```rust
+/// use crate::globals::{init_globals, server_host};
+/// init_globals("localhost".to_string(), 5444);
+/// assert_eq!(server_host(), "localhost");
+/// ```
+pub fn server_host() -> &'static str {
+ STACKQL_SERVER_HOST
+ .get()
+ .map_or(DEFAULT_SERVER_HOST, |s| s.as_str())
+}
+
+/// Retrieves the configured global server port.
+///
+/// If the port is not set via `init_globals`, it returns the default value from `app`.
+///
+/// # Returns
+/// - `u16` - The configured server port or the default port.
+///
+/// # Example
+/// ```rust
+/// use crate::globals::{init_globals, server_port};
+/// init_globals("localhost".to_string(), 5444);
+/// assert_eq!(server_port(), 5444);
+/// ```
+pub fn server_port() -> u16 {
+ STACKQL_SERVER_PORT
+ .get()
+ .copied()
+ .unwrap_or(DEFAULT_SERVER_PORT)
+}
+
+/// Retrieves the configured global connection string.
+///
+/// The connection string is generated during initialization via `init_globals`.
+/// If not initialized, it returns an empty string.
+///
+/// # Returns
+/// - `&'static str` - The configured connection string or an empty string if not initialized.
+///
+/// # Example
+/// ```rust
+/// use crate::globals::{init_globals, connection_string};
+/// init_globals("localhost".to_string(), 5444);
+/// println!("Connection String: {}", connection_string());
+/// ```
+#[allow(dead_code)]
+pub fn connection_string() -> &'static str {
+ STACKQL_CONNECTION_STRING.get().map_or("", |s| s.as_str())
+}
diff --git a/src/main.rs b/src/main.rs
index c118846..e7c1f68 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,20 +1,92 @@
+// main.rs
+
+//! # StackQL Deploy - Main Entry Point
+//!
+//! This is the main entry point for the StackQL Deploy application.
+//! It initializes the CLI, configures global settings, and handles user commands (e.g., `build`, `teardown`, `test`, `info`, `shell`, etc.).
+//!
+//! ## Global Arguments
+//!
+//! These arguments can be specified for **any command**.
+//!
+//! - `--server`, `-h` - The server host to connect to (default: `localhost`).
+//! - `--port`, `-p` - The server port to connect to (default: `5444`).
+//! - `--log-level` - The logging level (default: `info`). Possible values: `error`, `warn`, `info`, `debug`, `trace`.
+//!
+//! ## Example Usage
+//! ```bash
+//! ./stackql-deploy --server myserver.com --port 1234 build
+//! ./stackql-deploy shell -h localhost -p 5444
+//! ./stackql-deploy info
+//! ```
+//!
+//! For detailed help, use `--help` or `-h` flags.
+
+mod app;
mod commands;
mod error;
+mod globals;
+mod resource;
mod utils;
+// mod template;
-use crate::utils::display::{print_error, print_info};
-use crate::utils::server::stop_server;
-use clap::Command;
-use error::{get_binary_path_with_error, AppError};
use std::process;
+use clap::{Arg, ArgAction, Command};
+
+use error::{get_binary_path_with_error, AppError};
+use log::{debug, error, info};
+
+use crate::app::{
+ APP_AUTHOR, APP_DESCRIPTION, APP_NAME, APP_VERSION, DEFAULT_LOG_LEVEL, DEFAULT_SERVER_HOST,
+ DEFAULT_SERVER_PORT, DEFAULT_SERVER_PORT_STR, EXEMPT_COMMANDS, LOG_LEVELS,
+};
+use crate::utils::logging::initialize_logger;
+
+/// Main function that initializes the CLI and handles command execution.
fn main() {
- let matches = Command::new("stackql-deploy")
- .version("0.1.0")
- .author("Jeffrey Aven ")
- .about("Model driven IaC using stackql")
+ let matches = Command::new(APP_NAME)
+ .version(APP_VERSION)
+ .author(APP_AUTHOR)
+ .about(APP_DESCRIPTION)
+ // ====================
+ // Global Flags
+ // ====================
+ .arg(
+ Arg::new("server")
+ .long("server")
+ .alias("host")
+ .short('h')
+ .help("StackQL server host to connect to")
+ .global(true)
+ .default_value(DEFAULT_SERVER_HOST)
+ .action(ArgAction::Set),
+ )
+ .arg(
+ Arg::new("port")
+ .short('p')
+ .long("port")
+ .help("StackQL server port to connect to")
+ .value_parser(clap::value_parser!(u16).range(1024..=65535))
+ .global(true)
+ .default_value(DEFAULT_SERVER_PORT_STR)
+ .action(ArgAction::Set),
+ )
+ .arg(
+ Arg::new("log-level")
+ .long("log-level")
+ .help("Set the logging level")
+ .global(true)
+ .value_parser(clap::builder::PossibleValuesParser::new(LOG_LEVELS))
+ .ignore_case(true)
+ .default_value(DEFAULT_LOG_LEVEL)
+ .action(ArgAction::Set),
+ )
.subcommand_required(true)
.arg_required_else_help(true)
+ // ====================
+ // Subcommand Definitions
+ // ====================
.subcommand(commands::build::command())
.subcommand(commands::teardown::command())
.subcommand(commands::test::command())
@@ -27,60 +99,63 @@ fn main() {
.subcommand(commands::plan::command())
.get_matches();
- // Check for binary existence except for init and server management commands
- let exempt_commands = ["init"];
- if !exempt_commands.contains(&matches.subcommand_name().unwrap_or("")) {
- if let Err(AppError::BinaryNotFound) = get_binary_path_with_error() {
- print_info("stackql binary not found in the current directory or in the PATH. Downloading the latest version...");
- // Call your download code here
- process::exit(1);
- }
- // if let None = get_binary_path() {
- // print_info("stackql binary not found in the current directory or in the PATH. Downloading the latest version...");
- // // Call your download code here
- // process::exit(1);
- // }
- }
+ // ====================
+ // Initialize Logger
+ // ====================
+ let log_level = matches.get_one::("log-level").unwrap();
+ initialize_logger(log_level);
- // Define which commands need server management
- let server_commands = ["build", "test", "plan", "teardown", "shell"];
- let needs_server = server_commands.contains(&matches.subcommand_name().unwrap_or(""));
- let default_port = 5444;
+ debug!("Logger initialized with level: {}", log_level);
- // Handle command execution
- match matches.subcommand() {
- Some(("build", sub_matches)) => {
- commands::build::execute(sub_matches);
- if needs_server {
- stop_server(default_port).ok();
- }
- }
- Some(("teardown", sub_matches)) => {
- commands::teardown::execute(sub_matches);
- if needs_server {
- stop_server(default_port).ok();
- }
- }
- Some(("test", sub_matches)) => {
- commands::test::execute(sub_matches);
- if needs_server {
- stop_server(default_port).ok();
+ // Get the server and port values from command-line arguments
+ let server_host = matches
+ .get_one::("server")
+ .unwrap_or(&DEFAULT_SERVER_HOST.to_string())
+ .clone();
+
+ let server_port = *matches
+ .get_one::("port")
+ .unwrap_or(&DEFAULT_SERVER_PORT);
+
+ debug!("Server Host: {}", server_host);
+ debug!("Server Port: {}", server_port);
+
+ // Initialize the global values
+ globals::init_globals(server_host, server_port);
+
+ // Check for binary existence except for exempt commands
+ if !EXEMPT_COMMANDS.contains(&matches.subcommand_name().unwrap_or("")) {
+ match get_binary_path_with_error() {
+ Ok(path) => debug!("StackQL binary found at: {:?}", path),
+ Err(_e) => {
+ info!("StackQL binary not found. Downloading the latest version...");
+ commands::upgrade::execute();
+
+ // Re-check for binary existence after upgrade attempt
+ if let Err(AppError::BinaryNotFound) = get_binary_path_with_error() {
+ error!("Failed to download StackQL binary. Please try again or check your network connection.");
+ process::exit(1);
+ }
}
}
+ }
+
+ // ====================
+ // Command Execution
+ // ====================
+ match matches.subcommand() {
+ Some(("build", sub_matches)) => commands::build::execute(sub_matches),
+ Some(("test", sub_matches)) => commands::test::execute(sub_matches),
+ Some(("plan", sub_matches)) => commands::plan::execute(sub_matches),
+ Some(("teardown", sub_matches)) => commands::teardown::execute(sub_matches),
Some(("info", _)) => commands::info::execute(),
Some(("shell", sub_matches)) => commands::shell::execute(sub_matches),
Some(("upgrade", _)) => commands::upgrade::execute(),
Some(("init", sub_matches)) => commands::init::execute(sub_matches),
Some(("start-server", sub_matches)) => commands::start_server::execute(sub_matches),
Some(("stop-server", sub_matches)) => commands::stop_server::execute(sub_matches),
- Some(("plan", _)) => {
- commands::plan::execute();
- if needs_server {
- stop_server(default_port).ok();
- }
- }
_ => {
- print_error("Unknown command. Use --help for usage.");
+ print_error!("Unknown command. Use --help for usage.");
process::exit(1);
}
}
diff --git a/src/resource/exports.rs b/src/resource/exports.rs
new file mode 100644
index 0000000..b410c09
--- /dev/null
+++ b/src/resource/exports.rs
@@ -0,0 +1,290 @@
+// resource/exports.rs
+
+//! # Resource Exports Module
+//!
+//! Handles exporting variables from resources.
+//! Exports are used to share data between resources, such as IDs or attributes
+//! that are needed for dependent resources.
+//!
+//! This module provides functionality for processing exports, including
+//! masking protected values and updating the context with exported values.
+
+use std::collections::HashMap;
+use std::error::Error;
+use std::fmt;
+
+use colored::*;
+
+use crate::resource::manifest::Resource;
+use crate::template::context::Context;
+
+/// Errors that can occur during export operations.
+#[derive(Debug)]
+pub enum ExportError {
+ /// Missing required export
+ MissingExport(String),
+
+ /// Invalid export format
+ InvalidFormat(String),
+
+ /// Export processing failed
+ ProcessingFailed(String),
+}
+
+impl fmt::Display for ExportError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExportError::MissingExport(name) => write!(f, "Missing required export: {}", name),
+ ExportError::InvalidFormat(msg) => write!(f, "Invalid export format: {}", msg),
+ ExportError::ProcessingFailed(msg) => write!(f, "Export processing failed: {}", msg),
+ }
+ }
+}
+
+impl Error for ExportError {}
+
+/// Type alias for export operation results
+pub type ExportResult = Result;
+
+/// Represents the result of processing exports.
+#[derive(Debug, Clone)]
+pub struct ExportOutput {
+ /// Exported values
+ pub values: HashMap,
+
+ /// Protected values that were exported (keys only)
+ pub protected: Vec,
+}
+
+/// Processes exports from a query result.
+///
+/// # Arguments
+/// * `resource` - The resource being processed
+/// * `row` - Row of data from query result
+/// * `columns` - Column definitions from query result
+/// * `dry_run` - Whether this is a dry run
+///
+/// # Returns
+/// A map of export names to values.
+pub fn process_raw_exports(
+ resource: &Resource,
+ row: Option<&Vec>,
+ columns: &[String],
+ dry_run: bool,
+) -> ExportResult {
+ let mut exported = HashMap::new();
+ let protected = resource.protected.clone();
+
+ if dry_run {
+ // For dry run, just use placeholder values
+ for export_name in &resource.exports {
+ exported.insert(export_name.clone(), "".to_string());
+ }
+ } else if let Some(row_values) = row {
+ // Check if we have values to export
+ if row_values.len() != columns.len() {
+ return Err(ExportError::InvalidFormat(
+ "Column count mismatch in export query result".to_string(),
+ ));
+ }
+
+ // Extract values for each requested export
+ for export_name in &resource.exports {
+ // Find the column index for this export
+ if let Some(idx) = columns.iter().position(|c| c == export_name) {
+ if idx < row_values.len() {
+ let value = row_values[idx].clone();
+ exported.insert(export_name.clone(), value);
+ } else {
+ return Err(ExportError::MissingExport(format!(
+ "Export '{}' column index out of bounds",
+ export_name
+ )));
+ }
+ } else {
+ return Err(ExportError::MissingExport(format!(
+ "Export '{}' not found in query result",
+ export_name
+ )));
+ }
+ }
+ } else {
+ // No row data
+ return Err(ExportError::ProcessingFailed(
+ "No row data for exports".to_string(),
+ ));
+ }
+
+ Ok(ExportOutput {
+ values: exported,
+ protected,
+ })
+}
+
+/// Updates a context with exported values.
+///
+/// # Arguments
+/// * `context` - The context to update
+/// * `exports` - The export output to apply
+/// * `show_values` - Whether to print the values being exported
+///
+/// # Returns
+/// Nothing, but updates the context in place.
+pub fn apply_exports_to_context(context: &mut Context, exports: &ExportOutput, show_values: bool) {
+ for (name, value) in &exports.values {
+ if exports.protected.contains(name) {
+ // Mask protected values in output
+ if show_values {
+ let mask = "*".repeat(value.len());
+ println!(
+ " š Set protected variable [{}] to [{}] in exports",
+ name, mask
+ );
+ }
+ } else {
+ // Show regular exports
+ if show_values {
+ println!(" š¤ Set [{}] to [{}] in exports", name, value);
+ }
+ }
+
+ // Add to context
+ context.add_variable(name.clone(), value.clone());
+ }
+}
+
+/// Processes exports for all resources in a stack.
+///
+/// Useful for commands like teardown that need to process all exports
+/// before starting operations.
+///
+/// # Arguments
+/// * `resources` - Resources to process
+/// * `context` - Context to update with exports
+/// * `client` - Database client
+/// * `dry_run` - Whether this is a dry run
+///
+/// # Returns
+/// Success or error
+pub fn collect_all_exports(
+ resources: &Vec,
+ context: &mut Context,
+ client: &mut postgres::Client,
+ dry_run: bool,
+) -> ExportResult<()> {
+ let _ = client;
+ let _ = dry_run;
+
+ println!("Collecting exports for all resources...");
+
+ for resource in resources {
+ // Skip if not a resource type or has no exports
+ let resource_type = resource["type"].as_str().unwrap_or("resource");
+ if resource_type == "script" || resource_type == "command" {
+ continue;
+ }
+
+ if !resource["exports"].is_sequence()
+ || resource["exports"].as_sequence().unwrap().is_empty()
+ {
+ continue;
+ }
+
+ // Get resource name
+ let resource_name = match resource["name"].as_str() {
+ Some(name) => name,
+ None => {
+ eprintln!("Error: Missing 'name' for resource");
+ continue;
+ }
+ };
+
+ println!(
+ " {} Collecting exports for {}",
+ "š¦".bright_magenta(),
+ resource_name
+ );
+
+ // This part would require refactoring or additional methods to properly handle
+ // resource loading and processing exports. In a full implementation, we would have:
+ //
+ // 1. Load the resource from the manifest
+ // 2. Load its queries
+ // 3. Render and execute the exports query
+ // 4. Process the results and update the context
+
+ // For now, we'll simulate a simplified version
+ // In a real implementation, this would use the proper loading functions
+ let fake_export_values = HashMap::new(); // Would be actual values in real implementation
+ let fake_protected = Vec::new();
+
+ let fake_exports = ExportOutput {
+ values: fake_export_values,
+ protected: fake_protected,
+ };
+
+ apply_exports_to_context(context, &fake_exports, false);
+ }
+
+ Ok(())
+}
+
+/// Unit tests for export functionality.
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::resource::manifest::Resource;
+
+ #[test]
+ fn test_process_raw_exports() {
+ // Create a test resource with exports
+ let resource = Resource {
+ name: "test-resource".to_string(),
+ r#type: "resource".to_string(),
+ file: None,
+ props: Vec::new(),
+ exports: vec!["id".to_string(), "name".to_string()],
+ protected: vec!["id".to_string()],
+ description: "".to_string(),
+ r#if: None,
+ };
+
+ // Test with a row of data
+ let columns = vec!["id".to_string(), "name".to_string()];
+ let row = vec!["123".to_string(), "test".to_string()];
+
+ let result = process_raw_exports(&resource, Some(&row), &columns, false).unwrap();
+
+ assert_eq!(result.values.len(), 2);
+ assert_eq!(result.values.get("id").unwrap(), "123");
+ assert_eq!(result.values.get("name").unwrap(), "test");
+ assert_eq!(result.protected.len(), 1);
+ assert!(result.protected.contains(&"id".to_string()));
+
+ // Test dry run
+ let dry_result = process_raw_exports(&resource, None, &columns, true).unwrap();
+
+ assert_eq!(dry_result.values.len(), 2);
+ assert_eq!(dry_result.values.get("id").unwrap(), "");
+ assert_eq!(dry_result.values.get("name").unwrap(), "");
+ }
+
+ #[test]
+ fn test_apply_exports_to_context() {
+ let mut context = Context::new();
+
+ let mut values = HashMap::new();
+ values.insert("id".to_string(), "123".to_string());
+ values.insert("name".to_string(), "test".to_string());
+
+ let exports = ExportOutput {
+ values,
+ protected: vec!["id".to_string()],
+ };
+
+ apply_exports_to_context(&mut context, &exports, false);
+
+ assert_eq!(context.get_variable("id").unwrap(), "123");
+ assert_eq!(context.get_variable("name").unwrap(), "test");
+ }
+}
diff --git a/src/resource/manifest.rs b/src/resource/manifest.rs
index e69de29..85f7fea 100644
--- a/src/resource/manifest.rs
+++ b/src/resource/manifest.rs
@@ -0,0 +1,289 @@
+// resource/manifest.rs
+
+//! # Manifest Module
+//!
+//! Handles loading, parsing, and managing stack manifests.
+//! A manifest describes the resources that make up a stack and their configurations.
+//!
+//! The primary type is `Manifest`, which represents a parsed stackql_manifest.yml file.
+//! This module also provides types for resources, properties, and other manifest components.
+
+use std::collections::HashMap;
+use std::path::{Path, PathBuf};
+use std::{fs, process};
+
+use log::{debug, error};
+use serde::{Deserialize, Serialize};
+use thiserror::Error;
+
+/// Errors that can occur when working with manifests.
+#[derive(Error, Debug)]
+pub enum ManifestError {
+ #[error("Failed to read manifest file: {0}")]
+ FileReadError(#[from] std::io::Error),
+
+ #[error("Failed to parse manifest: {0}")]
+ ParseError(#[from] serde_yaml::Error),
+
+ #[error("Missing required field: {0}")]
+ MissingField(String),
+
+ #[error("Invalid field: {0}")]
+ InvalidField(String),
+}
+
+/// Type alias for ManifestResult
+pub type ManifestResult = Result;
+
+/// Represents a stack manifest file.
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct Manifest {
+ /// Version of the manifest format
+ #[serde(default = "default_version")]
+ pub version: u32,
+
+ /// Name of the stack
+ pub name: String,
+
+ /// Description of the stack
+ #[serde(default)]
+ pub description: String,
+
+ /// List of providers used by the stack
+ pub providers: Vec,
+
+ /// Global variables for the stack
+ #[serde(default)]
+ pub globals: Vec,
+
+ /// Resources in the stack
+ #[serde(default)]
+ pub resources: Vec,
+}
+
+/// Default version for manifest when not specified
+fn default_version() -> u32 {
+ 1
+}
+
+/// Represents a global variable in the manifest.
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct GlobalVar {
+ /// Name of the global variable
+ pub name: String,
+
+ /// Value of the global variable - can be a string or a complex structure
+ #[serde(default)]
+ pub value: serde_yaml::Value,
+
+ /// Optional description
+ #[serde(default)]
+ pub description: String,
+}
+
+/// Represents a resource in the manifest.
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct Resource {
+ /// Name of the resource
+ pub name: String,
+
+ /// Type of the resource (defaults to "resource")
+ #[serde(default = "default_resource_type")]
+ pub r#type: String,
+
+ /// Custom file name for resource queries (if not derived from name)
+ #[serde(default)]
+ pub file: Option,
+
+ /// Properties for the resource
+ #[serde(default)]
+ pub props: Vec,
+
+ /// Exports from the resource
+ #[serde(default)]
+ pub exports: Vec,
+
+ /// Protected exports
+ #[serde(default)]
+ pub protected: Vec,
+
+ /// Description of the resource
+ #[serde(default)]
+ pub description: String,
+
+ /// Condition for resource processing
+ #[serde(default)]
+ pub r#if: Option,
+}
+
+/// Default resource type value
+fn default_resource_type() -> String {
+ "resource".to_string()
+}
+
+/// Represents a property of a resource.
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct Property {
+ /// Name of the property
+ pub name: String,
+
+ /// Value of the property - can be a string or a complex structure
+ #[serde(default)]
+ pub value: Option,
+
+ /// Environment-specific values
+ #[serde(default)]
+ pub values: Option>,
+
+ /// Description of the property
+ #[serde(default)]
+ pub description: String,
+
+ /// Items to merge with the value
+ #[serde(default)]
+ pub merge: Option>,
+}
+
+/// Represents a value for a property in a specific environment.
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct PropertyValue {
+ /// Value for the property in this environment - can be a string or complex structure
+ pub value: serde_yaml::Value,
+}
+
+impl Manifest {
+ /// Loads a manifest file from the specified path.
+ pub fn load_from_file(path: &Path) -> ManifestResult {
+ let content = fs::read_to_string(path)?;
+ let manifest: Manifest = serde_yaml::from_str(&content)?;
+
+ // Validate the manifest
+ manifest.validate()?;
+
+ Ok(manifest)
+ }
+
+ /// Loads a manifest file from the specified stack directory.
+ pub fn load_from_stack_dir(stack_dir: &Path) -> ManifestResult {
+ let manifest_path = stack_dir.join("stackql_manifest.yml");
+ Self::load_from_file(&manifest_path)
+ }
+
+ /// Validates the manifest for required fields and correctness.
+ fn validate(&self) -> ManifestResult<()> {
+ // Check required fields
+ if self.name.is_empty() {
+ return Err(ManifestError::MissingField("name".to_string()));
+ }
+
+ if self.providers.is_empty() {
+ return Err(ManifestError::MissingField("providers".to_string()));
+ }
+
+ // Validate each resource
+ for resource in &self.resources {
+ if resource.name.is_empty() {
+ return Err(ManifestError::MissingField("resource.name".to_string()));
+ }
+
+ // Validate properties
+ for prop in &resource.props {
+ if prop.name.is_empty() {
+ return Err(ManifestError::MissingField("property.name".to_string()));
+ }
+
+ // Each property must have either a value or values
+ if prop.value.is_none() && prop.values.is_none() {
+ return Err(ManifestError::MissingField(format!(
+ "Property '{}' in resource '{}' has no value or values",
+ prop.name, resource.name
+ )));
+ }
+ }
+
+ // Make sure exports are valid
+ for export in &resource.exports {
+ if export.is_empty() {
+ return Err(ManifestError::InvalidField(format!(
+ "Empty export in resource '{}'",
+ resource.name
+ )));
+ }
+ }
+
+ // Make sure protected exports are a subset of exports
+ for protected in &resource.protected {
+ if !resource.exports.contains(protected) {
+ return Err(ManifestError::InvalidField(format!(
+ "Protected export '{}' not found in exports for resource '{}'",
+ protected, resource.name
+ )));
+ }
+ }
+ }
+
+ Ok(())
+ }
+
+ /// Gets the resource query file path for a resource.
+ pub fn get_resource_query_path(&self, stack_dir: &Path, resource: &Resource) -> PathBuf {
+ let file_name = match &resource.file {
+ Some(file) => file.clone(),
+ _none => format!("{}.iql", resource.name),
+ };
+
+ stack_dir.join("resources").join(file_name)
+ }
+
+ /// Gets the value of a property in a specific environment.
+ pub fn get_property_value<'a>(
+ property: &'a Property,
+ env: &str,
+ ) -> Option<&'a serde_yaml::Value> {
+ // Direct value takes precedence
+ if let Some(ref value) = property.value {
+ return Some(value);
+ }
+
+ // Fall back to environment-specific values
+ if let Some(ref values) = property.values {
+ if let Some(env_value) = values.get(env) {
+ return Some(&env_value.value);
+ }
+ }
+
+ None
+ }
+
+ /// Finds a resource by name.
+ pub fn find_resource(&self, name: &str) -> Option<&Resource> {
+ self.resources.iter().find(|r| r.name == name)
+ }
+
+ /// Gets global variables as a map of name to YAML value.
+ pub fn globals_as_map(&self) -> HashMap {
+ self.globals
+ .iter()
+ .map(|g| (g.name.clone(), g.value.clone()))
+ .collect()
+ }
+
+ /// Loads a manifest file from the specified stack directory or exits with an error message.
+ pub fn load_from_dir_or_exit(stack_dir: &str) -> Self {
+ debug!("Loading manifest file from stack directory: {}", stack_dir);
+
+ match Self::load_from_stack_dir(Path::new(stack_dir)) {
+ Ok(manifest) => {
+ debug!("Stack name: {}", manifest.name);
+ debug!("Stack description: {}", manifest.description);
+ debug!("Providers: {:?}", manifest.providers);
+ debug!("Resources count: {}", manifest.resources.len());
+ manifest
+ }
+ Err(err) => {
+ error!("Failed to load manifest: {}", err);
+ process::exit(1);
+ }
+ }
+ }
+}
diff --git a/src/resource/mod.rs b/src/resource/mod.rs
index e69de29..9707ca8 100644
--- a/src/resource/mod.rs
+++ b/src/resource/mod.rs
@@ -0,0 +1,40 @@
+// resource/mod.rs
+
+//! # Resource Module
+//!
+//! This module contains functionality for working with resources in a stack.
+//! It includes submodules for manifest handling, operations, queries, and exports.
+//!
+//! Resources are the fundamental building blocks of a stack, and this module
+//! provides the tools needed to load, manipulate, and process them.
+
+// pub mod exports;
+pub mod manifest;
+// pub mod operations;
+// pub mod queries;
+
+// /// Creates a combined error type for resource operations.
+// #[derive(thiserror::Error, Debug)]
+// pub enum ResourceError {
+// #[error("Manifest error: {0}")]
+// Manifest(#[from] manifest::ManifestError),
+
+// #[error("Operation error: {0}")]
+// Operation(#[from] operations::OperationError),
+
+// #[error("Query error: {0}")]
+// Query(#[from] queries::QueryError),
+
+// #[error("Export error: {0}")]
+// Export(#[from] exports::ExportError),
+
+// #[error("I/O error: {0}")]
+// Io(#[from] std::io::Error),
+
+// #[allow(dead_code)]
+// #[error("Other error: {0}")]
+// Other(String),
+// }
+
+// /// Type alias for resource operation results
+// pub type _Result = std::result::Result;
diff --git a/src/resource/operations.rs b/src/resource/operations.rs
new file mode 100644
index 0000000..469dd15
--- /dev/null
+++ b/src/resource/operations.rs
@@ -0,0 +1,561 @@
+// resource/operations.rs
+
+//! # Resource Operations Module
+//!
+//! Provides functionality for performing operations on resources.
+//! This includes creating, updating, and deleting resources, as well as
+//! checking their existence and state.
+//!
+//! Operations are performed by executing SQL queries against a StackQL server.
+
+use std::collections::HashMap;
+use std::error::Error;
+use std::fmt;
+
+use colored::*;
+use postgres::Client;
+
+use crate::resource::manifest::Resource;
+use crate::resource::queries::QueryType;
+use crate::template::context::Context;
+use crate::template::engine::TemplateEngine;
+use crate::utils::query::{execute_query, QueryResult};
+
+/// Errors that can occur during resource operations.
+#[derive(Debug)]
+pub enum OperationError {
+ /// Query execution failed
+ QueryError(String),
+
+ /// Resource validation failed
+ ValidationError(String),
+
+ /// Missing required query
+ MissingQuery(String),
+
+ /// Operation not supported for resource type
+ UnsupportedOperation(String),
+
+ /// State check failed after operation
+ StateCheckFailed(String),
+}
+
+impl fmt::Display for OperationError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ OperationError::QueryError(msg) => write!(f, "Query error: {}", msg),
+ OperationError::ValidationError(msg) => write!(f, "Validation error: {}", msg),
+ OperationError::MissingQuery(msg) => write!(f, "Missing query: {}", msg),
+ OperationError::UnsupportedOperation(msg) => {
+ write!(f, "Unsupported operation: {}", msg)
+ }
+ OperationError::StateCheckFailed(msg) => write!(f, "State check failed: {}", msg),
+ }
+ }
+}
+
+impl Error for OperationError {}
+
+/// Type alias for operation results
+pub type OperationResult = Result;
+
+/// Result of a resource existence check.
+#[derive(Debug, PartialEq)]
+pub enum ExistenceStatus {
+ /// Resource exists
+ Exists,
+
+ /// Resource does not exist
+ NotExists,
+
+ /// Could not determine if resource exists
+ Unknown,
+}
+
+/// Result of a resource state check.
+#[derive(Debug, PartialEq)]
+pub enum StateStatus {
+ /// Resource is in the correct state
+ Correct,
+
+ /// Resource is not in the correct state
+ Incorrect,
+
+ /// Could not determine resource state
+ Unknown,
+}
+
+/// Handles resource operations.
+pub struct ResourceOperator<'a> {
+ /// Database client for query execution
+ client: &'a mut Client,
+
+ /// Template engine for rendering queries
+ engine: TemplateEngine,
+
+ /// Whether to run in dry-run mode
+ dry_run: bool,
+
+ /// Whether to show queries
+ show_queries: bool,
+}
+
+impl<'a> ResourceOperator<'a> {
+ /// Creates a new ResourceOperator.
+ pub fn new(client: &'a mut Client, dry_run: bool, show_queries: bool) -> Self {
+ Self {
+ client,
+ engine: TemplateEngine::new(),
+ dry_run,
+ show_queries,
+ }
+ }
+
+ /// Checks if a resource exists.
+ pub fn check_exists(
+ &mut self,
+ resource: &Resource,
+ queries: &HashMap,
+ context: &Context,
+ ) -> OperationResult {
+ // Try exists query first, then fall back to preflight (for backward compatibility), then statecheck
+ let exists_query = if let Some(query) = queries.get(&QueryType::Exists) {
+ query
+ } else if let Some(query) = queries.get(&QueryType::Preflight) {
+ query
+ } else if let Some(query) = queries.get(&QueryType::StateCheck) {
+ query
+ } else {
+ println!(
+ " {} No exists check configured for [{}]",
+ "ā¹ļø".bright_blue(),
+ resource.name
+ );
+ return Ok(ExistenceStatus::Unknown);
+ };
+
+ let rendered_query = self
+ .engine
+ .render(exists_query, context.get_variables())
+ .map_err(|e| OperationError::QueryError(e.to_string()))?;
+
+ if self.dry_run {
+ println!(
+ " {} Dry run exists check for [{}]:",
+ "š".bright_cyan(),
+ resource.name
+ );
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+ return Ok(ExistenceStatus::NotExists); // Assume it doesn't exist in dry run
+ }
+
+ println!(
+ " {} Running exists check for [{}]",
+ "š".bright_cyan(),
+ resource.name
+ );
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+
+ match execute_query(&rendered_query, self.client) {
+ Ok(result) => match result {
+ QueryResult::Data { columns, rows, .. } => {
+ if rows.is_empty() || columns.is_empty() {
+ return Ok(ExistenceStatus::NotExists);
+ }
+
+ // Check for "count" column with value 1
+ let count_col_idx = columns.iter().position(|c| c.name == "count");
+ if let Some(idx) = count_col_idx {
+ if let Some(row) = rows.first() {
+ if let Some(count) = row.values.get(idx) {
+ if count == "1" {
+ return Ok(ExistenceStatus::Exists);
+ } else {
+ return Ok(ExistenceStatus::NotExists);
+ }
+ }
+ }
+ }
+
+ Ok(ExistenceStatus::NotExists)
+ }
+ _ => Ok(ExistenceStatus::NotExists),
+ },
+ Err(e) => Err(OperationError::QueryError(format!(
+ "Exists check failed: {}",
+ e
+ ))),
+ }
+ }
+
+ /// Checks if a resource is in the correct state.
+ pub fn check_state(
+ &mut self,
+ resource: &Resource,
+ queries: &HashMap,
+ context: &Context,
+ ) -> OperationResult {
+ let statecheck_query = if let Some(query) = queries.get(&QueryType::StateCheck) {
+ query
+ } else if let Some(query) = queries.get(&QueryType::PostDeploy) {
+ query
+ } else {
+ println!(
+ " {} State check not configured for [{}]",
+ "ā¹ļø".bright_blue(),
+ resource.name
+ );
+ return Ok(StateStatus::Unknown);
+ };
+
+ let rendered_query = self
+ .engine
+ .render(statecheck_query, context.get_variables())
+ .map_err(|e| OperationError::QueryError(e.to_string()))?;
+
+ if self.dry_run {
+ println!(
+ " {} Dry run state check for [{}]:",
+ "š".bright_cyan(),
+ resource.name
+ );
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+ return Ok(StateStatus::Correct); // Assume correct state in dry run
+ }
+
+ println!(
+ " {} Running state check for [{}]",
+ "š".bright_cyan(),
+ resource.name
+ );
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+
+ match execute_query(&rendered_query, self.client) {
+ Ok(result) => match result {
+ QueryResult::Data { columns, rows, .. } => {
+ if rows.is_empty() || columns.is_empty() {
+ return Ok(StateStatus::Incorrect);
+ }
+
+ // Check for "count" column with value 1
+ let count_col_idx = columns.iter().position(|c| c.name == "count");
+ if let Some(idx) = count_col_idx {
+ if let Some(row) = rows.first() {
+ if let Some(count) = row.values.get(idx) {
+ if count == "1" {
+ println!(
+ " {} [{}] is in the desired state",
+ "š".green(),
+ resource.name
+ );
+ return Ok(StateStatus::Correct);
+ } else {
+ println!(
+ " {} [{}] is not in the desired state",
+ "š".yellow(),
+ resource.name
+ );
+ return Ok(StateStatus::Incorrect);
+ }
+ }
+ }
+ }
+
+ println!(
+ " {} Could not determine state for [{}]",
+ "ā ļø".yellow(),
+ resource.name
+ );
+ Ok(StateStatus::Unknown)
+ }
+ _ => {
+ println!(
+ " {} Unexpected result type from state check",
+ "ā ļø".yellow()
+ );
+ Ok(StateStatus::Unknown)
+ }
+ },
+ Err(e) => Err(OperationError::QueryError(format!(
+ "State check failed: {}",
+ e
+ ))),
+ }
+ }
+
+ /// Creates a new resource.
+ pub fn create_resource(
+ &mut self,
+ resource: &Resource,
+ queries: &HashMap,
+ context: &Context,
+ ) -> OperationResult {
+ // Try createorupdate query first, then fall back to create
+ let create_query = if let Some(query) = queries.get(&QueryType::CreateOrUpdate) {
+ query
+ } else if let Some(query) = queries.get(&QueryType::Create) {
+ query
+ } else {
+ return Err(OperationError::MissingQuery(format!(
+ "No create or createorupdate query for resource '{}'",
+ resource.name
+ )));
+ };
+
+ let rendered_query = self
+ .engine
+ .render(create_query, context.get_variables())
+ .map_err(|e| OperationError::QueryError(e.to_string()))?;
+
+ if self.dry_run {
+ println!(
+ " {} Dry run create for [{}]:",
+ "š§".yellow(),
+ resource.name
+ );
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+ return Ok(true); // Pretend success in dry run
+ }
+
+ println!(
+ " {} [{}] does not exist, creating...",
+ "š§".yellow(),
+ resource.name
+ );
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+
+ match execute_query(&rendered_query, self.client) {
+ Ok(_) => {
+ println!(" {} Resource created successfully", "ā".green());
+ Ok(true)
+ }
+ Err(e) => Err(OperationError::QueryError(format!(
+ "Create operation failed: {}",
+ e
+ ))),
+ }
+ }
+
+ /// Updates an existing resource.
+ pub fn update_resource(
+ &mut self,
+ resource: &Resource,
+ queries: &HashMap,
+ context: &Context,
+ ) -> OperationResult {
+ let update_query = if let Some(query) = queries.get(&QueryType::Update) {
+ query
+ } else {
+ println!(
+ " {} Update query not configured for [{}], skipping update",
+ "ā¹ļø".bright_blue(),
+ resource.name
+ );
+ return Ok(false);
+ };
+
+ let rendered_query = self
+ .engine
+ .render(update_query, context.get_variables())
+ .map_err(|e| OperationError::QueryError(e.to_string()))?;
+
+ if self.dry_run {
+ println!(
+ " {} Dry run update for [{}]:",
+ "š§".yellow(),
+ resource.name
+ );
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+ return Ok(true); // Pretend success in dry run
+ }
+
+ println!(" {} Updating [{}]...", "š§".yellow(), resource.name);
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+
+ match execute_query(&rendered_query, self.client) {
+ Ok(_) => {
+ println!(" {} Resource updated successfully", "ā".green());
+ Ok(true)
+ }
+ Err(e) => Err(OperationError::QueryError(format!(
+ "Update operation failed: {}",
+ e
+ ))),
+ }
+ }
+
+ /// Deletes a resource.
+ pub fn delete_resource(
+ &mut self,
+ resource: &Resource,
+ queries: &HashMap,
+ context: &Context,
+ ) -> OperationResult {
+ let delete_query = if let Some(query) = queries.get(&QueryType::Delete) {
+ query
+ } else {
+ return Err(OperationError::MissingQuery(format!(
+ "No delete query for resource '{}'",
+ resource.name
+ )));
+ };
+
+ let rendered_query = self
+ .engine
+ .render(delete_query, context.get_variables())
+ .map_err(|e| OperationError::QueryError(e.to_string()))?;
+
+ if self.dry_run {
+ println!(
+ " {} Dry run delete for [{}]:",
+ "š§".yellow(),
+ resource.name
+ );
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+ return Ok(true); // Pretend success in dry run
+ }
+
+ println!(" {} Deleting [{}]...", "š§".yellow(), resource.name);
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+
+ match execute_query(&rendered_query, self.client) {
+ Ok(_) => {
+ println!(" {} Resource deleted successfully", "ā".green());
+ Ok(true)
+ }
+ Err(e) => Err(OperationError::QueryError(format!(
+ "Delete operation failed: {}",
+ e
+ ))),
+ }
+ }
+
+ /// Processes exports from a resource.
+ pub fn process_exports(
+ &mut self,
+ resource: &Resource,
+ queries: &HashMap,
+ context: &mut Context,
+ ) -> OperationResult> {
+ let exports_query = if let Some(query) = queries.get(&QueryType::Exports) {
+ query
+ } else {
+ println!(
+ " {} No exports query for [{}]",
+ "ā¹ļø".bright_blue(),
+ resource.name
+ );
+ return Ok(HashMap::new());
+ };
+
+ let rendered_query = self
+ .engine
+ .render(exports_query, context.get_variables())
+ .map_err(|e| OperationError::QueryError(e.to_string()))?;
+
+ let mut exported_values = HashMap::new();
+
+ if self.dry_run {
+ println!(
+ " {} Dry run exports for [{}]:",
+ "š¦".bright_magenta(),
+ resource.name
+ );
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+
+ // Simulate exports in dry run
+ for export in &resource.exports {
+ let value = "".to_string();
+ context
+ .get_variables_mut()
+ .insert(export.clone(), value.clone());
+ exported_values.insert(export.clone(), value);
+ println!(" š¤ Set [{}] to [] in exports", export);
+ }
+
+ return Ok(exported_values);
+ }
+
+ println!(
+ " {} Exporting variables for [{}]",
+ "š¦".bright_magenta(),
+ resource.name
+ );
+ if self.show_queries {
+ println!("{}", rendered_query);
+ }
+
+ match execute_query(&rendered_query, self.client) {
+ Ok(result) => match result {
+ QueryResult::Data { columns, rows, .. } => {
+ if rows.is_empty() {
+ return Err(OperationError::QueryError(
+ "Exports query returned no rows".to_string(),
+ ));
+ }
+
+ let row = &rows[0]; // Typically exports query returns one row
+
+ for (i, col) in columns.iter().enumerate() {
+ if i < row.values.len() && resource.exports.contains(&col.name) {
+ let value = row.values[i].clone();
+
+ if resource.protected.contains(&col.name) {
+ let mask = "*".repeat(value.len());
+ println!(
+ " š Set protected variable [{}] to [{}] in exports",
+ col.name, mask
+ );
+ } else {
+ println!(" š¤ Set [{}] to [{}] in exports", col.name, value);
+ }
+
+ context
+ .get_variables_mut()
+ .insert(col.name.clone(), value.clone());
+ exported_values.insert(col.name.clone(), value);
+ }
+ }
+
+ Ok(exported_values)
+ }
+ _ => Err(OperationError::QueryError(
+ "Unexpected result from exports query".to_string(),
+ )),
+ },
+ Err(e) => Err(OperationError::QueryError(format!(
+ "Exports query failed: {}",
+ e
+ ))),
+ }
+ }
+}
+
+/// Unit tests for resource operations.
+#[cfg(test)]
+mod tests {
+ // These would be added in a real implementation to test the operations
+ // with a mock database client
+}
diff --git a/src/resource/queries.rs b/src/resource/queries.rs
new file mode 100644
index 0000000..0768bda
--- /dev/null
+++ b/src/resource/queries.rs
@@ -0,0 +1,339 @@
+// resource/queries.rs
+
+//! # Resource Queries Module
+//!
+//! Handles parsing and managing queries for resources.
+//! Queries are stored in .iql files and include various types like
+//! exists, create, update, delete, and statecheck.
+//!
+//! This module provides functionality for loading query files, parsing queries,
+//! and working with query options.
+
+use std::collections::HashMap;
+use std::fs;
+use std::path::Path;
+use std::str::FromStr;
+
+use thiserror::Error;
+
+/// Errors that can occur when working with queries.
+#[derive(Error, Debug)]
+pub enum QueryError {
+ #[error("Failed to read query file: {0}")]
+ FileReadError(#[from] std::io::Error),
+
+ #[error("Invalid query format: {0}")]
+ InvalidFormat(String),
+
+ #[error("Missing query: {0}")]
+ MissingQuery(String),
+
+ #[error("Invalid query type: {0}")]
+ InvalidType(String),
+}
+
+/// Type alias for query results
+pub type QueryResult = Result;
+
+/// Types of queries that can be defined in a resource file.
+#[derive(Debug, PartialEq, Eq, Hash, Clone)]
+pub enum QueryType {
+ /// Check if a resource exists
+ Exists,
+
+ /// Preflight check (alias for Exists for backward compatibility)
+ Preflight,
+
+ /// Create a new resource
+ Create,
+
+ /// Update an existing resource
+ Update,
+
+ /// Create or update a resource (idempotent operation)
+ CreateOrUpdate,
+
+ /// Check if a resource is in the correct state
+ StateCheck,
+
+ /// Post-deployment check (alias for StateCheck for backward compatibility)
+ PostDeploy,
+
+ /// Export variables from a resource
+ Exports,
+
+ /// Delete a resource
+ Delete,
+
+ /// Execute a command
+ Command,
+}
+
+impl FromStr for QueryType {
+ type Err = QueryError;
+
+ fn from_str(s: &str) -> Result {
+ match s.trim().to_lowercase().as_str() {
+ "exists" => Ok(QueryType::Exists),
+ "preflight" => Ok(QueryType::Preflight),
+ "create" => Ok(QueryType::Create),
+ "update" => Ok(QueryType::Update),
+ "createorupdate" => Ok(QueryType::CreateOrUpdate),
+ "statecheck" => Ok(QueryType::StateCheck),
+ "postdeploy" => Ok(QueryType::PostDeploy),
+ "exports" => Ok(QueryType::Exports),
+ "delete" => Ok(QueryType::Delete),
+ "command" => Ok(QueryType::Command),
+ _ => Err(QueryError::InvalidType(format!(
+ "Unknown query type: {}",
+ s
+ ))),
+ }
+ }
+}
+
+/// Options for a query.
+#[derive(Debug, Clone)]
+pub struct QueryOptions {
+ /// Number of times to retry the query
+ pub retries: u32,
+
+ /// Delay between retries in seconds
+ pub retry_delay: u32,
+
+ /// Number of times to retry after deletion
+ pub postdelete_retries: u32,
+
+ /// Delay between post-deletion retries in seconds
+ pub postdelete_retry_delay: u32,
+}
+
+impl Default for QueryOptions {
+ fn default() -> Self {
+ Self {
+ retries: 1,
+ retry_delay: 0,
+ postdelete_retries: 10,
+ postdelete_retry_delay: 5,
+ }
+ }
+}
+
+/// Represents a query with its options.
+#[derive(Debug, Clone)]
+pub struct Query {
+ /// Type of query
+ pub query_type: QueryType,
+
+ /// SQL query text
+ pub sql: String,
+
+ /// Options for the query
+ pub options: QueryOptions,
+}
+
+/// Loads queries from a file.
+pub fn load_queries_from_file(path: &Path) -> QueryResult> {
+ let content = fs::read_to_string(path)?;
+ parse_queries_from_content(&content)
+}
+
+/// Parses queries from content.
+pub fn parse_queries_from_content(content: &str) -> QueryResult> {
+ let mut queries = HashMap::new();
+ let mut current_query_type: Option = None;
+ let mut current_options = QueryOptions::default();
+ let mut current_query = String::new();
+
+ let lines: Vec<&str> = content.lines().collect();
+ let mut i = 0;
+
+ while i < lines.len() {
+ let line = lines[i].trim();
+
+ // Check for query anchor
+ if line.starts_with("/*+") && line.contains("*/") {
+ // Store previous query if exists
+ if let Some(query_type) = current_query_type.take() {
+ if !current_query.is_empty() {
+ queries.insert(
+ query_type.clone(),
+ Query {
+ query_type,
+ sql: current_query.trim().to_string(),
+ options: current_options,
+ },
+ );
+ current_query = String::new();
+ current_options = QueryOptions::default();
+ }
+ }
+
+ // Extract new anchor
+ let start = line.find("/*+").unwrap() + 3;
+ let end = line.find("*/").unwrap();
+ let anchor_with_options = &line[start..end].trim();
+
+ // Handle options (like retries=5)
+ let parts: Vec<&str> = anchor_with_options.split(',').collect();
+ if let Ok(query_type) = QueryType::from_str(parts[0].trim()) {
+ current_query_type = Some(query_type);
+
+ // Parse options
+ for part in &parts[1..] {
+ let option_parts: Vec<&str> = part.split('=').collect();
+ if option_parts.len() == 2 {
+ let option_name = option_parts[0].trim();
+ let option_value = option_parts[1].trim();
+
+ if let Ok(value) = option_value.parse::() {
+ match option_name {
+ "retries" => current_options.retries = value,
+ "retry_delay" => current_options.retry_delay = value,
+ "postdelete_retries" => current_options.postdelete_retries = value,
+ "postdelete_retry_delay" => {
+ current_options.postdelete_retry_delay = value
+ }
+ _ => {} // Ignore unknown options
+ }
+ }
+ }
+ }
+ } else {
+ current_query_type = None;
+ }
+ } else if let Some(_) = current_query_type {
+ // Accumulate query content
+ current_query.push_str(line);
+ current_query.push('\n');
+ }
+
+ i += 1;
+ }
+
+ // Store last query if exists
+ if let Some(query_type) = current_query_type {
+ if !current_query.is_empty() {
+ queries.insert(
+ query_type.clone(),
+ Query {
+ query_type,
+ sql: current_query.trim().to_string(),
+ options: current_options,
+ },
+ );
+ }
+ }
+
+ Ok(queries)
+}
+
+/// Gets all queries as a simple map from query type to SQL string.
+pub fn get_queries_as_map(queries: &HashMap) -> HashMap {
+ queries
+ .iter()
+ .map(|(k, v)| (k.clone(), v.sql.clone()))
+ .collect()
+}
+
+/// Unit tests for query functionality.
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::io::Write;
+ use tempfile::NamedTempFile;
+
+ fn create_test_query_file() -> NamedTempFile {
+ let mut file = NamedTempFile::new().unwrap();
+
+ writeln!(file, "/*+ exists */").unwrap();
+ writeln!(file, "SELECT COUNT(*) as count FROM aws.ec2.vpc_tags").unwrap();
+ writeln!(file, "WHERE region = '{{ region }}';").unwrap();
+ writeln!(file).unwrap();
+ writeln!(file, "/*+ create, retries=3, retry_delay=5 */").unwrap();
+ writeln!(file, "INSERT INTO aws.ec2.vpcs (").unwrap();
+ writeln!(file, " CidrBlock,").unwrap();
+ writeln!(file, " region").unwrap();
+ writeln!(file, ")").unwrap();
+ writeln!(file, "SELECT ").unwrap();
+ writeln!(file, " '{{ vpc_cidr_block }}',").unwrap();
+ writeln!(file, " '{{ region }}';").unwrap();
+
+ file
+ }
+
+ #[test]
+ fn test_parse_queries() {
+ let file = create_test_query_file();
+ let content = fs::read_to_string(file.path()).unwrap();
+
+ let queries = parse_queries_from_content(&content).unwrap();
+
+ assert_eq!(queries.len(), 2);
+ assert!(queries.contains_key(&QueryType::Exists));
+ assert!(queries.contains_key(&QueryType::Create));
+
+ let create_query = queries.get(&QueryType::Create).unwrap();
+ assert_eq!(create_query.options.retries, 3);
+ assert_eq!(create_query.options.retry_delay, 5);
+ }
+
+ #[test]
+ fn test_query_type_from_str() {
+ assert_eq!(QueryType::from_str("exists").unwrap(), QueryType::Exists);
+ assert_eq!(QueryType::from_str("create").unwrap(), QueryType::Create);
+ assert_eq!(
+ QueryType::from_str("createorupdate").unwrap(),
+ QueryType::CreateOrUpdate
+ );
+ assert_eq!(
+ QueryType::from_str("statecheck").unwrap(),
+ QueryType::StateCheck
+ );
+ assert_eq!(QueryType::from_str("exports").unwrap(), QueryType::Exports);
+ assert_eq!(QueryType::from_str("delete").unwrap(), QueryType::Delete);
+
+ // Case insensitive
+ assert_eq!(QueryType::from_str("EXISTS").unwrap(), QueryType::Exists);
+ assert_eq!(QueryType::from_str("Create").unwrap(), QueryType::Create);
+
+ // With spaces
+ assert_eq!(QueryType::from_str(" exists ").unwrap(), QueryType::Exists);
+
+ // Invalid
+ assert!(QueryType::from_str("invalid").is_err());
+ }
+
+ #[test]
+ fn test_get_queries_as_map() {
+ let mut queries = HashMap::new();
+ queries.insert(
+ QueryType::Exists,
+ Query {
+ query_type: QueryType::Exists,
+ sql: "SELECT COUNT(*) FROM table".to_string(),
+ options: QueryOptions::default(),
+ },
+ );
+ queries.insert(
+ QueryType::Create,
+ Query {
+ query_type: QueryType::Create,
+ sql: "INSERT INTO table VALUES (1)".to_string(),
+ options: QueryOptions::default(),
+ },
+ );
+
+ let map = get_queries_as_map(&queries);
+
+ assert_eq!(map.len(), 2);
+ assert_eq!(
+ map.get(&QueryType::Exists).unwrap(),
+ "SELECT COUNT(*) FROM table"
+ );
+ assert_eq!(
+ map.get(&QueryType::Create).unwrap(),
+ "INSERT INTO table VALUES (1)"
+ );
+ }
+}
diff --git a/src/template/context.rs b/src/template/context.rs
index e69de29..5ae700a 100644
--- a/src/template/context.rs
+++ b/src/template/context.rs
@@ -0,0 +1,229 @@
+// template/context.rs
+
+//! # Template Context Module
+//!
+//! Provides a type for managing template context variables.
+//! The context is used to store variables and their values for template rendering.
+//!
+//! This module also includes functionality for merging contexts, adding/updating
+//! variables, and other context-related operations.
+
+use std::collections::HashMap;
+use std::error::Error;
+use std::fmt;
+
+/// Error types that can occur during context operations.
+#[derive(Debug)]
+pub enum ContextError {
+ /// Merging contexts failed
+ MergeError(String),
+
+ /// Variable not found
+ NotFound(String),
+}
+
+impl fmt::Display for ContextError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ContextError::MergeError(msg) => write!(f, "Context merge error: {}", msg),
+ ContextError::NotFound(var) => write!(f, "Variable not found: {}", var),
+ }
+ }
+}
+
+impl Error for ContextError {}
+
+/// Type alias for context operation results
+pub type ContextResult = Result;
+
+/// A context for template rendering.
+///
+/// This stores a mapping of variable names to their string values.
+#[derive(Default, Debug, Clone)]
+pub struct Context {
+ /// The variables in this context
+ variables: HashMap,
+}
+
+impl Context {
+ /// Creates a new empty context.
+ pub fn new() -> Self {
+ Self { variables: HashMap::new() }
+ }
+
+ /// Creates a new context with initial variables.
+ pub fn with_variables(variables: HashMap) -> Self {
+ Self { variables }
+ }
+
+ /// Adds a variable to the context.
+ ///
+ /// If the variable already exists, its value is updated.
+ pub fn add_variable(&mut self, name: String, value: String) {
+ self.variables.insert(name, value);
+ }
+
+ /// Removes a variable from the context.
+ pub fn remove_variable(&mut self, name: &str) -> Option {
+ self.variables.remove(name)
+ }
+
+ /// Gets a variable's value from the context.
+ pub fn get_variable(&self, name: &str) -> Option<&String> {
+ self.variables.get(name)
+ }
+
+ /// Checks if a variable exists in the context.
+ pub fn has_variable(&self, name: &str) -> bool {
+ self.variables.contains_key(name)
+ }
+
+ /// Returns all variables in the context.
+ pub fn get_variables(&self) -> &HashMap {
+ &self.variables
+ }
+
+ /// Creates a mutable reference to the variables.
+ pub fn get_variables_mut(&mut self) -> &mut HashMap {
+ &mut self.variables
+ }
+
+ /// Merges another context into this one.
+ ///
+ /// Variables from the other context will overwrite existing variables
+ /// with the same name in this context.
+ pub fn merge(&mut self, other: &Context) {
+ for (name, value) in &other.variables {
+ self.variables.insert(name.clone(), value.clone());
+ }
+ }
+
+ /// Creates a new context by merging with another context.
+ ///
+ /// This returns a new context without modifying either input context.
+ pub fn merged_with(&self, other: &Context) -> Self {
+ let mut result = self.clone();
+ result.merge(other);
+ result
+ }
+
+ /// Creates a child context that inherits values from this context.
+ ///
+ /// The child context can override values without affecting the parent.
+ pub fn create_child(&self) -> Self {
+ self.clone()
+ }
+
+ /// Adds built-in variables like date/time, unique IDs, etc.
+ ///
+ /// This can be extended in the future with more built-in variables.
+ pub fn add_built_ins(&mut self) {
+ // Add current date and time
+ let now = chrono::Local::now();
+ self.add_variable("current_date".to_string(), now.format("%Y-%m-%d").to_string());
+ self.add_variable("current_time".to_string(), now.format("%H:%M:%S").to_string());
+ self.add_variable("current_datetime".to_string(), now.format("%Y-%m-%d %H:%M:%S").to_string());
+
+ // Add a unique ID
+ let uuid = uuid::Uuid::new_v4().to_string();
+ self.add_variable("uuid".to_string(), uuid);
+ }
+}
+
+/// Unit tests for context functionality.
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_add_and_get_variable() {
+ let mut context = Context::new();
+ context.add_variable("name".to_string(), "Value".to_string());
+
+ assert_eq!(context.get_variable("name"), Some(&"Value".to_string()));
+ assert_eq!(context.get_variable("nonexistent"), None);
+ }
+
+ #[test]
+ fn test_has_variable() {
+ let mut context = Context::new();
+ context.add_variable("name".to_string(), "Value".to_string());
+
+ assert!(context.has_variable("name"));
+ assert!(!context.has_variable("nonexistent"));
+ }
+
+ #[test]
+ fn test_remove_variable() {
+ let mut context = Context::new();
+ context.add_variable("name".to_string(), "Value".to_string());
+
+ let removed = context.remove_variable("name");
+ assert_eq!(removed, Some("Value".to_string()));
+ assert!(!context.has_variable("name"));
+
+ let nonexistent = context.remove_variable("nonexistent");
+ assert_eq!(nonexistent, None);
+ }
+
+ #[test]
+ fn test_context_merge() {
+ let mut context1 = Context::new();
+ context1.add_variable("var1".to_string(), "Value1".to_string());
+ context1.add_variable("common".to_string(), "OriginalValue".to_string());
+
+ let mut context2 = Context::new();
+ context2.add_variable("var2".to_string(), "Value2".to_string());
+ context2.add_variable("common".to_string(), "NewValue".to_string());
+
+ context1.merge(&context2);
+
+ assert_eq!(context1.get_variable("var1"), Some(&"Value1".to_string()));
+ assert_eq!(context1.get_variable("var2"), Some(&"Value2".to_string()));
+ assert_eq!(context1.get_variable("common"), Some(&"NewValue".to_string()));
+ }
+
+ #[test]
+ fn test_merged_with() {
+ let mut context1 = Context::new();
+ context1.add_variable("var1".to_string(), "Value1".to_string());
+
+ let mut context2 = Context::new();
+ context2.add_variable("var2".to_string(), "Value2".to_string());
+
+ let merged = context1.merged_with(&context2);
+
+ // Original contexts should be unchanged
+ assert_eq!(context1.get_variable("var1"), Some(&"Value1".to_string()));
+ assert_eq!(context1.get_variable("var2"), None);
+ assert_eq!(context2.get_variable("var1"), None);
+ assert_eq!(context2.get_variable("var2"), Some(&"Value2".to_string()));
+
+ // Merged context should have both variables
+ assert_eq!(merged.get_variable("var1"), Some(&"Value1".to_string()));
+ assert_eq!(merged.get_variable("var2"), Some(&"Value2".to_string()));
+ }
+
+ #[test]
+ fn test_with_initial_variables() {
+ let mut variables = HashMap::new();
+ variables.insert("var1".to_string(), "Value1".to_string());
+ variables.insert("var2".to_string(), "Value2".to_string());
+
+ let context = Context::with_variables(variables);
+
+ assert_eq!(context.get_variable("var1"), Some(&"Value1".to_string()));
+ assert_eq!(context.get_variable("var2"), Some(&"Value2".to_string()));
+ }
+
+ #[test]
+ fn test_add_built_ins() {
+ let mut context = Context::new();
+ context.add_built_ins();
+
+ assert!(context.has_variable("current_date"));
+ assert!(context.has_variable("current_time"));
+ assert!(context.has_variable("current_datetime"));
+ assert!(context.has_variable("uuid"));
+ }
+}
\ No newline at end of file
diff --git a/src/template/engine.rs b/src/template/engine.rs
index e69de29..fc3eea9 100644
--- a/src/template/engine.rs
+++ b/src/template/engine.rs
@@ -0,0 +1,222 @@
+// template/engine.rs
+
+//! # Template Engine Module
+//!
+//! Provides functionality for rendering templates with variable substitution.
+//! The engine is responsible for taking template strings and replacing variable
+//! placeholders with their corresponding values from a context.
+//!
+//! This implementation supports the Jinja-like syntax using `{{ variable_name }}`.
+
+use std::collections::HashMap;
+use std::error::Error;
+use std::fmt;
+
+/// Error types that can occur during template rendering.
+#[derive(Debug)]
+pub enum TemplateError {
+ /// Variable not found in context
+ VariableNotFound(String),
+
+ /// Syntax error in template
+ SyntaxError(String),
+
+ /// Invalid template structure
+ InvalidTemplate(String),
+}
+
+impl fmt::Display for TemplateError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ TemplateError::VariableNotFound(var) => write!(f, "Variable not found: {}", var),
+ TemplateError::SyntaxError(msg) => write!(f, "Template syntax error: {}", msg),
+ TemplateError::InvalidTemplate(msg) => write!(f, "Invalid template: {}", msg),
+ }
+ }
+}
+
+impl Error for TemplateError {}
+
+/// Type alias for template rendering results
+pub type TemplateResult = Result;
+
+/// A structure that renders templates.
+#[derive(Default, Debug)]
+pub struct TemplateEngine {
+ // Configuration options could be added here in the future
+}
+
+impl TemplateEngine {
+ /// Creates a new template engine.
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ /// Renders a template string using the provided context.
+ ///
+ /// Replaces all instances of `{{ variable_name }}` with the corresponding
+ /// value from the context.
+ ///
+ /// # Arguments
+ /// * `template` - The template string to render
+ /// * `context` - The context containing variable values
+ ///
+ /// # Returns
+ /// The rendered string with all variables replaced.
+ ///
+ /// # Errors
+ /// Returns an error if:
+ /// * A variable used in the template is not found in the context
+ /// * The template has syntax errors (e.g., unclosed variables)
+ pub fn render(&self, template: &str, context: &HashMap) -> TemplateResult {
+ let mut result = String::with_capacity(template.len());
+ let mut chars = template.chars().peekable();
+
+ while let Some(&c) = chars.peek() {
+ match c {
+ '{' => {
+ // Consume the '{'
+ chars.next();
+
+ // Check if it's the start of a variable
+ if let Some('{') = chars.peek() {
+ // Consume the second '{'
+ chars.next();
+
+ // Extract the variable name
+ let var_name = self.extract_variable_name(&mut chars)?;
+
+ // Look up the variable in the context
+ match context.get(&var_name) {
+ Some(value) => result.push_str(value),
+ _none => {
+ return Err(TemplateError::VariableNotFound(var_name));
+ }
+ }
+ } else {
+ // Just a regular '{' character
+ result.push('{');
+ }
+ },
+ _ => {
+ // Regular character, just copy it
+ result.push(c);
+ chars.next();
+ }
+ }
+ }
+
+ Ok(result)
+ }
+
+ /// Extracts a variable name from a character iterator.
+ ///
+ /// Assumes the opening `{{` has already been consumed.
+ /// Consumes characters until it finds the closing `}}`.
+ fn extract_variable_name(&self, chars: &mut std::iter::Peekable) -> TemplateResult
+ where
+ I: Iterator- ,
+ {
+ let mut var_name = String::new();
+ let mut found_closing = false;
+
+ while let Some(c) = chars.next() {
+ match c {
+ '}' => {
+ if let Some(&'}') = chars.peek() {
+ // Consume the second '}'
+ chars.next();
+ found_closing = true;
+ break;
+ } else {
+ // Single '}', still part of the variable name
+ var_name.push(c);
+ }
+ },
+ _ => var_name.push(c),
+ }
+ }
+
+ if !found_closing {
+ return Err(TemplateError::SyntaxError("Unclosed variable".to_string()));
+ }
+
+ // Trim whitespace from the variable name
+ Ok(var_name.trim().to_string())
+ }
+
+ /// Renders a template string with built-in support for conditionals and loops.
+ ///
+ /// This more advanced version can process simple conditions and loops.
+ /// Note: This is a placeholder for future implementation.
+ #[allow(dead_code)]
+ pub fn render_advanced(&self, _template: &str, _context: &HashMap
) -> TemplateResult {
+ // This is a placeholder for future implementation of more advanced template features
+ // like conditionals and loops.
+ Err(TemplateError::InvalidTemplate("Advanced rendering not implemented yet".to_string()))
+ }
+}
+
+/// Unit tests for template engine functionality.
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_simple_variable_substitution() {
+ let engine = TemplateEngine::new();
+ let mut context = HashMap::new();
+ context.insert("name".to_string(), "World".to_string());
+
+ let result = engine.render("Hello {{ name }}!", &context).unwrap();
+ assert_eq!(result, "Hello World!");
+ }
+
+ #[test]
+ fn test_multiple_variables() {
+ let engine = TemplateEngine::new();
+ let mut context = HashMap::new();
+ context.insert("first".to_string(), "Hello".to_string());
+ context.insert("second".to_string(), "World".to_string());
+
+ let result = engine.render("{{ first }} {{ second }}!", &context).unwrap();
+ assert_eq!(result, "Hello World!");
+ }
+
+ #[test]
+ fn test_variable_not_found() {
+ let engine = TemplateEngine::new();
+ let context = HashMap::new();
+
+ let result = engine.render("Hello {{ name }}!", &context);
+ assert!(result.is_err());
+ match result {
+ Err(TemplateError::VariableNotFound(var)) => assert_eq!(var, "name"),
+ _ => panic!("Expected VariableNotFound error"),
+ }
+ }
+
+ #[test]
+ fn test_unclosed_variable() {
+ let engine = TemplateEngine::new();
+ let mut context = HashMap::new();
+ context.insert("name".to_string(), "World".to_string());
+
+ let result = engine.render("Hello {{ name!", &context);
+ assert!(result.is_err());
+ match result {
+ Err(TemplateError::SyntaxError(_)) => {},
+ _ => panic!("Expected SyntaxError"),
+ }
+ }
+
+ #[test]
+ fn test_nested_braces() {
+ let engine = TemplateEngine::new();
+ let mut context = HashMap::new();
+ context.insert("json".to_string(), r#"{"key": "value"}"#.to_string());
+
+ let result = engine.render("JSON: {{ json }}", &context).unwrap();
+ assert_eq!(result, r#"JSON: {"key": "value"}"#);
+ }
+}
\ No newline at end of file
diff --git a/src/template/mod.rs b/src/template/mod.rs
index e69de29..8be2d4f 100644
--- a/src/template/mod.rs
+++ b/src/template/mod.rs
@@ -0,0 +1,41 @@
+// template/mod.rs
+
+//! # Template Module
+//!
+//! This module provides functionality for template rendering and context management.
+//! Templates are used throughout the application to render queries and other text
+//! with variable substitution.
+//!
+//! The module includes an engine for rendering templates and a context for managing
+//! variables used in templates.
+
+pub mod engine;
+pub mod context;
+
+// Re-export commonly used types, avoid naming conflicts by using aliases
+pub use engine::TemplateError as EngineTemplateError;
+pub use context::ContextError;
+
+/// Creates a combined error type for template operations.
+#[derive(thiserror::Error, Debug)]
+pub enum TemplateError {
+ #[error("Engine error: {0}")]
+ Engine(#[from] EngineTemplateError),
+
+ #[error("Context error: {0}")]
+ Context(#[from] ContextError),
+
+ #[error("Other error: {0}")]
+ Other(String), // Keep this if you intend to handle generic errors
+}
+
+// Type alias for template operation results
+pub type _TemplateResult = std::result::Result;
+
+// If you don't plan to use `Other`, you can suppress the warning like this:
+#[allow(dead_code)]
+impl TemplateError {
+ pub fn other(msg: &str) -> Self {
+ TemplateError::Other(msg.to_string())
+ }
+}
diff --git a/src/utils/binary.rs b/src/utils/binary.rs
index 34a0fa8..993b62d 100644
--- a/src/utils/binary.rs
+++ b/src/utils/binary.rs
@@ -1,3 +1,26 @@
+// utils/binary.rs
+
+//! # Binary Utility Module
+//!
+//! This module provides utility functions for locating and verifying the `stackql` binary.
+//! It supports checking the binary's presence in the system `PATH` or the current directory
+//! and retrieving the full path to the binary if it exists.
+//!
+//! ## Features
+//! - Checks if the `stackql` binary is available in the system's `PATH`.
+//! - Retrieves the full path of the `stackql` binary from the current directory or `PATH`.
+//!
+//! ## Example Usage
+//! ```rust
+//! use crate::utils::binary::{binary_exists_in_path, get_binary_path};
+//!
+//! if binary_exists_in_path() {
+//! if let Some(path) = get_binary_path() {
+//! println!("Found stackql binary at: {:?}", path);
+//! }
+//! }
+//! ```
+
use std::env;
use std::path::PathBuf;
use std::process::Command;
diff --git a/src/utils/connection.rs b/src/utils/connection.rs
new file mode 100644
index 0000000..d3ddeee
--- /dev/null
+++ b/src/utils/connection.rs
@@ -0,0 +1,44 @@
+// utils/connection.rs
+
+//! # Connection Utility Module
+//!
+//! This module provides functions for creating a PgwireLite client connection
+//! to the StackQL server. It utilizes global configuration for host and port
+//! and supports error handling during connection attempts.
+//!
+//! ## Features
+//! - Establishes a connection to the StackQL server using `pgwire_lite::PgwireLite`.
+//! - Uses global host and port settings for consistency across the application.
+//! - Handles connection errors and exits the program if unsuccessful.
+//!
+//! ## Example Usage
+//! ```rust
+//! use crate::utils::connection::create_client;
+//!
+//! let client = create_client();
+//! ```
+
+use std::process;
+
+use colored::*;
+use pgwire_lite::PgwireLite;
+
+use crate::globals::{server_host, server_port};
+
+/// Creates a new PgwireLite client connection
+pub fn create_client() -> PgwireLite {
+ let host = server_host();
+ let port = server_port();
+
+ // Create a new PgwireLite client with the server's host and port
+ // Default to no TLS and default verbosity
+ let client = PgwireLite::new(host, port, false, "default").unwrap_or_else(|e| {
+ eprintln!("{}", format!("Failed to connect to server: {}", e).red());
+ process::exit(1); // Exit the program if connection fails
+ });
+
+ println!("Connected to stackql server at {}:{}", host, port);
+ println!("Using libpq version: {}", client.libpq_version());
+
+ client
+}
diff --git a/src/utils/display.rs b/src/utils/display.rs
index 4e32bd3..e8cf0ae 100644
--- a/src/utils/display.rs
+++ b/src/utils/display.rs
@@ -1,6 +1,31 @@
-use colored::*;
+// utils/display.rs
+
+//! # Display Utility Module
+//!
+//! This module provides utility functions for rendering messages with various styles
+//! including Unicode-styled message boxes and color-coded output for errors, success messages, and informational messages.
+//! It leverages the `colored` crate for styling and `unicode_width` crate for handling Unicode text width.
+//!
+//! ## Features
+//! - Unicode-styled message boxes with proper alignment for emojis and wide characters.
+//! - Color-coded messages for errors, successes, and informational outputs.
+//!
+//! ## Example Usage
+//! ```rust
+//! use crate::utils::display::print_unicode_box;
+//!
+//! print_unicode_box("š Initializing application...");
+//! print_error!("Failed to connect to the server.");
+//! print_success!("Operation completed successfully.");
+//! print_info!("Fetching data...");
+//! ```
+
+use log::debug;
use unicode_width::UnicodeWidthStr;
+use crate::commands::common_args::CommonCommandArgs;
+use clap::ArgMatches;
+
/// Utility function to print a Unicode-styled message box
/// that correctly handles the width of emojis and other wide characters
pub fn print_unicode_box(message: &str) {
@@ -38,18 +63,46 @@ pub fn print_unicode_box(message: &str) {
println!("{}", bottom_border);
}
-/// Print an error message in red
-pub fn print_error(message: &str) {
- eprintln!("{}", message.red());
+#[macro_export]
+macro_rules! print_info {
+ ($($arg:tt)*) => {{
+ use colored::Colorize;
+ println!("{}", format!($($arg)*).blue())
+ }};
+}
+
+#[macro_export]
+macro_rules! print_error {
+ ($($arg:tt)*) => {{
+ use colored::Colorize;
+ eprintln!("{}", format!($($arg)*).red())
+ }};
}
-/// Print a success message in green
-#[allow(dead_code)]
-pub fn print_success(message: &str) {
- println!("{}", message.green());
+#[macro_export]
+macro_rules! print_success {
+ ($($arg:tt)*) => {{
+ use colored::Colorize;
+ println!("{}", format!($($arg)*).green())
+ }};
}
-/// Print an info message in blue
-pub fn print_info(message: &str) {
- println!("{}", message.blue());
+/// Log common command arguments at debug level
+pub fn log_common_command_args(args: &CommonCommandArgs, matches: &ArgMatches) {
+ debug!("Stack Directory: {}", args.stack_dir);
+ debug!("Stack Environment: {}", args.stack_env);
+ debug!("Log Level: {}", args.log_level);
+ debug!("Environment File: {}", args.env_file);
+
+ // Log environment variables if present
+ if let Some(vars) = matches.get_many::("env") {
+ debug!("Environment Variables:");
+ for var in vars {
+ debug!(" - {}", var);
+ }
+ }
+
+ debug!("Dry Run: {}", args.dry_run);
+ debug!("Show Queries: {}", args.show_queries);
+ debug!("On Failure: {:?}", args.on_failure);
}
diff --git a/src/utils/download.rs b/src/utils/download.rs
index fba0401..80dbd61 100644
--- a/src/utils/download.rs
+++ b/src/utils/download.rs
@@ -1,23 +1,50 @@
-use crate::error::AppError;
-use crate::utils::display::print_info;
-use crate::utils::platform::{get_platform, Platform};
-use indicatif::{ProgressBar, ProgressStyle};
-use reqwest::blocking::Client;
+// utils/download.rs
+
+//! # Download Utility Module
+//!
+//! This module provides functions for downloading, extracting, and setting up the StackQL binary.
+//! It supports various platforms including Linux, Windows, and macOS, handling differences in
+//! extraction methods and permissions.
+//!
+//! ## Features
+//! - Downloads the StackQL binary from a predefined URL.
+//! - Supports progress tracking during download.
+//! - Extracts the binary on various platforms (Windows, Linux, macOS).
+//! - Sets executable permissions on Unix-like systems.
+//!
+//! ## Example Usage
+//! ```rust
+//! use crate::utils::download::download_binary;
+//!
+//! match download_binary() {
+//! Ok(path) => println!("Binary downloaded to: {}", path.display()),
+//! Err(e) => eprintln!("Failed to download binary: {}", e),
+//! }
+//! ```
+
use std::fs::{self, File};
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use std::process::Command;
+
+use indicatif::{ProgressBar, ProgressStyle};
+use log::debug;
+use reqwest::blocking::Client;
use zip::ZipArchive;
+use crate::app::STACKQL_DOWNLOAD_URL;
+use crate::error::AppError;
+use crate::utils::platform::{get_platform, Platform};
+
+/// Retrieves the URL for downloading the StackQL binary.
pub fn get_download_url() -> Result {
- match get_platform() {
- Platform::Linux => Ok("https://releases.stackql.io/stackql/latest/stackql_linux_amd64.zip".to_string()),
- Platform::Windows => Ok("https://releases.stackql.io/stackql/latest/stackql_windows_amd64.zip".to_string()),
- Platform::MacOS => Ok("https://storage.googleapis.com/stackql-public-releases/latest/stackql_darwin_multiarch.pkg".to_string()),
- Platform::Unknown => Err(AppError::CommandFailed("Unsupported OS".to_string())),
- }
+ Ok(STACKQL_DOWNLOAD_URL.to_string())
}
+/// Downloads the StackQL binary and extracts it to the current directory.
+///
+/// This function downloads the StackQL binary from a URL and unzips it if necessary.
+/// It also sets executable permissions on Unix-like systems.
pub fn download_binary() -> Result {
let download_url = get_download_url()?;
let current_dir = std::env::current_dir().map_err(AppError::IoError)?;
@@ -30,7 +57,7 @@ pub fn download_binary() -> Result {
let archive_path = current_dir.join(&archive_name);
// Download the file with progress bar
- print_info(&format!("Downloading from {}", download_url));
+ debug!("Downloading from {}", download_url);
let client = Client::new();
let response = client
.get(&download_url)
@@ -55,7 +82,7 @@ pub fn download_binary() -> Result {
progress_bar.finish_with_message("Download complete");
// Extract the file based on platform
- print_info("Extracting the binary...");
+ debug!("Extracting the binary...");
let binary_path = extract_binary(&archive_path, ¤t_dir, &binary_name)?;
// Clean up the archive
@@ -72,13 +99,14 @@ pub fn download_binary() -> Result {
})?;
}
- print_info(&format!(
+ debug!(
"StackQL executable successfully installed at: {}",
binary_path.display()
- ));
+ );
Ok(binary_path)
}
+/// Extracts the StackQL binary from an archive.
fn extract_binary(
archive_path: &Path,
dest_dir: &Path,
@@ -102,11 +130,6 @@ fn extract_binary(
.output()
.map_err(|e| AppError::CommandFailed(format!("Failed to extract pkg: {}", e)))?;
- // Find and copy the binary
- // This might need adjustment based on the actual structure of the pkg
- // Typically you'd need to look for the binary in the expanded package
-
- // Example (adjust paths as needed):
let extracted_binary = unpacked_dir
.join("payload")
.join("usr")
@@ -132,7 +155,7 @@ fn extract_binary(
let outpath = match file.enclosed_name() {
Some(path) => dest_dir.join(path),
- None => continue,
+ _none => continue,
};
if file.name().ends_with('/') {
diff --git a/src/utils/logging.rs b/src/utils/logging.rs
new file mode 100644
index 0000000..b2a5674
--- /dev/null
+++ b/src/utils/logging.rs
@@ -0,0 +1,96 @@
+// utils/logging.rs
+
+use chrono::Local;
+use env_logger::{Builder, Env};
+use log::LevelFilter;
+use std::io::Write;
+use std::path::Path;
+
+/// Colors for different log levels when printing to the terminal
+struct LevelColors;
+
+impl LevelColors {
+ // ANSI color codes
+ const RED: &'static str = "\x1B[31m";
+ const YELLOW: &'static str = "\x1B[33m";
+ const GREEN: &'static str = "\x1B[32m";
+ const CYAN: &'static str = "\x1B[36m";
+ const MAGENTA: &'static str = "\x1B[35m";
+ const RESET: &'static str = "\x1B[0m";
+
+ /// Get the color code for a given log level
+ fn get_color(level: log::Level) -> &'static str {
+ match level {
+ log::Level::Error => Self::RED,
+ log::Level::Warn => Self::YELLOW,
+ log::Level::Info => Self::GREEN,
+ log::Level::Debug => Self::CYAN,
+ log::Level::Trace => Self::MAGENTA,
+ }
+ }
+}
+
+/// Initializes the logger with a specified log level.
+///
+/// Formats logs as follows:
+/// - Standard: [timestamp LEVEL stackql_deploy] message
+/// - Debug/Trace: [timestamp LEVEL file_name (line_num)] message
+///
+/// Log levels are color-coded in the terminal output.
+pub fn initialize_logger(log_level: &str) {
+ let level = match log_level.to_lowercase().as_str() {
+ "error" => LevelFilter::Error,
+ "warn" => LevelFilter::Warn,
+ "info" => LevelFilter::Info,
+ "debug" => LevelFilter::Debug,
+ "trace" => LevelFilter::Trace,
+ _ => LevelFilter::Info,
+ };
+
+ let mut builder = Builder::from_env(Env::default());
+
+ builder.format(|buf, record| {
+ let timestamp = Local::now().format("%Y-%m-%dT%H:%M:%SZ");
+ let level_str = record.level();
+ let color = LevelColors::get_color(level_str);
+ let reset = LevelColors::RESET;
+
+ if record.level() <= log::Level::Info {
+ // For info, warn, error: [timestamp LEVEL stackql_deploy] message
+ writeln!(
+ buf,
+ "[{} {}{}{} stackql_deploy] {}",
+ timestamp,
+ color,
+ level_str,
+ reset,
+ record.args()
+ )
+ } else {
+ // For debug, trace: [timestamp LEVEL file_name (line_num)] message
+ let file = record.file().unwrap_or("");
+ let file_name = Path::new(file)
+ .file_name()
+ .and_then(|f| f.to_str())
+ .unwrap_or(file);
+
+ writeln!(
+ buf,
+ "[{} {}{}{} {} ({})] {}",
+ timestamp,
+ color,
+ level_str,
+ reset,
+ file_name,
+ record.line().unwrap_or(0),
+ record.args()
+ )
+ }
+ });
+
+ // Set the default log level
+ builder.filter_level(level);
+
+ // Initialize the logger
+ builder.init();
+}
diff --git a/src/utils/mod.rs b/src/utils/mod.rs
index f9e0251..7af634d 100644
--- a/src/utils/mod.rs
+++ b/src/utils/mod.rs
@@ -1,6 +1,8 @@
pub mod binary;
+pub mod connection;
pub mod display;
pub mod download;
+pub mod logging;
pub mod platform;
pub mod query;
pub mod server;
diff --git a/src/utils/platform.rs b/src/utils/platform.rs
index ce93c3e..3faaba4 100644
--- a/src/utils/platform.rs
+++ b/src/utils/platform.rs
@@ -1,3 +1,28 @@
+// utils/platform.rs
+
+//! # Platform Utility Module
+//!
+//! This module provides utilities for detecting the operating system platform
+//! and retrieving the appropriate binary name for the `stackql` application.
+//!
+//! ## Features
+//! - Detects the current operating system (Windows, macOS, Linux).
+//! - Returns the platform-specific `stackql` binary name.
+//!
+//! ## Example Usage
+//! ```rust
+//! use crate::utils::platform::{get_platform, get_binary_name, Platform};
+//!
+//! let platform = get_platform();
+//! let binary_name = get_binary_name();
+//!
+//! println!("Platform: {:?}", platform);
+//! println!("Binary Name: {}", binary_name);
+//! ```
+
+use crate::app::STACKQL_BINARY_NAME;
+
+/// Enum representing supported platforms.
#[derive(Debug, PartialEq)]
pub enum Platform {
Windows,
@@ -21,8 +46,5 @@ pub fn get_platform() -> Platform {
/// Get the appropriate binary name based on platform
pub fn get_binary_name() -> String {
- match get_platform() {
- Platform::Windows => "stackql.exe".to_string(),
- _ => "stackql".to_string(),
- }
+ STACKQL_BINARY_NAME.to_string()
}
diff --git a/src/utils/query.rs b/src/utils/query.rs
index 6bed00f..5159d53 100644
--- a/src/utils/query.rs
+++ b/src/utils/query.rs
@@ -1,80 +1,134 @@
-use crate::utils::server::{is_server_running, start_server, ServerOptions};
-use postgres::{Client, NoTls};
+// utils/query.rs
+//! # Query Utility Module
+//!
+//! This module provides functions and data structures for executing SQL queries
+//! against a PgwireLite client. It supports processing query results and
+//! formatting them into various representations (rows, columns, notices).
+//!
+//! ## Features
+//! - Executes SQL queries using `pgwire_lite::PgwireLite`.
+//! - Formats query results into structured data (columns, rows, notices).
+//! - Supports different query result types: Data, Command, and Empty.
+//!
+//! ## Example Usage
+//! ```rust
+//! use crate::utils::query::{execute_query, QueryResult};
+//! use pgwire_lite::PgwireLite;
+//!
+//! let mut client = PgwireLite::new("localhost", 5432, false, "default").unwrap();
+//! let result = execute_query("SELECT * FROM my_table;", &mut client).unwrap();
+//!
+//! match result {
+//! QueryResult::Data { columns, rows, .. } => println!("Received data with {} rows.", rows.len()),
+//! QueryResult::Command(cmd) => println!("Command executed: {}", cmd),
+//! QueryResult::Empty => println!("Query executed successfully with no result."),
+//! }
+//! ```
+
+use pgwire_lite::{PgwireLite, Value};
+
+/// Represents a column in a query result.
pub struct QueryResultColumn {
pub name: String,
}
+/// Represents a row in a query result.
pub struct QueryResultRow {
pub values: Vec,
}
+/// Enum representing the possible results of a query execution.
pub enum QueryResult {
Data {
columns: Vec,
rows: Vec,
- #[allow(dead_code)]
notices: Vec,
},
Command(String),
Empty,
}
-pub fn execute_query(query: &str, port: u16) -> Result {
- if !is_server_running(port) {
- let options = ServerOptions {
- port,
- ..Default::default()
- };
- start_server(&options).map_err(|e| format!("Failed to start server: {}", e))?;
- }
-
- let connection_string = format!(
- "host=localhost port={} user=postgres dbname=stackql application_name=stackql",
- port
- );
- let mut client = Client::connect(&connection_string, NoTls)
- .map_err(|e| format!("Failed to connect to server: {}", e))?;
-
- match client.simple_query(query) {
- Ok(results) => {
- let mut columns = Vec::new();
- let mut rows = Vec::new();
- let mut command_message = String::new();
+/// Executes an SQL query and returns the result in a structured format.
+pub fn execute_query(query: &str, client: &mut PgwireLite) -> Result {
+ match client.query(query) {
+ Ok(result) => {
+ // Convert column names to QueryResultColumn structs
+ let columns: Vec = result
+ .column_names
+ .iter()
+ .map(|name| QueryResultColumn { name: name.clone() })
+ .collect();
- for result in results {
- match result {
- postgres::SimpleQueryMessage::Row(row) => {
- if columns.is_empty() {
- for i in 0..row.len() {
- columns.push(QueryResultColumn {
- name: row.columns()[i].name().to_string(),
- });
+ // Convert rows to QueryResultRow structs
+ let rows: Vec = result
+ .rows
+ .iter()
+ .map(|row_map| {
+ let values: Vec = columns
+ .iter()
+ .map(|col| {
+ match row_map.get(&col.name) {
+ Some(Value::String(s)) => s.clone(),
+ Some(Value::Null) => "NULL".to_string(),
+ Some(Value::Bool(b)) => b.to_string(),
+ Some(Value::Integer(i)) => i.to_string(),
+ Some(Value::Float(f)) => f.to_string(),
+ Some(_) => "UNKNOWN_TYPE".to_string(), // For any future value types
+ None => "NULL".to_string(),
}
- }
+ })
+ .collect();
- let row_values = (0..row.len())
- .map(|i| row.get(i).unwrap_or("NULL").to_string())
- .collect();
+ QueryResultRow { values }
+ })
+ .collect();
- rows.push(QueryResultRow { values: row_values });
+ // Convert notices to strings
+ let notices: Vec = result
+ .notices
+ .iter()
+ .map(|notice| {
+ // Get the basic message
+ let mut notice_text = notice
+ .fields
+ .get("message")
+ .cloned()
+ .unwrap_or_else(|| "Unknown notice".to_string());
+
+ // Add detail if available
+ if let Some(detail) = notice.fields.get("detail") {
+ notice_text.push_str("\nDETAIL: ");
+ notice_text.push_str(detail);
}
- postgres::SimpleQueryMessage::CommandComplete(cmd) => {
- command_message = cmd.to_string();
+
+ // Add hint if available
+ if let Some(hint) = notice.fields.get("hint") {
+ notice_text.push_str("\nHINT: ");
+ notice_text.push_str(hint);
}
- _ => {}
- }
- }
- if !columns.is_empty() {
+ notice_text
+ })
+ .collect();
+
+ // Determine the type of result based on rows, notices, and data
+ if !rows.is_empty() || !notices.is_empty() {
+ // If we have rows OR notices, it's a data result
Ok(QueryResult::Data {
columns,
rows,
- notices: vec![],
+ notices,
})
- } else if !command_message.is_empty() {
+ } else if result.row_count > 0 {
+ // If row_count > 0 but no rows, it was a command that affected rows
+ let command_message = format!(
+ "Command completed successfully (affected {} rows)",
+ result.row_count
+ );
Ok(QueryResult::Command(command_message))
} else {
+ // Otherwise it's an empty result
Ok(QueryResult::Empty)
}
}
diff --git a/src/utils/server.rs b/src/utils/server.rs
index 8eb00d7..97c50f2 100644
--- a/src/utils/server.rs
+++ b/src/utils/server.rs
@@ -1,201 +1,255 @@
-use crate::utils::binary::get_binary_path;
-use colored::*;
+// utils/server.rs
+
+//! # Server Utility Module
+//!
+//! This module provides utilities for starting, stopping, and managing StackQL server instances.
+//! It supports detecting running servers, extracting process information, and managing server lifecycles
+//! with functionalities to start, stop, and check server status across multiple platforms (Windows, Linux, macOS).
+//!
+//! ## Features
+//! - Start a StackQL server on a specified host and port.
+//! - Check if a server is running.
+//! - Retrieve running servers by scanning processes.
+//! - Stop a server by process ID (PID).
+//! - Automatically detect and manage servers running on local or remote hosts.
+//!
+//! ## Example Usage
+//! ```rust
+//! use crate::utils::server::{check_and_start_server, start_server, stop_server, StartServerOptions};
+//!
+//! let options = StartServerOptions {
+//! host: "localhost".to_string(),
+//! port: 5444,
+//! ..Default::default()
+//! };
+//!
+//! match start_server(&options) {
+//! Ok(pid) => println!("Server started with PID: {}", pid),
+//! Err(e) => eprintln!("Failed to start server: {}", e),
+//! }
+//! ```
+
use std::fs::OpenOptions;
use std::path::Path;
+use std::process;
use std::process::{Command as ProcessCommand, Stdio};
use std::thread;
use std::time::Duration;
-pub struct ServerOptions {
+// use clap::error;
+use log::{error, info, warn};
+
+// use colored::*;
+
+use crate::app::{DEFAULT_LOG_FILE, LOCAL_SERVER_ADDRESSES};
+use crate::globals::{server_host, server_port};
+use crate::utils::binary::get_binary_path;
+
+/// Options for starting a StackQL server
+pub struct StartServerOptions {
+ pub host: String,
pub port: u16,
pub registry: Option,
- pub additional_args: Vec,
+ pub mtls_config: Option,
+ pub custom_auth_config: Option,
+ pub log_level: Option,
}
-impl Default for ServerOptions {
+impl Default for StartServerOptions {
fn default() -> Self {
Self {
- port: 5444,
+ host: "localhost".to_string(),
+ port: crate::app::DEFAULT_SERVER_PORT,
registry: None,
- additional_args: Vec::new(),
+ mtls_config: None,
+ custom_auth_config: None,
+ log_level: None,
}
}
}
-/// Check if the stackql server is running
+/// Represents a running StackQL server process
+pub struct RunningServer {
+ pub pid: u32,
+ pub port: u16,
+}
+
+/// Check if the stackql server is running on a specific port
pub fn is_server_running(port: u16) -> bool {
- // Check using process name and port
+ find_all_running_servers()
+ .iter()
+ .any(|server| server.port == port)
+}
+
+/// Find all stackql servers that are running and their ports
+pub fn find_all_running_servers() -> Vec {
+ let mut running_servers = Vec::new();
+
if cfg!(target_os = "windows") {
let output = ProcessCommand::new("tasklist")
.output()
.unwrap_or_else(|_| panic!("Failed to execute tasklist"));
let output_str = String::from_utf8_lossy(&output.stdout);
- output_str.contains("stackql") && output_str.contains(&port.to_string())
- } else {
- // Try multiple pattern variations to be more robust
- let patterns = [
- format!("stackql.*--pgsrv.port {}", port),
- format!("stackql.*--pgsrv.port={}", port),
- format!("stackql.*pgsrv.port {}", port),
- format!("stackql.*pgsrv.port={}", port),
- ];
-
- for pattern in patterns {
- let output = ProcessCommand::new("pgrep")
- .arg("-f")
- .arg(&pattern)
- .output();
-
- if let Ok(output) = output {
- if !output.stdout.is_empty() {
- return true;
+
+ for line in output_str.lines() {
+ if line.contains("stackql") {
+ if let Some(port) = extract_port_from_windows_tasklist(line) {
+ if let Some(pid) = extract_pid_from_windows_tasklist(line) {
+ running_servers.push(RunningServer { pid, port });
+ }
}
}
}
-
- // Fallback: Just check for any stackql process
+ } else {
let output = ProcessCommand::new("pgrep")
.arg("-f")
.arg("stackql")
- .output();
-
- if let Ok(output) = output {
- if !output.stdout.is_empty() {
- // Further check if this is likely our server by examining the process details
- let stdout_content = String::from_utf8_lossy(&output.stdout);
- let pid = stdout_content.trim();
-
- let ps_output = ProcessCommand::new("ps")
- .arg("-p")
- .arg(pid)
- .arg("-o")
- .arg("args")
- .output();
-
- if let Ok(ps_output) = ps_output {
- let ps_str = String::from_utf8_lossy(&ps_output.stdout);
- return ps_str.contains(&port.to_string()) && ps_str.contains("srv");
+ .output()
+ .unwrap_or_else(|_| panic!("Failed to execute pgrep"));
+
+ if !output.stdout.is_empty() {
+ let pids_str = String::from_utf8_lossy(&output.stdout).to_string();
+ let pids = pids_str.trim().split('\n').collect::>();
+
+ for pid_str in pids {
+ if let Ok(pid) = pid_str.trim().parse::() {
+ if let Some(port) = extract_port_from_ps(pid_str) {
+ running_servers.push(RunningServer { pid, port });
+ }
}
}
}
+ }
+
+ running_servers
+}
- false
+/// Extract port from process information on Unix-like systems using `ps`
+fn extract_port_from_ps(pid: &str) -> Option {
+ let ps_output = ProcessCommand::new("ps")
+ .arg("-p")
+ .arg(pid)
+ .arg("-o")
+ .arg("args")
+ .output()
+ .ok()?;
+
+ let ps_str = String::from_utf8_lossy(&ps_output.stdout);
+
+ let patterns = [
+ "--pgsrv.port=",
+ "--pgsrv.port ",
+ "pgsrv.port=",
+ "pgsrv.port ",
+ ];
+ for pattern in patterns.iter() {
+ if let Some(start_index) = ps_str.find(pattern) {
+ let port_start = start_index + pattern.len();
+ let port_end = ps_str[port_start..]
+ .split_whitespace()
+ .next()
+ .unwrap_or("")
+ .trim();
+
+ if let Ok(port) = port_end.parse::() {
+ return Some(port);
+ }
+ }
}
+
+ None
}
-/// Get the PID of the running stackql server
-pub fn get_server_pid(port: u16) -> Option {
- if cfg!(target_os = "windows") {
- let output = ProcessCommand::new("wmic")
- .arg("process")
- .arg("where")
- .arg(format!(
- "CommandLine like '%stackql%--pgsrv.port={}%'",
- port
- ))
- .arg("get")
- .arg("ProcessId")
- .output()
- .ok()?;
+/// Extract PID from process information on Windows
+fn extract_pid_from_windows_tasklist(line: &str) -> Option {
+ line.split_whitespace()
+ .filter_map(|s| s.parse::().ok())
+ .next()
+}
- let output_str = String::from_utf8_lossy(&output.stdout);
- let lines: Vec<&str> = output_str.lines().collect();
- if lines.len() >= 2 {
- lines[1].trim().parse::().ok()
- } else {
- None
- }
+/// Extract port from process information on Windows
+fn extract_port_from_windows_tasklist(line: &str) -> Option {
+ if let Some(port_str) = line.split_whitespace().find(|&s| s.parse::().is_ok()) {
+ port_str.parse().ok()
} else {
- // For Linux/macOS, let's try multiple pattern variations
- let patterns = [
- format!("stackql.*--pgsrv.port {}", port),
- format!("stackql.*--pgsrv.port={}", port),
- format!("stackql.*pgsrv.port {}", port),
- format!("stackql.*pgsrv.port={}", port),
- ];
-
- for pattern in patterns {
- let output = ProcessCommand::new("pgrep")
- .arg("-f")
- .arg(&pattern)
- .output()
- .ok()?;
-
- if !output.stdout.is_empty() {
- let stdout_content = String::from_utf8_lossy(&output.stdout);
- let pid_str = stdout_content.trim();
- if let Ok(pid) = pid_str.parse::() {
- return Some(pid);
- }
- }
- }
+ None
+ }
+}
- // Try a more general approach to find the stackql server
+/// Get the PID of the running stackql server on a specific port
+pub fn get_server_pid(port: u16) -> Option {
+ let patterns = [
+ format!("stackql.*--pgsrv.port={}", port),
+ format!("stackql.*--pgsrv.port {}", port),
+ format!("stackql.*pgsrv.port={}", port),
+ format!("stackql.*pgsrv.port {}", port),
+ ];
+
+ for pattern in &patterns {
let output = ProcessCommand::new("pgrep")
.arg("-f")
- .arg("stackql.*srv")
+ .arg(pattern)
.output()
.ok()?;
if !output.stdout.is_empty() {
let stdout_content = String::from_utf8_lossy(&output.stdout);
let pid_str = stdout_content.trim();
- pid_str.parse::().ok()
- } else {
- None
+ if let Ok(pid) = pid_str.parse::() {
+ return Some(pid);
+ }
}
}
+
+ None
}
/// Start the stackql server with the given options
-pub fn start_server(options: &ServerOptions) -> Result {
+pub fn start_server(options: &StartServerOptions) -> Result {
let binary_path = match get_binary_path() {
Some(path) => path,
- _none => return Err("StackQL binary not found".to_string()),
+ _none => return Err("stackql binary not found".to_string()),
};
- // Check if server is already running
if is_server_running(options.port) {
- println!(
- "{}",
- format!("Server is already running on port {}", options.port).yellow()
- );
+ info!("Server is already running on port {}", options.port);
return Ok(get_server_pid(options.port).unwrap_or(0));
}
- // Prepare command with all options
let mut cmd = ProcessCommand::new(&binary_path);
+ cmd.arg("srv");
+ cmd.arg("--pgsrv.address").arg(&options.host);
cmd.arg("--pgsrv.port").arg(options.port.to_string());
+ cmd.arg("--pgsrv.debug.enable=true");
+ cmd.arg("--pgsrv.loglevel=DEBUG");
+
if let Some(registry) = &options.registry {
cmd.arg("--registry").arg(registry);
}
- for arg in &options.additional_args {
- if arg.contains("=") {
- let parts: Vec<&str> = arg.split('=').collect();
- if parts.len() == 2 {
- cmd.arg(parts[0]).arg(parts[1]);
- } else {
- cmd.arg(arg);
- }
- } else {
- cmd.arg(arg);
- }
+ if let Some(mtls_config) = &options.mtls_config {
+ cmd.arg("--mtls-config").arg(mtls_config);
}
- cmd.arg("srv");
+ if let Some(custom_auth) = &options.custom_auth_config {
+ cmd.arg("--custom-auth-config").arg(custom_auth);
+ }
- // Setup logging
- let log_path = Path::new("stackql.log");
+ if let Some(log_level) = &options.log_level {
+ cmd.arg("--log-level").arg(log_level);
+ }
+
+ let log_path = Path::new(DEFAULT_LOG_FILE);
let log_file = OpenOptions::new()
.create(true)
- .append(true)
+ .write(true)
+ .truncate(true)
+ // .append(true)
.open(log_path)
.map_err(|e| format!("Failed to open log file: {}", e))?;
- // Start the server
let child = cmd
.stdout(Stdio::from(log_file.try_clone().unwrap()))
.stderr(Stdio::from(log_file))
@@ -203,16 +257,11 @@ pub fn start_server(options: &ServerOptions) -> Result {
.map_err(|e| format!("Failed to start server: {}", e))?;
let pid = child.id();
-
- // Wait a bit for the server to start
- println!(
- "{}",
- format!("Starting stackql server with PID: {}", pid).green()
- );
+ info!("Starting stackql server with PID: {}", pid);
thread::sleep(Duration::from_secs(5));
if is_server_running(options.port) {
- println!("{}", "Server started successfully".green());
+ info!("Server started successfully on port {}", options.port);
Ok(pid)
} else {
Err("Server failed to start properly".to_string())
@@ -222,6 +271,7 @@ pub fn start_server(options: &ServerOptions) -> Result {
/// Stop the stackql server
pub fn stop_server(port: u16) -> Result<(), String> {
if !is_server_running(port) {
+ warn!("No server running on port {}", port);
return Ok(());
}
@@ -230,10 +280,7 @@ pub fn stop_server(port: u16) -> Result<(), String> {
_none => return Err("Could not determine server PID".to_string()),
};
- println!(
- "{}",
- format!("Stopping stackql server with PID: {}", pid).yellow()
- );
+ info!("Stopping stackql server with PID: {}", pid);
if cfg!(target_os = "windows") {
ProcessCommand::new("taskkill")
@@ -249,13 +296,46 @@ pub fn stop_server(port: u16) -> Result<(), String> {
.map_err(|e| format!("Failed to stop server: {}", e))?;
}
- // Wait a bit to verify it's stopped
- thread::sleep(Duration::from_secs(1));
+ Ok(())
+}
- if !is_server_running(port) {
- println!("{}", "Server stopped successfully".green());
- Ok(())
+/// Checks if the server is running and starts it if necessary.
+///
+/// This function checks if the server is local and needs to be started. If the server is not running,
+/// it attempts to start it with the specified host and port.
+///
+/// # Arguments
+///
+/// * `host` - A reference to the server host address.
+/// * `port` - The port number to check.
+///
+/// # Behavior
+///
+/// * If the server is already running locally, it will display a message indicating this.
+/// * If a remote server is specified, it will display a message indicating the remote connection.
+/// * If the server needs to be started, it will attempt to do so and indicate success or failure.
+pub fn check_and_start_server() {
+ let host = server_host();
+ let port = server_port();
+
+ if LOCAL_SERVER_ADDRESSES.contains(&host) {
+ if is_server_running(port) {
+ info!("Local server is already running on port {}.", port);
+ } else {
+ info!("Server not running. Starting server...");
+
+ let options = StartServerOptions {
+ host: host.to_string(),
+ port,
+ ..Default::default()
+ };
+
+ if let Err(e) = start_server(&options) {
+ error!("Failed to start server: {}", e);
+ process::exit(1);
+ }
+ }
} else {
- Err("Server is still running after stop attempt".to_string())
+ info!("Using remote server {}:{}", host, port);
}
}
diff --git a/src/utils/stackql.rs b/src/utils/stackql.rs
index 5dc70b3..4e9dd5e 100644
--- a/src/utils/stackql.rs
+++ b/src/utils/stackql.rs
@@ -1,17 +1,53 @@
-use crate::utils::binary::get_binary_path;
+// utils/stackql.rs
+
+//! # StackQL Utility Module
+//!
+//! This module provides functionalities for interacting with the `stackql` binary,
+//! such as retrieving version information, installed providers, and the binary path.
+//! It serves as a bridge between your Rust application and the StackQL CLI tool.
+//!
+//! ## Features
+//! - Retrieve `stackql` binary version and SHA information.
+//! - List installed StackQL providers.
+//! - Get the path to the `stackql` binary.
+//!
+//! ## Example Usage
+//! ```rust
+//! use crate::utils::stackql::{get_version, get_installed_providers, get_stackql_path};
+//!
+//! if let Ok(version_info) = get_version() {
+//! println!("StackQL Version: {}, SHA: {}", version_info.version, version_info.sha);
+//! }
+//!
+//! if let Ok(providers) = get_installed_providers() {
+//! for provider in providers {
+//! println!("Provider: {}, Version: {}", provider.name, provider.version);
+//! }
+//! }
+//!
+//! if let Some(path) = get_stackql_path() {
+//! println!("StackQL Binary Path: {:?}", path);
+//! }
+//! ```
+
use std::path::PathBuf;
use std::process::Command as ProcessCommand;
+use crate::utils::binary::get_binary_path;
+
+/// Holds version information retrieved from the `stackql` binary.
pub struct VersionInfo {
pub version: String,
pub sha: String,
}
+/// Represents a provider installed in the `stackql` environment.
pub struct Provider {
pub name: String,
pub version: String,
}
+/// Retrieves the version and SHA information of the `stackql` binary.
pub fn get_version() -> Result {
let binary_path = match get_binary_path() {
Some(path) => path,
@@ -44,6 +80,7 @@ pub fn get_version() -> Result {
Ok(VersionInfo { version, sha })
}
+/// Retrieves a list of installed StackQL providers.
pub fn get_installed_providers() -> Result, String> {
let binary_path = match get_binary_path() {
Some(path) => path,
@@ -84,6 +121,7 @@ pub fn get_installed_providers() -> Result, String> {
Ok(providers)
}
+/// Retrieves the path to the `stackql` binary.
pub fn get_stackql_path() -> Option {
get_binary_path()
}