diff --git a/Cargo.lock b/Cargo.lock index 479f7267e37..3ab68aac250 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,22 +1,16 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" -version = "0.21.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - [[package]] name = "adler2" version = "2.0.0" @@ -25,12 +19,12 @@ checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "ahash" -version = "0.8.7" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom 0.2.12", + "getrandom 0.2.15", "once_cell", "version_check", "zerocopy", @@ -38,18 +32,18 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "ammonia" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e6d1c7838db705c9b756557ee27c384ce695a1c51a6fe528784cb1c6840170" +checksum = "1ab99eae5ee58501ab236beb6f20f6ca39be615267b014899c89b2f0bc18a459" dependencies = [ "html5ever", "maplit", @@ -81,9 +75,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.14" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -96,43 +90,44 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.4" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.3" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.2" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.2" +version = "3.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "once_cell", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.79" +version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" +checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4" [[package]] name = "appendlist" @@ -155,6 +150,25 @@ dependencies = [ "derive_arbitrary", ] +[[package]] +name = "ariadne" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31beedec3ce83ae6da3a79592b3d8d7afd146a5b15bb9bb940279aced60faa89" +dependencies = [ + "unicode-width 0.1.14", + "yansi", +] + +[[package]] +name = "ascii-canvas" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" +dependencies = [ + "term", +] + [[package]] name = "askama" version = "0.12.1" @@ -170,18 +184,18 @@ dependencies = [ [[package]] name = "askama_derive" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ccf09143e56923c12e027b83a9553210a3c58322ed8419a53461b14a4dccd85" +checksum = "19fe8d6cb13c4714962c072ea496f3392015f0989b1a2847bb4b2d9effd71d83" dependencies = [ "askama_parser", "basic-toml", "mime", "mime_guess", - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", "serde", - "syn 2.0.85", + "syn 2.0.98", ] [[package]] @@ -192,9 +206,9 @@ checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341" [[package]] name = "askama_parser" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "262eb9cf7be51269c5f2951eeda9ccd14d6934e437457f47b4f066bf55a6770d" +checksum = "acb1161c6b64d1c3d83108213c2a2533a342ac225aabd0bda218278c2ddb00c0" dependencies = [ "nom", ] @@ -211,13 +225,14 @@ dependencies = [ [[package]] name = "assert_cmd" -version = "2.0.12" +version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88903cb14723e4d4003335bb7f8a14f27691649105346a0f0957466c096adfe6" +checksum = "dc1835b7f27878de8525dc71410b5a31cdcc5f230aed5ba5df968e09c201b23d" dependencies = [ "anstyle", "bstr", "doc-comment", + "libc", "predicates", "predicates-core", "predicates-tree", @@ -226,23 +241,23 @@ dependencies = [ [[package]] name = "autocfg" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backtrace" -version = "0.3.69" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", - "miniz_oxide 0.7.1", + "miniz_oxide", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -253,9 +268,9 @@ checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" [[package]] name = "base64" -version = "0.21.6" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c79fed4cdb43e993fcdadc7e58a09fd0e3e649c4436fa11da71c9f1f3ee7feb9" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64" @@ -265,20 +280,49 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "basic-toml" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2db21524cad41c5591204d22d75e1970a2d1f71060214ca931dc7d5afe2c14e5" +checksum = "823388e228f614e9558c6804262db37960ec8821856535f5c3f59913140558f8" dependencies = [ "serde", ] +[[package]] +name = "bindgen" +version = "0.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f" +dependencies = [ + "bitflags 2.8.0", + "cexpr", + "clang-sys", + "itertools 0.13.0", + "log 0.4.26", + "prettyplease", + "proc-macro2 1.0.93", + "quote 1.0.38", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.98", +] + [[package]] name = "bit-set" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ - "bit-vec", + "bit-vec 0.6.3", +] + +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec 0.8.0", ] [[package]] @@ -287,6 +331,12 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + [[package]] name = "bitflags" version = "1.3.2" @@ -295,9 +345,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.6.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" [[package]] name = "block-buffer" @@ -305,18 +355,18 @@ version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] name = "boon" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9672cb0edeadf721484e298c0ed4dd70b0eaa3acaed5b4fd0bd73ca32e51d814" +checksum = "baa187da765010b70370368c49f08244b1ae5cae1d5d33072f76c8cb7112fe3e" dependencies = [ "ahash", "appendlist", - "base64 0.21.6", + "base64 0.22.1", "fluent-uri", "idna", "once_cell", @@ -328,22 +378,40 @@ dependencies = [ "url", ] +[[package]] +name = "borrow-or-share" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3eeab4423108c5d7c744f4d234de88d18d636100093ae04caf4825134b9c3a32" + [[package]] name = "bstr" -version = "1.9.0" +version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc" +checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" dependencies = [ "memchr", "regex-automata", "serde", ] +[[package]] +name = "buffered-reader" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fabd1c5e55587a8e8526172d63ad2ba665fa18c8acb39ec9a77af1708c982b9b" +dependencies = [ + "bzip2", + "flate2", + "lazy_static", + "libc", +] + [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "byteorder" @@ -353,9 +421,38 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.8.0" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9" + +[[package]] +name = "bytesize" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d2c12f985c78475a6b8d629afd0c360260ef34cfef52efccdcfd31972f81c2e" +dependencies = [ + "serde", +] + +[[package]] +name = "bzip2" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b89e7c29231c673a61a46e722602bcd138298f6b9e81e71119693534585f5c" +dependencies = [ + "bzip2-sys", +] + +[[package]] +name = "bzip2-sys" +version = "0.1.12+1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" +checksum = "72ebc2f1a417f01e1da30ef264ee86ae31d2dcd2d603ea283d3c244a883ca2a9" +dependencies = [ + "cc", + "libc", + "pkg-config", +] [[package]] name = "cast" @@ -365,39 +462,55 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.1" +version = "1.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47" +checksum = "c736e259eea577f443d5c86c304f9f4ae0295c43f3ba05c21f1d66b5f06001af" dependencies = [ "shlex", ] +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", + "pure-rust-locales", "serde", "wasm-bindgen", - "windows-targets 0.52.6", + "windows-link", ] [[package]] name = "ciborium" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" dependencies = [ "ciborium-io", "ciborium-ll", @@ -406,25 +519,36 @@ dependencies = [ [[package]] name = "ciborium-io" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" [[package]] name = "ciborium-ll" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" dependencies = [ "ciborium-io", "half", ] +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + [[package]] name = "clap" -version = "4.5.11" +version = "4.5.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35723e6a11662c2afb578bcf0b88bf6ea8e21282a953428f240574fcc3a2b5b3" +checksum = "027bb0d98429ae334a8698531da7077bdf906419543a35a55c2cb1b66437d767" dependencies = [ "clap_builder", "clap_derive", @@ -432,67 +556,76 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.11" +version = "4.5.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49eb96cbfa7cfa35017b7cd548c75b14c3118c98b423041d70562665e07fb0fa" +checksum = "5589e0cba072e0f3d23791efac0fd8627b49c829c196a492e88168e6a669d863" dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim 0.11.0", - "terminal_size", + "strsim", + "terminal_size 0.4.1", ] [[package]] name = "clap_complete" -version = "4.5.10" +version = "4.5.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbeb5ae3a578b7f639911ed31e98a48e16ef6f44e62d275184bb755a093fea55" +checksum = "f5c5508ea23c5366f77e53f5a0070e5a84e51687ec3ef9e0464c86dc8d13ce98" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.11" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d029b67f89d30bbb547c89fd5161293c0aec155fc691d7924b64550662db93e" +checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" dependencies = [ "heck 0.5.0", - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] name = "clap_lex" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "cli-table" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adfbb116d9e2c4be7011360d0c0bee565712c11e969c9609b25b619366dc379d" +checksum = "b53f9241f288a7b12c56565f04aaeaeeab6b8923d42d99255d4ca428b4d97f89" dependencies = [ "cli-table-derive", "csv", "termcolor", - "unicode-width", + "unicode-width 0.1.14", ] [[package]] name = "cli-table-derive" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2af3bfb9da627b0a6c467624fb7963921433774ed435493b5c08a3053e829ad4" +checksum = "3e83a93253aaae7c74eb7428ce4faa6e219ba94886908048888701819f82fb94" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", "syn 1.0.109", ] +[[package]] +name = "clipboard-win" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15efe7a882b08f34e38556b14f2fb3daa98769d06c7f0c1b076dfd0d983bc892" +dependencies = [ + "error-code", +] + [[package]] name = "codespan-reporting" version = "0.11.1" @@ -500,23 +633,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" dependencies = [ "termcolor", - "unicode-width", + "unicode-width 0.1.14", ] [[package]] name = "colorchoice" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "colored" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.48.0", + "windows-sys 0.59.0", +] + +[[package]] +name = "colored" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e" +dependencies = [ + "windows-sys 0.59.0", ] [[package]] @@ -531,24 +673,24 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.12" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crc" -version = "3.0.1" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" +checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" dependencies = [ "crc-catalog", ] @@ -615,9 +757,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", @@ -634,9 +776,15 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" [[package]] name = "crypto-common" @@ -644,15 +792,15 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array", + "generic-array 0.14.7", "typenum", ] [[package]] name = "csv" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" +checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" dependencies = [ "csv-core", "itoa", @@ -662,62 +810,77 @@ dependencies = [ [[package]] name = "csv-core" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" +checksum = "7d02f3b0da4c6504f86e9cd789d8dbafab48c2321be74e9987593de5a894d93d" dependencies = [ "memchr", ] [[package]] name = "cxx" -version = "1.0.130" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23c042a0ba58aaff55299632834d1ea53ceff73d62373f62c9ae60890ad1b942" +checksum = "8bc580dceb395cae0efdde0a88f034cfd8a276897e40c693a7b87bed17971d33" dependencies = [ "cc", + "cxxbridge-cmd", "cxxbridge-flags", "cxxbridge-macro", + "foldhash", "link-cplusplus", ] [[package]] name = "cxx-build" -version = "1.0.130" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45dc1c88d0fdac57518a9b1f6c4f4fb2aca8f3c30c0d03d7d8518b47ca0bcea6" +checksum = "49d8c1baedad72a7efda12ad8d7ad687b3e7221dfb304a12443fd69e9de8bb30" dependencies = [ "cc", "codespan-reporting", - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", "scratch", - "syn 2.0.85", + "syn 2.0.98", +] + +[[package]] +name = "cxxbridge-cmd" +version = "1.0.141" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e43afb0e3b2ef293492a31ecd796af902112460d53e5f923f7804f348a769f9c" +dependencies = [ + "clap", + "codespan-reporting", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] name = "cxxbridge-flags" -version = "1.0.130" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa7ed7d30b289e2592cc55bc2ccd89803a63c913e008e6eb59f06cddf45bb52f" +checksum = "0257ad2096a2474fe877e9e055ab69603851c3d6b394efcc7e0443899c2492ce" [[package]] name = "cxxbridge-macro" -version = "1.0.130" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b8c465d22de46b851c04630a5fc749a26005b263632ed2e0d9cc81518ead78d" +checksum = "b46cbd7358a46b760609f1cb5093683328e58ca50e594a308716f5403fdc03e5" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", "rustversion", - "syn 2.0.85", + "syn 2.0.98", ] [[package]] name = "darling" -version = "0.20.8" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ "darling_core", "darling_macro", @@ -725,27 +888,38 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.8" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.89", - "quote 1.0.35", - "strsim 0.10.0", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "strsim", + "syn 2.0.98", ] [[package]] name = "darling_macro" -version = "0.20.8" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", - "quote 1.0.35", - "syn 2.0.85", + "quote 1.0.38", + "syn 2.0.98", +] + +[[package]] +name = "dbus" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bb21987b9fb1613058ba3843121dd18b163b254d8a6e797e144cbac14d96d1b" +dependencies = [ + "libc", + "libdbus-sys", + "winapi", ] [[package]] @@ -755,6 +929,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", + "serde", ] [[package]] @@ -763,18 +938,49 @@ version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", +] + +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn 2.0.98", ] [[package]] name = "diesel" -version = "2.2.4" +version = "2.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "158fe8e2e68695bd615d7e4f3227c0727b151330d3e253b525086c348d055d5e" +checksum = "04001f23ba8843dc315804fa324000376084dfb1c30794ff68dd279e6e5696d5" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", "byteorder", "chrono", "diesel_derives", @@ -791,9 +997,9 @@ checksum = "e7f2c3de51e2ba6bf2a648285696137aaf0f5f487bcbea93972fe8a364e131a4" dependencies = [ "diesel_table_macro_syntax", "dsl_auto_type", - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] @@ -802,7 +1008,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "209c735641a413bc68c4923a9d6ad4bcb3ca306b794edaa7eb0b3228a99ffb25" dependencies = [ - "syn 2.0.85", + "syn 2.0.98", ] [[package]] @@ -836,15 +1042,36 @@ dependencies = [ "walkdir", ] +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + [[package]] name = "displaydoc" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] @@ -855,23 +1082,29 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dsl_auto_type" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5d9abe6314103864cc2d8901b7ae224e0ab1a103a0a416661b4097b0779b607" +checksum = "139ae9aca7527f85f26dd76483eb38533fd84bd571065da1739656ef71c5ff5b" dependencies = [ "darling", "either", "heck 0.5.0", - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] +[[package]] +name = "dyn-clone" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "feeef44e73baff3a26d371801df019877a9866a8c493d315ab00177843314f35" + [[package]] name = "either" -version = "1.9.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "b7914353092ddf589ad78f25c5c1c21b7f80b0ff8621e7c814c3485b5306da9d" [[package]] name = "elasticlunr-rs" @@ -885,44 +1118,81 @@ dependencies = [ "serde_json", ] +[[package]] +name = "ena" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" +dependencies = [ + "log 0.4.26", +] + [[package]] name = "encoding_rs" -version = "0.8.33" +version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] +[[package]] +name = "endian-type" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" + +[[package]] +name = "env_filter" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" +dependencies = [ + "log 0.4.26", + "regex", +] + +[[package]] +name = "env_home" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe" + [[package]] name = "env_logger" -version = "0.10.1" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95b3f3e67048839cb0d0781f445682a35113da7121f7c949db0e2be96a4fbece" +checksum = "dcaee3d8e3cfc3fd92428d477bc97fc29ec8716d180c0d74c643bb26166660e0" dependencies = [ + "anstream", + "anstyle", + "env_filter", "humantime", - "is-terminal", - "log 0.4.22", - "regex", - "termcolor", + "log 0.4.26", ] [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.8" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] +[[package]] +name = "error-code" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5d9305ccc6942a704f4335694ecd3de2ea531b114ac2d51f5f843750787a92f" + [[package]] name = "fallible-iterator" version = "0.3.0" @@ -941,16 +1211,38 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2" dependencies = [ - "bit-set", + "bit-set 0.5.3", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "fancy-regex" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298" +dependencies = [ + "bit-set 0.8.0", "regex-automata", "regex-syntax", ] [[package]] name = "fastrand" -version = "2.0.1" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fd-lock" +version = "4.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "7e5768da2206272c81ef0b5e951a41862938a6070da63bcea197899942d3b947" +dependencies = [ + "cfg-if", + "rustix", + "windows-sys 0.52.0", +] [[package]] name = "file-owner" @@ -968,48 +1260,55 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e40758ed24c9b2eeb76c35fb0aebc66c626084edd827e07e1552279814c6682d" dependencies = [ "libc", - "thiserror", + "thiserror 1.0.69", "winapi", ] [[package]] name = "filetime" -version = "0.2.23" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", - "redox_syscall", - "windows-sys 0.52.0", + "libredox", + "windows-sys 0.59.0", ] +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + [[package]] name = "flate2" -version = "1.0.35" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" dependencies = [ "crc32fast", - "miniz_oxide 0.8.0", + "miniz_oxide", ] [[package]] name = "float-cmp" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" +checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8" dependencies = [ "num-traits", ] [[package]] name = "fluent-uri" -version = "0.1.4" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17c704e9dbe1ddd863da1e6ff3567795087b1eb201ce80d8fa81162e1516500d" +checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5" dependencies = [ - "bitflags 1.3.2", + "borrow-or-share", + "ref-cast", ] [[package]] @@ -1018,6 +1317,12 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" + [[package]] name = "foreign-types" version = "0.3.2" @@ -1047,7 +1352,7 @@ name = "format_serde_error" version = "0.3.1" source = "git+https://github.com/fennewald/format_serde_error/?rev=06ef275#06ef275ce1f2c56b91fa0a4785e28c3aa9c67b31" dependencies = [ - "colored", + "colored 2.2.0", "serde", "serde_json", "serde_yaml", @@ -1067,13 +1372,12 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", - "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -1096,17 +1400,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" -[[package]] -name = "futures-executor" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - [[package]] name = "futures-io" version = "0.3.31" @@ -1119,9 +1412,9 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] @@ -1138,9 +1431,9 @@ checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-timer" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" @@ -1148,7 +1441,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ - "futures-channel", "futures-core", "futures-io", "futures-macro", @@ -1180,40 +1472,51 @@ dependencies = [ "version_check", ] +[[package]] +name = "generic-array" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8c8444bc9d71b935156cc0ccab7f622180808af7867b1daae6547d773591703" +dependencies = [ + "typenum", +] + [[package]] name = "getrandom" -version = "0.2.12" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.3.2" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" dependencies = [ "cfg-if", "libc", - "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasi 0.13.3+wasi-0.2.2", + "windows-targets 0.52.6", ] [[package]] name = "gimli" -version = "0.28.1" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "gumdrop" @@ -1230,8 +1533,8 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "729f9bd3449d77e7831a18abfb7ba2f99ee813dfd15b8c2167c9a54ba20aa99d" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", "syn 1.0.109", ] @@ -1246,8 +1549,8 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 0.2.11", - "indexmap", + "http 0.2.12", + "indexmap 2.7.1", "slab", "tokio", "tokio-util", @@ -1256,46 +1559,52 @@ dependencies = [ [[package]] name = "half" -version = "1.8.2" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +dependencies = [ + "cfg-if", + "crunchy", +] [[package]] name = "handlebars" -version = "4.5.0" +version = "6.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faa67bab9ff362228eb3d00bd024a4965d8231bbb7921167f0cfa66c6626b225" +checksum = "d752747ddabc4c1a70dd28e72f2e3c218a816773e0d7faf67433f1acfa6cba7c" dependencies = [ - "log 0.4.22", + "derive_builder", + "log 0.4.26", + "num-order", "pest", "pest_derive", "serde", "serde_json", - "thiserror", + "thiserror 2.0.11", ] [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" -dependencies = [ - "ahash", -] +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.15.1" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +dependencies = [ + "foldhash", +] [[package]] name = "hashlink" -version = "0.9.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" dependencies = [ - "hashbrown 0.14.3", + "hashbrown 0.15.2", ] [[package]] @@ -1304,10 +1613,10 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" dependencies = [ - "base64 0.21.6", + "base64 0.21.7", "bytes", "headers-core", - "http 0.2.11", + "http 0.2.12", "httpdate", "mime", "sha1", @@ -1319,7 +1628,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" dependencies = [ - "http 0.2.11", + "http 0.2.12", ] [[package]] @@ -1336,9 +1645,9 @@ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hermit-abi" -version = "0.3.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" [[package]] name = "hex" @@ -1348,11 +1657,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "home" -version = "0.5.9" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1368,23 +1677,23 @@ dependencies = [ [[package]] name = "html5ever" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7" +checksum = "c13771afe0e6e846f1e67d038d4cb29998a6779f93c809212e4e9c32efd244d4" dependencies = [ - "log 0.4.22", + "log 0.4.26", "mac", "markup5ever", - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 1.0.109", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] name = "http" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", @@ -1393,9 +1702,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -1409,7 +1718,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.11", + "http 0.2.12", "pin-project-lite", ] @@ -1420,7 +1729,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -1431,16 +1740,16 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "f2d708df4e7140240a16cd6ab0ab65c972d7433ab77819ea693fde9c43811e2a" [[package]] name = "httpdate" @@ -1465,16 +1774,16 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.28" +version = "0.14.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", "h2", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", "httparse", "httpdate", @@ -1489,14 +1798,14 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "httparse", "itoa", @@ -1513,7 +1822,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.28", + "hyper 0.14.32", "native-tls", "tokio", "tokio-native-tls", @@ -1527,7 +1836,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.5.0", + "hyper 1.6.0", "hyper-util", "native-tls", "tokio", @@ -1544,9 +1853,9 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", - "hyper 1.5.0", + "hyper 1.6.0", "pin-project-lite", "socket2", "tokio", @@ -1577,6 +1886,124 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -1585,50 +2012,73 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.5.0" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "icu_normalizer", + "icu_properties", ] [[package]] name = "include_dir" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18762faeff7122e89e0857b02f7ce6fcc0d101d5e9ad2ad7846cc01d61b7f19e" +checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd" dependencies = [ "include_dir_macros", ] [[package]] name = "include_dir_macros" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" +checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", ] [[package]] name = "indexmap" -version = "2.6.0" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ "equivalent", - "hashbrown 0.15.1", + "hashbrown 0.15.2", + "serde", ] [[package]] name = "inotify" -version = "0.10.2" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdd168d97690d0b8c412d6b6c10360277f4d7ee495c5d0d5d5fe0854923255cc" +checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.8.0", "futures-core", "inotify-sys", "libc", @@ -1645,38 +2095,36 @@ dependencies = [ ] [[package]] -name = "io-lifetimes" -version = "1.0.11" +name = "ipnet" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys 0.48.0", -] +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] -name = "ipnet" -version = "2.9.0" +name = "iprange" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "37209be0ad225457e63814401415e748e2453a5297f9b637338f5fb8afa4ec00" +dependencies = [ + "ipnet", +] [[package]] name = "is-terminal" -version = "0.4.10" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bad00257d07be169d870ab665980b06cdb366d792ad690bf2e76876dc503455" +checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37" dependencies = [ "hermit-abi", - "rustix 0.38.34", - "windows-sys 0.52.0", + "libc", + "windows-sys 0.59.0", ] [[package]] name = "is_terminal_polyfill" -version = "1.70.0" +version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" @@ -1705,44 +2153,134 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" -version = "1.0.10" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "js-sys" -version = "0.3.66" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ + "once_cell", "wasm-bindgen", ] +[[package]] +name = "lalrpop" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" +dependencies = [ + "ascii-canvas", + "bit-set 0.5.3", + "ena", + "itertools 0.11.0", + "lalrpop-util", + "petgraph", + "regex", + "regex-syntax", + "string_cache", + "term", + "tiny-keccak", + "unicode-xid 0.2.6", + "walkdir", +] + +[[package]] +name = "lalrpop-util" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" +dependencies = [ + "regex-automata", +] + [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "lib" +version = "0.0.0-dev" +dependencies = [ + "anyhow", + "dyn-clone", + "env_logger", + "itertools 0.14.0", + "log 0.4.26", + "regex", + "rudder_commons", + "rudderc", + "serde", + "serde_json", + "serde_yaml", + "tempfile", + "test-log", +] [[package]] name = "libc" -version = "0.2.164" +version = "0.2.170" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f" +checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" + +[[package]] +name = "libdbus-sys" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06085512b750d640299b79be4bad3d2fa90a9c00b1fd9e1b46364f66f0485c72" +dependencies = [ + "cc", + "pkg-config", +] + +[[package]] +name = "libloading" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" +dependencies = [ + "cfg-if", + "windows-targets 0.52.6", +] [[package]] name = "libm" -version = "0.2.8" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.8.0", + "libc", + "redox_syscall", +] [[package]] name = "libsqlite3-sys" -version = "0.30.1" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +checksum = "ad8935b44e7c13394a179a438e0cebba0fe08fe01b54f152e29a93b5cf993fd4" dependencies = [ "cc", "pkg-config", @@ -1760,21 +2298,21 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.1.4" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] -name = "linux-raw-sys" -version = "0.4.12" +name = "litemap" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" [[package]] name = "lock_api" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -1792,14 +2330,14 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b" dependencies = [ - "log 0.4.22", + "log 0.4.26", ] [[package]] name = "log" -version = "0.4.22" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" [[package]] name = "lzma-rs" @@ -1825,11 +2363,11 @@ checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "markup5ever" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016" +checksum = "16ce3abbeba692c8b8441d036ef91aea6df8da2c6b6e21c7e14d3c18e526be45" dependencies = [ - "log 0.4.22", + "log 0.4.26", "phf", "phf_codegen", "string_cache", @@ -1848,9 +2386,9 @@ dependencies = [ [[package]] name = "mdbook" -version = "0.4.36" +version = "0.4.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80992cb0e05f22cc052c99f8e883f1593b891014b96a8b4637fd274d7030c85e" +checksum = "b07d36d96ffe1b5b16ddf2bc80b3b26bb7a498b2a6591061250bf0af8e8095ad" dependencies = [ "ammonia", "anyhow", @@ -1860,11 +2398,10 @@ dependencies = [ "elasticlunr-rs", "env_logger", "handlebars", - "log 0.4.22", + "log 0.4.26", "memchr", "once_cell", "opener", - "pathdiff", "pulldown-cmark", "regex", "serde", @@ -1890,6 +2427,12 @@ dependencies = [ "libc", ] +[[package]] +name = "memsec" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c797b9d6bb23aab2fc369c65f871be49214f5c759af65bde26ffaaa2b646b492" + [[package]] name = "mime" version = "0.3.17" @@ -1898,37 +2441,21 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "mime_guess" -version = "2.0.4" +version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" dependencies = [ "mime", "unicase", ] -[[package]] -name = "minifusion" -version = "0.0.0-dev" -dependencies = [ - "anyhow", - "chrono", - "clap", - "hostname", - "pretty_assertions", - "quick-xml", - "regex", - "serde", - "sysinfo", - "uname-rs", -] - [[package]] name = "minijinja" -version = "2.5.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c37e1b517d1dcd0e51dc36c4567b9d5a29262b3ec8da6cb5d35e27a8fb529b5" +checksum = "cff7b8df5e85e30b87c2b0b3f58ba3a87b68e133738bf512a7713769326dbca9" dependencies = [ - "indexmap", + "indexmap 2.7.1", "percent-encoding", "serde", "serde_json", @@ -1942,18 +2469,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" -dependencies = [ - "adler", -] - -[[package]] -name = "miniz_oxide" -version = "0.8.0" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ "adler2", ] @@ -1981,13 +2499,12 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.11" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" dependencies = [ - "lazy_static", "libc", - "log 0.4.22", + "log 0.4.26", "openssl", "openssl-probe", "openssl-sys", @@ -1997,20 +2514,57 @@ dependencies = [ "tempfile", ] +[[package]] +name = "nettle" +version = "7.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44e6ff4a94e5d34a1fd5abbd39418074646e2fa51b257198701330f22fcd6936" +dependencies = [ + "getrandom 0.2.15", + "libc", + "nettle-sys", + "thiserror 1.0.69", + "typenum", +] + +[[package]] +name = "nettle-sys" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61a3f5406064d310d59b1a219d3c5c9a49caf4047b6496032e3f930876488c34" +dependencies = [ + "bindgen", + "cc", + "libc", + "pkg-config", + "tempfile", + "vcpkg", +] + [[package]] name = "new_debug_unreachable" -version = "1.0.4" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "nibble_vec" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43" +dependencies = [ + "smallvec", +] [[package]] name = "nix" -version = "0.27.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", "cfg-if", + "cfg_aliases", "libc", ] @@ -2032,11 +2586,11 @@ checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" [[package]] name = "normpath" -version = "1.1.1" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec60c60a693226186f5d6edf073232bfb6464ed97eb22cf3b01c1e8198fd97f5" +checksum = "c8911957c4b1549ac0dc74e30db9c8b0e66ddcd6d7acc33098f4c63a64a6d7ed" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -2063,46 +2617,61 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" +[[package]] +name = "num-modular" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17bb261bf36fa7d83f4c294f834e91256769097b3cb505d44831e0a179ac647f" + +[[package]] +name = "num-order" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537b596b97c40fcf8056d153049eb22f481c17ebce72a513ec9286e4986d1bb6" +dependencies = [ + "num-modular", +] + [[package]] name = "num-traits" -version = "0.2.17" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", - "libm", ] [[package]] name = "object" -version = "0.32.2" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "oorandom" -version = "11.1.3" +version = "11.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "opener" -version = "0.6.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c62dcb6174f9cb326eac248f07e955d5d559c272730b6c03e396b443b562788" +checksum = "d0812e5e4df08da354c851a3376fead46db31c2214f849d3de356d774d057681" dependencies = [ "bstr", + "dbus", "normpath", - "winapi", + "windows-sys 0.59.0", ] [[package]] @@ -2111,7 +2680,7 @@ version = "0.10.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", "cfg-if", "foreign-types", "libc", @@ -2126,16 +2695,16 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] name = "openssl-probe" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" @@ -2149,11 +2718,20 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + [[package]] name = "parking_lot" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -2161,15 +2739,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -2178,12 +2756,6 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" -[[package]] -name = "pathdiff" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" - [[package]] name = "percent-encoding" version = "2.3.1" @@ -2192,20 +2764,20 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.6" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f200d8d83c44a45b21764d1916299752ca035d15ecd46faca3e9a2a2bf6ad06" +checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror", + "thiserror 2.0.11", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.6" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcd6ab1236bbdb3a49027e920e693192ebfe8913f6d60e294de57463a493cfde" +checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e" dependencies = [ "pest", "pest_generator", @@ -2213,42 +2785,52 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.6" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a31940305ffc96863a735bef7c7994a00b325a7138fdbc5bda0f1a0476d3275" +checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] name = "pest_meta" -version = "2.7.6" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7ff62f5259e53b78d1af898941cdcdccfae7385cf7d793a6e55de5d05bb4b7d" +checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea" dependencies = [ "once_cell", "pest", "sha2", ] +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap 2.7.1", +] + [[package]] name = "phf" -version = "0.10.1" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ "phf_shared", ] [[package]] name = "phf_codegen" -version = "0.10.0" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ "phf_generator", "phf_shared", @@ -2256,9 +2838,9 @@ dependencies = [ [[package]] name = "phf_generator" -version = "0.10.0" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ "phf_shared", "rand 0.8.5", @@ -2266,38 +2848,38 @@ dependencies = [ [[package]] name = "phf_shared" -version = "0.10.0" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ "siphasher", ] [[package]] name = "pin-project" -version = "1.1.3" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +checksum = "dfe2e71e1471fe07709406bf725f710b02927c9c54b2b5b2ec0e8087d97c327d" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.3" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +checksum = "f6e859e6e5bd50440ab63c47e3ebabc90f26251f7c73c3d3e837b74a1cc3fa67" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] name = "pin-project-lite" -version = "0.2.13" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -2307,15 +2889,15 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69d3587f8a9e599cc7ec2c00e331f71c4e69a5f9a4b8a6efd5b07466b9736f9a" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "plotters" -version = "0.3.5" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" dependencies = [ "num-traits", "plotters-backend", @@ -2326,15 +2908,15 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.5" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" [[package]] name = "plotters-svg" -version = "0.3.5" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" dependencies = [ "plotters-backend", ] @@ -2347,16 +2929,20 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "pq-sys" -version = "0.4.8" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31c0052426df997c0cbd30789eb44ca097e3541717a7b8fa36b1c464ee7edebd" +checksum = "30b51d65ebe1cb1f40641b15abae017fed35ccdda46e3dab1ff8768f625a3222" dependencies = [ + "libc", "vcpkg", ] @@ -2368,14 +2954,13 @@ checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "predicates" -version = "3.0.4" +version = "3.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dfc28575c2e3f19cb3c73b93af36460ae898d426eba6fc15b9bd2a5220758a0" +checksum = "a5d19ee57562043d37e82899fade9a22ebab7be9cef5026b07fda9cdd4293573" dependencies = [ "anstyle", "difflib", "float-cmp", - "itertools 0.11.0", "normalize-line-endings", "predicates-core", "regex", @@ -2383,15 +2968,15 @@ dependencies = [ [[package]] name = "predicates-core" -version = "1.0.6" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174" +checksum = "727e462b119fe9c93fd0eb1429a5f7647394014cf3c04ab2c0350eeb09095ffa" [[package]] name = "predicates-tree" -version = "1.0.9" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf" +checksum = "72dd2d6d381dfb73a193c7fca536518d7caee39fc8503f74e7dc0be0531b425c" dependencies = [ "predicates-core", "termtree", @@ -2399,21 +2984,31 @@ dependencies = [ [[package]] name = "pretty_assertions" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", ] +[[package]] +name = "prettyplease" +version = "0.2.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6924ced06e1f7dfe3fa48d57b9f74f55d8915f5036121bef647ef4b204895fac" +dependencies = [ + "proc-macro2 1.0.93", + "syn 2.0.98", +] + [[package]] name = "proc-macro-crate" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "toml_edit 0.22.22", + "toml_edit", ] [[package]] @@ -2422,36 +3017,46 @@ version = "0.4.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" dependencies = [ - "unicode-xid", + "unicode-xid 0.1.0", ] [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] [[package]] name = "procfs" -version = "0.14.2" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1de8dacb0873f77e6aefc6d71e044761fcc68060290f5b1089fcdf84626bb69" +checksum = "731e0d9356b0c25f16f33b5be79b1c57b562f141ebfcdb0ad8ac2c13a24293b4" dependencies = [ - "bitflags 1.3.2", - "byteorder", + "bitflags 2.8.0", "hex", "lazy_static", - "rustix 0.36.17", + "procfs-core", + "rustix", +] + +[[package]] +name = "procfs-core" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d3554923a69f4ce04c4a754260c338f505ce22642d3830e049a399fc2059a29" +dependencies = [ + "bitflags 2.8.0", + "hex", ] [[package]] name = "prometheus" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "449811d15fbdf5ceb5c1144416066429cf82316e2ec8ce0c1f6f8a02e7bbcf8c" +checksum = "3d33c28a30771f7f96db69893f78b857f7450d7e0237e9c8fc6427a81bae7ed1" dependencies = [ "cfg-if", "fnv", @@ -2460,18 +3065,18 @@ dependencies = [ "memchr", "parking_lot", "procfs", - "thiserror", + "thiserror 1.0.69", ] [[package]] name = "proptest" -version = "1.4.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50" dependencies = [ - "bit-set", - "bit-vec", - "bitflags 2.6.0", + "bit-set 0.8.0", + "bit-vec 0.8.0", + "bitflags 2.8.0", "lazy_static", "num-traits", "rand 0.8.5", @@ -2485,15 +3090,28 @@ dependencies = [ [[package]] name = "pulldown-cmark" -version = "0.9.3" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a1a2f1f0a7ecff9c31abbe177637be0e97a0aef46cf8738ece09327985d998" +checksum = "76979bea66e7875e7509c4ec5300112b316af87fa7a252ca91c448b32dfe3993" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.8.0", "memchr", + "pulldown-cmark-escape", "unicase", ] +[[package]] +name = "pulldown-cmark-escape" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd348ff538bc9caeda7ee8cad2d1d48236a1f443c1fa3913c6a02fe0043b1dd3" + +[[package]] +name = "pure-rust-locales" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1190fd18ae6ce9e137184f207593877e70f39b015040156b1e05081cdfe3733a" + [[package]] name = "quick-error" version = "1.2.3" @@ -2502,9 +3120,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quick-xml" -version = "0.36.2" +version = "0.37.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7649a7b4df05aed9ea7ec6f628c67c9953a43869b8bc50929569b2999d443fe" +checksum = "165859e9e55f79d67b96c5d96f4e88b6f2695a1972849c15a6a3f5c59fc2c003" dependencies = [ "memchr", "serde", @@ -2521,30 +3139,34 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.35" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.93", ] -[[package]] -name = "r-efi" -version = "5.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" - [[package]] name = "r2d2" version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" dependencies = [ - "log 0.4.22", + "log 0.4.26", "parking_lot", "scheduled-thread-pool", ] +[[package]] +name = "radix_trie" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd" +dependencies = [ + "endian-type", + "nibble_vec", +] + [[package]] name = "rand" version = "0.8.5" @@ -2592,7 +3214,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.12", + "getrandom 0.2.15", ] [[package]] @@ -2601,7 +3223,7 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.3.2", + "getrandom 0.3.1", ] [[package]] @@ -2613,11 +3235,32 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "raugeas" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e66a438618c49d60a6b794af3d7bc19527919849d395abf5e9ee607e8e302e0f" +dependencies = [ + "bitflags 2.8.0", + "libc", + "raugeas_sys", +] + +[[package]] +name = "raugeas_sys" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8e985ad2c819c519567fb5a19eae02bb7de4b00a3ad6dba0c510f7fd6396513" +dependencies = [ + "bindgen", + "pkg-config", +] + [[package]] name = "rayon" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", @@ -2625,9 +3268,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.0" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -2635,18 +3278,49 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +checksum = "82b568323e98e49e2a0899dcee453dd679fae22d69adf9b11dd508d1549b7e2f" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.8.0", +] + +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom 0.2.15", + "libredox", + "thiserror 1.0.69", +] + +[[package]] +name = "ref-cast" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf0a6f84d5f1d581da8b41b47ec8600871962f2a528115b542b362d4b744931" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc303e793d3734489387d205e9b186fac9c6cfacedd98cbb2e8a5943595f3e6" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] name = "regex" -version = "1.10.6" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", @@ -2656,9 +3330,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -2667,43 +3341,45 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.2" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "relative-path" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e898588f33fdd5b9420719948f9f2a32c922a246964576f71ba7f24f80610fbc" +checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2" [[package]] name = "reqwest" -version = "0.11.23" +version = "0.11.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ - "base64 0.21.6", + "base64 0.21.7", "bytes", "encoding_rs", "futures-core", "futures-util", "h2", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.28", + "hyper 0.14.32", "hyper-tls 0.5.0", "ipnet", "js-sys", - "log 0.4.22", + "log 0.4.26", "mime", "native-tls", "once_cell", "percent-encoding", "pin-project-lite", + "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", + "sync_wrapper 0.1.2", "system-configuration", "tokio", "tokio-native-tls", @@ -2719,24 +3395,24 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.9" +version = "0.12.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" +checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" dependencies = [ "base64 0.22.1", "bytes", "futures-channel", "futures-core", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.5.0", + "hyper 1.6.0", "hyper-tls 0.6.0", "hyper-util", "ipnet", "js-sys", - "log 0.4.22", + "log 0.4.26", "mime", "native-tls", "once_cell", @@ -2746,9 +3422,10 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 1.0.2", "tokio", "tokio-native-tls", + "tower", "tower-service", "url", "wasm-bindgen", @@ -2765,34 +3442,62 @@ checksum = "194d8e591e405d1eecf28819740abed6d719d1a2db87fc0bcdedee9a26d55560" [[package]] name = "rstest" -version = "0.22.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b423f0e62bdd61734b67cd21ff50871dfaeb9cc74f869dcd6af974fbcb19936" +checksum = "03e905296805ab93e13c1ec3a03f4b6c4f35e9498a3d5fa96dc626d22c03cd89" dependencies = [ - "futures", "futures-timer", + "futures-util", "rstest_macros", "rustc_version", ] [[package]] name = "rstest_macros" -version = "0.22.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e1711e7d14f74b12a58411c542185ef7fb7f2e7f8ee6e2940a883628522b42" +checksum = "ef0053bbffce09062bee4bcc499b0fbe7a57b879f1efe088d6d8d4c7adcdef9b" dependencies = [ "cfg-if", "glob", "proc-macro-crate", - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", "regex", "relative-path", "rustc_version", - "syn 2.0.85", + "syn 2.0.98", "unicode-ident", ] +[[package]] +name = "rudder-module-augeas" +version = "0.0.0-dev" +dependencies = [ + "anyhow", + "ariadne", + "bytesize", + "chrono", + "gumdrop", + "ipnet", + "iprange", + "pest", + "pest_derive", + "raugeas", + "regex", + "rudder_commons_test", + "rudder_module_type", + "rustyline", + "secrecy", + "serde", + "serde-inline-default", + "serde_json", + "serde_with", + "similar", + "tempfile", + "zxcvbn", +] + [[package]] name = "rudder-module-directory" version = "0.0.0-dev" @@ -2806,6 +3511,22 @@ dependencies = [ "tempfile", ] +[[package]] +name = "rudder-module-inventory" +version = "0.0.0-dev" +dependencies = [ + "anyhow", + "chrono", + "clap", + "hostname", + "pretty_assertions", + "quick-xml", + "regex", + "serde", + "sysinfo", + "uname-rs", +] + [[package]] name = "rudder-module-system-updates" version = "0.0.0-dev" @@ -2817,14 +3538,15 @@ dependencies = [ "gag", "gumdrop", "libc", - "log 0.4.22", + "log 0.4.26", "memfile", "pretty_assertions", "regex", "rudder_commons_test", "rudder_module_type", "rusqlite", - "rust-apt", + "rust-apt 0.8.0 (git+https://gitlab.com/amousset/rust-apt.git?branch=rudder)", + "rust-apt 0.8.0 (git+https://gitlab.com/amousset/rust-apt.git?branch=rudder-libapt07)", "serde", "serde_json", "tempfile", @@ -2859,15 +3581,16 @@ dependencies = [ "cli-table", "dir-diff", "flate2", - "itertools 0.13.0", + "itertools 0.14.0", "lzma-rs", "pretty_assertions", "quick-xml", "regex", - "reqwest 0.12.9", + "reqwest 0.12.12", "rstest", "rudder_cli", - "secrecy 0.10.3", + "secrecy", + "sequoia-openpgp", "serde", "serde_ini", "serde_json", @@ -2876,7 +3599,6 @@ dependencies = [ "tar", "tempfile", "tracing", - "tracing-subscriber", "which", ] @@ -2896,7 +3618,7 @@ dependencies = [ "futures", "hex", "humantime", - "hyper 0.14.28", + "hyper 0.14.32", "inotify", "lazy_static", "nom", @@ -2907,16 +3629,17 @@ dependencies = [ "proptest", "rand 0.9.1", "regex", - "reqwest 0.11.23", - "secrecy 0.8.0", + "reqwest 0.11.27", + "secrecy", "serde", + "serde-inline-default", "serde_json", "sha2", "tempfile", - "thiserror", + "thiserror 2.0.11", "tokio", "tokio-stream", - "toml 0.8.8", + "toml 0.8.20", "tracing", "tracing-subscriber", "walkdir", @@ -2940,7 +3663,7 @@ name = "rudder_cli" version = "0.0.0-dev" dependencies = [ "anyhow", - "colored", + "colored 3.0.0", "tracing", "tracing-appender", "tracing-subscriber", @@ -2951,9 +3674,9 @@ name = "rudder_commons" version = "0.0.0-dev" dependencies = [ "anyhow", - "colored", - "fancy-regex", - "log 0.4.22", + "colored 3.0.0", + "fancy-regex 0.14.0", + "log 0.4.26", "nom", "pretty_assertions", "regex", @@ -3009,7 +3732,7 @@ dependencies = [ "serde", "serde_json", "serde_yaml", - "strsim 0.11.0", + "strsim", "tempfile", "test-generator", "tracing", @@ -3021,11 +3744,11 @@ dependencies = [ [[package]] name = "rusqlite" -version = "0.32.1" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7753b721174eb8ff87a9a0e799e2d7bc3749323e773db92e0984debb00019d6e" +checksum = "1c6d5e5acb6f6129fe3f7ba0a7fc77bca1942cb568535e18e7bc40262baf3110" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", "fallible-iterator", "fallible-streaming-iterator", "hashlink", @@ -3041,49 +3764,52 @@ dependencies = [ "cxx", "cxx-build", "paste", - "terminal_size", + "terminal_size 0.3.0", +] + +[[package]] +name = "rust-apt" +version = "0.8.0" +source = "git+https://gitlab.com/amousset/rust-apt.git?branch=rudder-libapt07#6069ffaf7e537681e435dc35a56e32c9b3989938" +dependencies = [ + "cxx", + "cxx-build", + "paste", + "terminal_size 0.3.0", ] [[package]] name = "rustc-demangle" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] -name = "rustc_version" -version = "0.4.0" +name = "rustc-hash" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" -dependencies = [ - "semver", -] +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] -name = "rustix" -version = "0.36.17" +name = "rustc_version" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "305efbd14fde4139eb501df5f136994bb520b033fa9fbdce287507dc23b8c7ed" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.1.4", - "windows-sys 0.45.0", + "semver", ] [[package]] name = "rustix" -version = "0.38.34" +version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", "errno", "libc", - "linux-raw-sys 0.4.12", - "windows-sys 0.52.0", + "linux-raw-sys", + "windows-sys 0.59.0", ] [[package]] @@ -3092,7 +3818,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "base64 0.21.6", + "base64 0.21.7", ] [[package]] @@ -3106,15 +3832,15 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" [[package]] name = "rustversion" -version = "1.0.14" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "rusty-fork" @@ -3128,11 +3854,33 @@ dependencies = [ "wait-timeout", ] +[[package]] +name = "rustyline" +version = "15.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ee1e066dc922e513bda599c6ccb5f3bb2b0ea5870a579448f2622993f0a9a2f" +dependencies = [ + "bitflags 2.8.0", + "cfg-if", + "clipboard-win", + "fd-lock", + "home", + "libc", + "log 0.4.26", + "memchr", + "nix", + "radix_trie", + "unicode-segmentation", + "unicode-width 0.2.0", + "utf8parse", + "windows-sys 0.59.0", +] + [[package]] name = "ryu" -version = "1.0.16" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" +checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "same-file" @@ -3145,11 +3893,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.23" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3179,16 +3927,6 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3cf7c11c38cb994f3d40e8a8cde3bbd1f72a435e4c49e85d6553d8312306152" -[[package]] -name = "secrecy" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bd1c54ea06cfd2f6b63219704de0b9b4f72dcc2b8fdef820be6cd799780e91e" -dependencies = [ - "serde", - "zeroize", -] - [[package]] name = "secrecy" version = "0.10.3" @@ -3201,11 +3939,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.9.2" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.8.0", "core-foundation", "core-foundation-sys", "libc", @@ -3214,9 +3952,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.1" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" dependencies = [ "core-foundation-sys", "libc", @@ -3224,39 +3962,90 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.21" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" +checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03" + +[[package]] +name = "sequoia-openpgp" +version = "1.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e858e4e9e48ff079cede92e1b45c942a5466ce9a4e3cc0c2a7e66586a718ef59" +dependencies = [ + "anyhow", + "base64 0.22.1", + "buffered-reader", + "bzip2", + "chrono", + "dyn-clone", + "flate2", + "getrandom 0.2.15", + "idna", + "lalrpop", + "lalrpop-util", + "lazy_static", + "libc", + "memsec", + "nettle", + "once_cell", + "regex", + "regex-syntax", + "sha1collisiondetection", + "thiserror 1.0.69", + "xxhash-rust", +] [[package]] name = "serde" -version = "1.0.214" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5" +checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" dependencies = [ "serde_derive", ] [[package]] name = "serde-aux" -version = "4.5.0" +version = "4.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d2e8bfba469d06512e11e3311d4d051a4a387a5b42d010404fecf3200321c95" +checksum = "5290c39c5f6992b9dddbda28541d965dba46468294e6018a408fa297e6c602de" dependencies = [ "chrono", "serde", + "serde-value", "serde_json", ] +[[package]] +name = "serde-inline-default" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59fb1bedd774187d304179493b0d3c41fbe97b04b14305363f68d2bdf5e47cb9" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", +] + +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float", + "serde", +] + [[package]] name = "serde_derive" -version = "1.0.214" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" +checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] @@ -3272,9 +4061,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.132" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +checksum = "44f86c3acccc9c65b153fe1b85a3be07fe5515274ec9f0653b4a0875731c72a6" dependencies = [ "itoa", "memchr", @@ -3284,9 +4073,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.5" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] @@ -3303,13 +4092,43 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_with" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.7.1", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" +dependencies = [ + "darling", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", +] + [[package]] name = "serde_yaml" version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap", + "indexmap 2.7.1", "itoa", "ryu", "serde", @@ -3327,6 +4146,16 @@ dependencies = [ "digest", ] +[[package]] +name = "sha1collisiondetection" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f606421e4a6012877e893c399822a4ed4b089164c5969424e1b9d1e66e6964b" +dependencies = [ + "digest", + "generic-array 1.2.0", +] + [[package]] name = "sha2" version = "0.10.8" @@ -3355,9 +4184,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] @@ -3368,11 +4197,17 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" + [[package]] name = "siphasher" -version = "0.3.11" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" @@ -3385,18 +4220,18 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.2" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" [[package]] name = "socket2" -version = "0.5.5" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -3410,14 +4245,19 @@ dependencies = [ "strum", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "string_cache" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +checksum = "938d512196766101d333398efde81bc1f37b00cb42c2f8350e5df639f040bbbe" dependencies = [ "new_debug_unreachable", - "once_cell", "parking_lot", "phf_shared", "precomputed-hash", @@ -3426,27 +4266,21 @@ dependencies = [ [[package]] name = "string_cache_codegen" -version = "0.5.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" +checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0" dependencies = [ "phf_generator", "phf_shared", - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", ] [[package]] name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "strsim" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" @@ -3464,8 +4298,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", "rustversion", "syn 1.0.109", ] @@ -3478,7 +4312,7 @@ checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" dependencies = [ "proc-macro2 0.4.30", "quote 0.6.13", - "unicode-xid", + "unicode-xid 0.1.0", ] [[package]] @@ -3487,36 +4321,53 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.85" +version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56" +checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", + "proc-macro2 1.0.93", + "quote 1.0.38", "unicode-ident", ] [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "sync_wrapper" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", +] + [[package]] name = "sysinfo" -version = "0.31.4" +version = "0.33.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "355dbe4f8799b304b05e1b0f05fc59b2a18d36645cf169607da45bde2f69a1be" +checksum = "4fc858248ea01b66f19d8e8a6d55f41deaf91e9d495246fd01368d99935c6c01" dependencies = [ "core-foundation-sys", "libc", @@ -3549,9 +4400,9 @@ dependencies = [ [[package]] name = "tar" -version = "0.4.40" +version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" dependencies = [ "filetime", "libc", @@ -3560,15 +4411,16 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.9.0" +version = "3.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" +checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230" dependencies = [ "cfg-if", "fastrand", - "redox_syscall", - "rustix 0.38.34", - "windows-sys 0.52.0", + "getrandom 0.3.1", + "once_cell", + "rustix", + "windows-sys 0.59.0", ] [[package]] @@ -3582,6 +4434,17 @@ dependencies = [ "utf-8", ] +[[package]] +name = "term" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +dependencies = [ + "dirs-next", + "rustversion", + "winapi", +] + [[package]] name = "termcolor" version = "1.4.1" @@ -3597,15 +4460,25 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" dependencies = [ - "rustix 0.38.34", + "rustix", "windows-sys 0.48.0", ] [[package]] -name = "termtree" +name = "terminal_size" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" +checksum = "5352447f921fda68cf61b4101566c0bdb5104eff6804d0678e5227580ab6a4e9" +dependencies = [ + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "termtree" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" [[package]] name = "test-generator" @@ -3618,31 +4491,73 @@ dependencies = [ "syn 0.15.44", ] +[[package]] +name = "test-log" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f46083d221181166e5b6f6b1e5f1d499f3a76888826e6cb1d057554157cd0f" +dependencies = [ + "env_logger", + "test-log-macros", + "tracing-subscriber", +] + +[[package]] +name = "test-log-macros" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "888d0c3c6db53c0fdab160d2ed5e12ba745383d3e85813f2ea0f2b1475ab553f" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + [[package]] name = "thiserror" -version = "1.0.65" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +dependencies = [ + "thiserror-impl 2.0.11", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ - "thiserror-impl", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] name = "thiserror-impl" -version = "1.0.65" +version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602" +checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] name = "thread_local" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ "cfg-if", "once_cell", @@ -3650,9 +4565,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -3671,44 +4586,48 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", ] [[package]] -name = "tinytemplate" -version = "1.2.1" +name = "tiny-keccak" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" dependencies = [ - "serde", - "serde_json", + "crunchy", ] [[package]] -name = "tinyvec" -version = "1.6.0" +name = "tinystr" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" dependencies = [ - "tinyvec_macros", + "displaydoc", + "zerovec", ] [[package]] -name = "tinyvec_macros" -version = "0.1.1" +name = "tinytemplate" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] [[package]] name = "tokio" -version = "1.42.1" +version = "1.44.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2209a14885b74764cce87ffa777ffa1b8ce81a3f3166c6f886b83337fe7e077f" +checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" dependencies = [ "backtrace", "bytes", @@ -3723,13 +4642,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] @@ -3744,9 +4663,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -3755,16 +4674,15 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] @@ -3778,14 +4696,14 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.8" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.21.0", + "toml_edit", ] [[package]] @@ -3799,39 +4717,49 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.21.0" +version = "0.22.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03" +checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ - "indexmap", + "indexmap 2.7.1", "serde", "serde_spanned", "toml_datetime", - "winnow 0.5.34", + "winnow", ] [[package]] -name = "toml_edit" -version = "0.22.22" +name = "topological-sort" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" + +[[package]] +name = "tower" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ - "indexmap", - "toml_datetime", - "winnow 0.6.20", + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 1.0.2", + "tokio", + "tower-layer", + "tower-service", ] [[package]] -name = "topological-sort" -version = "0.2.2" +name = "tower-layer" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" @@ -3839,7 +4767,7 @@ version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ - "log 0.4.22", + "log 0.4.26", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -3852,27 +4780,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" dependencies = [ "crossbeam-channel", - "thiserror", + "thiserror 1.0.69", "time", "tracing-subscriber", ] [[package]] name = "tracing-attributes" -version = "0.1.30" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] name = "tracing-core" -version = "0.1.34" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -3884,7 +4812,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ - "log 0.4.22", + "log 0.4.26", "once_cell", "tracing-core", ] @@ -3928,15 +4856,15 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "ucd-trie" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "uname-rs" @@ -3952,45 +4880,33 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicase" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] - -[[package]] -name = "unicode-bidi" -version = "0.3.14" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" [[package]] -name = "unicode-normalization" -version = "0.1.22" +name = "unicode-segmentation" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" -dependencies = [ - "tinyvec", -] +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] -name = "unicode-segmentation" -version = "1.10.1" +name = "unicode-width" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-width" -version = "0.1.11" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] name = "unicode-xid" @@ -3998,6 +4914,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -4006,9 +4928,9 @@ checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "url" -version = "2.5.0" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -4021,26 +4943,38 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.6.1" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560" +checksum = "bd8dcafa1ca14750d8d7a05aa05988c17aab20886e1f3ae33a40223c58d92ef7" dependencies = [ - "getrandom 0.2.12", + "getrandom 0.3.1", ] [[package]] name = "valuable" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "vcpkg" @@ -4050,9 +4984,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "void" @@ -4062,18 +4996,18 @@ checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" [[package]] name = "wait-timeout" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" dependencies = [ "libc", ] [[package]] name = "walkdir" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", @@ -4090,28 +5024,26 @@ dependencies = [ [[package]] name = "warp" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e92e22e03ff1230c03a1a8ee37d2f89cd489e2e541b7550d6afad96faed169" +checksum = "4378d202ff965b011c64817db11d5829506d3404edeadb61f190d111da3f231c" dependencies = [ "bytes", "futures-channel", "futures-util", "headers", - "http 0.2.11", - "hyper 0.14.28", - "log 0.4.22", + "http 0.2.12", + "hyper 0.14.32", + "log 0.4.26", "mime", "mime_guess", "percent-encoding", "pin-project", - "rustls-pemfile 1.0.4", "scoped-tls", "serde", "serde_json", "serde_urlencoded", "tokio", - "tokio-stream", "tokio-util", "tower-service", "tracing", @@ -4125,84 +5057,89 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi" -version = "0.14.2+wasi-0.2.4" +version = "0.13.3+wasi-0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "wasm-bindgen" -version = "0.2.89" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", + "once_cell", + "rustversion", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.89" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", - "log 0.4.22", - "once_cell", - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "log 0.4.26", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.39" +version = "0.4.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac36a15a220124ac510204aec1c3e5db8a22ab06fd6706d881dc6149f8ed9a12" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.89" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ - "quote 1.0.35", + "quote 1.0.38", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.89" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.89" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] name = "wasm-streams" -version = "0.3.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4609d447824375f43e1ffbc051b50ad8f4b3ae8219680c94452ea05eb240ac7" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" dependencies = [ "futures-util", "js-sys", @@ -4213,9 +5150,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.66" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" dependencies = [ "js-sys", "wasm-bindgen", @@ -4223,13 +5160,13 @@ dependencies = [ [[package]] name = "which" -version = "6.0.3" +version = "7.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4ee928febd44d98f2f459a4a79bd4d928591333a494a10a868418ac1b39cf1f" +checksum = "2774c861e1f072b3aadc02f8ba886c26ad6321567ecc294c935434cad06f1283" dependencies = [ "either", - "home", - "rustix 0.38.34", + "env_home", + "rustix", "winsafe", ] @@ -4251,11 +5188,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "winapi", + "windows-sys 0.59.0", ] [[package]] @@ -4311,9 +5248,9 @@ version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] [[package]] @@ -4322,11 +5259,17 @@ version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", ] +[[package]] +name = "windows-link" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3" + [[package]] name = "windows-registry" version = "0.2.0" @@ -4366,15 +5309,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - [[package]] name = "windows-sys" version = "0.48.0" @@ -4394,18 +5328,12 @@ dependencies = [ ] [[package]] -name = "windows-targets" -version = "0.42.2" +name = "windows-sys" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", + "windows-targets 0.52.6", ] [[package]] @@ -4439,12 +5367,6 @@ dependencies = [ "windows_x86_64_msvc 0.52.6", ] -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -4457,12 +5379,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" - [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -4475,12 +5391,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" -[[package]] -name = "windows_i686_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" - [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -4499,12 +5409,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" -[[package]] -name = "windows_i686_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" - [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -4517,12 +5421,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" - [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -4535,12 +5433,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" - [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -4553,12 +5445,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" - [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -4573,18 +5459,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.5.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7cf47b659b318dccbd69cc4797a39ae128f533dce7902a1096044d1967b9c16" -dependencies = [ - "memchr", -] - -[[package]] -name = "winnow" -version = "0.6.20" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" +checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" dependencies = [ "memchr", ] @@ -4607,70 +5484,156 @@ checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" [[package]] name = "wit-bindgen-rt" -version = "0.39.0" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.8.0", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "xattr" -version = "1.2.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "914566e6413e7fa959cc394fb30e563ba80f3541fbd40816d4c05a0fc3f2a0f1" +checksum = "e105d177a3871454f754b33bb0ee637ecaaac997446375fd3e5d43a2ed00c909" dependencies = [ "libc", - "linux-raw-sys 0.4.12", - "rustix 0.38.34", + "linux-raw-sys", + "rustix", ] +[[package]] +name = "xxhash-rust" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" + [[package]] name = "yansi" -version = "0.5.1" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", + "synstructure", +] [[package]] name = "zerocopy" -version = "0.7.32" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ + "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.32" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" dependencies = [ - "proc-macro2 1.0.89", - "quote 1.0.35", - "syn 2.0.85", + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", + "synstructure", ] [[package]] name = "zeroize" -version = "1.7.0" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.38", + "syn 2.0.98", +] [[package]] name = "zip" -version = "2.2.0" +version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc5e4288ea4057ae23afc69a4472434a87a2495cafce6632fd1c4ec9f5cf3494" +checksum = "b280484c454e74e5fff658bbf7df8fdbe7a07c6b2de4a53def232c15ef138f3a" dependencies = [ "arbitrary", "crc32fast", "crossbeam-utils", "displaydoc", "flate2", - "indexmap", + "indexmap 2.7.1", "memchr", - "thiserror", + "thiserror 2.0.11", "time", "zopfli", ] @@ -4684,7 +5647,24 @@ dependencies = [ "bumpalo", "crc32fast", "lockfree-object-pool", - "log 0.4.22", + "log 0.4.26", "once_cell", "simd-adler32", ] + +[[package]] +name = "zxcvbn" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad76e35b00ad53688d6b90c431cabe3cbf51f7a4a154739e04b63004ab1c736c" +dependencies = [ + "chrono", + "derive_builder", + "fancy-regex 0.13.0", + "itertools 0.13.0", + "lazy_static", + "regex", + "time", + "wasm-bindgen", + "web-sys", +] diff --git a/Cargo.toml b/Cargo.toml index da0e9a889d6..0fcaf6a1176 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,14 +6,14 @@ members = ["policies/rudderc", "policies/rudder-commons-test", "policies/rudder-report", "policies/module-types/*", - "policies/minifusion", + "policies/lib", "relay/sources/relayd", "relay/sources/rudder-package"] resolver = "2" [workspace.package] authors = ["Rudder developers "] -edition = "2021" +edition = "2024" homepage = "https://www.rudder.io" license = "GPL-3.0-or-later" repository = "https://github.com/Normation/rudder" diff --git a/Jenkinsfile b/Jenkinsfile index 4e0a87b413a..c3227cf97de 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,12 +1,11 @@ def failedBuild = false -def version = "8.2" +def version = "8.3" def slackResponse = null def changeUrl = env.CHANGE_URL def errors = [] -def running = [] pipeline { agent none @@ -25,6 +24,65 @@ pipeline { stages { stage('Tests') { parallel { + stage('policies-methods') { + agent { + dockerfile { + label "generic-docker" + filename 'ci/methods.Dockerfile' + args '-u 0:0' + } + } + steps { + dir("policies/lib") { + sh script: 'cargo test', label: 'methods tests' + } + } + post { + failure { + script { + failedBuild = true + errors.add("Tests - policies-methods") + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) + slackSend(channel: slackResponse.threadId, message: "Error during policies-methods test - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") + } + } + cleanup { + script { + cleanWs(deleteDirs: true, notFailBuild: true) + } + } + } + } + stage('python-lib') { + agent { + dockerfile { + label "generic-docker" + filename 'ci/python-avocado.Dockerfile' + additionalBuildArgs "--build-arg USER_ID=${env.JENKINS_UID}" + args '-u 0:0' + } + } + steps { + dir("policies/lib") { + sh script: 'avocado run --disable-sysinfo tests/quick', label: 'quick method tests' + } + } + post { + failure { + script { + failedBuild = true + errors.add("Tests - python-lib") + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) + slackSend(channel: slackResponse.threadId, message: "Error during python-lib test - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") + } + } + cleanup { + script { + cleanWs(deleteDirs: true, notFailBuild: true) + } + } + } + } stage('relayd-man') { agent { dockerfile { @@ -35,9 +93,6 @@ pipeline { } when { not { branch 'master' } } steps { - script { - running.add("Tests - relayd-man") - } dir('relay/sources') { sh script: 'make man-source', label: 'build man page' } @@ -47,13 +102,12 @@ pipeline { script { failedBuild = true errors.add("Tests - relayd-man") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error during relayd man build - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Tests - relayd-man") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -69,9 +123,6 @@ pipeline { } steps { - script { - running.add("Tests - shell") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { sh script: './qa-test --shell', label: 'shell scripts lint' } @@ -87,13 +138,12 @@ pipeline { script { failedBuild = true errors.add("Tests - shell") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error during shell tests - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Tests - shell") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -108,9 +158,6 @@ pipeline { } } steps { - script { - running.add("Tests - python") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { sh script: './qa-test --python', label: 'python scripts lint' } @@ -121,13 +168,12 @@ pipeline { script { failedBuild = true errors.add("Tests - python") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error during python tests - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Tests - python") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -143,18 +189,15 @@ pipeline { } steps { - script { - running.add("Tests - typo") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { dir('policies') { - sh script: 'typos --exclude "*.log"', label: 'check policies typos' + sh script: 'typos --exclude lib/tree/20_cfe_basics/cfengine --exclude lib/tree/10_ncf_internals/modules/packages --exclude "*.log"', label: 'check policies typos' } dir('webapp/sources/api-doc') { sh script: 'typos', label: 'check webapp api doc typos' } dir('relay') { - sh script: 'typos --exclude "*.log" --exclude "*.license" --exclude "*.asc" --exclude "*.pem" --exclude "*.cert" --exclude "*.priv" --exclude "*.pub" --exclude "*.signed" --exclude "*.log" --exclude "*.json"', label: 'check relayd typos' + sh script: 'typos --exclude "*.log" --exclude "*.gpg" --exclude "*.license" --exclude "*.asc" --exclude "*.pem" --exclude "*.cert" --exclude "*.priv" --exclude "*.pub" --exclude "*.signed" --exclude "*.log" --exclude "*.json"', label: 'check relayd typos' } } } @@ -163,13 +206,12 @@ pipeline { script { failedBuild = true errors.add("Tests - typo") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error while checking typos - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Tests - typo") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -185,9 +227,6 @@ pipeline { } steps { - script { - running.add("Tests - api-doc") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { dir('api-doc') { sh script: 'make', label: 'build API docs' @@ -199,13 +238,12 @@ pipeline { script { failedBuild = true errors.add("Tests - api-doc") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error while buiding api doc - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Tests - api-doc") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -223,10 +261,6 @@ pipeline { } } steps { - - script { - running.add("Tests - webapp") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { sh script: 'webapp/sources/rudder/rudder-core/src/test/resources/hooks.d/test-hooks.sh', label: "hooks tests" dir('webapp/sources') { @@ -244,13 +278,12 @@ pipeline { script { failedBuild = true errors.add("Tests - webapp") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error during webapp tests - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Tests - webapp") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -265,9 +298,6 @@ pipeline { POSTGRES_USER = 'rudderreports' } steps { - script { - running.add("Tests - relayd") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { script { docker.image('postgres:11-bullseye').withRun('-u 0:0 -e POSTGRES_USER=${POSTGRES_USER} -e POSTGRES_PASSWORD=${POSTGRES_PASSWORD} -e POSTGRES_DB=${POSTGRES_DB}', '-c listen_addresses="*"') { c -> @@ -292,13 +322,12 @@ pipeline { script { failedBuild = true errors.add("Tests - relayd") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error during relayd tests - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Tests - relayd") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -316,8 +345,7 @@ pipeline { steps { script { - running.add("Tests - rudder-package") - updateSlack(errors, running, slackResponse, version, changeUrl) + updateSlack(errors, slackResponse, version, changeUrl, false) } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { dir('relay/sources/rudder-package') { @@ -341,8 +369,7 @@ pipeline { } cleanup { script { - running.remove("Tests - rudder-package") - updateSlack(errors, running, slackResponse, version, changeUrl) + updateSlack(errors, slackResponse, version, changeUrl, false) cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -354,19 +381,15 @@ pipeline { label 'generic-docker' filename 'policies/Dockerfile' additionalBuildArgs "--build-arg RUDDER_VER=${env.RUDDER_VERSION}-nightly --build-arg PSANALYZER_VER=1.20.0" - // mount cache args '-u 0:0 -v /srv/cache/cargo:/usr/local/cargo/registry -v /srv/cache/sccache:/root/.cache/sccache -v /srv/cache/cargo-vet:/root/.cache/cargo-vet' } } steps { - script { - running.add("Tests - policies") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { dir('policies/rudderc') { dir('target/repos') { - dir('ncf') { - git url: 'https://github.com/normation/ncf.git' + dir('rudder') { + git url: 'https://github.com/normation/rudder.git' } dir('dsc') { git url: 'https://github.com/normation/rudder-agent-windows.git', @@ -395,13 +418,12 @@ pipeline { script { failedBuild = true errors.add("Tests - policies") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error during policies tests - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Tests - policies") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -441,9 +463,6 @@ pipeline { } } steps { - script { - running.add("Tests - compatibility JDK ${JDK_VERSION}") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { dir('webapp/sources') { sh script: 'mvn clean test --batch-mode', label: "webapp tests" @@ -459,13 +478,12 @@ pipeline { script { failedBuild = true errors.add("Tests - compatibility JDK ${JDK_VERSION}") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error during compatibility JDK ${JDK_VERSION} tests - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Tests - compatibility JDK ${JDK_VERSION}") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -487,9 +505,6 @@ pipeline { } when { not { branch 'master' } } steps { - script { - running.add("Publish - relayd-man") - } dir('relay/sources') { sh script: 'make man-source', label: 'build man page' withCredentials([sshUserPrivateKey(credentialsId: 'f15029d3-ef1d-4642-be7d-362bf7141e63', keyFileVariable: 'KEY_FILE', passphraseVariable: '', usernameVariable: 'KEY_USER')]) { @@ -502,13 +517,12 @@ pipeline { script { failedBuild = true errors.add("Publish - relayd-man") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error while publishing relayd man pages - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Publish - relayd-man") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -524,9 +538,6 @@ pipeline { } steps { - script { - running.add("Publish - api-doc") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { dir('api-doc') { sh script: 'make', label: 'build API docs' @@ -545,13 +556,12 @@ pipeline { script { failedBuild = true errors.add("Publish - api-doc") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error while publishing api docs - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Publish - api-doc") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -567,9 +577,6 @@ pipeline { } when { branch 'master' } steps { - script { - running.add("Publish - api-doc-redirect") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { withCredentials([sshUserPrivateKey(credentialsId: 'f15029d3-ef1d-4642-be7d-362bf7141e63', keyFileVariable: 'KEY_FILE', passphraseVariable: '', usernameVariable: 'KEY_USER')]) { writeFile file: 'htaccess', text: redirectApi() @@ -582,13 +589,12 @@ pipeline { script { failedBuild = true errors.add("Publish - api-doc-redirect") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error while building api doc redirect - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Publish - api-doc-redirect") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -606,9 +612,6 @@ pipeline { } } steps { - script { - running.add("Publish - webapp") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { dir('webapp/sources') { withMaven(globalMavenSettingsConfig: "1bfa2e1a-afda-4cb4-8568-236c44b94dbf", @@ -630,13 +633,12 @@ pipeline { script { failedBuild = true errors.add("Publish - webapp") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error while publishing webapp - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Publish - webapp") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -653,14 +655,11 @@ pipeline { } } steps { - script { - running.add("Publish - policies") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { dir('policies/rudderc') { dir('target/repos') { - dir('ncf') { - git url: 'https://github.com/normation/ncf.git' + dir('rudder') { + git url: 'https://github.com/normation/rudder.git' } dir('dsc') { git url: 'https://github.com/normation/rudder-agent-windows.git', @@ -689,13 +688,12 @@ pipeline { script { failedBuild = true errors.add("Publish - policies") - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Error while publishing policies - <${currentBuild.absoluteUrl}|Link>", color: "#CC3421") } } cleanup { script { - running.remove("Publish - policies") cleanWs(deleteDirs: true, notFailBuild: true) } } @@ -706,7 +704,7 @@ pipeline { stage('End') { steps { script { - updateSlack(errors, running, slackResponse, version, changeUrl) + updateSlack(errors, slackResponse, version, changeUrl, true) if (failedBuild) { error 'End of build' } else { @@ -760,13 +758,13 @@ def redirectApi() { -def updateSlack(errors, running, slackResponse, version, changeUrl) { +def updateSlack(errors, slackResponse, version, changeUrl, isEnded) { def msg ="*${version} - rudder repo* - <"+currentBuild.absoluteUrl+"|Link>" if (changeUrl == null) { def fixed = currentBuild.resultIsBetterOrEqualTo("SUCCESS") && currentBuild.previousBuild.resultIsWorseOrEqualTo("UNSTABLE") - if (errors.isEmpty() && running.isEmpty() && fixed) { + if (errors.isEmpty() && isEnded && fixed) { msg += " => Build fixed! :white_check_mark:" def color = "good" slackSend(channel: "ci", message: msg, color: color) diff --git a/Jenkinsfile-security b/Jenkinsfile-security index dc342986837..d0618c0dda2 100644 --- a/Jenkinsfile-security +++ b/Jenkinsfile-security @@ -1,10 +1,9 @@ -def version = "8.2" +def version = "8.3" def slackResponse = null def errors = [] -def running = [] def changeUrl = env.CHANGE_URL @@ -32,9 +31,6 @@ pipeline { NVD_API_KEY = credentials('NVD_API_KEY') } steps { - script { - running.add("webapp") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { dir('webapp/sources') { sh script: 'mvn --batch-mode -DnvdApiKey=$NVD_API_KEY -DfailBuildOnCVSS=7 -DcveValidForHours=48 -DsuppressionFiles=dependency-check-suppression.xml -DossindexAnalyzerEnabled=false org.owasp:dependency-check-maven:aggregate', label: "check webapp dependencies" @@ -52,15 +48,10 @@ pipeline { script { errors.add("webapp") failedBuild = true - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Dependency check error on webapp - <${currentBuild.absoluteUrl}console|Console>", color: "#CC3421") } } - cleanup { - script { - running.remove("webapp") - } - } } } @@ -76,9 +67,6 @@ pipeline { } } steps { - script { - running.add("npm") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { dir('webapp/sources/rudder/rudder-web/src/main/') { sh script: 'npm_config_loglevel=error npm ci --no-audit', label: "install dependencies" @@ -91,15 +79,10 @@ pipeline { script { errors.add("npm") failedBuild = true - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Dependency check error on npm - <${currentBuild.absoluteUrl}console|Console>", color: "#CC3421") } } - cleanup { - script { - running.remove("npm") - } - } } } stage('deps-rust') { @@ -113,9 +96,6 @@ pipeline { } } steps { - script { - running.add("relay") - } catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { sh script: 'mkdir -p target', label: 'create target directory' sh script: 'cargo deny check', label: 'check relayd dependencies' @@ -127,16 +107,10 @@ pipeline { script { errors.add("rust") failedBuild = true - slackResponse = updateSlack(errors, running, slackResponse, version, changeUrl) + slackResponse = updateSlack(errors, slackResponse, version, changeUrl, false) slackSend(channel: slackResponse.threadId, message: "Dependency check error on rust projects - <${currentBuild.absoluteUrl}console|Console>", color: "#CC3421") } } - - cleanup { - script { - running.remove("rust") - } - } always { archiveArtifacts artifacts: 'target/*-dependencies.txt' } @@ -145,7 +119,7 @@ pipeline { stage('End') { steps { script { - updateSlack(errors, running, slackResponse, version, changeUrl) + updateSlack(errors, slackResponse, version, changeUrl, true) if (failedBuild) { error 'End of build' } else { @@ -157,13 +131,13 @@ pipeline { } } -def updateSlack(errors, running, slackResponse, version, changeUrl) { +def updateSlack(errors, slackResponse, version, changeUrl, isEnded) { def msg ="*${version} - rudder dependency check* - <"+currentBuild.absoluteUrl+"|Link>" if (changeUrl == null) { def fixed = currentBuild.resultIsBetterOrEqualTo("SUCCESS") && currentBuild.previousBuild.resultIsWorseOrEqualTo("UNSTABLE") - if (errors.isEmpty() && running.isEmpty() && fixed) { + if (errors.isEmpty() && isEnded && fixed) { msg += " => No security issues found! :white_check_mark:" def color = "good" slackSend(channel: "ci-security", message: msg, color: color) diff --git a/README.md b/README.md index 9f3e571da4f..bb228cdd4d5 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,9 @@

- - - Rudder logo + + + Rudder logo
diff --git a/api-doc/Makefile b/api-doc/Makefile index 9c3171c0598..f3e7a86286c 100644 --- a/api-doc/Makefile +++ b/api-doc/Makefile @@ -10,7 +10,7 @@ $(API): clean mkdir -p build target/$@ cp -r ../$@/sources/api-doc build/$@ mkdir -p build/$@/assets - cp -r assets/. build/$@/assets + cp ../logo/icons/svg/rudder-logo-rect-*.svg build/$@/assets/ cp node_modules/rapidoc/dist/rapidoc-min.js build/$@/assets/ cp -r rapidoc.html build/$@/ ./build.py $@ build target diff --git a/api-doc/assets/rudder-white.svg b/api-doc/assets/rudder-white.svg deleted file mode 100644 index 587b8314d37..00000000000 --- a/api-doc/assets/rudder-white.svg +++ /dev/null @@ -1,50 +0,0 @@ - -image/svg+xml \ No newline at end of file diff --git a/api-doc/assets/rudder.svg b/api-doc/assets/rudder.svg deleted file mode 100644 index 7eb08a77a34..00000000000 --- a/api-doc/assets/rudder.svg +++ /dev/null @@ -1,55 +0,0 @@ - - - -image/svg+xml \ No newline at end of file diff --git a/api-doc/package-lock.json b/api-doc/package-lock.json index 08f2fa5b6a9..fa947649c82 100644 --- a/api-doc/package-lock.json +++ b/api-doc/package-lock.json @@ -42,18 +42,18 @@ } }, "node_modules/@babel/runtime": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.3.tgz", - "integrity": "sha512-7EYtGezsdiDMyY80+65EzwiGmcJqpmcZCojSXaRgdrBaGtWTgDZKq69cPIVped6MkIM78cTQ2GOiEYjwOlG4xw==", + "version": "7.27.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.4.tgz", + "integrity": "sha512-t3yaEOuGu9NlIZ+hIeGbBjFtZT7j2cb2tg0fuaJKeGotchRjjLfrBA9Kwf8quhpP1EUuxModQg04q/mBwyg8uA==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/runtime-corejs3": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.27.3.tgz", - "integrity": "sha512-ZYcgrwb+dkWNcDlsTe4fH1CMdqMDSJ5lWFd1by8Si2pI54XcQjte/+ViIPqAk7EAWisaUxvQ89grv+bNX2x8zg==", + "version": "7.27.4", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.27.4.tgz", + "integrity": "sha512-H7QhL0ucCGOObsUETNbB2PuzF4gAvN8p32P6r91bX7M/hk4bx+3yz2hTwHL9d/Efzwu1upeb4/cd7oSxCzup3w==", "license": "MIT", "dependencies": { "core-js-pure": "^3.30.2" @@ -1223,9 +1223,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "22.15.27", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.27.tgz", - "integrity": "sha512-5fF+eu5mwihV2BeVtX5vijhdaZOfkQTATrePEaXTcKqI16LhJ7gi2/Vhd9OZM0UojcdmiOCVg5rrax+i1MdoQQ==", + "version": "22.15.28", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.28.tgz", + "integrity": "sha512-I0okKVDmyKR281I0UIFV7EWAWRnR0gkuSKob5wVcByyyhr7Px/slhkQapcYX4u00ekzNWaS1gznKZnuzxwo4pw==", "license": "MIT", "dependencies": { "undici-types": "~6.21.0" diff --git a/api-doc/rapidoc.html b/api-doc/rapidoc.html index fae4175155a..55148129c36 100644 --- a/api-doc/rapidoc.html +++ b/api-doc/rapidoc.html @@ -18,8 +18,8 @@ sans-serif' mono-font='SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace' nav-bg-color="#041922" theme="light" nav-text-color="#ffffffaa" nav-accent-color="#13BEB7" primary-color="#13BEB7" allow-spec-file-download="true" info-description-headings-in-navbar="true"> - + - \ No newline at end of file + diff --git a/ci/methods.Dockerfile b/ci/methods.Dockerfile new file mode 100644 index 00000000000..1aeb9c09df4 --- /dev/null +++ b/ci/methods.Dockerfile @@ -0,0 +1,13 @@ +FROM rust:bullseye +LABEL ci=ncf/ci/methods.Dockerfile + +# Accept all OSes +ENV UNSUPPORTED=y +RUN < black - - + + white - - + + monochrome black - - + + monochrome white - - + + + + + no text + + + +### Build + +The build the full set of icons (optimized SVG and PNG), use the `convert.sh` script. diff --git a/logo/convert.sh b/logo/convert.sh new file mode 100755 index 00000000000..9d2f0ee7c8d --- /dev/null +++ b/logo/convert.sh @@ -0,0 +1,68 @@ +#!/bin/bash + +# This script requires: +# +# * inkscape (best for SVG -> PNG) +# * svgo (for SVG optimization) +# * zopfli (best png opimizer: https://iter.ca/post/zopfli/) +# +# NOTE: This script aims at best size result over time, and should be used for one-shot jobs. + +set -e + +target="icons" +archive="rudder-logos" +rm -rf "${target}" "${archive}.*" + +sources="../webapp/sources/rudder/rudder-web/src/main/svg/logo/*.svg" +png_sizes=(512 1024) + +################################################# +# Favicon + +out_dir="${target}/favicons" +mkdir -p "${out_dir}" +cd ../webapp/sources/rudder/rudder-web/src/main/ && ./favicon.sh && cd - >/dev/null +cp ../webapp/sources/rudder/rudder-web/src/main/webapp/images/rudder-favicon.ico "${target}/favicons/" + +################################################# +### Optimized SVG + +out_dir="${target}/svg" +mkdir -p "${out_dir}" +for file in ${sources} +do + basename=$(basename "${file}") + out="${out_dir}/${basename}" + zout="${out_dir}/${basename%.svg}.svgz" + echo "${file}" + echo " optimizing (svgo) -> ${out}" + svgo --quiet --multipass --input "${file}" --output "${out}" + #echo " compressing (gzip) -> ${zout}" + #gzip --best --stdout "${out}" > "${zout}" +done + +################################################# +# Optimized PNG + +for file in ${sources} +do + echo "${file}" + for size in "${png_sizes[@]}" + do + out_dir="${target}/png/${size}px" + mkdir -p "${out_dir}" + basename=$(basename "${file}") + out="${out_dir}/${basename%.svg}.png" + echo " generating (inkscape, ${size}px) -> ${out}" + inkscape --export-filename="${out}" -w "${size}" "${file}" + echo " optimizing (zopflipng)" + zopflipng -y -m "${out}" "${out}" >/dev/null + done +done + +################################################# +# Archive + +zip -q -r "${archive}" icons +tar -czf "${archive}.tar.gz" icons diff --git a/logo/icons/svg/rudder-logo-notext.svg b/logo/icons/svg/rudder-logo-notext.svg new file mode 100644 index 00000000000..c6f38e36c8d --- /dev/null +++ b/logo/icons/svg/rudder-logo-notext.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/logo/icons/svg/rudder-logo-rect-black.svg b/logo/icons/svg/rudder-logo-rect-black.svg new file mode 100644 index 00000000000..997d4ac965b --- /dev/null +++ b/logo/icons/svg/rudder-logo-rect-black.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/logo/icons/svg/rudder-logo-rect-monochrome-black.svg b/logo/icons/svg/rudder-logo-rect-monochrome-black.svg new file mode 100644 index 00000000000..cc7b5e17c30 --- /dev/null +++ b/logo/icons/svg/rudder-logo-rect-monochrome-black.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/logo/icons/svg/rudder-logo-rect-monochrome-white.svg b/logo/icons/svg/rudder-logo-rect-monochrome-white.svg new file mode 100644 index 00000000000..728b3591dd0 --- /dev/null +++ b/logo/icons/svg/rudder-logo-rect-monochrome-white.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/logo/icons/svg/rudder-logo-rect-white.svg b/logo/icons/svg/rudder-logo-rect-white.svg new file mode 100644 index 00000000000..cc40e56ac3b --- /dev/null +++ b/logo/icons/svg/rudder-logo-rect-white.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/logo/icons/svg/rudder-logo-square-black.svg b/logo/icons/svg/rudder-logo-square-black.svg new file mode 100644 index 00000000000..a1d6dbc099f --- /dev/null +++ b/logo/icons/svg/rudder-logo-square-black.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/logo/icons/svg/rudder-logo-square-monochrome-black.svg b/logo/icons/svg/rudder-logo-square-monochrome-black.svg new file mode 100644 index 00000000000..5dd5a3258bf --- /dev/null +++ b/logo/icons/svg/rudder-logo-square-monochrome-black.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/logo/icons/svg/rudder-logo-square-monochrome-white.svg b/logo/icons/svg/rudder-logo-square-monochrome-white.svg new file mode 100644 index 00000000000..68d8c78b3f6 --- /dev/null +++ b/logo/icons/svg/rudder-logo-square-monochrome-white.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/logo/icons/svg/rudder-logo-square-white.svg b/logo/icons/svg/rudder-logo-square-white.svg new file mode 100644 index 00000000000..fa567437e8e --- /dev/null +++ b/logo/icons/svg/rudder-logo-square-white.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/policies/Dockerfile b/policies/Dockerfile index 6c627e29e08..bb8caad340b 100644 --- a/policies/Dockerfile +++ b/policies/Dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.81.0-bookworm +FROM rust:1.85.0-bookworm LABEL ci=rudder/policies/Dockerfile ARG USER_ID=1000 @@ -19,8 +19,13 @@ RUN <. +# +##################################################################################### + + +DESTDIR = /usr +INSTALL := $(shell type ginstall >/dev/null 2>&1 && echo ginstall || echo install) +CP_A := cp -rp + +all: install + +# no dependency +depend: localdepends +localdepends: + +# Install ncf in DESTDIR +install: + mkdir -p $(DESTDIR)/share/ncf + $(CP_A) tree $(DESTDIR)/share/ncf/ + $(INSTALL) -m 755 ncf $(DESTDIR)/share/ncf/ + +test: + cd tests/acceptance/ && ./testall --printlog --info + +test-unsafe: + cd tests/acceptance/ && ./testall --printlog --info --unsafe + +clean: + rm -rf tests/style/.succeeded + rm -f tests/style/summary.log + rm -f tests/style/test.log + rm -f tests/style/test.xml + rm -f tests/style/xml.tmp + rm -rf tests/style/workdir/ + rm -rf tests/unit/test.log + rm -rf tests/acceptance/.succeeded + rm -f tests/acceptance/summary.log + rm -f tests/acceptance/test.log + rm -f tests/acceptance/test.xml + rm -f tests/acceptance/xml.tmp + rm -rf tests/acceptance/workdir/ + find $(CURDIR) -name "*.[pP][yY][cC]" -exec rm "{}" \; + +distclean: clean + +.PHONY: all test clean distclean depend localdepend install diff --git a/policies/lib/README.md b/policies/lib/README.md new file mode 100644 index 00000000000..ccefb090ae8 --- /dev/null +++ b/policies/lib/README.md @@ -0,0 +1,44 @@ +# Policies library / ncf + +## Usage + +This project is part of Rudder, and as such is bundled with every Rudder server. + +## Synopsis + +### A powerful and structured configuration management framework + +ncf is designed with the following concepts throughout: + + - __DRY__: You should never have to duplicate promises, or even promise patterns. This is the best way to make unmaintainable code. + - __KISS__: Keep everything Simple and Sweet :) This extends to having one bundle do one thing, but do it well. Avoid complexity. + - __Minimal effort__: Reduce typing and syntax effort for everyday use as much as possible. Make the framework do the heavy lifting - code once, benefit forever! (aka "Lazy" :) ) + - __Intuitive__: Reading and writing configuration management rules with ncf should be self-evident. Clearly named bundles and conventions make this easy. + - __Extensible__: You should be able to extend anything, add methods or support for new tools easily, without breaking anything. + - __Open source__: We believe in sharing so that the world can build on each other's work, and continually improve. ncf is [distributed under the GPLv3 license on GitHub](https://github.com/normation/ncf/). + +### Decoupled layers + +There are several layers in this framework, from 10 to 60, where each layer is a foundation for higher levels. The higher the lever, the higher the abstraction level. + + - __10_ncf_internals__: This directory contains the mechanics and glue to make the framework work. This should be very rarely modified, and is generic. + - __20_cfe_basics__: This directory contains libraries that can be reused; most notably the CFEngine Standard Library. + - __30_generic_methods__: This directory contains reusable bundles, that perform unit tasks, and are completely generic (for example "file_create_symlink"). All generic methods are documented on the [reference page](https://docs.rudder.io/reference/current/reference/generic_methods.html). + +Each level uses items from lower levels (lower numbers) or, in some cases, from its own level. + +## Tests + +### Quick tests + +Quick tests are run using the avocado framework, which can be installed using: + + pip3 install --user avocado-framework + +To add a test, simply add an executable file to the `tests/quick` folder. A lib folder is available in `tests/testlib` but is not +automatically imported, you will to import it manually in your new test if needed. + +To run the tests: + + avocado run tests/quick + avocado run tests/quick/test_ncf_api.py diff --git a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/services/servers/ServerSummaryService.scala b/policies/lib/src/lib.rs similarity index 100% rename from webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/services/servers/ServerSummaryService.scala rename to policies/lib/src/lib.rs diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/Rudder-tests-key b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/Rudder-tests-key new file mode 100644 index 00000000000..9bf49490fd0 --- /dev/null +++ b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/Rudder-tests-key @@ -0,0 +1,30 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQENBF1Stw0BCAC48EtwN8inD0VP0f92AANqeNMQwDHeyoiurxB7mRe2TZuSnjgS +N92PbXXHHrF+fX2OyYAL7VKX9YQtL6GwQOLDTlN+2+SHydjajJqQIhaQtx5sOYGs +zCO8sV1sIgGl4Ym/RxC4Gt5hFYxdZvzzYzM6ZDRpZy6LrKx+B2KAIzEeIom4Lcgm +ikokswDhKmNH2GExjyfd00rSfbOrHtx4Kr9Cgt7zl8YYEsIhABlDCKgIPvA5QS57 +Wc+OgBquEQKiHSUMKDP7PyyyBAn4k953ifSSpukRPE2/fjl6TZw5YJoLWYTp7jqW +je4lmfEwO0uIlPbzMsSpoBhsy/Yabm4s9g3zABEBAAG0DFJ1ZGRlci10ZXN0c4kB +VAQTAQgAPhYhBO6XrpxBGgnwGaf5fXyL1mE0bkQNBQJdUrcNAhsDBQkDwmcABQsJ +CAcCBhUKCQgLAgQWAgMBAh4BAheAAAoJEHyL1mE0bkQN36kH/jw/Eu5w2uOwlPsJ +P6Z1DJouE7e9yh8rJLO4TGaSHGI/Lh3dpfdgjwYFvx9fk+zlBt53B9osKfSGwGfQ +hI+ofJtM+3P9mpJHx1NFOlXY8McHXOkL6q1UpjgZft9cl6rO5aXDQWi5Jrz6pcPD +ACBgWPBNxEMsPRkv9ezB7wMDRau/ftochwQMnODua9O2qEbwURE8zggSliW3wEJ3 +LehMBqAvfYxkgeG/jlYViyeMHytvcm+camYiHaHSPpmYaIfCNuq2O6eeohRun7j2 +CRMAHAtIxpd/7Ix1w7xWsJ1yPYVVs/OIn1PNjW00PlohhdY+U2XxsKpcZ+g2SEf4 +NackJ9S5AQ0EXVK3DQEIANVzDHeOYeYtBlLwlghQc3jzCjeybXM+AhJDoJ/ZzcF2 +CwbhAczc8aEChfT83NK/y8/zeusFdOEZsvxdPJuAPMNXus+Y5TE+3Xe0ph7KJYy5 +Do1h5ix9haiS9WO+BiZxaRS1hRWbOZ0ju24jrQsjEIoY/P9Pqdxfpve1JuYZZT+x +AdFVEhKk5iD57XVHs4C4GLqV4DjhZYhbML0QIDA31fNqs6HhocI7DeXW/vMGEuCC +GfYkHy2hDfGHIi7H3Cma0sIkbSritYMjCd7C9K5FEPcpbPXbMVKOdfwNOhXRg4MT +PUQiI+ukVtIaNl8a9jnatCW0QTQ22CuuwOtINuBeOY0AEQEAAYkBPAQYAQgAJhYh +BO6XrpxBGgnwGaf5fXyL1mE0bkQNBQJdUrcNAhsMBQkDwmcAAAoJEHyL1mE0bkQN +8fMH/2M88sODx6vfildEMYPRHteM2w8T3OT1N+p1GfzLxK4/bBeUwhgz983pGKOP +Kywpo/yocxnYSiccoctUo6dvUbZB82/7SD3z/YaJZOGtTt85CuyD9htJmw/OJUlH +EwezlKMtkW6Q6xIxXfPXO+rb+m0IqCtt42Fpp+GsDf+3y4cpqnruDZgJtY2Iis51 +15OSsBHp4uofsPWbCLfG78uNuu/9Bo9SXJVC2vLL1cosxHJ8P1llndLh+K94kcVq ++z0LbxxnRPcxfHRhiaEM9l0eRB5Uoo9aZy/YfeguzteHIu0fMkgKWSTau3tX/M9z +yHUYeBH0v/BbKeNEljG2AgoFlP4= +=86dD +-----END PGP PUBLIC KEY BLOCK----- diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/dummy-1-0.noarch.rpm b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/dummy-1-0.noarch.rpm new file mode 100755 index 00000000000..430acdbe096 Binary files /dev/null and b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/dummy-1-0.noarch.rpm differ diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/dummy-2-0.noarch.rpm b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/dummy-2-0.noarch.rpm new file mode 100755 index 00000000000..348bf07d29f Binary files /dev/null and b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/dummy-2-0.noarch.rpm differ diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/dummy-1-0.noarch.rpm b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/dummy-1-0.noarch.rpm new file mode 100755 index 00000000000..430acdbe096 Binary files /dev/null and b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/dummy-1-0.noarch.rpm differ diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/dummy-2-0.noarch.rpm b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/dummy-2-0.noarch.rpm new file mode 100755 index 00000000000..348bf07d29f Binary files /dev/null and b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/dummy-2-0.noarch.rpm differ diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/dummy_to_not_remove-1-0.noarch.rpm b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/dummy_to_not_remove-1-0.noarch.rpm new file mode 100644 index 00000000000..b674530cf37 Binary files /dev/null and b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/dummy_to_not_remove-1-0.noarch.rpm differ diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/rudder_test_remove-1-64.3.x86_64.rpm b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/rudder_test_remove-1-64.3.x86_64.rpm new file mode 100644 index 00000000000..514a3c10fe6 Binary files /dev/null and b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/rudder_test_remove-1-64.3.x86_64.rpm differ diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/rudder_tests_repo-1-64.3.x86_64.rpm b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/rudder_tests_repo-1-64.3.x86_64.rpm new file mode 100644 index 00000000000..8056ce2edf4 Binary files /dev/null and b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/rudder_tests_repo-1-64.3.x86_64.rpm differ diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/rudder_tests_repo-3-64.3.x86_64.rpm b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/rudder_tests_repo-3-64.3.x86_64.rpm new file mode 100644 index 00000000000..d708bb6f1b5 Binary files /dev/null and b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/repo_rudder_tests/rudder_tests_repo-3-64.3.x86_64.rpm differ diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/rudder-tests-1-64.3.x86_64.rpm b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/rudder-tests-1-64.3.x86_64.rpm new file mode 100644 index 00000000000..757f446dfce Binary files /dev/null and b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/rudder-tests-1-64.3.x86_64.rpm differ diff --git a/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/zypper_pattern_module.cf b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/zypper_pattern_module.cf new file mode 100644 index 00000000000..a8d694d526e --- /dev/null +++ b/policies/lib/tests/acceptance/10_ncf_internals/staging/modules/zypper_pattern_module.cf @@ -0,0 +1,395 @@ +##################################################################################### +# Copyright 2018 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, initialization, default("${this.promise_filename}") }; + version => "1.0"; +} + + +bundle agent init +{ + # Build packages + a local repo + remove packages used in test + vars: + "pwd" string => dirname("${this.promise_filename}"); + "repo_path" string => "${pwd}/repo_rudder_tests"; + "modules_path" string => "${ncf_inputs.ncf_tree}/10_ncf_internals/modules"; + "packages_list" slist => {"zip", "dummy", "rudder_test_repo", "rudder-tests"}; + commands: + # Since newer zypper version does not support by default local unsigned rpm install + # we have to import a one time use key that signed the pattern rpm + "${paths.rpm} --import ${pwd}/Rudder-tests-key"; + "/usr/bin/createrepo ${repo_path}"; + "/usr/bin/zypper rr local"; + "/usr/bin/zypper ar --no-gpgcheck ${repo_path} local"; + "/usr/bin/zypper ref local"; + + "/usr/bin/zypper --non-interactive in -t pattern rudder_test_remove"; + + methods: + "package_absent" usebundle => ncf_package("${packages_list}", "", "", "zypper", "absent", ""); +} + +############################################################################### +# SCENARIO +#________ Basic Tests ________ +# -Test get-package-data on a dummy rpm on the repo => repo based return +# -Test get-package-data on a dummy rpm without pattern => error return +# -Test get-package-data on a rpm containing a pattern => ok and return pattenr name +# -Test list-installed method by looking for its output in rpm -qa output +# -Test file-install with rudder-tests-1-64.3.x86_64.rpm which should provide +# xxxx +# -Test repo-install with rudder_test_repo in specific version +# -Test list-updates with rudder_test_repo, should output the 3-64.3 version +# -Test repo-install with rudder_test_repo without specific version +# It should install rudder_test_repo, and update/install the dependencies +# -Test remove with the pattern rudder_test_remove, which provides +# the package dummy_to_not_remove which should not be removed after pattern +# removal. +#________ Other Tests ________ +############################################################################### +bundle agent test +{ + vars: + pass1:: + "rudder_test_remove_installed" string => execresult("/usr/bin/zypper patterns --installed-only | grep -c rudder_test_remove", "useshell"); + + classes: + "pass1" expression => "any"; + "rudder_test_remove_installed" expression => strcmp("${rudder_test_remove_installed}", "1"); + + methods: + SUSE.pass1:: + # Get-Package-Data + "any" usebundle => test_package_module_get_package_data("zypper_pattern", "dummy", "PackageType=repo +Name=dummy", "1"); + "any" usebundle => test_package_module_get_package_data("zypper_pattern", "${init.pwd}/dummy-1-0.noarch.rpm", "PackageType=file +File=${init.pwd}/dummy-1-0.noarch.rpm +ErrorMessage: Package pattern name not found", "2"); + "any" usebundle => test_package_module_get_package_data("zypper_pattern", "${init.pwd}/rudder-tests-1-64.3.x86_64.rpm", "PackageType=file +Name=rudder-tests +Version=1-64.3 +Architecture=x86_64", "3"); + + "any" usebundle => test_package_module_list_installed("zypper_pattern"); + + # We assume that packages listed in the pattern will be installed and are available via repo + # install a pattern from file + "any" usebundle => test_package_module_file_install("zypper_pattern","File=${init.pwd}/rudder-tests-1-64.3.x86_64.rpm", "1", "rudder-tests"); + "package" usebundle => test_package_module_package_installed("zypper", "Name=zip", "zip_installed"); + + # Install a pattern from repo, version specific + "any" usebundle => test_package_module_repo_install("zypper_pattern","Name=rudder_test_repo +Version=1-64.3", "1"); + "package" usebundle => test_package_module_package_installed("zypper", "Name=dummy +Version=1-0", "dummy_1_0_installed"); + + # Check for updates + "any" usebundle => test_package_module_list_updates("zypper_pattern","Name=rudder_test_repo +Version=3-64.3 +Architecture=x86_64", "1"); + + # Install the latest pattern version from repo + "any" usebundle => test_package_module_repo_install("zypper_pattern","Name=rudder_test_repo", "2"); + "package" usebundle => test_package_module_package_installed("zypper","Name=dummy +Version=2-0", "dummy_2_0_installed"); + + # Remove the pattern + pass1.rudder_test_remove_installed:: + "any" usebundle => test_package_module_remove("zypper_pattern", "Name=rudder_test_remove", "1"); + "package" usebundle => test_package_module_package_installed("zypper","Name=dummy_to_not_remove", "dummy_to_not_remove_installed"); + +} + +bundle agent check { + vars: + "check_list" slist => {"package_data_ok_1", "package_data_ok_2", "package_data_ok_3", "list_installed_ok", "file_install_ok", "repo_install_ok", "list_updates_ok", "repo_install_latest_ok", "remove_ok"}; + + classes: + SUSE:: + "package_data_ok_1" expression => "get_package_data_zypper_pattern_1_ok"; + "package_data_ok_2" expression => "get_package_data_zypper_pattern_2_ok"; + "package_data_ok_3" expression => "get_package_data_zypper_pattern_3_ok"; + + "list_installed_ok" expression => "test_package_module_list_installed_zypper_pattern_ok"; + + "file_install_ok" expression => "test_package_module_file_install_zypper_pattern_1_ok.zip_installed"; + + "repo_install_ok" expression => "test_package_module_repo_install_zypper_pattern_1_ok.dummy_1_0_installed"; + + "list_updates_ok" expression => "test_package_module_list_updates_zypper_pattern_1_ok"; + + "repo_install_latest_ok" expression => "test_package_module_repo_install_zypper_pattern_2_ok.dummy_2_0_installed"; + + "remove_ok" expression => "test_package_module_remove_zypper_pattern_1_ok.dummy_to_not_remove_installed"; + + "ok" expression => "package_data_ok_1.package_data_ok_2.package_data_ok_3.list_installed_ok.file_install_ok.repo_install_ok.list_updates_ok.repo_install_latest_ok.remove_ok"; + !SUSE:: + "ok" expression => "any"; + + reports: + "${check_list} OK" + ifvarclass => "${check_list}"; + "${check_list} X" + ifvarclass => "!${check_list}"; + + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + +} + + +# Compare the output of the cfengine package module ${module_name} when using the method "package_data" and the input ${input} +# with ${expected_output}. It is raw string matching. +bundle agent test_package_module_get_package_data(module_name, input, expected_output, test_id) +{ + vars: + "command" string => "${init.modules_path}/packages/${module_name} get-package-data < execresult("${command}","useshell"); + + classes: + "get_package_data_${module_name}_${test_id}_ok" expression => strcmp("${output}", "${expected_output}"), + scope => "namespace"; + "print" expression => "get_package_data_${module_name}_${test_id}_ok"; + reports: + "${output}" + ifvarclass => "!get_package_data_${module_name}_${test_id}_ok"; +} + + +# Check if the output of the method "list_installed" of the cfengine module ${module_name} is correct. +# It compares the output with a custom one, here it creates a one liner regex per package which should match the output of rpm -qa. +bundle agent test_package_module_list_installed(module_name) +{ + # TODO make non rpm based tests + + vars: + # Parse the output of the module to get 3 array, use them to build a unique regex per pattern to match the raw output of + # the packet manager + + "command" string => "${init.modules_path}/packages/${module_name} list-installed < /dev/null #test list-installed"; + "module_output" string => execresult("${command}", "useshell"); + + # Ugly way but most likely the only way to "easily" build 3 arrays from the module output, by parsing 3 long strings + "package_name_raw" string => execresult("/usr/bin/echo \"${module_output}\" | /usr/bin/sed -n '1~3p' | /usr/bin/sed s/Name=//g", "useshell"); + "package_version_raw" string => execresult("/usr/bin/echo \"${module_output}\" | /usr/bin/sed -n '2~3p' | /usr/bin/sed s/Version=//g", "useshell"); + "package_architecture_raw" string => execresult("/usr/bin/echo \"${module_output}\" | /usr/bin/sed -n '3~3p' | /usr/bin/sed s/Architecture=//g", "useshell"); + + # Get a list containing the name of each package + "packages_name" slist => splitstring("${package_name_raw}","\n","99999999999"); + + "packages_number" int => length("packages_name"); + "raw_indices" string => execresult("/usr/bin/seq ${packages_number}", "noshell"); + "indices" slist => splitstring("${raw_indices}", "\n", "99999999999"); + + # Build the arrays from the previous string + "packages_names_${indices}" string => execresult("/usr/bin/echo \"${package_name_raw}\" | /usr/bin/sed -n '${indices}p' | /usr/bin/sed s/Name=//g", "useshell"); + "packages_version_${indices}" string => execresult("/usr/bin/echo \"${package_version_raw}\" | /usr/bin/sed -n '${indices}p' | /usr/bin/sed s/Version=//g", "useshell"); + "packages_architecture_${indices}" string => execresult("/usr/bin/echo \"${package_architecture_raw}\" | /usr/bin/sed -n '${indices}p' | /usr/bin/sed s/Architecture=//g", "useshell"); + + + # Get the packages list from the package manager directly, rpm distro only + "packages" string => execresult("${paths.rpm} -qa #${module_name}", "useshell"); + "package_regex_${indices}" string => ".*${packages_names_${indices}}-${packages_version_${indices}}.*${packages_architecture_${indices}}"; + "packages_list" slist => splitstring("${packages}", "${const.n}", "99999999999"); + + classes: + # Compare rpm and module outputs + "packages_match_${indices}" expression => regcmp("${package_regex_${indices}}", "${packages_list}"); + "invalid" not => "packages_match_${indices}"; + "test_package_module_list_installed_${module_name}_ok" not => "invalid", + scope => "namespace"; + + reports: + "Expected line pattern ${package_regex_${indices}} could not be found in the output of \"rpm -qa\"" + ifvarclass => "!packages_match_${indices}"; +} + + +bundle agent test_package_module_list_updates(module_name, expected_output, test_id) +{ + vars: + "command" string => "${init.modules_path}/packages/${module_name} list-updates < /dev/null"; + "output" string => execresult("${command}", "useshell"); + + classes: + "test_package_module_list_updates_${module_name}_${test_id}_ok" expression => strcmp("${expected_output}", "${output}"), + scope => "namespace"; + +} + + +# TODO, unused for the moment +bundle agent test_package_module_list_updates_local(module_name, input, expected_output, test_id) +{ + vars: + "command" string => "${init.modules_path}/packages/${module_name} list-updates-local < /dev/null"; + "output" string => execresult("${command}", "useshell"); + + classes: + "test_package_module_list_updates_local_${module_name}_${test_id}_ok" expression => strcmp("${expected_output}", "${output}"), + scope => "namespace"; +} + +bundle agent test_package_module_repo_install(module_name, input, test_id) +{ + methods: + "any" usebundle => test_package_module_install("${module_name}", "repo", "${input}", "${test_id}", ""); +} + +bundle agent test_package_module_file_install(module_name, input, test_id, expected_name) +{ + methods: + "any" usebundle => test_package_module_install("${module_name}", "file", "${input}", "${test_id}", "${expected_name}"); +} + +# The command should never be called with attributes that are not present in the package. +# It is already checked by the get-package-data method for the file source install. +# The package name can be trickier to get since rpm and zypper may differs in the naming +# of patterns for instances. +# +# Uninstall the rpm providing the given pattern via zypper cfengine module. +# Then installs it via the ${module_name} and use custom command (rpm based) to test if the install/version/arch are correct. +# +# It can only handle one packet given in input at a time, with or without arch/version specified. +# This is to limit the complexity of the test. Moreover, Rudder should not use multiple package install. +# +# /!\ It uses other cfengine package module to test the presence of the package. +# ${source} should be "file" or "repo" +bundle agent test_package_module_install(module_name, source, input, test_id, expected_name) +{ + vars: + "command" string => "${init.modules_path}/packages/${module_name} ${source}-install < regex_replace("${input}", "(^(?!File=).*|\n|File=)", "", "gms"); + "package_name" string => execresult("${paths.rpm} --qf \"%{NAME}\\n\" -qp ${package_path}", "useshell"); + pass1.repo:: + "package_path" string => regex_replace("${input}", "(^(?!Name=).*|\n|Name=)", "", "gms"); + "package_name" string => "${package_path}"; + any:: + "package_name_canonify" string => canonify("${package_name}"); + + "package_arch" string => regex_replace("${input}", "(^(?!Architecture=).*|\n|Architecture=)", "", "gm"); + "package_version" string => regex_replace("${input}", "(^(?!Version=).*|\n|Version=)", "", "gm"); + + zypper_pattern:: + "zypper_name" string => "${expected_name}", + ifvarclass => "file"; + "zypper_name" string => "${package_path}", + ifvarclass => "repo"; + + pass1:: + "outnull" string => execresult("${command}","useshell"), + ifvarclass => "ncf_package_absent_${package_name_canonify}_ok"; + + # Use zypper to check_${package_name_canonify} if the installation is correct + "install_check_${package_name_canonify}" string => execresult("/usr/bin/zypper info -t pattern ${zypper_name} | grep Installed | sed 's%Installed\s*:\s*%%g'","useshell"), + ifvarclass => "ncf_package_absent_${package_name_canonify}_ok"; + # Zypper info provides only the latest version available and not the current installed one.... + "version_check_${package_name_canonify}" string => execresult("/usr/bin/zypper patterns --installed-only | grep \"${zypper_name}\" | awk '{print $5}'", "useshell"), + ifvarclass => "ncf_package_absent_${package_name_canonify}_ok"; + "arch_check_${package_name_canonify}" string => execresult("/usr/bin/zypper info -t pattern ${zypper_name} | grep Arch | sed 's%Arch\s*:\s*%%g'","useshell"), + ifvarclass => "ncf_package_absent_${package_name_canonify}_ok"; + + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + "repo" expression => strcmp("${source}", "repo"); + "file" expression => strcmp("${source}", "file"); + + "zypper_pattern" expression => strcmp("${module_name}", "zypper_pattern"); + + pass3:: + "is_installed" expression => regcmp("Yes.*", "${install_check_${package_name_canonify}}"); + "is_version_correct" expression => or(strcmp("${version_check_${package_name_canonify}}", "${package_version}"), strcmp("${package_version}", "")); + "is_arch_correct" expression => or(strcmp("${arch_check_${package_name_canonify}}", "${package_arch}"), strcmp("${package_arch}", "")); + + "test_package_module_${source}_install_${module_name}_${test_id}_ok" expression => "is_installed.is_version_correct.is_arch_correct.ncf_package_absent_${package_name_canonify}_ok", + scope => "namespace"; + + + methods: + zypper_pattern.pass1.!pass2:: + "package_absent" usebundle => ncf_package("${package_name}", "${package_version}", "${package_arch}", "zypper", "absent", ""); + + reports: + pass3.!is_installed:: + "Package ${package_name} not found installed, found \"${install_check_${package_name_canonify}}\" instead of \"Yes\""; + pass3.!is_version_correct.is_installed:: + "Package ${package_name} version found incorrect: ${version_check_${package_name_canonify}}"; + pas3.!is_arch_correct.is_installed:: + "Package ${package_name} arch found incorrect: ${arch_check_${package_name_canonify}}"; +} + +# Call the "${module_name}" method "remove" with the input "${input}" and define a class if the input is not found in the installed packages. +# Can only handle one input at a time for now ie one package to remove +# Comments in the command are there to avoid caching the result when calling them multiple times. +bundle agent test_package_module_remove(module_name, input, test_id) +{ + vars: + "command" string => "${init.modules_path}/packages/${module_name} remove < execresult("${command}", "useshell"); + "package_name" string => regex_replace("${input}", "(^(?!Name=).*|\n|Name=)", "", "gms"); + + pass1.zypper_pattern:: + "is_installed" string => execresult("/usr/bin/zypper patterns --installed-only | grep -c ${package_name} #${test_id}", "useshell"); + + classes: + "pass1" expression => "any"; + "zypper_pattern" expression => strcmp("zypper_pattern", "${module_name}"); + "is_not_installed" expression => strcmp("${is_installed}", "0"); + "test_package_module_remove_${module_name}_${test_id}_ok" expression => "is_not_installed", + scope => "namespace"; + +} + + +# Create the class "${class_name}" if the ${input} is contained in the output of the "list_installed" method of the module +bundle agent test_package_module_package_installed(module_name, input, class_name) +{ + vars: + # We need a custom comment to avoid hitting the execresult cache... + "command" string => "${init.modules_path}/packages/${module_name} list-installed < /dev/null #${class_name}"; + "module_output" string => execresult("${command}", "useshell"); + + classes: + "${class_name}" expression => regcmp("[\s\S]*${input}[\s\S]*", "${module_output}"), + scope => "namespace"; +} + diff --git a/policies/lib/tests/acceptance/20_cfe_basics/dry_run_lib_global_class.cf b/policies/lib/tests/acceptance/20_cfe_basics/dry_run_lib_global_class.cf new file mode 100644 index 00000000000..5716e801c57 --- /dev/null +++ b/policies/lib/tests/acceptance/20_cfe_basics/dry_run_lib_global_class.cf @@ -0,0 +1,65 @@ +####################################################### +# +# Test ncf dry-run using the global_dry_run class +# +####################################################### + +# @agent_requirements "capabilities": [ "bodydefault" ] || "agent_version" >= 3.9 + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init {} + +####################################################### + +bundle agent test_action(name) +{ + files: + "$(G.testdir)/${name}" + create => "true", + classes => classes_generic("create_${name}"); +} + +####################################################### + +bundle agent test +{ + + classes: + "global_dry_run" expression => "create_default_reached", scope => "namespace"; + + methods: + "default" + usebundle => test_action("default"); + global_dry_run:: + "dryrun" + usebundle => test_action("dryrun"); +} + +bundle agent check +{ + classes: + "default_created" expression => fileexists("$(G.testdir)/default"); + "dryrun_created" expression => fileexists("$(G.testdir)/dryrun"); + "ok" expression => "create_dryrun_reached.!dryrun_created.default_created"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/20_cfe_basics/dry_run_lib_method.cf b/policies/lib/tests/acceptance/20_cfe_basics/dry_run_lib_method.cf new file mode 100644 index 00000000000..1533cd8eef2 --- /dev/null +++ b/policies/lib/tests/acceptance/20_cfe_basics/dry_run_lib_method.cf @@ -0,0 +1,69 @@ +####################################################### +# +# Test ncf dry-run using the set_dry_run_mode call +# +####################################################### + +# @agent_requirements "capabilities": [ "bodydefault" ] || "agent_version" >= 3.9 + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init {} + +####################################################### + +bundle agent test_action(name) +{ + files: + "$(G.testdir)/${name}" + create => "true", + classes => classes_generic("create_${name}"); +} + +####################################################### + +bundle agent test +{ + + methods: + "default" + usebundle => test_action("default"); + "enable dry-run" + usebundle => set_dry_run_mode("true"); + "dryrun" + usebundle => test_action("dryrun"); + "disable dry-run" + usebundle => set_dry_run_mode("false"); + "not dryrun" + usebundle => test_action("notdryrun"); + +} + +bundle agent check +{ + classes: + "default_created" expression => fileexists("$(G.testdir)/default"); + "dryrun_created" expression => fileexists("$(G.testdir)/dryrun"); + "notdryrun_created" expression => fileexists("$(G.testdir)/notdryrun"); + "ok" expression => "create_dryrun_reached.!dryrun_created.default_created.notdryrun_created"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/20_cfe_basics/dry_run_lib_push_pop.cf b/policies/lib/tests/acceptance/20_cfe_basics/dry_run_lib_push_pop.cf new file mode 100644 index 00000000000..805f8d1c3b9 --- /dev/null +++ b/policies/lib/tests/acceptance/20_cfe_basics/dry_run_lib_push_pop.cf @@ -0,0 +1,84 @@ +####################################################### +# +# Test ncf dry-run using the push/pop_dry_run_mode call +# +####################################################### + +# @agent_requirements "capabilities": [ "bodydefault" ] || "agent_version" >= 3.9 + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init {} + +####################################################### + +bundle agent test_action(name) +{ + files: + "$(G.testdir)/${name}" + create => "true", + classes => classes_generic("create_${name}"); +} + +####################################################### + +bundle agent test +{ + +# "enable dry-run" usebundle => push_dry_run_mode("true"); +# "dry-run part" usebundle => package_present("htop"); +# "restore dry-run" usebundle => pop_dry_run_mode(); + + + methods: + "default" + usebundle => test_action("default"); + "enable dry-run" + usebundle => push_dry_run_mode("true"); + "dryrun" + usebundle => test_action("dryrun"); + "disable dry-run" + usebundle => push_dry_run_mode("false"); + "not dryrun" + usebundle => test_action("notdryrun"); + "pop dry-run" + usebundle => pop_dry_run_mode; + "dryrun2" + usebundle => test_action("dryrun2"); + "pop dry-run2" + usebundle => pop_dry_run_mode; + "not dryrun2" + usebundle => test_action("notdryrun2"); + +} + +bundle agent check +{ + classes: + "default_created" expression => fileexists("$(G.testdir)/default"); + "dryrun_created" expression => fileexists("$(G.testdir)/dryrun"); + "dryrun2_created" expression => fileexists("$(G.testdir)/dryrun2"); + "notdryrun_created" expression => fileexists("$(G.testdir)/notdryrun"); + "notdryrun2_created" expression => fileexists("$(G.testdir)/notdryrun2"); + "ok" expression => "create_dryrun_reached.!dryrun_created.default_created.notdryrun_created.!dryrun2_created.notdryrun2_created"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/20_cfe_basics/flag_lib.cf b/policies/lib/tests/acceptance/20_cfe_basics/flag_lib.cf new file mode 100644 index 00000000000..92b4e59e807 --- /dev/null +++ b/policies/lib/tests/acceptance/20_cfe_basics/flag_lib.cf @@ -0,0 +1,98 @@ +####################################################### +# +# Test flag_lib +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, initialization, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init { + vars: + "existing" string => '{ "already": { "created": "2018-10-15T16:52:20+0200" } }'; + + methods: + "clean" usebundle => file_absent("${configuration.flag_file}"); +} + +####################################################### + +bundle agent test +{ + + methods: + "read before" + usebundle => get_flag("test"), + useresult => "get1"; + "reserve" + usebundle => reserve_flag("test"), + useresult => "reserve2"; + "read after" + usebundle => get_flag("test"), + useresult => "get3"; + "reserve twice" + usebundle => reserve_flag("test"), + useresult => "reserve4"; + "set flag" + usebundle => set_flag("test"), + useresult => "set5"; + "read after set" + usebundle => get_flag("test"), + useresult => "get6"; + "reserve after set" + usebundle => reserve_flag("test"), + useresult => "reserve6"; + "set another flag without reservation" + usebundle => set_flag("test2"), + useresult => "set7"; + + "clean" usebundle => file_content("${configuration.flag_file}", "${init.existing}", "true"); + "read an existing condition" + usebundle => get_flag("already"), + useresult => "get8"; + + "reserve" + usebundle => reserve_flag("test3"), + useresult => "reserve9"; + "cancel" usebundle => cancel_flag("test3"); + "read after" + usebundle => get_flag("test3"), + useresult => "get10"; +} + +bundle agent check +{ + classes: + "ok_1" expression => strcmp("${test.get1[1]}", "free"); + "ok_2" expression => strcmp("${test.reserve2[1]}", "ok"); + "ok_3" expression => strcmp("${test.get3[1]}", "reserved"); + "ok_4" expression => strcmp("${test.reserve4[1]}", "failed_reserved"); + "ok_5" expression => strcmp("${test.set5[1]}", "ok"); + "ok_6" expression => strcmp("${test.get6[1]}", "set"); + "ok_6b" expression => strcmp("${test.reserve6[1]}", "failed_set"); + "ok_7" expression => strcmp("${test.set7[1]}", "notreserved"); + "ok_8" expression => strcmp("${test.get8[1]}", "set"); + "ok_9" expression => strcmp("${test.reserve9[1]}", "ok"); + "ok_10" expression => strcmp("${test.get10[1]}", "free"); + + "ok" expression => "ok_1.ok_2.ok_3.ok_4.ok_5.ok_6.ok_6b.ok_7.ok_8.ok_9.ok_10"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/20_cfe_basics/generic_method_dry_run.cf b/policies/lib/tests/acceptance/20_cfe_basics/generic_method_dry_run.cf new file mode 100644 index 00000000000..f56ceade44c --- /dev/null +++ b/policies/lib/tests/acceptance/20_cfe_basics/generic_method_dry_run.cf @@ -0,0 +1,68 @@ +####################################################### +# +# Test ncf dry-run with a generic method +# +####################################################### + +# @agent_requirements "capabilities": [ "bodydefault" ] || "agent_version" >= 3.9 + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + + vars: + "file" slist => { "default", "dryrun", "notdryrun" }; + + "path[${file}]" string => "$(G.testdir)/${file}"; + "canon[${file}]" string => canonify("${path[${file}]}"); + +} + +####################################################### + +bundle agent test +{ + + methods: + "default" + usebundle => file_create("${init.path[default]}"); + "enable dry-run" + usebundle => set_dry_run_mode("true"); + "dryrun" + usebundle => file_create("${init.path[dryrun]}"); + "disable dry-run" + usebundle => set_dry_run_mode("false"); + "not dryrun" + usebundle => file_create("${init.path[notdryrun]}"); + +} + +bundle agent check +{ + classes: + "default_created" expression => fileexists("${init.path[default]}"); + "dryrun_created" expression => fileexists("${init.path[dryrun]}"); + "notdryrun_created" expression => fileexists("${init.path[notdryrun]}"); + "ok" expression => "file_create_${init.canon[dryrun]}_reached.!dryrun_created.default_created.notdryrun_created"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/abort.x.cf b/policies/lib/tests/acceptance/30_generic_methods/abort.x.cf new file mode 100644 index 00000000000..a8d04bcd51c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/abort.x.cf @@ -0,0 +1,52 @@ +####################################################### +# +# Sub cf file that simply generates abort +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +body agent control { + abortclasses => { "abort_agent_run" }; +} + +####################################################### + +bundle agent init +{ + vars: + "dummy" string => "dummy"; +} + +####################################################### + +bundle agent test +{ + methods: + "class" usebundle => _abort("reason", "message"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "any"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/abort_default.cf b/policies/lib/tests/acceptance/30_generic_methods/abort_default.cf new file mode 100644 index 00000000000..0e91095e540 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/abort_default.cf @@ -0,0 +1,60 @@ +####################################################### +# +# Test checking if reporting is correctly done with rudder_logger +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "expected_report" string => "R: [FATAL] Aborting agent run because of reason: message"; +} + +####################################################### + +bundle agent test +{ + vars: + # execute the agent, and get its output in 'output' variable + "output_cmd" string => "${sys.workdir}/bin/cf-agent -Kf \"${this.promise_dirname}/abort_default_output.cf.sub\""; + "output" string => execresult("${output_cmd}", "noshell"); + "output_as_list" slist => splitstring("${output}", "\n", "10"); + +} + +####################################################### + +bundle agent check +{ + classes: + pass1:: + # Check if output contains the correct value + "contain_expected_reports" + expression => some("R: \\[FATAL\\] Aborting agent run because of reason: message", "@{test.output_as_list}"); + + any:: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + reports: + pass2.contain_expected_reports:: + "$(this.promise_filename) Pass"; + pass2.!contain_expected_reports:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/abort_default_output.cf.sub b/policies/lib/tests/acceptance/30_generic_methods/abort_default_output.cf.sub new file mode 100644 index 00000000000..ca909d6e53c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/abort_default_output.cf.sub @@ -0,0 +1,29 @@ +####################################################### +# +# Sub cf file that simply generates abort +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, test }; + version => "1.0"; +} + +####################################################### + +bundle agent test +{ + methods: + "class" usebundle => _abort("reason", "message"); +} + +####################################################### + diff --git a/policies/lib/tests/acceptance/30_generic_methods/class_failure.cf b/policies/lib/tests/acceptance/30_generic_methods/class_failure.cf new file mode 100644 index 00000000000..7f6249f244e --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/class_failure.cf @@ -0,0 +1,54 @@ +####################################################### +# +# Test if only failure classes are created +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => _classes_failure("my_class"); + "ph2" usebundle => _classes_failure(""); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok_success" expression => "!(promise_kept_my_class|my_class_kept|my_class_ok)"; + "ok_repaired" expression => "!(promise_repaired_my_class|my_class_repaired)"; + "ok_error" expression => "(repair_failed_my_class.my_class_failed.my_class_not_ok.my_class_not_kept.my_class_not_repaired.my_class_reached).!(repair_denied_class_my_class|repair_timeout_my_class|my_class_timeout|my_class_denied)"; + "ok_no_empty" expression => "!_not_kept.!_not_ok.!_not_repaired.!_failed.!_reached.!_error.!repair_failed_"; + "ok" and => { "ok_success", "ok_repaired", "ok_error", "ok_no_empty" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/class_noop.cf b/policies/lib/tests/acceptance/30_generic_methods/class_noop.cf new file mode 100644 index 00000000000..15f9a1395d4 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/class_noop.cf @@ -0,0 +1,55 @@ +####################################################### +# +# Test if only noop classes are created +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => _classes_noop("my_class"); + "ph1" usebundle => _classes_noop(""); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok_noop" and => { "my_class_noop" }; + "ok_success" expression => "!(promise_kept_my_class|my_class_kept|my_class_not_repaired|my_class_ok|my_class_reached)"; + "ok_repaired" expression => "!(promise_repaired_my_class|my_class_repaired)"; + "ok_error" expression => "!(repair_failed_my_class|repair_denied_class_my_class|repair_timeout_my_class|my_class_failed|my_class_not_ok|my_class_not_kept|my_class_denied|my_class_timeout)"; + "ok_no_empty" expression => "!_noop"; + "ok" and => { "ok_noop", "ok_success", "ok_repaired", "ok_error", "ok_no_empty" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/class_success.cf b/policies/lib/tests/acceptance/30_generic_methods/class_success.cf new file mode 100644 index 00000000000..285a9148d80 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/class_success.cf @@ -0,0 +1,54 @@ +####################################################### +# +# Test if only success classes are created +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => _classes_success("my_class"); + "ph2" usebundle => _classes_success(""); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok_success" and => { "promise_kept_my_class", "my_class_kept", "my_class_not_repaired", "my_class_ok", "my_class_reached" }; + "ok_repaired" expression => "!(promise_repaired_my_class|my_class_repaired)"; + "ok_error" expression => "!(repair_failed_my_class|repair_denied_class_my_class|repair_timeout_my_class|my_class_failed|my_class_not_ok|my_class_not_kept|my_class_denied|my_class_timeout)"; + "ok_no_empty" expression => "!_ok.!_reached.!_kept.!_not_repaired.!promise_kept_"; + "ok" and => { "ok_success", "ok_repaired", "ok_error", "ok_no_empty" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/classes_cancel.cf b/policies/lib/tests/acceptance/30_generic_methods/classes_cancel.cf new file mode 100644 index 00000000000..db03b04ea6b --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/classes_cancel.cf @@ -0,0 +1,53 @@ +####################################################### +# +# This will check that result classes are canceled +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); +} + +####################################################### + +bundle agent test +{ + + methods: + "ph1" usebundle => file_ensure_lines_present("${init.file}", "sample two"); # should get repaired + "ph2" usebundle => _classes_cancel("file_ensure_lines_present_${init.file_canon}"); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "!file_ensure_lines_present_${init.file_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/classes_combine_ifthenelse.cf b/policies/lib/tests/acceptance/30_generic_methods/classes_combine_ifthenelse.cf new file mode 100644 index 00000000000..af588249fa2 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/classes_combine_ifthenelse.cf @@ -0,0 +1,71 @@ +####################################################### +# +# This will check that result classes are copied +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); +} + +####################################################### + +bundle agent test +{ + classes: + "global_true_true" expression => "any", + scope => "namespace"; + + "global_false_false" expression => "any", + scope => "namespace"; + + methods: + + "ok" usebundle => _classes_success("global_success"); + "ko" usebundle => _classes_failure("global_failure"); + + "test1" usebundle => _classes_combine_ifthenelse("global_true", "success", "failure", "global_classA"); # success + "test1" usebundle => _classes_combine_ifthenelse("global_true", "failure", "success", "global_classB"); # failure + "test1" usebundle => _classes_combine_ifthenelse("global_false", "success", "failure", "global_classC"); # failure + "test1" usebundle => _classes_combine_ifthenelse("global_false", "failure", "success", "global_classD"); # success + + "test1" usebundle => _classes_combine_ifthenelse("global_true", "global_success", "global_failure", "global_classE"); # success + "test1" usebundle => _classes_combine_ifthenelse("global_true", "global_failure", "global_success", "global_classF"); # failure + "test1" usebundle => _classes_combine_ifthenelse("global_false", "global_success", "global_failure", "global_classG"); # failure + "test1" usebundle => _classes_combine_ifthenelse("global_false", "global_failure", "global_success", "global_classH"); # success + + "test1" usebundle => _classes_combine_ifthenelse("global_failure", "invalid", "invalid", "global_classI"); # failure +} + +####################################################### + +bundle agent check +{ + classes: + "ok_success" expression => "global_success_kept.global_classA_kept.global_classD_kept.global_classE_kept.global_classH_kept"; + "ok_failure" expression => "global_failure_failed.global_classB_failed.global_classC_failed.global_classF_failed.global_classG_failed.global_classI_failed"; + "ok" expression => "ok_success.ok_failure"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/classes_combine_two.cf b/policies/lib/tests/acceptance/30_generic_methods/classes_combine_two.cf new file mode 100644 index 00000000000..3e664e79e43 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/classes_combine_two.cf @@ -0,0 +1,60 @@ +####################################################### +# +# This will check that result classes are copied +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); +} + +####################################################### + +bundle agent test +{ + + methods: + "ph1" usebundle => file_ensure_lines_present("${init.file}", "sample two"); # should get repaired + "ph2" usebundle => file_ensure_lines_absent("${init.file}", "sample one"); # should get success + "ph3" usebundle => file_ensure_lines_absent("/dev/zero", "sample three"); # should get failed + + "test1" usebundle => _classes_combine_two("file_ensure_lines_absent_${init.file_canon}", "file_ensure_lines_absent_${init.file_canon}", "global_classA"); # success + "test2" usebundle => _classes_combine_two("file_ensure_lines_absent_${init.file_canon}", "file_ensure_lines_present_${init.file_canon}", "global_classB"); # repaired + "test3" usebundle => _classes_combine_two("file_ensure_lines_absent_${init.file_canon}", "file_ensure_lines_absent__dev_zero", "global_classC"); # error +} + +####################################################### + +bundle agent check +{ + classes: + "ok_repaired" expression => "(global_classA_ok.global_classA_reached.global_classA_kept.global_classA_not_repaired.promise_kept_global_classA).!(global_classA_repaired|promise_repaired_global_classA)"; + "ok_success" expression => "(global_classB_ok.global_classB_reached.global_classB_repaired.promise_repaired_global_classB).!(global_classB_kept|global_classB_not_repaired|promise_kept_global_classB)"; + "ok_error" expression => "(global_classC_failed.repair_failed_global_classC.global_classC_not_ok.global_classC_not_kept.global_classC_not_repaired.global_classC_reached).!(global_classC_kept|promise_kept_global_classC|global_classC_repaired|promise_repaired_global_classC)"; + "ok" expression => "ok_success.ok_repaired.ok_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/classes_copy.cf b/policies/lib/tests/acceptance/30_generic_methods/classes_copy.cf new file mode 100644 index 00000000000..1ebde4ae5d0 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/classes_copy.cf @@ -0,0 +1,63 @@ +####################################################### +# +# This will check that result classes are copied +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); +} + +####################################################### + +bundle agent test +{ + + methods: + "ph1" usebundle => file_ensure_lines_present("${init.file}", "sample two"); # should get repaired + "ph2" usebundle => file_ensure_lines_absent("${init.file}", "sample one"); # should get success + "ph3" usebundle => file_ensure_lines_absent("/dev/zero", "sample three"); # should get failed + + "ph2" usebundle => _classes_copy("file_ensure_lines_present_${init.file_canon}","global_classB"); + "ph1" usebundle => _classes_copy("file_ensure_lines_absent_${init.file_canon}","global_classA"); + "ph3" usebundle => _classes_copy("file_ensure_lines_absent__dev_zero","global_classC"); + "ph4" usebundle => _classes_copy("file_ensure_lines_absent_${init.file_canon}",""); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok_repaired" expression => "(global_classA_ok.global_classA_reached.global_classA_kept.global_classA_not_repaired.promise_kept_global_classA).!(global_classA_repaired|promise_repaired_global_classA)"; + "ok_success" expression => "(global_classB_ok.global_classB_reached.global_classB_repaired.promise_repaired_global_classB).!(global_classB_kept|global_classB_not_repaired|promise_kept_global_classB)"; + "ok_error" expression => "(global_classC_failed.repair_failed_global_classC.global_classC_not_ok.global_classC_not_kept.global_classC_not_repaired.global_classC_reached).!(global_classC_kept|promise_kept_global_classC|global_classC_repaired|promise_repaired_global_classC)"; + "ok_no_empty" expression => "(!_ok.!_reached.!_kept.!_not_repaired.!promise_kept_)"; + "ok" expression => "ok_success.ok_repaired.ok_error.ok_no_empty"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/classes_copy_invert_kept_repaired.cf b/policies/lib/tests/acceptance/30_generic_methods/classes_copy_invert_kept_repaired.cf new file mode 100644 index 00000000000..10c538895f3 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/classes_copy_invert_kept_repaired.cf @@ -0,0 +1,63 @@ +####################################################### +# +# This will check that repaired classes are converted +# into kept classes and vice versa and that error +# classes are still error classes +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); +} + +####################################################### + +bundle agent test +{ + + methods: + "ph1" usebundle => file_ensure_lines_present("${init.file}", "sample two"); # should get repaired + "ph2" usebundle => file_ensure_lines_absent("${init.file}", "sample one"); # should get success + "ph3" usebundle => file_ensure_lines_absent("/dev/zero", "sample three"); # should get failed + + "ph1" usebundle => _classes_copy_invert_kept_repaired("file_ensure_lines_absent_${init.file_canon}","global_classA"); + "ph2" usebundle => _classes_copy_invert_kept_repaired("file_ensure_lines_present_${init.file_canon}","global_classB"); + "ph3" usebundle => _classes_copy_invert_kept_repaired("file_ensure_lines_absent__dev_zero","global_classC"); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok_success" expression => "(global_classA_ok.global_classA_reached.global_classA_repaired.promise_repaired_global_classA).!(global_classA_kept|global_classA_not_repaired|promise_kept_global_classA)"; + "ok_repaired" expression => "(global_classB_ok.global_classB_reached.global_classB_kept.global_classB_not_repaired.promise_kept_global_classB).!(global_classB_repaired|promise_repaired_global_classB)"; + "ok_error" expression => "(global_classC_failed.repair_failed_global_classC.global_classC_not_ok.global_classC_not_kept.global_classC_not_repaired.global_classC_reached).!(global_classC_kept|promise_kept_global_classC|global_classC_repaired|promise_repaired_global_classC)"; + "ok" expression => "ok_success.ok_repaired.ok_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/classes_sanitize.cf b/policies/lib/tests/acceptance/30_generic_methods/classes_sanitize.cf new file mode 100644 index 00000000000..8befee81b55 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/classes_sanitize.cf @@ -0,0 +1,83 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +# Apply permissions_posix_acls_absent in enforce mode with recursion on +# unknown_file1 +# Apply permissions_posix_acls_absent in enforce mode without recursion on +# unknown_file* +# They all should fail since they do not exist +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "prefix[1]" string => "prefix1"; + "prefix[2]" string => "prefix2"; + "prefix[3]" string => "prefix3"; + + "expected_outcome[1]" string => "success"; + "expected_outcome[2]" string => "repaired"; + "expected_outcome[3]" string => "error"; + + methods: + # Init base classes + "success1" usebundle => _classes_success("${prefix[1]}"); + + "success2" usebundle => _classes_success("${prefix[2]}"); + "repaired2" usebundle => _classes_repaired("${prefix[2]}"); + + "success3" usebundle => _classes_success("${prefix[3]}"); + "repaired3" usebundle => _classes_repaired("${prefix[3]}"); + "error3" usebundle => _classes_failure("${prefix[3]}"); + + # Init expected classes + "expected_classes" usebundle => define_expected_classes("${prefix[1]}", "${expected_outcome[1]}", "ph1"); + "expected_classes" usebundle => define_expected_classes("${prefix[2]}", "${expected_outcome[2]}", "ph2"); + "expected_classes" usebundle => define_expected_classes("${prefix[3]}", "${expected_outcome[3]}", "ph3"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => _classes_sanitize("${init.prefix[1]}"); + "ph2" usebundle => _classes_sanitize("${init.prefix[2]}"); + "ph3" usebundle => _classes_sanitize("${init.prefix[3]}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ph1_ok" expression => "${define_expected_classes.report_string_ph1}"; + "ph2_ok" expression => "${define_expected_classes.report_string_ph2}"; + "ph3_ok" expression => "${define_expected_classes.report_string_ph3}"; + + "ok" expression => "ph1_ok.ph2_ok.ph3_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/command_execution.cf b/policies/lib/tests/acceptance/30_generic_methods/command_execution.cf new file mode 100644 index 00000000000..4977bbdfe93 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/command_execution.cf @@ -0,0 +1,89 @@ +####################################################### +# +# Launch the /bin/date command +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "command[1]" string => "/bin/date | ${paths.path[grep]} -E '[0-9]'"; + "result[1]" string => "repaired"; + + "command[2]" string => "/bin/false"; + "result[2]" string => "error"; + + "command[3]" string => "unknown command"; + "result[3]" string => "error"; + + "command[4]" string => "cd ${tmp} && /bin/touch ${const.dollar}(pwd)/1"; + "result[4]" string => "repaired"; + + # Audit mode + "command[5]" string => "unknown audit command"; + "result[5]" string => "na"; + + "command[6]" string => "/bin/touch ${tmp}/my_file"; + "result[6]" string => "na"; + + + "indices" slist => getindices("command"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.command[${init.indices}]}" }; + + methods: + # Enforce + "ph1" usebundle => apply_gm_v4("command_execution", @{args1}, "${init.result[1]}", "ph1", "enforce" ); + "ph2" usebundle => apply_gm_v4("command_execution", @{args2}, "${init.result[2]}", "ph2", "enforce" ); + "ph3" usebundle => apply_gm_v4("command_execution", @{args3}, "${init.result[3]}", "ph3", "enforce" ); + "ph4" usebundle => apply_gm_v4("command_execution", @{args4}, "${init.result[4]}", "ph4", "enforce" ); + + # Audit + "ph5" usebundle => apply_gm_v4("command_execution", @{args5}, "${init.result[5]}", "ph5", "audit" ); + "ph6" usebundle => apply_gm_v4("command_execution", @{args6}, "${init.result[6]}", "ph6", "audit" ); +} + +####################################################### + +bundle agent check +{ + classes: + "file_1" expression => fileexists("${init.tmp}/1"); + "file_2" not => fileexists("${init.tmp}/my_file"); + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok"; + + "ok" expression => "classes_ok.file_1.file_2"; + + reports: + !file_1:: + "The file ${init.tmp}/1 does not exists"; + !file_2:: + "The file ${init.tmp}/my_file exists"; + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/command_execution_once.cf b/policies/lib/tests/acceptance/30_generic_methods/command_execution_once.cf new file mode 100644 index 00000000000..b1d7ac3b138 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/command_execution_once.cf @@ -0,0 +1,164 @@ +####################################################### +# +# Test the exit code of shell commands +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, initialization, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + + "ok_codes" string => "0, 2,4, 6"; + + "command[1]" string => "exit 0"; + "result[1]" string => "repaired"; + "until[1]" string => "any"; + "testName[1]" string => "testA"; + "mode[1]" string => "enforce"; + + "command[2]" string => "exit 0 "; + "result[2]" string => "success"; + "until[2]" string => "any"; + "testName[2]" string => "testA"; + "mode[2]" string => "enforce"; + + "command[3]" string => "exit 0 "; + "result[3]" string => "success"; + "until[3]" string => "any"; + "testName[3]" string => "testA"; + "mode[3]" string => "audit"; + + "command[4]" string => "exit 1"; + "result[4]" string => "error"; + "until[4]" string => "any"; + "testName[4]" string => "testB"; + "mode[4]" string => "enforce"; + + "command[5]" string => "exit 1 "; + "result[5]" string => "success"; + "until[5]" string => "any"; + "testName[5]" string => "testB"; + "mode[5]" string => "enforce"; + + "command[6]" string => "exit 2"; + "result[6]" string => "repaired"; + "until[6]" string => "ok"; + "testName[6]" string => "testC"; + "mode[6]" string => "enforce"; + + "command[7]" string => "exit 2 "; + "result[7]" string => "success"; + "until[7]" string => "ok"; + "testName[7]" string => "testC"; + "mode[7]" string => "enforce"; + + "command[8]" string => "exit 3"; + "result[8]" string => "error"; + "until[8]" string => "ok"; + "testName[8]" string => "testD"; + "mode[8]" string => "enforce"; + + "command[9]" string => "exit 3 "; + "result[9]" string => "error"; + "until[9]" string => "ok"; + "testName[9]" string => "testD"; + "mode[9]" string => "enforce"; + + "command[10]" string => "exit 4"; + "result[10]" string => "repaired"; + "until[10]" string => "ok"; + "testName[10]" string => "testD"; + "mode[10]" string => "enforce"; + + "command[11]" string => "exit 4 "; + "result[11]" string => "success"; + "until[11]" string => "ok"; + "testName[11]" string => "testD"; + "mode[11]" string => "enforce"; + + "command[12]" string => "exit 5"; + "result[12]" string => "error"; + "until[12]" string => "ok"; + "testName[12]" string => "testE"; + "mode[12]" string => "audit"; + + "command[13]" string => "exit 5 "; + "result[13]" string => "error"; + "until[13]" string => "ok"; + "testName[13]" string => "testE"; + "mode[13]" string => "audit"; + + "command[14]" string => "exit 6"; + "result[14]" string => "error"; + "until[14]" string => "ok"; + "testName[14]" string => "testE"; + "mode[14]" string => "audit"; + + "indices" slist => getindices("command"); + + methods: + "clean" usebundle => file_absent("${configuration.flag_file}"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.command[${init.indices}]}", "${init.ok_codes}", "${init.until[${init.indices}]}", "${init.testName[${init.indices}]}" }; + "ok_codes" string => "0, 2,4, 6"; + + methods: + + + "phA" usebundle => apply_gm_v4("command_execution_once", @{args1}, "${init.result[1]}", "phA" , "${init.mode[1]}" ); + "phAA" usebundle => apply_gm_v4("command_execution_once", @{args2}, "${init.result[2]}", "phAA" , "${init.mode[2]}" ); + "phAAA" usebundle => apply_gm_v4("command_execution_once", @{args3}, "${init.result[3]}", "phAAA", "${init.mode[3]}" ); + + "phB" usebundle => apply_gm_v4("command_execution_once", @{args4}, "${init.result[4]}", "phB" , "${init.mode[4]}" ); + "phBB" usebundle => apply_gm_v4("command_execution_once", @{args5}, "${init.result[5]}", "phBB" , "${init.mode[5]}" ); + + "phC" usebundle => apply_gm_v4("command_execution_once", @{args6}, "${init.result[6]}", "phC" , "${init.mode[6]}" ); + "phCC" usebundle => apply_gm_v4("command_execution_once", @{args7}, "${init.result[7]}", "phCC" , "${init.mode[7]}" ); + + + "phD" usebundle => apply_gm_v4("command_execution_once", @{args8}, "${init.result[8]}", "phD" , "${init.mode[8]}" ); + "phDD" usebundle => apply_gm_v4("command_execution_once", @{args9}, "${init.result[9]}", "phDD" , "${init.mode[9]}" ); + "phDDD" usebundle => apply_gm_v4("command_execution_once", @{args10}, "${init.result[10]}", "phDDD" , "${init.mode[10]}" ); + "phDDE" usebundle => apply_gm_v4("command_execution_once", @{args11}, "${init.result[11]}", "phDDE" , "${init.mode[11]}" ); + + + "phE" usebundle => apply_gm_v4("command_execution_once", @{args12}, "${init.result[12]}", "phE" , "${init.mode[12]}" ); + "phEE" usebundle => apply_gm_v4("command_execution_once", @{args13}, "${init.result[13]}", "phEE" , "${init.mode[13]}" ); + "phEEE" usebundle => apply_gm_v4("command_execution_once", @{args14}, "${init.result[14]}", "phEEE" , "${init.mode[14]}" ); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "phA_ok.phAA_ok.phAAA_ok.phB_ok.phBB_ok.phC_ok.phCC_ok.phD_ok.phDD_ok.phDDD_ok.phDDE_ok.phE_ok.phEE_ok.phEEE_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/command_execution_result.cf b/policies/lib/tests/acceptance/30_generic_methods/command_execution_result.cf new file mode 100644 index 00000000000..ea347cbfe81 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/command_execution_result.cf @@ -0,0 +1,123 @@ +####################################################### +# +# Test the exit code of shell commands +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "kept_codes" string => "0,1, 2"; + "repaired_codes" string => "3, 4"; + + "command[0]" string => "exit 0"; + "result[0]" string => "success"; + + "command[1]" string => "exit 1"; + "result[1]" string => "success"; + + "command[2]" string => "exit 2"; + "result[2]" string => "success"; + + "command[3]" string => "exit 3"; + "result[3]" string => "repaired"; + + "command[4]" string => "exit 4"; + "result[4]" string => "repaired"; + + "command[5]" string => "exit 5"; + "result[5]" string => "error"; + + # Audit + "command[6]" string => "/bin/touch ${tmp}/6"; + "result[6]" string => "na"; + + "command[7]" string => "/bin/touch ${tmp}/7"; + "result[7]" string => "na"; + + "command[8]" string => "/bin/touch ${tmp}/8"; + "result[8]" string => "na"; + + "command[9]" string => "/bin/touch ${tmp}/9"; + "result[9]" string => "na"; + + "command[10]" string => "/bin/touch ${tmp}/10"; + "result[10]" string => "na"; + + "command[11]" string => "/bin/touch ${tmp}/11"; + "result[11]" string => "na"; + + "indices" slist => getindices("command"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.command[${init.indices}]}", "${init.kept_codes}", "${init.repaired_codes}" }; + + methods: + # Enforce + "ph0" usebundle => apply_gm_v4("command_execution_result", @{args0}, "${init.result[0]}", "ph0", "enforce" ); + "ph1" usebundle => apply_gm_v4("command_execution_result", @{args1}, "${init.result[1]}", "ph1", "enforce" ); + "ph2" usebundle => apply_gm_v4("command_execution_result", @{args2}, "${init.result[2]}", "ph2", "enforce" ); + "ph3" usebundle => apply_gm_v4("command_execution_result", @{args3}, "${init.result[3]}", "ph3", "enforce" ); + "ph4" usebundle => apply_gm_v4("command_execution_result", @{args4}, "${init.result[4]}", "ph4", "enforce" ); + "ph5" usebundle => apply_gm_v4("command_execution_result", @{args5}, "${init.result[5]}", "ph5", "enforce" ); + + # Audit + "enable" usebundle => set_dry_run_mode("true"); + "ph6" usebundle => apply_gm_v4("command_execution_result", @{args6}, "${init.result[6]}", "ph6", "audit" ); + "ph7" usebundle => apply_gm_v4("command_execution_result", @{args7}, "${init.result[7]}", "ph7", "audit" ); + "ph8" usebundle => apply_gm_v4("command_execution_result", @{args8}, "${init.result[8]}", "ph8", "audit" ); + "ph9" usebundle => apply_gm_v4("command_execution_result", @{args9}, "${init.result[9]}", "ph9", "audit" ); + "ph10" usebundle => apply_gm_v4("command_execution_result", @{args10}, "${init.result[10]}", "ph10", "audit" ); + "ph11" usebundle => apply_gm_v4("command_execution_result", @{args11}, "${init.result[11]}", "ph11", "audit" ); + "disable" usebundle => set_dry_run_mode("false"); +} + +####################################################### + +bundle agent check +{ + vars: + "audit_report" string => "command_execution_result_${init.canonified_audit_command_name}_exit"; + + classes: + "classes_ok" expression => "ph0_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok.ph9_ok.ph10_ok.ph11_ok"; + + "files_exist" or => { + fileexists("${init.tmp}/6"), + fileexists("${init.tmp}/7"), + fileexists("${init.tmp}/8"), + fileexists("${init.tmp}/9"), + fileexists("${init.tmp}/10"), + fileexists("${init.tmp}/11") + }; + + "ok" expression => "classes_ok.!files_exist"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/command_execution_test_invalid_command.cf b/policies/lib/tests/acceptance/30_generic_methods/command_execution_test_invalid_command.cf new file mode 100644 index 00000000000..a8146460acb --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/command_execution_test_invalid_command.cf @@ -0,0 +1,52 @@ +####################################################### +# +# Try to launch and invalid command +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "command_name" string => "/invalid/path"; + "canonified_command_name" string => canonify("${command_name}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => command_execution("${init.command_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "command_execution_${init.canonified_command_name}_ok.!command_execution_${init.canonified_command_name}_error"; + + reports: + DEBUG:: + "This test should not pass"; + + !ok:: + "$(this.promise_filename) Pass"; + ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/condition_from_command.cf b/policies/lib/tests/acceptance/30_generic_methods/condition_from_command.cf new file mode 100644 index 00000000000..50795d2ffc5 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/condition_from_command.cf @@ -0,0 +1,214 @@ +####################################################### +# +# Exit with a specific code +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "very_long_command" string => "exit 1; #Comment to reach 4K string aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; + + "true_codes" string => "0,1, 2"; + "false_codes" string => "3, 4"; + + # Enforce + "prefix[0]" string => "class0"; + "command[0]" string => "exit 0"; + "status[0]" string => "success"; + "expected[0]" string => "${prefix[0]}_true"; + "unexpected[0]" string => "${prefix[0]}_false"; + "mode[0]" string => "enforce"; + + "prefix[1]" string => "class1"; + "command[1]" string => "exit 1"; + "status[1]" string => "success"; + "expected[1]" string => "${prefix[1]}_true"; + "unexpected[1]" string => "${prefix[1]}_false"; + "mode[1]" string => "enforce"; + + "prefix[2]" string => "class2"; + "command[2]" string => "exit 2"; + "status[2]" string => "success"; + "expected[2]" string => "${prefix[2]}_true"; + "unexpected[2]" string => "${prefix[2]}_false"; + "mode[2]" string => "enforce"; + + "prefix[3]" string => "class3"; + "command[3]" string => "exit 3"; + "status[3]" string => "success"; + "expected[3]" string => "${prefix[3]}_false"; + "unexpected[3]" string => "${prefix[3]}_true"; + "mode[3]" string => "enforce"; + + "prefix[4]" string => "class4"; + "command[4]" string => "exit 4"; + "status[4]" string => "success"; + "expected[4]" string => "${prefix[4]}_false"; + "unexpected[4]" string => "${prefix[4]}_true"; + "mode[4]" string => "enforce"; + + "prefix[5]" string => "class5"; + "command[5]" string => "exit 5"; + "status[5]" string => "error"; + "expected[5]" string => "any"; + "unexpected[5]" string => "${prefix[5]}_false|${prefix[5]}_true"; + "mode[5]" string => "enforce"; + + "prefix[6]" string => "class6"; + "command[6]" string => "unknown command"; + "status[6]" string => "error"; + "expected[6]" string => "any"; + "unexpected[6]" string => "${prefix[6]}_false|${prefix[6]}_true"; + "mode[6]" string => "enforce"; + + "prefix[7]" string => "class7"; + "command[7]" string => "${very_long_command}"; + "status[7]" string => "success"; + "expected[7]" string => "${prefix[7]}_true"; + "unexpected[7]" string => "${prefix[7]}_false"; + "mode[7]" string => "enforce"; + + "prefix[8]" string => "class8"; + "command[8]" string => "/bin/touch ${tmp}/enforce"; + "status[8]" string => "success"; + "expected[8]" string => "${prefix[8]}_true"; + "unexpected[8]" string => "${prefix[8]}_false"; + "mode[8]" string => "enforce"; + + # Audit + "prefix[9]" string => "class9"; + "command[9]" string => "exit 0"; + "status[9]" string => "success"; + "expected[9]" string => "${prefix[9]}_true"; + "unexpected[9]" string => "${prefix[9]}_false"; + "mode[9]" string => "audit"; + + "prefix[10]" string => "class10"; + "command[10]" string => "exit 1"; + "status[10]" string => "success"; + "expected[10]" string => "${prefix[10]}_true"; + "unexpected[10]" string => "${prefix[10]}_false"; + "mode[10]" string => "audit"; + + "prefix[11]" string => "class11"; + "command[11]" string => "exit 2"; + "status[11]" string => "success"; + "expected[11]" string => "${prefix[11]}_true"; + "unexpected[11]" string => "${prefix[11]}_false"; + "mode[11]" string => "audit"; + + "prefix[12]" string => "class12"; + "command[12]" string => "exit 3"; + "status[12]" string => "success"; + "expected[12]" string => "${prefix[12]}_false"; + "unexpected[12]" string => "${prefix[12]}_true"; + "mode[12]" string => "audit"; + + "prefix[13]" string => "class13"; + "command[13]" string => "exit 4"; + "status[13]" string => "success"; + "expected[13]" string => "${prefix[13]}_false"; + "unexpected[13]" string => "${prefix[13]}_true"; + "mode[13]" string => "audit"; + + "prefix[14]" string => "class14"; + "command[14]" string => "exit 5"; + "status[14]" string => "error"; + "expected[14]" string => "any"; + "unexpected[14]" string => "${prefix[14]}_true|${prefix[14]}_false"; + "mode[14]" string => "audit"; + + "prefix[15]" string => "class15"; + "command[15]" string => "unknown command"; + "status[15]" string => "error"; + "expected[15]" string => "any"; + "unexpected[15]" string => "${prefix[15]}_true|${prefix[15]}_false"; + "mode[15]" string => "audit"; + + "prefix[16]" string => "class16"; + "command[16]" string => "/bin/touch ${tmp}/audit"; + "status[16]" string => "success"; + "expected[16]" string => "${prefix[16]}_true"; + "unexpected[16]" string => "${prefix[16]}_false"; + "mode[16]" string => "audit"; + + "indices" slist => getindices("prefix"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.prefix[${init.indices}]}", "${init.command[${init.indices}]}", "${init.true_codes}", "${init.false_codes}" }; + + methods: + # Enforce + "ph0" usebundle => apply_gm_v4("condition_from_command", @{args0}, "${init.status[0]}", "ph0", "${init.mode[0]}"); + "ph1" usebundle => apply_gm_v4("condition_from_command", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}"); + "ph2" usebundle => apply_gm_v4("condition_from_command", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}"); + "ph3" usebundle => apply_gm_v4("condition_from_command", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}"); + "ph4" usebundle => apply_gm_v4("condition_from_command", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}"); + "ph5" usebundle => apply_gm_v4("condition_from_command", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}"); + "ph6" usebundle => apply_gm_v4("condition_from_command", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}"); + "ph7" usebundle => apply_gm_v4("condition_from_command", @{args7}, "${init.status[7]}", "ph7", "${init.mode[7]}"); + "ph8" usebundle => apply_gm_v4("condition_from_command", @{args8}, "${init.status[8]}", "ph8", "${init.mode[8]}"); + + # Audit + "ph9" usebundle => apply_gm_v4("condition_from_command", @{args9}, "${init.status[9]}", "ph9", "${init.mode[9]}"); + "ph10" usebundle => apply_gm_v4("condition_from_command", @{args10}, "${init.status[10]}", "ph10", "${init.mode[10]}"); + "ph11" usebundle => apply_gm_v4("condition_from_command", @{args11}, "${init.status[11]}", "ph11", "${init.mode[11]}"); + "ph12" usebundle => apply_gm_v4("condition_from_command", @{args12}, "${init.status[12]}", "ph12", "${init.mode[12]}"); + "ph13" usebundle => apply_gm_v4("condition_from_command", @{args13}, "${init.status[13]}", "ph13", "${init.mode[13]}"); + "ph14" usebundle => apply_gm_v4("condition_from_command", @{args14}, "${init.status[14]}", "ph14", "${init.mode[14]}"); + "ph15" usebundle => apply_gm_v4("condition_from_command", @{args15}, "${init.status[15]}", "ph15", "${init.mode[15]}"); + "ph16" usebundle => apply_gm_v4("condition_from_command", @{args16}, "${init.status[16]}", "ph16", "${init.mode[16]}"); + +} + +####################################################### + +bundle agent check +{ + classes: + "enforce_created" expression => fileexists("${init.tmp}/enforce"); + "audit_created" expression => fileexists("${init.tmp}/audit"); + + "expected_not_ok" expression => "!${init.expected[${init.indices}]}"; + "unexpected_not_ok" expression => "${init.unexpected[${init.indices}]}"; + "classes_ok" expression => "ph0_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok.ph8_ok.ph9_ok.ph10_ok.ph11_ok.ph12_ok.ph13_ok.ph14_ok.ph15_ok.ph16_ok"; + + "ok" expression => "classes_ok.!expected_not_ok.!unexpected_not_ok.enforce_created.audit_created"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "Expected ${init.tmp}/enforce not found" + ifvarclass => "!enforce_created"; + "Unexpected file ${init.tmp}/audit found" + ifvarclass => "!audit_created"; + "${init.unexpected[${init.indices}]} not expected" + ifvarclass => "${init.unexpected[${init.indices}]}"; + "${init.expected[${init.indices}]} not found" + ifvarclass => "!${init.expected[${init.indices}]}"; + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/condition_from_expression.cf b/policies/lib/tests/acceptance/30_generic_methods/condition_from_expression.cf new file mode 100644 index 00000000000..ece638b69f8 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/condition_from_expression.cf @@ -0,0 +1,118 @@ +####################################################### +# +# Test if only success classes are created +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + # Enforce + "prefix[0]" string => "class0"; + "expression[0]" string => "any"; + "expected[0]" string => "${prefix[0]}_true"; + "unexpected[0]" string => "${prefix[0]}_false"; + "status[0]" string => "success"; + + "prefix[1]" string => "class1"; + "expression[1]" string => "!any"; + "expected[1]" string => "${prefix[1]}_false"; + "unexpected[1]" string => "${prefix[1]}_true"; + "status[1]" string => "success"; + + "prefix[2]" string => "class2"; + "expression[2]" string => "any|!any.something_with_car.(another_one)"; + "expected[2]" string => "${prefix[2]}_true"; + "unexpected[2]" string => "${prefix[2]}_false"; + "status[2]" string => "success"; + + "prefix[3]" string => "class3"; + "expression[3]" string => "forbidden-char"; + "expected[3]" string => "any"; + "unexpected[3]" string => "${prefix[3]}_true|${prefix[3]}_false"; + "status[3]" string => "success"; + + # Audit + "prefix[4]" string => "class4"; + "expression[4]" string => "any"; + "expected[4]" string => "${prefix[4]}_true"; + "unexpected[4]" string => "${prefix[4]}_false"; + "status[4]" string => "success"; + + "prefix[5]" string => "class5"; + "expression[5]" string => "!any"; + "expected[5]" string => "${prefix[5]}_false"; + "unexpected[5]" string => "${prefix[5]}_true"; + "status[5]" string => "success"; + + "prefix[6]" string => "class6"; + "expression[6]" string => "any|!any.something_with_car.(another_one)"; + "expected[6]" string => "${prefix[6]}_true"; + "unexpected[6]" string => "${prefix[6]}_false"; + "status[6]" string => "success"; + + "prefix[7]" string => "class7"; + "expression[7]" string => "forbidden-char"; + "expected[7]" string => "any"; + "unexpected[7]" string => "${prefix[7]}_true|${prefix[7]}_false"; + "status[7]" string => "success"; + + "indices" slist => getindices("prefix"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.prefix[${init.indices}]}", "${init.expression[${init.indices}]}" }; + + methods: + # Enforce + "ph0" usebundle => apply_gm("condition_from_expression", @{args0}, "${init.status[0]}", "ph0", "enforce" ); + "ph1" usebundle => apply_gm("condition_from_expression", @{args1}, "${init.status[1]}", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("condition_from_expression", @{args2}, "${init.status[2]}", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("condition_from_expression", @{args3}, "${init.status[3]}", "ph3", "enforce" ); + + # Audit + "ph4" usebundle => apply_gm("condition_from_expression", @{args4}, "${init.status[4]}", "ph4", "audit" ); + "ph5" usebundle => apply_gm("condition_from_expression", @{args5}, "${init.status[5]}", "ph5", "audit" ); + "ph6" usebundle => apply_gm("condition_from_expression", @{args6}, "${init.status[6]}", "ph6", "audit" ); + "ph7" usebundle => apply_gm("condition_from_expression", @{args7}, "${init.status[7]}", "ph7", "audit" ); + + +} + +####################################################### + +bundle agent check +{ + classes: + "expected_not_ok" expression => "!${init.expected[${init.indices}]}"; + "unexpected_not_ok" expression => "${init.unexpected[${init.indices}]}"; + "classes_ok" expression => "ph0_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok"; + + "ok" expression => "classes_ok.!expected_not_ok.!unexpected_not_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/condition_from_variable_existence.cf b/policies/lib/tests/acceptance/30_generic_methods/condition_from_variable_existence.cf new file mode 100644 index 00000000000..789b593a4a4 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/condition_from_variable_existence.cf @@ -0,0 +1,123 @@ +####################################################### +# +# Test if only success classes are created +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + # Enforce + "prefix[0]" string => "class0"; + "var_name[0]" string => "my_prefix.my_variable"; + "expected[0]" string => "${prefix[0]}_true"; + "unexpected[0]" string => "${prefix[0]}_false"; + "status[0]" string => "success"; + + "prefix[1]" string => "class1"; + "var_name[1]" string => "my_prefix.my_empty_variable"; + "expected[1]" string => "${prefix[1]}_true"; + "unexpected[1]" string => "${prefix[1]}_false"; + "status[1]" string => "success"; + + "prefix[2]" string => "class2"; + "var_name[2]" string => "my_prefix.non_defined"; + "expected[2]" string => "${prefix[2]}_false"; + "unexpected[2]" string => "${prefix[2]}_true"; + "status[2]" string => "success"; + + "prefix[3]" string => "class3"; + "var_name[3]" string => "my_prefix.forbidden-name"; + "expected[3]" string => "${prefix[3]}_false"; + "unexpected[3]" string => "${prefix[3]}_true"; + "status[3]" string => "success"; + + # Audit + "prefix[4]" string => "class4"; + "var_name[4]" string => "my_prefix.my_variable"; + "expected[4]" string => "${prefix[4]}_true"; + "unexpected[4]" string => "${prefix[4]}_false"; + "status[4]" string => "success"; + + "prefix[5]" string => "class5"; + "var_name[5]" string => "my_prefix.my_empty_variable"; + "expected[5]" string => "${prefix[5]}_true"; + "unexpected[5]" string => "${prefix[5]}_false"; + "status[5]" string => "success"; + + "prefix[6]" string => "class6"; + "var_name[6]" string => "my_prefix.non_defined"; + "expected[6]" string => "${prefix[6]}_false"; + "unexpected[6]" string => "${prefix[6]}_true"; + "status[6]" string => "success"; + + "prefix[7]" string => "class7"; + "var_name[7]" string => "my_prefix.forbidden-name"; + "expected[7]" string => "${prefix[7]}_false"; + "unexpected[7]" string => "${prefix[7]}_true"; + "status[7]" string => "success"; + + "indices" slist => getindices("prefix"); + + methods: + "method_call" usebundle => variable_string("my_prefix", "my_variable", "Some text"); + "method_call" usebundle => variable_string("my_prefix", "my_empty_variable", ""); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.prefix[${init.indices}]}", "${init.var_name[${init.indices}]}" }; + + methods: + # Enforce + "ph0" usebundle => apply_gm("condition_from_variable_existence", @{args0}, "${init.status[0]}", "ph0", "enforce" ); + "ph1" usebundle => apply_gm("condition_from_variable_existence", @{args1}, "${init.status[1]}", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("condition_from_variable_existence", @{args2}, "${init.status[2]}", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("condition_from_variable_existence", @{args3}, "${init.status[3]}", "ph3", "enforce" ); + + # Audit + "ph4" usebundle => apply_gm("condition_from_variable_existence", @{args4}, "${init.status[4]}", "ph4", "audit" ); + "ph5" usebundle => apply_gm("condition_from_variable_existence", @{args5}, "${init.status[5]}", "ph5", "audit" ); + "ph6" usebundle => apply_gm("condition_from_variable_existence", @{args6}, "${init.status[6]}", "ph6", "audit" ); + "ph7" usebundle => apply_gm("condition_from_variable_existence", @{args7}, "${init.status[7]}", "ph7", "audit" ); +} + +####################################################### + +bundle agent check +{ + classes: + "expected_not_ok" expression => "!${init.expected[${init.indices}]}"; + "unexpected_not_ok" expression => "${init.unexpected[${init.indices}]}"; + + "classes_ok" expression => "ph0_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok"; + "ok" expression => "classes_ok.!expected_not_ok.!unexpected_not_ok"; + + reports: + "${init.unexpected[${init.indices}]} not expected" + ifvarclass => "${init.unexpected[${init.indices}]}"; + "${init.expected[${init.indices}]} not found" + ifvarclass => "!${init.expected[${init.indices}]}"; + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/condition_from_variable_match.cf b/policies/lib/tests/acceptance/30_generic_methods/condition_from_variable_match.cf new file mode 100644 index 00000000000..e431a7b76e6 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/condition_from_variable_match.cf @@ -0,0 +1,134 @@ +####################################################### +# +# Test if only success classes are created +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "class[1]" string => "class1"; + "variable[1]" string => "my_prefix.my_variable"; + "match[1]" string => "Some.*"; + + "class[2]" string => "class2"; + "variable[2]" string => "my_prefix.my_empty_variable"; + "match[2]" string => ".*"; + + "class[3]" string => "class3"; + "variable[3]" string => "my_prefix.my_multiline_variable"; + "match[3]" string => ".*on\s*multi.*"; + + "class[4]" string => "class4"; + "variable[4]" string => "my_prefix.my_variable"; + "match[4]" string => "nonmatching.*regex"; + + "class[5]" string => "class5"; + "variable[5]" string => "my_prefix.my_undefined_variable"; + "match[5]" string => "nonmatching.*regex"; + + "class[6]" string => "class6"; + "variable[6]" string => "my_audit_prefix.my_variable"; + "match[6]" string => "Some.*"; + + "class[7]" string => "class7"; + "variable[7]" string => "my_audit_prefix.my_empty_variable"; + "match[7]" string => ".*"; + + "class[8]" string => "class8"; + "variable[8]" string => "my_audit_prefix.my_multiline_variable"; + "match[8]" string => ".*on\s*multi.*"; + + "class[9]" string => "class9"; + "variable[9]" string => "my_audit_prefix.my_variable"; + "match[9]" string => "nonmatching.*regex"; + + "class[10]" string => "class10"; + "variable[10]" string => "my_audit_prefix.my_undefined_variable"; + "match[10]" string => "nonmatching.*regex"; + + "indices" slist => getindices("class"); + + methods: + "method_call" usebundle => variable_string("my_prefix", "my_variable", "Some text"); + "method_call" usebundle => variable_string("my_prefix", "my_empty_variable", ""); + "method_call" usebundle => variable_string("my_prefix", "my_multiline_variable", "some + text + on + multiline"); + + "method_call" usebundle => variable_string("my_audit_prefix", "my_variable", "Some text"); + "method_call" usebundle => variable_string("my_audit_prefix", "my_empty_variable", ""); + "method_call" usebundle => variable_string("my_audit_prefix", "my_multiline_variable", "some + text + on + multiline"); + + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.class[${init.indices}]}", "${init.variable[${init.indices}]}", "${init.match[${init.indices}]}"}; + + methods: + "ph1" usebundle => apply_gm("condition_from_variable_match", @{args1}, "success", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("condition_from_variable_match", @{args2}, "success", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("condition_from_variable_match", @{args3}, "success", "ph3", "enforce" ); + "ph4" usebundle => apply_gm("condition_from_variable_match", @{args4}, "success", "ph4", "enforce" ); + "ph5" usebundle => apply_gm("condition_from_variable_match", @{args5}, "error", "ph5", "enforce" ); + + "ph6" usebundle => apply_gm("condition_from_variable_match", @{args6}, "success", "ph6", "audit" ); + "ph7" usebundle => apply_gm("condition_from_variable_match", @{args7}, "success", "ph7", "audit" ); + "ph8" usebundle => apply_gm("condition_from_variable_match", @{args8}, "success", "ph8", "audit" ); + "ph9" usebundle => apply_gm("condition_from_variable_match", @{args9}, "success", "ph9", "audit" ); + "ph10" usebundle => apply_gm("condition_from_variable_match", @{args10}, "error", "ph10", "audit" ); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok_class1" expression => "ph1_ok.class1_true"; + "ok_class2" expression => "ph2_ok.class2_true"; + "ok_class3" expression => "ph3_ok.class3_true"; + "ok_class4" expression => "ph4_ok.class4_false"; + "ok_class5" expression => "ph5_ok.class5_false"; + + "ok_class6" expression => "ph6_ok.class6_true"; + "ok_class7" expression => "ph7_ok.class7_true"; + "ok_class8" expression => "ph8_ok.class8_true"; + "ok_class9" expression => "ph9_ok.class9_false"; + "ok_class10" expression => "ph10_ok.class10_false"; + + "enforce_ok" expression => "ok_class1.ok_class2.ok_class3.ok_class4.ok_class5"; + "audit_ok" expression => "ok_class6.ok_class7.ok_class8.ok_class9.ok_class10"; + "ok" expression => "enforce_ok.audit_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/condition_once.cf b/policies/lib/tests/acceptance/30_generic_methods/condition_once.cf new file mode 100644 index 00000000000..5a9c38f20a6 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/condition_once.cf @@ -0,0 +1,63 @@ +####################################################### +# +# Test if only success classes are created +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, initialization, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + methods: + "clean" usebundle => file_absent("${configuration.flag_file}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => condition_once("first"); + "enable dry-run" usebundle => set_dry_run_mode("true"); + "ph2" usebundle => condition_once("second"); + "disable dry-run" usebundle => set_dry_run_mode("false"); + "reserve" usebundle => reserve_flag("condition_once_third"); + "ph3" usebundle => condition_once("third"); + "reserve" usebundle => reserve_flag("condition_once_fourth"); + "re-enable" usebundle => set_dry_run_mode("true"); + "ph4" usebundle => condition_once("fourth"); + "re-disable" usebundle => set_dry_run_mode("true"); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok_class1" expression => "first.condition_once_first_repaired"; + "ok_class2" expression => "second.condition_once_second_repaired"; + "ok_class3" expression => "!third.condition_once_third_kept.!condition_once_third_repaired"; + "ok_class4" expression => "!fourth.condition_once_fourth_kept.!condition_once_fourth_repaired"; + + "ok" expression => "ok_class1.ok_class2.ok_class3.ok_class4"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/directory_absent.cf b/policies/lib/tests/acceptance/30_generic_methods/directory_absent.cf new file mode 100644 index 00000000000..dd8a075d26c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/directory_absent.cf @@ -0,0 +1,129 @@ +####################################################### +# +# Test checking that directory_absent removes a directory +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "directory" string => "${tmp}/test"; + "directory_canon_0" string => canonify("${directory}/subdir0"); + "directory_canon_1" string => canonify("${directory}/subdir1/"); + "directory_canon_2" string => canonify("${directory}/subdir2"); + "directory_canon_3" string => canonify("${directory}/subdir3"); + "directory_canon_4" string => canonify("${directory}/subdir4"); + "directory_canon_5" string => canonify("${directory}/subdir5"); + "directory_canon_6" string => canonify("${directory}/file"); + "directory_canon_7" string => canonify("${directory}/file"); + + files: + "${directory}/." + create => "true"; + + "${directory}/file" + create => "true"; + + #---- + + "${directory}/subdir0/." + create => "true"; + + #---- + + "${directory}/subdir1/." + create => "true"; + + "${directory}/subdir1/subfile1" + create => "true"; + + #---- + + "${directory}/subdir2/." + create => "true"; + + "${directory}/subdir2/subfile2" + create => "true"; + + "${directory}/subdir2/subsubdir2/." + create => "true"; + + "${directory}/subdir2/subsubdir2/subsubfile2" + create => "true"; + + #---- + + "${directory}/subdir3/." + create => "true"; + + "${directory}/subdir3/subfile3" + create => "true"; + + "${directory}/subdir3/subsubdir3/." + create => "true"; + + "${directory}/subdir3/subsubdir3/subsubfile3" + create => "true"; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph0" usebundle => directory_absent("${init.directory}/subdir0", "true"); + "ph1" usebundle => directory_absent("${init.directory}/subdir1/", "true"); + "ph2" usebundle => directory_absent("${init.directory}/subdir2", "true"); + "ph3" usebundle => directory_absent("${init.directory}/subdir3", "false"); + "ph4" usebundle => directory_absent("${init.directory}/subdir4", "true"); + "ph5" usebundle => directory_absent("${init.directory}/subdir5", "false"); + "ph6" usebundle => directory_absent("${init.directory}/file", "true"); + "ph7" usebundle => directory_absent("${init.directory}/file", "false"); +} + +####################################################### + +bundle agent check +{ + classes: + "file_exists_0" expression => fileexists("${init.directory}/subdir0"); + "file_exists_1" expression => fileexists("${init.directory}/subdir1"); + "file_exists_2" expression => fileexists("${init.directory}/subdir2"); + "file_exists_3" expression => fileexists("${init.directory}/subdir3"); + "file_exists_4" expression => fileexists("${init.directory}/subdir4"); + "file_exists_5" expression => fileexists("${init.directory}/subdir5"); + "file_exists_6" expression => fileexists("${init.directory}/file"); + "file_exists_7" expression => fileexists("${init.directory}/file"); + "ok_0" expression => "!file_exists_0.directory_absent_${init.directory_canon_0}_repaired.!directory_absent_${init.directory_canon_0}_error"; + "ok_1" expression => "!file_exists_1.directory_absent_${init.directory_canon_1}_repaired.!directory_absent_${init.directory_canon_1}_error"; + "ok_2" expression => "!file_exists_2.directory_absent_${init.directory_canon_2}_repaired.!directory_absent_${init.directory_canon_2}_error"; + "ok_3" expression => "file_exists_3.!directory_absent_${init.directory_canon_3}_repaired.directory_absent_${init.directory_canon_3}_error"; + "ok_4" expression => "!file_exists_4.directory_absent_${init.directory_canon_4}_kept.!directory_absent_${init.directory_canon_4}_error"; + "ok_5" expression => "!file_exists_5.directory_absent_${init.directory_canon_5}_kept.!directory_absent_${init.directory_canon_5}_error"; + "ok_6" expression => "file_exists_6.directory_absent_${init.directory_canon_6}_kept.!directory_absent_${init.directory_canon_6}_error"; + "ok_7" expression => "file_exists_7.directory_absent_${init.directory_canon_7}_kept.!directory_absent_${init.directory_canon_7}_error"; + "ok" and => { "ok_0", "ok_1", "ok_2", "ok_3", "ok_4", "ok_5", "ok_6", "ok_7" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/directory_check_exists.cf b/policies/lib/tests/acceptance/30_generic_methods/directory_check_exists.cf new file mode 100644 index 00000000000..af5df32e066 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/directory_check_exists.cf @@ -0,0 +1,82 @@ +######################################### +# +# Test checking if a directory exists or not +# +######################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "directory" string => "${tmp}/test"; + "directory1" string => "${tmp}/test1"; + "directory_canon" string => canonify("${directory}"); + "directory1_canon" string => canonify("${directory1}"); + "directory_list" slist => { "${directory}", "${directory1}" }; + + + files: + "${directory}/." + create => "true"; +} + +####################################################### + +bundle agent test +{ + methods: + "fce" usebundle => directory_check_exists("${init.directory_list}"); +} + +####################################################### + +bundle agent check +{ + vars: + # dir should exist + "directory_check_exists_test" string => "/bin/ls \"${init.directory}\""; + # dir shouldn't exist + "directory_check_exists_test1" string => "/bin/ls \"${init.directory1}\""; + + classes: + "directory_check_exists_test_ok" + expression => returnszero("${directory_check_exists_test}", "useshell"), + ifvarclass => canonify("directory_check_exists_${init.directory}_reached"); + "directory_check_exists_test1_ok" + expression => returnszero("${directory_check_exists_test1}", "useshell"), + ifvarclass => canonify("directory_check_exists_${init.directory1}_reached"); + + "ok" expression => "directory_check_exists_test_ok.directory_check_exists_${init.directory_canon}_ok.!directory_check_exists_test1_ok.!directory_check_exists_${init.directory1_canon}_ok.directory_check_exists_${init.directory_canon}_reached.directory_check_exists_${init.directory1_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !directory_check_exists_test_ok:: + "${init.directory} is not present as expected"; + directory_check_exists_test1_ok:: + "${init.directory1} is not absent as expected"; + + cfengine:: + "Check of ${init.directory} is not reached" + ifvarclass => "!directory_check_exists_${init.directory_canon}_reached"; + "Check of ${init.directory1} is not reached" + ifvarclass => "!directory_check_exists_${init.directory1_canon}_reached"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_absent.cf b/policies/lib/tests/acceptance/30_generic_methods/file_absent.cf new file mode 100644 index 00000000000..b88ceb28c35 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_absent.cf @@ -0,0 +1,84 @@ +####################################################### +# +# Test checking that file_absent removes a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "destination_file" string => "${tmp}/remove_test"; + "destination_file_canon" string => canonify("${destination_file}"); + "not_a_file" string => "${tmp}/nonexistentfilenamefromncftests"; + "not_a_file_canon" string => canonify("${not_a_file}"); + + "destination_folder" string => "${tmp}/no_remove_folder"; + "destination_folder_canon" string => canonify("${destination_folder}"); + + files: + "${destination_file}" + create => "true"; + + "${destination_folder}/." + create => "true"; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_absent("${init.destination_file}"); + "ph2" usebundle => file_absent("${init.not_a_file}"); + "ph3" usebundle => file_absent("${init.destination_folder}"); +} + +####################################################### + +bundle agent check +{ + classes: + "file_exists" expression => fileexists("${init.destination_file}"); + "ok_1" expression => "!file_exists.file_absent_${init.destination_file_canon}_repaired.!file_absent_${init.destination_file_canon}_error"; + "ok_2" expression => "file_absent_${init.not_a_file_canon}_kept.!file_absent_${init.not_a_file_canon}_repaired.!file_absent_${init.not_a_file_canon}_error"; + "ok_3" expression => "file_absent_${init.destination_folder_canon}_error.!file_absent_${init.destination_folder_canon}_repaired.!file_absent_${init.destination_folder_canon}_kept"; + "ok" and => { "ok_1", "ok_2", "ok_3" }; + + + reports: + file_exists:: + "First test: File ${init.destination_file} exists even though it should have been removed!"; + ok_1:: "First test OK (removing existing file, ${init.destination_file})"; + !ok_1:: "First test failed (removing existing file, ${init.destination_file})"; + ok_2:: "Second test OK (removing a non existent file, ${init.not_a_file_canon})"; + !ok_2:: "Second test failed (removing a non existent file, ${init.not_a_file_canon})"; + !ok_3:: "Third test failed (removing a folder should fail, ${init.destination_folder_canon})"; + + !ok_2:: #debug + "Second test debug: class file_absent_${init.not_a_file_canon}_reached defined" ifvarclass => "file_absent_${init.not_a_file_canon}_reached"; + "Second test debug: class file_absent_${init.not_a_file_canon}_kept defined" ifvarclass => "file_absent_${init.not_a_file_canon}_kept"; + "Second test debug: class file_absent_${init.not_a_file_canon}_repaired defined" ifvarclass => "file_absent_${init.not_a_file_canon}_repaired"; + "Second test debug: class file_absent_${init.not_a_file_canon}_error defined" ifvarclass => "file_absent_${init.not_a_file_canon}_error"; + + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_and_folder_copy_from_local_source.cf b/policies/lib/tests/acceptance/30_generic_methods/file_and_folder_copy_from_local_source.cf new file mode 100644 index 00000000000..a27e128c303 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_and_folder_copy_from_local_source.cf @@ -0,0 +1,82 @@ +####################################################### +# +# Test checking if a file and folder can be copied +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_file_1" string => "${tmp}/source_file_1"; + "source_folder" string => "${tmp}/source_folder"; + "source_file_2" string => "${source_folder}/file_2"; + "destination_file_1" string => "${tmp}/destination_file_1"; + "destination_folder" string => "${tmp}/destination_folder"; + "destination_file_2" string => "${destination_folder}/file_2"; + "destination_file_canon_1" string => canonify("${destination_file_1}"); + "destination_folder_canon" string => canonify("${destination_folder}"); + "destination_file_canon_2" string => canonify("${destination_file_2}"); + "folder_recursion" string => "inf"; + + "mode" string => "644"; + "owner" string => "root"; + "group" string => "root"; + + files: + "${source_file_1}" + create => "true", + perms => mog("${mode}", "${owner}", "${group}"); + + "${source_file_2}" + create => "true", + perms => mog("${mode}", "${owner}", "${group}"); + +} + +####################################################### + +bundle agent test +{ + methods: + "copy_file" usebundle => file_copy_from_local_source("${init.source_file_1}", "${init.destination_file_1}"); + "copy_file" usebundle => file_copy_from_local_source_recursion("${init.source_folder}", "${init.destination_folder}", "${init.folder_recursion}"); +} + +####################################################### + +bundle agent check +{ + + classes: + "file_1_exists_ok" expression => fileexists("${init.destination_file_1}"); + "folder_exists_ok" expression => fileexists("${init.destination_folder}"); + "file_2_exists_ok" expression => fileexists("${init.destination_file_2}"); + "file_isdir_ok" not => isdir("${init.source_file_1}"); + "folder_isdir_ok" expression => isdir("${init.source_folder}"); + "ok_file" and => { "file_1_exists_ok", "file_isdir_ok" }; + "ok_folder" and => { "folder_exists_ok", "folder_isdir_ok", "file_2_exists_ok" }; + + "ok" and => { "ok_file", "ok_folder" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_augeas_commands.cf b/policies/lib/tests/acceptance/30_generic_methods/file_augeas_commands.cf new file mode 100644 index 00000000000..65a691d84f6 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_augeas_commands.cf @@ -0,0 +1,65 @@ + ####################################################### +# +# Read a file into a string +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "commands_1" string => "print /augeas/load/"; + "commands_2" string => "get /files/etc/hosts/1/ipaddr"; + "noautoload_1" string => "false"; + "noautoload_2" string => "true"; +} +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_augeas_commands("prefix", "var1", "${init.commands_1}", "${init.noautoload_1}"); + "ph2" usebundle => file_augeas_commands("prefix", "var2", "${init.commands_2}", "${init.noautoload_2}"); + "ph3" usebundle => file_augeas_commands("prefix", "var3", "${init.commands_2}", ""); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "file_augeas_commands_var1_kept.!file_augeas_commands_var1_repaired.!file_augeas_commands_var1_error"; + "ok_2" expression => "file_augeas_commands_var2_kept.!file_augeas_commands_var2_repaired.!file_augeas_commands_var2_error"; + "ok_3" expression => "file_augeas_commands_var3_kept.!file_augeas_commands_var3_repaired.!file_augeas_commands_var3_error"; + "ok_var1" expression => isvariable("prefix.var1"); + "ok_var2" expression => isvariable("prefix.var2"); + "ok_var3" expression => isvariable("prefix.var3"); + "ok_var1_cont" expression => strcmp("${prefix.var1}","/augeas/load"); + "ok_var2_cont" expression => strcmp("${prefix.var2}","/files/etc/hosts/1/ipaddr = 127.0.0.1"); + "ok_var3_cont" expression => strcmp("${prefix.var3}","/files/etc/hosts/1/ipaddr = 127.0.0.1"); + + "ok" expression => "ok_1.ok_var1.ok_var1_cont.ok_2.ok_var2.ok_var2_cont.ok_3.ok_var3.ok_var3_cont"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_augeas_set.cf b/policies/lib/tests/acceptance/30_generic_methods/file_augeas_set.cf new file mode 100644 index 00000000000..0b792849a53 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_augeas_set.cf @@ -0,0 +1,82 @@ +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "file[0]" string => "${tmp}/etc/sshd_config0"; + "value[0]" string => "yes"; + "lens[0]" string => "Sshd"; + "path[0]" string => "${tmp}/etc/sshd_config0/PermitRootLogin"; + "status[0]" string => "success"; + "mode[0]" string => "enforce"; + + "file[1]" string => "${tmp}/etc/sshd_config1"; + "value[1]" string => "no"; + "lens[1]" string => "Sshd"; + "path[1]" string => "${tmp}/etc/sshd_config1/PermitRootLogin"; + "status[1]" string => "repaired"; + "mode[1]" string => "enforce"; + + "file[2]" string => "${tmp}/etc/sshd_config1"; + "value[2]" string => "yes"; + "lens[2]" string => "Sshd"; + "path[2]" string => "${tmp}/etc/sshd_config1/StrictModes"; + "status[2]" string => "success"; + "mode[2]" string => "enforce"; + + "indices" slist => getindices("file"); + + files: + "${tmp}/etc/sshd_config${indices}" + copy_from => local_cp("${this.promise_dirname}/file_augeas_set.sshd_config_yes"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.path[${init.indices}]}", "${init.value[${init.indices}]}", "${init.lens[${init.indices}]}", "${init.file[${init.indices}]}" }; + + methods: + "ph0" usebundle => apply_gm("file_augeas_set", @{args0}, "${init.status[0]}", "ph0", "${init.mode[0]}"); + "ph1" usebundle => apply_gm("file_augeas_set", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}"); + "ph2" usebundle => apply_gm("file_augeas_set", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}"); +} + +####################################################### + +bundle agent check +{ + classes: + "classes_ok" expression => "ph0_ok.ph1_ok.ph2_ok"; + classes_ok:: + "content_0" expression => returnszero("diff ${init.file[0]} ${this.promise_dirname}/file_augeas_set.sshd_config_yes 2>/dev/null", "useshell"); + "content_1" expression => returnszero("diff ${init.file[1]} ${this.promise_dirname}/file_augeas_set.sshd_config_no 2>/dev/null", "useshell"); + "content_ok" expression => "content_0.content_1"; + + "ok" expression => "classes_ok.content_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_augeas_set.sshd_config_no b/policies/lib/tests/acceptance/30_generic_methods/file_augeas_set.sshd_config_no new file mode 100644 index 00000000000..eb59570e5b8 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_augeas_set.sshd_config_no @@ -0,0 +1,7 @@ +# Authentication: + +#LoginGraceTime 2m +PermitRootLogin no +StrictModes yes +#MaxAuthTries 6 +#MaxSessions 10 diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_augeas_set.sshd_config_yes b/policies/lib/tests/acceptance/30_generic_methods/file_augeas_set.sshd_config_yes new file mode 100644 index 00000000000..d389889de3c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_augeas_set.sshd_config_yes @@ -0,0 +1,7 @@ +# Authentication: + +#LoginGraceTime 2m +PermitRootLogin yes +StrictModes yes +#MaxAuthTries 6 +#MaxSessions 10 diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_block_present.cf b/policies/lib/tests/acceptance/30_generic_methods/file_block_present.cf new file mode 100644 index 00000000000..533dc0bdcb3 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_block_present.cf @@ -0,0 +1,71 @@ +####################################################### +# +# Test checking if a text block is present in a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "reference_file" string => "${tmp}/reference"; + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "block_to_add" string => "This is a test block +with multiple lines."; + + commands: + "/bin/echo" + args => "\"${block_to_add}\" > \"${reference_file}\"", + contain => in_shell; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_block_present("${init.file}", "${init.block_to_add}"); +} + +####################################################### + +bundle agent check +{ + vars: + "line_exists_test" string => "/usr/bin/diff \"${init.reference_file}\" \"${init.file}\""; + + classes: + # By default, file_block_present should create the file if it doesn't exist + "file_exists" expression => fileexists("${init.file}"); + "block_exists" + expression => returnszero("${line_exists_test}", "noshell"), + ifvarclass => canonify("file_block_present_${init.file}_reached"); + + "ok" expression => "file_exists.block_exists.file_block_present_${init.file_canon}_ok.!file_block_present_${init.file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists:: + "fileexists returns false for file ${init.file}"; + !line_exists:: + "diff command doesn't return 0 for command: ${line_exists_test}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_block_present_in_section.cf b/policies/lib/tests/acceptance/30_generic_methods/file_block_present_in_section.cf new file mode 100644 index 00000000000..4bafebe4a8f --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_block_present_in_section.cf @@ -0,0 +1,78 @@ +####################################################### +# +# Test checking if a text block is present in a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "reference_file" string => "${tmp}/reference"; + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "section_start" string => ""; + "section_end" string => ""; + "block_to_add" string => "This is a test block +with multiple lines +to check content in section"; + + "reference_content" string => "${section_start} +${block_to_add} +${section_end}"; + + commands: + "/bin/echo" + args => "\"${reference_content}\" > \"${reference_file}\"", + contain => in_shell; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_block_present_in_section("${init.file}", "${init.section_start}", "${init.section_end}", "${init.block_to_add}"); +} + +####################################################### + +bundle agent check +{ + vars: + "line_exists_test" string => "/usr/bin/diff \"${init.reference_file}\" \"${init.file}\""; + + classes: + # By default, file_block_in_section should create the file if it doesn't exist + "file_exists" expression => fileexists("${init.file}"); + "block_exists" + expression => returnszero("${line_exists_test}", "noshell"), + ifvarclass => canonify("file_block_present_in_section_${init.file}_reached"); + + "ok" expression => "file_exists.block_exists.file_block_present_in_section_${init.file_canon}_ok.!file_block_present_in_section_${init.file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists:: + "fileexists returns false for file ${init.file}"; + !block_exists:: + "diff command doesn't return 0 for command: ${line_exists_test}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_check_exists.cf b/policies/lib/tests/acceptance/30_generic_methods/file_check_exists.cf new file mode 100644 index 00000000000..1ac231a0281 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_check_exists.cf @@ -0,0 +1,83 @@ +######################################### +# +# Test checking if a file exists or not +# +######################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file1" string => "${tmp}/test1"; + "file_canon" string => canonify("${file}"); + "file1_canon" string => canonify("${file1}"); + "file_list" slist => { "${file}", "${file1}" }; + + + files: + "${file}" + create => "true", + perms => mog("000", "root", "0"); +} + +####################################################### + +bundle agent test +{ + methods: + "fce" usebundle => file_check_exists("${init.file_list}"); +} + +####################################################### + +bundle agent check +{ + vars: + # file should exist + "file_check_exists_test" string => "/bin/ls \"${init.file}\""; + # File shouldn't exist + "file_check_exists_test1" string => "/bin/ls \"${init.file1}\""; + + classes: + "file_check_exists_test_ok" + expression => returnszero("${file_check_exists_test}", "useshell"), + ifvarclass => canonify("file_check_exists_${init.file}_reached"); + "file_check_exists_test1_ok" + expression => returnszero("${file_check_exists_test1}", "useshell"), + ifvarclass => canonify("file_check_exists_${init.file1}_reached"); + + "ok" expression => "file_check_exists_test_ok.file_check_exists_${init.file_canon}_ok.!file_check_exists_test1_ok.!file_check_exists_${init.file1_canon}_ok.file_check_exists_${init.file_canon}_reached.file_check_exists_${init.file1_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_check_exists_test_ok:: + "${file} is not present as expected"; + file_check_exists_test1_ok:: + "${file1} is not absent as expected"; + + cfengine:: + "Check of ${init.file} is not reached" + ifvarclass => "!file_check_exists_${init.file_canon}_reached"; + "Check of ${init.file1} is not reached" + ifvarclass => "!file_check_exists_${init.file1_canon}_reached"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_check_hardlink.cf b/policies/lib/tests/acceptance/30_generic_methods/file_check_hardlink.cf new file mode 100644 index 00000000000..5e5454d5c01 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_check_hardlink.cf @@ -0,0 +1,98 @@ +######################################### +# +# Test checking if a file is hardlinked to an other or not +# +######################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + + "file2" string => "${tmp}/test2"; + "file2_canon" string => canonify("${file2}"); + + "hardlink" string => "${tmp}/hardlink"; + "hardlink_canon" string => canonify("${hardlink}"); + + "file_list" slist => { "${file}", "${hardlink}" }; + + files: + "${file}" + create => "true"; + + "${file2}" + create => "true"; + + "${hardlink}" + link_from => linkfrom("${file}", "hardlink"); + +} + +####################################################### + +bundle agent test +{ + methods: + "positive" usebundle => file_check_hardlink("${init.file}", "${init.hardlink}"); + "negative" usebundle => file_check_hardlink("${init.file2}", "${init.hardlink}"); +} + +####################################################### + +bundle agent check +{ + vars: + # Check file type + "test_file_devino" string => "/usr/bin/stat -c %d:%i \"${init.file}\""; + "test_file2_devino" string => "/usr/bin/stat -c %d:%i \"${init.file2}\""; + + # Check hardlink type + "test_hardlink_devino" string => "/usr/bin/stat -c %d:%i \"${init.hardlink}\""; + + classes: + "test_hardlink_is_hardlink" + expression => strcmp( execresult("${test_file_devino}", "noshell"), execresult("${test_hardlink_devino}", "noshell") ), + ifvarclass => "file_check_hardlink_${init.file_canon}_reached"; + + "test_file_is_hardlink" + expression => strcmp( execresult("${test_file2_devino}", "noshell"), execresult("${test_hardlink_devino}", "noshell") ), + ifvarclass => "file_check_hardlink_${init.file2_canon}_reached"; + + "ok" expression => "test_hardlink_is_hardlink.file_check_hardlink_${init.file_canon}_ok.!test_file_is_hardlink.!file_check_hardlink_${init.file2_canon}_ok.file_check_hardlink_${init.file2_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !test_hardlink_is_hardlink:: + "${init.file} and ${init.hardlink} are not a hardlinked as expected. "; + test_file_is_hardlink:: + "${init.file2} and ${init.hardlink} are hardlinks, not as expected.'"; + + cfengine:: + "Check of ${init.file} is not reached" + ifvarclass => "!file_check_hardlink_${init.file_canon}_reached"; + "Check of ${init.hardlink} is not reached" + ifvarclass => "!file_check_hardlink_${init.hardlink_canon}_reached"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_content.cf b/policies/lib/tests/acceptance/30_generic_methods/file_content.cf new file mode 100644 index 00000000000..74408379c94 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_content.cf @@ -0,0 +1,121 @@ +####################################################### +# +# Test checking if a content of file is enforced +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "file_1" string => "${tmp}/test_1"; + "file_1_canon" string => canonify("${file_1}"); + "file_2" string => "${tmp}/test_2"; + "file_2_canon" string => canonify("${file_2}"); + "line_present" string => "Erase this line"; + "line_to_add" string => "This is a test line!"; + + + "file_3" string => "${tmp}/test_3"; + "file_3_canon" string => canonify("${file_3}"); + "ref_3" string => "${tmp}/ref_3"; + "lines_3" string => "Erase this line"; + "lines_3_ref" string => "one +two +three"; + "lines_3_to_add" slist => { "one", "two", "three" }; + + commands: + "/bin/echo" + args => "\"${line_present}\" > \"${file_1}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${line_present}\" > \"${file_2}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${lines_3}\" > \"${file_3}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${lines_3_ref}\" > \"${ref_3}\"", + contain => in_shell; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_content("${init.file_1}", "${init.line_to_add}","false"); + "ph2" usebundle => file_content("${init.file_2}", "${init.line_to_add}","true"); + "ph3" usebundle => file_content("${init.file_3}", "@{init.lines_3_to_add}","true"); +} + +####################################################### + +bundle agent check +{ + vars: + "line_1_present_exists_test" string => "${paths.path[grep]} -E \"^${init.line_present}$\" \"${init.file_1}\""; + "line_1_added_exists_test" string => "${paths.path[grep]} -E \"^${init.line_to_add}$\" \"${init.file_1}\""; + "line_2_present_exists_test" string => "${paths.path[grep]} -E \"^${init.line_present}$\" \"${init.file_2}\""; + "line_2_added_exists_test" string => "${paths.path[grep]} -E \"^${init.line_to_add}$\" \"${init.file_2}\""; + "file_3_test" string => "/usr/bin/diff \"${init.ref_3}\" \"${init.file_3}\""; + + classes: + "file_1_exists" expression => fileexists("${init.file_1}"); + "file_2_exists" expression => fileexists("${init.file_2}"); + "file_3_exists" expression => fileexists("${init.file_3}"); + file_1_exists:: + "line_1_present_exists" expression => returnszero("${line_1_present_exists_test}", "noshell"); + "line_1_added_exists" expression => returnszero("${line_1_added_exists_test}", "noshell"); + file_2_exists:: + "line_2_present_exists" expression => returnszero("${line_2_present_exists_test}", "noshell"); + "line_2_added_exists" expression => returnszero("${line_2_added_exists_test}", "noshell"); + file_3_exists:: + "file_3_valid" expression => returnszero("${file_3_test}", "noshell"); + + file_1_exists.file_2_exists.file_3_exists:: + "ok_file_1" expression => "(line_1_present_exists.line_1_added_exists).(file_lines_present_${init.file_1_canon}_ok.!file_lines_present_${init.file_1_canon}_error)"; + "ok_file_2" expression => "(!line_2_present_exists.line_2_added_exists).(file_lines_present_${init.file_2_canon}_ok.!file_lines_present_${init.file_2_canon}_error)"; + "ok_file_3" expression => "file_3_valid.(file_lines_present_${init.file_3_canon}_ok.!file_lines_present_${init.file_3_canon}_error)"; + "ok" and => { "ok_file_1", "ok_file_2", "ok_file_3" }; + + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_1_exists:: + "fileexists returns false for file ${init.file_1}"; + !file_2_exists:: + "fileexists returns false for file ${init.file_2}"; + !file_3_exists:: + "fileexists returns false for file ${init.file_3}"; + !(line_1_added_exists):: + "grep command doesn't return 0 for command: ${line_1_added_exists_test}"; + !(line_2_added_exists):: + "grep command doesn't return 0 for command: ${line_2_added_exists_test}"; + !(file_3_valid):: + "diff command doesn't return 0 for command: ${file_3_test}"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_copy_from_local_source.cf b/policies/lib/tests/acceptance/30_generic_methods/file_copy_from_local_source.cf new file mode 100644 index 00000000000..a06548ab7be --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_copy_from_local_source.cf @@ -0,0 +1,86 @@ +####################################################### +# +# Test checking if a file can be copied from a local source +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_file" string => "${tmp}/source_test"; + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + + "mode" string => "644"; + "owner" string => "root"; + "group" string => "0"; + + files: + "${source_file}" + create => "true", + perms => mog("${mode}", "${owner}", "${group}"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_copy_from_local_source("${init.source_file}", "${init.destination_file}"); +} + +####################################################### + +bundle agent check +{ + vars: + "owner_id" int => getuid("${init.owner}"); + "permissions_test_mode" string => "/usr/bin/test ${const.dollar}(${test_utils.file_perms} ${init.destination_file}) = \"${init.mode}\""; + "permissions_test_owner" string => "/usr/bin/test ${const.dollar}(${test_utils.file_owner} ${init.destination_file}) = \"${owner_id}\""; + "permissions_test_group" string => "/usr/bin/test ${const.dollar}(${test_utils.file_group} ${init.destination_file}) = \"${init.group}\""; + + classes: + # By default, file_copy_from_local_source_type_recursion should create the file if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("file_copy_from_local_source_${init.destination_file}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("file_copy_from_local_source_${init.destination_file}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("file_copy_from_local_source_${init.destination_file}_reached"); + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.file_copy_from_local_source_${init.destination_file_canon}_ok.!file_copy_from_local_source_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_copy_from_local_source_recursion.cf b/policies/lib/tests/acceptance/30_generic_methods/file_copy_from_local_source_recursion.cf new file mode 100644 index 00000000000..eabd19b89c1 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_copy_from_local_source_recursion.cf @@ -0,0 +1,87 @@ +####################################################### +# +# Test checking if a file can be copied from a local source +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_file" string => "${tmp}/source_test"; + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + "recursion" string => "0"; + + "mode" string => "644"; + "owner" string => "root"; + "group" string => "0"; + + files: + "${source_file}" + create => "true", + perms => mog("${mode}", "${owner}", "${group}"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_copy_from_local_source_recursion("${init.source_file}", "${init.destination_file}", "${init.recursion}"); +} + +####################################################### + +bundle agent check +{ + vars: + "owner_id" int => getuid("${init.owner}"); + "permissions_test_mode" string => "/usr/bin/test ${const.dollar}(${test_utils.file_perms} ${init.destination_file}) = \"${init.mode}\""; + "permissions_test_owner" string => "/usr/bin/test ${const.dollar}(${test_utils.file_owner} ${init.destination_file}) = \"${owner_id}\""; + "permissions_test_group" string => "/usr/bin/test ${const.dollar}(${test_utils.file_group} ${init.destination_file}) = \"${init.group}\""; + + classes: + # By default, file_copy_from_local_source_type_recursion should create the file if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("file_copy_from_local_source_${init.destination_file}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("file_copy_from_local_source_${init.destination_file}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("file_copy_from_local_source_${init.destination_file}_reached"); + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.file_copy_from_local_source_${init.destination_file_canon}_ok.!file_copy_from_local_source_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_copy_from_local_source_test_replace.cf b/policies/lib/tests/acceptance/30_generic_methods/file_copy_from_local_source_test_replace.cf new file mode 100644 index 00000000000..fa510d04367 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_copy_from_local_source_test_replace.cf @@ -0,0 +1,80 @@ +####################################################### +# +# Test checking if a file can be copied from a local source +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_file" string => "${tmp}/source_test"; + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + + "reference_content" string => "This is a source +test file +that contains some dummy content +¿÷¿↓÷öŀüỗœ’üŀï©»’"; + + "destination_content" string => "This is a destination dummy content"; + + + commands: + "/bin/echo" + args => "\"${destination_content}\" > \"${destination_file}\"", + contain => in_shell; + + # we need to sleep there, because the time comparison granularity is not precise enough + "/bin/sleep" + args => "1"; + + "/bin/echo" + args => "\"${reference_content}\" > \"${source_file}\"", + contain => in_shell; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_copy_from_local_source("${init.source_file}", "${init.destination_file}"); +} + +####################################################### + +bundle agent check +{ + vars: + "content_comparison" string => "/usr/bin/diff \"${init.destination_file}\" \"${init.source_file}\""; + + classes: + "ok" + expression => returnszero("${content_comparison}", "noshell"), + ifvarclass => canonify("file_copy_from_local_source_${init.destination_file}_reached"); + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !ok:: + "Source and destination file are not identical"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_local_source_with_check.cf b/policies/lib/tests/acceptance/30_generic_methods/file_from_local_source_with_check.cf new file mode 100644 index 00000000000..1a937879887 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_local_source_with_check.cf @@ -0,0 +1,114 @@ +####################################################### +# +# Test checking if a file can be copied from a local source +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_file" string => "${tmp}/source_test"; + + "dst[1]" string => "${tmp}/destination_test1"; + "command[1]" string => "/bin/true"; + "codes[1]" string => "0"; + "assert_exist[1]" string => "true"; + "result[1]" string => "repaired"; + + "dst[2]" string => "${tmp}/destination_test2"; + "command[2]" string => "/bin/false"; + "codes[2]" string => "0"; + "assert_exist[2]" string => "false"; + "result[2]" string => "error"; + + "dst[3]" string => "${tmp}/destination_test3"; + "command[3]" string => "/bin/false"; + "codes[3]" string => "0"; + "assert_exist[3]" string => "true"; + "result[3]" string => "success"; + + "dst[4]" string => "${tmp}/destination_test4"; + "command[4]" string => "exit 1"; + "codes[4]" string => "1"; + "assert_exist[4]" string => "true"; + "result[4]" string => "repaired"; + + "indices" slist => getindices("dst"); + + files: + "${source_file}" + edit_line => insert_lines("test file"), + create => "true"; + + "${dst[3]}" + edit_line => insert_lines("test file"), + create => "true"; +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { + "${init.source_file}", + "${init.dst[${init.indices}]}", + "${init.command[${init.indices}]}", + "${init.codes[${init.indices}]}" + }; + methods: + "ph1" usebundle => apply_gm_v4("file_from_local_source_with_check", @{args1}, "${init.result[1]}", "ph1", "enforce" ); + "ph2" usebundle => apply_gm_v4("file_from_local_source_with_check", @{args2}, "${init.result[2]}", "ph2", "enforce" ); + "ph3" usebundle => apply_gm_v4("file_from_local_source_with_check", @{args3}, "${init.result[3]}", "ph3", "enforce" ); + "ph4" usebundle => apply_gm_v4("file_from_local_source_with_check", @{args4}, "${init.result[4]}", "ph4", "enforce" ); +} + +####################################################### + +bundle agent check +{ + classes: + "expected_file_${init.indices}" expression => strcmp("${init.assert_exist[${init.indices}]}", "true"); + "file_exist_${init.indices}" expression => fileexists("${init.dst[${init.indices}]}"); + "files_not_ok" expression => and("file_exist_${init.indices}", + "!expected_file_${init.indices}" + ); + "files_not_ok" expression => and("!file_exist_${init.indices}", + "expected_file_${init.indices}" + ); + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok"; + "ok" expression => "classes_ok.!files_not_ok"; + + reports: + any:: + "ERROR ph${init.indices} failed" + if => "!ph${init.indices}_ok"; + "OK ph${init.indices} succeeded" + if => "ph${init.indices}_ok"; + "Found destination file for test ${init.indices}" + if => "file_exist_${init.indices}"; + "Expected file ${init.indices}" + if => "expected_file_${init.indices}"; + + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_missing_template.cf b/policies/lib/tests/acceptance/30_generic_methods/file_from_missing_template.cf new file mode 100644 index 00000000000..37277f92976 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_missing_template.cf @@ -0,0 +1,58 @@ +####################################################### +# +# Test checking if an empty file is generated from a missing local template +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_template" string => "${tmp}/source_template"; + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_from_template("${init.source_template}", "${init.destination_file}"); +} + +####################################################### + +bundle agent check +{ + + classes: + + "file_not_present" not => fileexists("${init.destination_file}"); + + "ok" expression => "file_not_present.file_from_template_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_not_present:: + "File ${init.destination_file} was created from a non-existent template"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_string_mustache.cf b/policies/lib/tests/acceptance/30_generic_methods/file_from_string_mustache.cf new file mode 100644 index 00000000000..c737d710da7 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_string_mustache.cf @@ -0,0 +1,75 @@ +####################################################### +# +# Test checking if a file can be generated from a local template +# +####################################################### +# @agent_version >=3.7 + + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_template_1" string => "sys.host is: {{{vars.sys.host}}}"; + "destination_file_1" string => "${tmp}/destination_test_1"; + "destination_file_canon_1" string => canonify("${destination_file_1}"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_from_string_mustache("${init.source_template_1}", "${init.destination_file_1}"); +} + +####################################################### + +bundle agent check +{ + vars: + "file_length_check_1" string => "${paths.path[test]} `${paths.path[cat]} ${init.destination_file_1} | wc -l` = '1'"; + "file_content_check_1" string => "${paths.path[cat]} ${init.destination_file_1} | ${ncf_paths.path[head]} -n1 | ${paths.path[grep]} '^sys.host is: ${sys.host}$'"; + + classes: + # By default, should create the file if it doesn't exist + "file_exists_1" expression => fileexists("${init.destination_file_1}"); + "file_length_ok_1" + expression => returnszero("${file_length_check_1}", "useshell"), + ifvarclass => canonify("file_from_string_mustache_${init.destination_file_1}_reached"); + "file_content_ok_1" + expression => returnszero("${file_content_check_1}", "useshell"), + ifvarclass => canonify("file_from_string_mustache_${init.destination_file_1}_reached"); + + "ok_1" expression => "file_exists_1.file_length_ok_1.file_content_ok_1.file_from_string_mustache_${init.destination_file_canon_1}_ok.!file_from_string_mustache_${init.destination_file_canon_1}_error"; + + "ok" expression => "ok_1"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists_1:: + "File ${init.destination_file_1} didn't appear to exist"; + !file_length_ok_1:: + "There was not exactly one line in ${init.destination_file_1}"; + !file_content_ok_1:: + "The file content in ${init.destination_file_1} was not exactly \"sys.host is: ${sys.host}\""; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template.cf b/policies/lib/tests/acceptance/30_generic_methods/file_from_template.cf new file mode 100644 index 00000000000..bf85aa6329b --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template.cf @@ -0,0 +1,76 @@ +####################################################### +# +# Test checking if a file can be generated from a local template +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_template" string => "${tmp}/source_template"; + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + + files: + "${source_template}" + create => "true", + edit_line => insert_lines("sys.host is: ${sys.host}"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_from_template("${init.source_template}", "${init.destination_file}"); +} + +####################################################### + +bundle agent check +{ + vars: + "file_length_check" string => "${paths.path[test]} `${paths.path[cat]} ${init.destination_file} | wc -l` = '1'"; + "file_content_check" string => "${paths.path[cat]} ${init.destination_file} | ${ncf_paths.path[head]} -n1 | ${paths.path[grep]} '^sys.host is: ${sys.host}$'"; + + classes: + # By default, should create the file if it doesn't exist + "file_exists" expression => fileexists("${init.destination_file}"); + "file_length_ok" + expression => returnszero("${file_length_check}", "useshell"), + ifvarclass => canonify("file_from_template_${init.destination_file}_reached"); + "file_content_ok" + expression => returnszero("${file_content_check}", "useshell"), + ifvarclass => canonify("file_from_template_${init.destination_file}_reached"); + + "ok" expression => "file_exists.file_length_ok.file_content_ok.file_from_template_${init.destination_file_canon}_ok.!file_from_template_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists:: + "File ${init.destination_file} didn't appear to exist"; + !file_length_ok:: + "There was not exactly one line in ${init.destination_file}"; + !file_content_ok:: + "The file content in ${init.destination_file} was not exactly \"sys.host is: ${sys.host}\""; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf new file mode 100644 index 00000000000..9abe029c0fb --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf @@ -0,0 +1,100 @@ +#################################################################################### +# Copyright 2016 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# agent_version >= 3.9 + +####################################################### +# +# Test checking if a file can be generated from a local template +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, initialization, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_template_1" string => "${this.promise_filename}_1.j2"; + "source_template_2" string => "${this.promise_filename}_2.j2"; + "expected_file_1" string => "${this.promise_filename}_1.expected"; + "expected_file_2" string => "${this.promise_filename}_2.expected"; + + "destination_file_1" string => "${tmp}/destination_test_1"; + "destination_file_2" string => "${tmp}/destination_test_2"; + "destination_file_canon_1" string => canonify("${destination_file_1}"); + "destination_file_canon_2" string => canonify("${destination_file_2}"); + + "test_variable" string => "test_value"; + "my_list" slist => { "test1", "test2" }; + + classes: + "my_global_class" expression => "any", scope => "namespace"; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_from_template_jinja2("${init.source_template_1}", "${init.destination_file_1}"); + "ph2" usebundle => file_from_template_jinja2("${init.source_template_2}", "${init.destination_file_2}"); +} + +####################################################### + +bundle agent check +{ + + vars: + "file_diff_test_1" string => "/usr/bin/diff \"${init.destination_file_1}\" \"${init.expected_file_1}\""; + "file_diff_test_2" string => "/usr/bin/diff \"${init.destination_file_2}\" \"${init.expected_file_2}\""; + + classes: + "file_ok_1" expression => returnszero("${file_diff_test_1}", "noshell"), + ifvarclass => "file_from_template_${init.destination_file_canon_1}_reached"; + "file_ok_2" expression => returnszero("${file_diff_test_2}", "noshell"), + ifvarclass => "file_from_template_${init.destination_file_canon_2}_reached"; + + "ok_1" expression => "file_ok_1.file_from_template_${init.destination_file_canon_1}_ok.!file_from_template_${init.destination_file_canon_1}_error"; + "ok_2" expression => "file_ok_2.file_from_template_${init.destination_file_canon_2}_ok.!file_from_template_${init.destination_file_canon_2}_error"; + "ok" expression => "ok_1.ok_2"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok_1:: + "$(this.promise_filename) FAIL: diff command doesn't returns 0 (command: ${file_diff_test_1} )"; + !ok_2:: + "$(this.promise_filename) FAIL: diff command doesn't returns 0 (command: ${file_diff_test_2} )"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_1.expected b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_1.expected new file mode 100644 index 00000000000..73df97840ae --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_1.expected @@ -0,0 +1,13 @@ +my var is test_value + +I can iterate over: + + +display test1 + +display test2 + + + +display this + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_1.j2 b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_1.j2 new file mode 100644 index 00000000000..bb85816e764 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_1.j2 @@ -0,0 +1,14 @@ +my var is {{ vars.init.test_variable }} + +I can iterate over: + +{% for item in vars.init.my_list %} +display {{ item }} +{% endfor %} + +{% if classes.my_global_class is defined %} +display this +{% endif %} +{% if classes.does_not_exists is defined %} +do not display this +{% endif %} \ No newline at end of file diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_2.expected b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_2.expected new file mode 100644 index 00000000000..eee1615c6a4 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_2.expected @@ -0,0 +1,13 @@ +my other var is test_value + +I can iterate over: + + +display test1 + +display test2 + + + +display this + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_2.j2 b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_2.j2 new file mode 100644 index 00000000000..f114848da22 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.cf_2.j2 @@ -0,0 +1,14 @@ +my other var is {{ vars.init.test_variable }} + +I can iterate over: + +{% for item in vars.init.my_list %} +display {{ item }} +{% endfor %} + +{% if classes.my_global_class is defined %} +display this +{% endif %} +{% if classes.does_not_exists is defined %} +do not display this +{% endif -%} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf new file mode 100644 index 00000000000..9d466fd362b --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf @@ -0,0 +1,95 @@ +#################################################################################### +# Copyright 2016 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# @agent_requirements "capabilities": [ "dumpdatastate" ] + +####################################################### +# +# Test checking if a file can be generated from a local template using custom filters and tests +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, initialization, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_template" string => "${this.promise_filename}.j2"; + "expected_file" string => "${this.promise_filename}.expected"; + "custom_jinja" string => "${this.promise_filename}.py"; + + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + + "test_variable" string => "test_value"; + "test_number" string => "42"; + "test_other_number" string => "43"; + "my_list" slist => { "test1", "test2" }; + + classes: + "my_global_class" expression => "any", scope => "namespace"; + + files: + "$(sys.workdir)/modules/extensions/jinja2_custom.py" + copy_from => local_cp("${custom_jinja}"), + perms => m("ugo+x"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_from_template_jinja2("${init.source_template}", "${init.destination_file}"); +} + +####################################################### + +bundle agent check +{ + + vars: + "file_diff_test" string => "/usr/bin/diff \"${init.destination_file}\" \"${init.expected_file}\""; + + classes: + "file_ok" expression => returnszero("${file_diff_test}", "noshell"), + ifvarclass => "file_from_template_${init.destination_file_canon}_reached"; + + "ok" expression => "file_ok.file_from_template_${init.destination_file_canon}_ok.!file_from_template_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL: diff command doesn't returns 0 (command: ${file_diff_test})"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf.expected b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf.expected new file mode 100644 index 00000000000..f025be20087 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf.expected @@ -0,0 +1,18 @@ +my var is TEST_VALUE + + +The answer + + + +I can iterate over: + + +display test1 + +display test2 + + + +display this + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf.j2 b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf.j2 new file mode 100644 index 00000000000..69d3df4fbb9 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf.j2 @@ -0,0 +1,21 @@ +my var is {{ vars.init.test_variable | upperstring }} + +{% if vars.init.test_number is the_answer %} +The answer +{% endif %} +{% if vars.init.test_other_number is the_answer %} +Not the answer +{% endif %} + +I can iterate over: + +{% for item in vars.init.my_list %} +display {{ item }} +{% endfor %} + +{% if classes.my_global_class is defined %} +display this +{% endif %} +{% if classes.does_not_exists is defined %} +do not display this +{% endif %} \ No newline at end of file diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf.py b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf.py new file mode 100644 index 00000000000..9b0712ee8b1 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.custom_filter.cf.py @@ -0,0 +1,7 @@ +def upperstring(input): + return input.upper() +def the_answer(value): + return value == "42" + +FILTERS = {'upperstring': upperstring} +TESTS = {'the_answer': the_answer} \ No newline at end of file diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.failure.cf b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.failure.cf new file mode 100644 index 00000000000..6389aa75b14 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.failure.cf @@ -0,0 +1,80 @@ +################################################################################### +# Copyright 2016 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# @agent_requirements "capabilities": [ "dumpdatastate" ] + +####################################################### +# +# Test checking if a file can be generated from a local template +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, initialization, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_template" string => "${this.promise_filename}.j2"; + + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + + "test_variable" string => "test_value"; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_from_template_jinja2("${init.source_template}", "${init.destination_file}"); +} + +####################################################### + +bundle agent check +{ + + + classes: + "file present" expression => fileexists("${init.destination_file}"), ifvarclass => "file_from_template_${init.destination_file_canon}_reached"; + + "ok" expression => "!file_present.file_from_template_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL: diff command doesn't returns 0 (command: ${file_diff_test})"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.missing-variable.cf b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.missing-variable.cf new file mode 100644 index 00000000000..48d512acd8a --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.missing-variable.cf @@ -0,0 +1,82 @@ +################################################################################## +# Copyright 2016 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# @agent_requirements "capabilities": [ "dumpdatastate" ] + +####################################################### +# +# Test checking if a file can be generated from a local template +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, initialization, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_template" string => "${this.promise_filename}.j2"; + + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + + "test_variable" string => "test_value"; + + classes: + "my_global_class" expression => "any", scope => "namespace"; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_from_template_jinja2("${init.source_template}", "${init.destination_file}"); +} + +####################################################### + +bundle agent check +{ + + classes: + "file present" expression => fileexists("${init.destination_file}"), ifvarclass => "file_from_template_${init.destination_file_canon}_reached"; + + "ok" expression => "!file_present.file_from_template_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL: diff command doesn't returns 0 (command: ${file_diff_test})"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.missing-variable.cf.j2 b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.missing-variable.cf.j2 new file mode 100644 index 00000000000..bb85816e764 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_jinja2.missing-variable.cf.j2 @@ -0,0 +1,14 @@ +my var is {{ vars.init.test_variable }} + +I can iterate over: + +{% for item in vars.init.my_list %} +display {{ item }} +{% endfor %} + +{% if classes.my_global_class is defined %} +display this +{% endif %} +{% if classes.does_not_exists is defined %} +do not display this +{% endif %} \ No newline at end of file diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_from_template_mustache.cf b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_mustache.cf new file mode 100644 index 00000000000..04b423b6a1f --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_from_template_mustache.cf @@ -0,0 +1,76 @@ +####################################################### +# +# Test checking if a file can be generated from a local template +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_template" string => "${tmp}/source_template"; + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + + files: + "${source_template}" + create => "true", + edit_line => insert_lines("sys.host is: {{{vars.sys.host}}}"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_from_template_mustache("${init.source_template}", "${init.destination_file}"); +} + +####################################################### + +bundle agent check +{ + vars: + "file_length_check" string => "${paths.path[test]} `${paths.path[cat]} ${init.destination_file} | wc -l` = '1'"; + "file_content_check" string => "${paths.path[cat]} ${init.destination_file} | ${ncf_paths.path[head]} -n1 | ${paths.path[grep]} '^sys.host is: ${sys.host}$'"; + + classes: + # By default, should create the file if it doesn't exist + "file_exists" expression => fileexists("${init.destination_file}"); + "file_length_ok" + expression => returnszero("${file_length_check}", "useshell"), + ifvarclass => canonify("file_from_template_${init.destination_file}_reached"); + "file_content_ok" + expression => returnszero("${file_content_check}", "useshell"), + ifvarclass => canonify("file_from_template_${init.destination_file}_reached"); + + "ok" expression => "file_exists.file_length_ok.file_content_ok.file_from_template_${init.destination_file_canon}_ok.!file_from_template_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists:: + "File ${init.destination_file} didn't appear to exist"; + !file_length_ok:: + "There was not exactly one line in ${init.destination_file}"; + !file_content_ok:: + "The file content in ${init.destination_file} was not exactly \"sys.host is: ${sys.host}\""; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_key_value_option.cf b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_option.cf new file mode 100644 index 00000000000..06cbdd6fa45 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_option.cf @@ -0,0 +1,178 @@ +####################################################### +# +# Test checking if a key-value pair are present in a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + +# First test: ensure that a value is correctly modified + "base_text[1]" string => "# File with lhs=rhs text +JAVA_OPTS =\"-Djava.awt.headless=true -Xmx128m -XX:+UseConcMarkSweepGC\""; + "expected_text[1]" string => "# File with lhs=rhs text +JAVA_OPTS =\"-Djava.awt.headless=true -Xmx1024m\""; + + "key[1]" string => "JAVA_OPTS"; + "value[1]" string => "\"-Djava.awt.headless=true -Xmx1024m\""; + "separator[1]" string => "="; + "option[1]" string => "lax"; + "status[1]" string => "repaired"; + + +# Second test: ensure that a key-value is correctly added in a file + "base_text[2]" string => "# File with lhs=rhs text +Key=Value"; + "expected_text[2]" string=> "# File with lhs=rhs text +Key=Value +AddedKey=AddedValue"; + + "key[2]" string => "AddedKey"; + "value[2]" string => "AddedValue"; + "separator[2]" string => "="; + "option[2]" string => "lax"; + "status[2]" string => "repaired"; + + +# Third test: ensure that if key-value is already correct (with extra spaces), nothing is done + "base_text[3]" string => "# File with lhs rhs text +Key Value"; + "expected_text[3]" string => "# File with lhs rhs text +Key Value"; + + "key[3]" string => "Key"; + "value[3]" string => "Value"; + "separator[3]" string => " "; + "option[3]" string => "lax"; + "status[3]" string => "success"; + +# Fourth test: ensure that if key-value is already correct (with extra spaces), nothing is done + "base_text[4]" string => "# File with lhs rhs text +Key = Value"; + "expected_text[4]" string => "# File with lhs rhs text +Key = Value"; + + "key[4]" string => "Key"; + "value[4]" string => "Value"; + "separator[4]" string => "="; + "option[4]" string => "lax"; + "status[4]" string => "success"; + + +# Fifth test: ensure that if key-value is already there with extra spaces, extra space are purged + "base_text[5]" string => "# File with lhs rhs text +Key = Value"; + "expected_text[5]" string => "# File with lhs rhs text +Key=Value"; + + "key[5]" string => "Key"; + "value[5]" string => "Value"; + "separator[5]" string => "="; + "option[5]" string => "strict"; + "status[5]" string => "repaired"; + +# Sixth test: ensure that a key-value is correctly added in a file + "base_text[6]" string => "# File with lhs=rhs text +Foo=bar +Key=Value"; + "expected_text[6]" string=> "# File with lhs=rhs text +Foo=bar +Key=Value +AddedKey=AddedValue"; + + "key[6]" string => "AddedKey"; + "value[6]" string => "AddedValue"; + "separator[6]" string => "="; + "option[6]" string => "strict"; + "status[6]" string => "repaired"; + + +# Seventh test: ensure that a value is correctly modified + "base_text[7]" string => "# File with lhs=rhs text +JAVA_OPTS =\"-Djava.awt.headless=true -Xmx128m -XX:+UseConcMarkSweepGC\""; + "expected_text[7]" string=> "# File with lhs=rhs text +JAVA_OPTS=\"-Djava.awt.headless=true -Xmx1024m\""; + + "key[7]" string => "JAVA_OPTS"; + "value[7]" string => "\"-Djava.awt.headless=true -Xmx1024m\""; + "separator[7]" string => "="; + "option[7]" string => "strict"; + "status[7]" string => "repaired"; + + "indices" slist => getindices("base_text"); + + files: + "${tmp}/src_${indices}" + create => "true", + edit_line => insert_lines("${base_text[${indices}]}"), + edit_defaults => empty; + + "${tmp}/ref_${indices}" + create => "true", + edit_line => insert_lines("${expected_text[${indices}]}"), + edit_defaults => empty; +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/src_${init.indices}", "${init.key[${init.indices}]}", "${init.value[${init.indices}]}", "${init.separator[${init.indices}]}", "${init.option[${init.indices}]}" }; + + methods: + "ph1" usebundle => apply_gm("file_key_value_present_option", @{args1}, "${init.status[1]}", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("file_key_value_present_option", @{args2}, "${init.status[2]}", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("file_key_value_present_option", @{args3}, "${init.status[3]}", "ph3", "enforce" ); + "ph4" usebundle => apply_gm("file_key_value_present_option", @{args4}, "${init.status[4]}", "ph4", "enforce" ); + "ph5" usebundle => apply_gm("file_key_value_present_option", @{args5}, "${init.status[5]}", "ph5", "enforce" ); + "ph6" usebundle => apply_gm("file_key_value_present_option", @{args6}, "${init.status[6]}", "ph6", "enforce" ); + "ph7" usebundle => apply_gm("file_key_value_present_option", @{args7}, "${init.status[7]}", "ph7", "enforce" ); +} + +####################################################### + +bundle agent check +{ + vars: + "file_diff_${init.indices}" string => "/usr/bin/diff \"${init.tmp}/ref_${init.indices}\" \"${init.tmp}/src_${init.indices}\""; + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + "file_${init.indices}_ok" expression => returnszero("${file_diff_${init.indices}}", "useshell"), + if => "pass3"; + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok"; + "diff_not_ok" expression => "!file_${init.indices}_ok.pass3"; + "ok" expression => "classes_ok.!diff_not_ok.pass3"; + + + reports: + ok.pass3:: + "$(this.promise_filename) Pass"; + !ok.pass3:: + "$(this.promise_filename) FAIL"; + pass3:: + "DIFF incorrect for test ${init.indices}${const.n}${file_diff_${init.indices}}" + if => "!file_${init.indices}_ok"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_key_value_parameter_absent_in_list.cf b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_parameter_absent_in_list.cf new file mode 100644 index 00000000000..f065741b492 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_parameter_absent_in_list.cf @@ -0,0 +1,244 @@ +####################################################### +# +# Test checking if parameter is not present in the list of key-values in a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + + "tmp" string => getenv("TEMP", 1024); + + "file[1]" string => "${tmp}/test1"; + "ref_file[1]" string => "${tmp}/ref1"; + "file_canon[1]" string => canonify("${file[1]}"); + + "file[2]" string => "${tmp}/test2"; + "ref_file[2]" string => "${tmp}/ref2"; + "file_canon[2]" string => canonify("${file[2]}"); + + "file[3]" string => "${tmp}/test3"; + "ref_file[3]" string => "${tmp}/ref3"; + "file_canon[3]" string => canonify("${file[3]}"); + + "file[4]" string => "${tmp}/test4"; + "ref_file[4]" string => "${tmp}/ref4"; + "file_canon[4]" string => canonify("${file[4]}"); + + "file[5]" string => "${tmp}/test5"; + "ref_file[5]" string => "${tmp}/ref5"; + "file_canon[5]" string => canonify("${file[5]}"); + + "file[6]" string => "${tmp}/test6"; + "ref_file[6]" string => "${tmp}/ref6"; + "file_canon[6]" string => canonify("${file[6]}"); + + "file[7]" string => "${tmp}/test7"; + "ref_file[7]" string => "${tmp}/ref7"; + "file_canon[7]" string => canonify("${file[7]}"); + + "file[8]" string => "${tmp}/test8"; + "ref_file[8]" string => "${tmp}/ref8"; + "file_canon[8]" string => canonify("${file[8]}"); + + + "tests_list" slist => getindices("file"); +# First test: ensure that a value is correctly removed at the end + "base_text1" string => "# File with lhs=rhs text +JAVA_OPTS=\\\"-Djava.awt.headless=true -XX:+UseConcMarkSweepGC -Xmx1024m\\\""; + + "ref_text1" string=> "# File with lhs=rhs text +JAVA_OPTS=\\\"-Djava.awt.headless=true -XX:+UseConcMarkSweepGC\\\""; + + "key1" string => "JAVA_OPTS"; + "parameter1" string => "-Xmx1024m"; + "key_value_separator1" string => "="; + "parameter_separator1" string => " "; + +# Second test: ensure that a key-value is correctly remove at the beginning of the line + "base_text2" string => "# +GRUB_CMDLINE_XEN=\\\"dom0_max_vcpus=32 dom0_mem=16G\\\""; + + "ref_text2" string=> "# +GRUB_CMDLINE_XEN=\\\"dom0_mem=16G\\\""; + + "key2" string => "GRUB_CMDLINE_XEN"; + "parameter2" string => "dom0_max_vcpus=32"; + "key_value_separator2" string => "="; + "parameter_separator2" string => " "; + +# Third test: ensure that if key-value is already correct, nothing is done + "base_text3" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=\\\"dom0_max_vcpus=32 dom0_mem=16G\\\""; + + "ref_text3" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=\\\"dom0_max_vcpus=32 dom0_mem=16G\\\""; + + "key3" string => "GRUB_CMDLINE_XEN"; + "parameter3" string => "dom_min_vcpus=5"; + "key_value_separator3" string => "="; + "parameter_separator3" string => " "; + +# Fourth test: key not present, nothing is done + "base_text4" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=\\\"dom0_max_vcpus=32 dom0_mem=16G\\\""; + + "ref_text4" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=\\\"dom0_max_vcpus=32 dom0_mem=16G\\\""; + + "key4" string => "GRUB_CMDLINE"; + "parameter4" string => "dom0_max_vcpus=32"; + "key_value_separator4" string => "="; + "parameter_separator4" string => " "; + +# Fifth test: key present, value to be removed no quote separator + "base_text5" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=dom0_max_vcpus=32 dom0_mem=16G"; + + "ref_text5" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=dom0_mem=16G"; + + "key5" string => "GRUB_CMDLINE_XEN"; + "parameter5" string => "dom0_max_vcpus=32"; + "key_value_separator5" string => "="; + "parameter_separator5" string => " "; + +# sixth test: key present, start and stop separator different + "base_text6" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3', 'value4']"; + + "ref_text6" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3']"; + + "key6" string => "some.js.config.file"; + "parameter6" string => "'value4'"; + "key_value_separator6" string => "="; + "parameter_separator6" string => ","; + "leading_char6" string => "["; + "closing_char6" string => "]"; + +# seventh test: key and value present, start and stop separator different + "base_text7" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3', 'value4' ]"; + + "ref_text7" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value4' ]"; + + "key7" string => "some.js.config.file"; + "parameter7" string => "'value3'"; + "key_value_separator7" string => "="; + "parameter_separator7" string => ", "; + "leading_char7" string => "["; + "closing_char7" string => "]"; + +# eighth test: key not present and value present, start and stop separator different + "base_text8" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3', 'value4' ]"; + + "ref_text8" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3', 'value4' ]"; + + "key8" string => "js.config.file"; + "parameter8" string => "'value4'"; + "key_value_separator8" string => "="; + "parameter_separator8" string => ", "; + "leading_char8" string => "["; + "closing_char8" string => "]"; + + + commands: +# Initialize test and reference files + "/bin/echo" + args => "\"${ref_text${tests_list}}\" > \"${ref_file[${tests_list}]}\"", + contain => in_shell; + "/bin/echo" + args => "\"${base_text${tests_list}}\" > \"${file[${tests_list}]}\"", + contain => in_shell; + +} + +####################################################### + +bundle agent test +{ + methods: + "removed key1" usebundle => file_key_value_parameter_absent_in_list("${init.file[1]}", "${init.key1}", "${init.key_value_separator1}", "${init.parameter1}", "${init.parameter_separator1}", "\"", "\""); + "removed key2" usebundle => file_key_value_parameter_absent_in_list("${init.file[2]}", "${init.key2}", "${init.key_value_separator2}", "${init.parameter2}", "${init.parameter_separator2}", "\"", "\""); + "success key" usebundle => file_key_value_parameter_absent_in_list("${init.file[3]}", "${init.key3}", "${init.key_value_separator3}", "${init.parameter3}", "${init.parameter_separator3}", "\"", "\""); + "success key2" usebundle => file_key_value_parameter_absent_in_list("${init.file[4]}", "${init.key4}", "${init.key_value_separator4}", "${init.parameter4}", "${init.parameter_separator4}", "", ""); + "removed_key3" usebundle => file_key_value_parameter_absent_in_list("${init.file[5]}", "${init.key5}", "${init.key_value_separator5}", "${init.parameter5}", "${init.parameter_separator5}", "", ""); + + "removed key4" usebundle => file_key_value_parameter_absent_in_list("${init.file[6]}", "${init.key6}", "${init.key_value_separator6}", "${init.parameter6}", "${init.parameter_separator6}", "${init.leading_char6}", "${init.closing_char6}"); + "removed key5" usebundle => file_key_value_parameter_absent_in_list("${init.file[7]}", "${init.key7}", "${init.key_value_separator7}", "${init.parameter7}", "${init.parameter_separator7}", "${init.leading_char7}", "${init.closing_char7}"); + "success key3" usebundle => file_key_value_parameter_absent_in_list("${init.file[8]}", "${init.key8}", "${init.key_value_separator8}", "${init.parameter8}", "${init.parameter_separator8}", "${init.leading_char8}", "${init.closing_char8}"); + +} + +####################################################### + +bundle agent check +{ + vars: + "file_diff_test[${init.tests_list}]" string => "/usr/bin/diff \"${init.ref_file[${init.tests_list}]}\" \"${init.file[${init.tests_list}]}\""; + + classes: + "file${init.tests_list}_correct" + expression => returnszero("${file_diff_test[${init.tests_list}]}", "noshell"), + ifvarclass => canonify("file_key_value_parameter_absent_in_list_${init.file_canon[${init.tests_list}]}_reached"); + + "ok_test1" expression => "file1_correct.!file_key_value_parameter_absent_in_list_${init.file_canon[1]}_kept.file_key_value_parameter_absent_in_list_${init.file_canon[1]}_repaired.!file_key_value_parameter_absent_in_list_${init.file_canon[1]}_not_ok"; + "ok_test2" expression => "file2_correct.!file_key_value_parameter_absent_in_list_${init.file_canon[2]}_kept.file_key_value_parameter_absent_in_list_${init.file_canon[2]}_repaired.!file_key_value_parameter_absent_in_list_${init.file_canon[2]}_not_ok"; + "ok_test3" expression => "file3_correct.file_key_value_parameter_absent_in_list_${init.file_canon[3]}_kept.!file_key_value_parameter_absent_in_list_${init.file_canon[3]}_not_ok.!file_key_value_parameter_absent_in_list_${init.file_canon[3]}_repaired"; + "ok_test4" expression => "file4_correct.file_key_value_parameter_absent_in_list_${init.file_canon[4]}_kept.!file_key_value_parameter_absent_in_list_${init.file_canon[4]}_not_ok.!file_key_value_parameter_absent_in_list_${init.file_canon[4]}_repaired"; + "ok_test5" expression => "file5_correct.!file_key_value_parameter_absent_in_list_${init.file_canon[5]}_kept.!file_key_value_parameter_absent_in_list_${init.file_canon[5]}_not_ok.file_key_value_parameter_absent_in_list_${init.file_canon[5]}_repaired"; + + "ok_test6" expression => "file6_correct.!file_key_value_parameter_absent_in_list_${init.file_canon[6]}_kept.file_key_value_parameter_absent_in_list_${init.file_canon[6]}_repaired.!file_key_value_parameter_absent_in_list_${init.file_canon[6]}_not_ok"; + "ok_test7" expression => "file7_correct.!file_key_value_parameter_absent_in_list_${init.file_canon[7]}_kept.file_key_value_parameter_absent_in_list_${init.file_canon[7]}_repaired.!file_key_value_parameter_absent_in_list_${init.file_canon[7]}_not_ok"; + "ok_test8" expression => "file8_correct.file_key_value_parameter_absent_in_list_${init.file_canon[8]}_kept.!file_key_value_parameter_absent_in_list_${init.file_canon[8]}_not_ok.!file_key_value_parameter_absent_in_list_${init.file_canon[8]}_repaired"; + + "ok" and => {"ok_test1","ok_test2","ok_test3", "ok_test4", "ok_test5", "ok_test6", "ok_test7", "ok_test8" }; + + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + cfengine:: + "diff command doesn't returns 0 for file ${init.tests_list} (command ${file_diff_test[${init.tests_list}]})" + ifvarclass => "!file${init.tests_list}_correct"; + + file1_correct.!ok_test1:: + "Generic method return is invalid for first test (expected repaired)"; + file2_correct.!ok_test3:: + "Generic method return is invalid for second test (expected repaired)"; + file3_correct.!ok_test3:: + "Generic method return is invalid for third test (expected kept)"; + file4_correct.!ok_test4:: + "Generic method return is invalid for fourth test (expected kept)"; + file5_correct.!ok_test5:: + "Generic method return is invalid for fifth test (expected repaired)"; + file6_correct.!ok_test6:: + "Generic method return is invalid for sixth test (expected repaired)"; + file7_correct.!ok_test7:: + "Generic method return is invalid for seventh test (expected repaired)"; + file8_correct.!ok_test8:: + "Generic method return is invalid for eighth test (expected kept)"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_key_value_parameter_present_in_list.cf b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_parameter_present_in_list.cf new file mode 100644 index 00000000000..ef5c69fd56e --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_parameter_present_in_list.cf @@ -0,0 +1,287 @@ +####################################################### +# +# Test checking if parameter is present in the list of key-values in a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + + "tmp" string => getenv("TEMP", 1024); + + "file[1]" string => "${tmp}/test1"; + "ref_file[1]" string => "${tmp}/ref1"; + "file_canon[1]" string => canonify("${file[1]}"); + + "file[2]" string => "${tmp}/test2"; + "ref_file[2]" string => "${tmp}/ref2"; + "file_canon[2]" string => canonify("${file[2]}"); + + "file[3]" string => "${tmp}/test3"; + "ref_file[3]" string => "${tmp}/ref3"; + "file_canon[3]" string => canonify("${file[3]}"); + + "file[4]" string => "${tmp}/test4"; + "ref_file[4]" string => "${tmp}/ref4"; + "file_canon[4]" string => canonify("${file[4]}"); + + "file[5]" string => "${tmp}/test5"; + "ref_file[5]" string => "${tmp}/ref5"; + "file_canon[5]" string => canonify("${file[5]}"); + + "file[6]" string => "${tmp}/test6"; + "ref_file[6]" string => "${tmp}/ref6"; + "file_canon[6]" string => canonify("${file[6]}"); + + "file[7]" string => "${tmp}/test7"; + "ref_file[7]" string => "${tmp}/ref7"; + "file_canon[7]" string => canonify("${file[7]}"); + + "file[8]" string => "${tmp}/test8"; + "ref_file[8]" string => "${tmp}/ref8"; + "file_canon[8]" string => canonify("${file[8]}"); + + "file[9]" string => "${tmp}/test9"; + "ref_file[9]" string => "${tmp}/ref9"; + "file_canon[9]" string => canonify("${file[9]}"); + + "file[10]" string => "${tmp}/test10"; + "ref_file[10]" string => "${tmp}/ref10"; + "file_canon[10]" string => canonify("${file[10]}"); + + "tests_list" slist => getindices("file"); +# First test: ensure that a value is correctly added + "base_text1" string => "# File with lhs=rhs text +JAVA_OPTS=\\\"-Djava.awt.headless=true -XX:+UseConcMarkSweepGC\\\""; + + "ref_text1" string=> "# File with lhs=rhs text +JAVA_OPTS=\\\"-Djava.awt.headless=true -XX:+UseConcMarkSweepGC -Xmx1024m\\\""; + + "key1" string => "JAVA_OPTS"; + "parameter1" string => "-Xmx1024m"; + "key_value_separator1" string => "="; + "parameter_separator1" string => " "; + +# Second test: ensure that a key-value is correctly added in a file + "base_text2" string => "# +GRUB_CMDLINE_XEN=\\\"dom0_mem=16G\\\""; + + "ref_text2" string=> "# +GRUB_CMDLINE_XEN=\\\"dom0_mem=16G dom0_max_vcpus=32\\\""; + + "key2" string => "GRUB_CMDLINE_XEN"; + "parameter2" string => "dom0_max_vcpus=32"; + "key_value_separator2" string => "="; + "parameter_separator2" string => " "; + +# Third test: ensure that if key-value is already correct, nothing is done + "base_text3" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=\\\"dom0_max_vcpus=32 dom0_mem=16G\\\""; + + "ref_text3" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=\\\"dom0_max_vcpus=32 dom0_mem=16G\\\""; + + "key3" string => "GRUB_CMDLINE_XEN"; + "parameter3" string => "dom0_max_vcpus=32"; + "key_value_separator3" string => "="; + "parameter_separator3" string => " "; + +# Fourth test: key not present, it is added + "base_text4" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=\\\"dom0_max_vcpus=32 dom0_mem=16G\\\""; + + "ref_text4" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=\\\"dom0_max_vcpus=32 dom0_mem=16G\\\" +GRUB_CMDLINE=dom0_max_vcpus=32"; + + "key4" string => "GRUB_CMDLINE"; + "parameter4" string => "dom0_max_vcpus=32"; + "key_value_separator4" string => "="; + "parameter_separator4" string => " "; + +# Fifth test: key present, no quote separator + "base_text5" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=dom0_max_vcpus=32 dom0_mem=16G"; + + "ref_text5" string => "# File with lhs rhs text +GRUB_CMDLINE_XEN=dom0_max_vcpus=32 dom0_mem=16G"; + + "key5" string => "GRUB_CMDLINE_XEN"; + "parameter5" string => "dom0_max_vcpus=32"; + "key_value_separator5" string => "="; + "parameter_separator5" string => " "; + +# sixth test: key present, start and stop separator different + "base_text6" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3']"; + + "ref_text6" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3', 'value4']"; + + "key6" string => "some.js.config.file"; + "parameter6" string => "'value4'"; + "key_value_separator6" string => "="; + "parameter_separator6" string => ", "; + "leading_char6" string => "["; + "closing_char6" string => "]"; + +# seventh test: key and value present, start and stop separator different + "base_text7" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3', 'value4' ]"; + + "ref_text7" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3', 'value4' ]"; + + "key7" string => "some.js.config.file"; + "parameter7" string => "'value3'"; + "key_value_separator7" string => "="; + "parameter_separator7" string => ", "; + "leading_char7" string => "["; + "closing_char7" string => "]"; + +# eighth test: key not present and value present, start and stop separator different + "base_text8" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3', 'value4' ]"; + + "ref_text8" string => "# JS conf file +some.js.config.file = [ 'value1', 'value2', 'value3', 'value4' ] +js.config.file=['value4']"; + + "key8" string => "js.config.file"; + "parameter8" string => "'value4'"; + "key_value_separator8" string => "="; + "parameter_separator8" string => ", "; + "leading_char8" string => "["; + "closing_char8" string => "]"; + +# ninth test: key is there, but parameter are merged together without the separator - should not believe it's correct + "base_text9" string => "# SSH conf file +Ciphers aes128-ctraes196-ctr"; + + "ref_text9" string => "# SSH conf file +Ciphers aes128-ctraes196-ctr,aes128-ctr"; + + "key9" string => "Ciphers"; + "parameter9" string => "aes128-ctr"; + "key_value_separator9" string => " "; + "parameter_separator9" string => ","; + "leading_char9" string => ""; + "closing_char9" string => ""; + +# then test: key is there, but parameter are merged together without the separator - should not believe it's correct + "base_text10" string => "# SSH conf file +Ciphers 'aes128-ctraes196-ctr'"; + + "ref_text10" string => "# SSH conf file +Ciphers 'aes128-ctraes196-ctr,aes128-ctr'"; + + "key10" string => "Ciphers"; + "parameter10" string => "aes128-ctr"; + "key_value_separator10" string => " "; + "parameter_separator10" string => ","; + "leading_char10" string => "'"; + "closing_char10" string => "'"; + + commands: +# Initialize test and reference files + "/bin/echo" + args => "\"${ref_text${tests_list}}\" > \"${ref_file[${tests_list}]}\"", + contain => in_shell; + "/bin/echo" + args => "\"${base_text${tests_list}}\" > \"${file[${tests_list}]}\"", + contain => in_shell; + +} + +####################################################### + +bundle agent test +{ + methods: + "added key1" usebundle => file_key_value_parameter_present_in_list("${init.file[1]}", "${init.key1}", "${init.key_value_separator1}", "${init.parameter1}", "${init.parameter_separator1}", "\"", "\""); + "added key2" usebundle => file_key_value_parameter_present_in_list("${init.file[2]}", "${init.key2}", "${init.key_value_separator2}", "${init.parameter2}", "${init.parameter_separator2}", "\"", "\""); + "success key" usebundle => file_key_value_parameter_present_in_list("${init.file[3]}", "${init.key3}", "${init.key_value_separator3}", "${init.parameter3}", "${init.parameter_separator3}", "\"", "\""); + "repair key" usebundle => file_key_value_parameter_present_in_list("${init.file[4]}", "${init.key4}", "${init.key_value_separator4}", "${init.parameter4}", "${init.parameter_separator4}", "", ""); + "successkey2" usebundle => file_key_value_parameter_present_in_list("${init.file[5]}", "${init.key5}", "${init.key_value_separator5}", "${init.parameter5}", "${init.parameter_separator5}", "", ""); + + "added key4" usebundle => file_key_value_parameter_present_in_list("${init.file[6]}", "${init.key6}", "${init.key_value_separator6}", "${init.parameter6}", "${init.parameter_separator6}", "${init.leading_char6}", "${init.closing_char6}"); + "success key2" usebundle => file_key_value_parameter_present_in_list("${init.file[7]}", "${init.key7}", "${init.key_value_separator7}", "${init.parameter7}", "${init.parameter_separator7}", "${init.leading_char7}", "${init.closing_char7}"); + "repair key2" usebundle => file_key_value_parameter_present_in_list("${init.file[8]}", "${init.key8}", "${init.key_value_separator8}", "${init.parameter8}", "${init.parameter_separator8}", "${init.leading_char8}", "${init.closing_char8}"); + "repair key3" usebundle => file_key_value_parameter_present_in_list("${init.file[9]}", "${init.key9}", "${init.key_value_separator9}", "${init.parameter9}", "${init.parameter_separator9}", "${init.leading_char9}", "${init.closing_char9}"); + +} + +####################################################### + +bundle agent check +{ + vars: + "file_diff_test[${init.tests_list}]" string => "/usr/bin/diff \"${init.ref_file[${init.tests_list}]}\" \"${init.file[${init.tests_list}]}\""; + + classes: + "file${init.tests_list}_correct" + expression => returnszero("${file_diff_test[${init.tests_list}]}", "noshell"), + ifvarclass => canonify("file_key_value_parameter_present_in_list_${init.file_canon[${init.tests_list}]}_reached"); + + "ok_test1" expression => "file1_correct.file_key_value_parameter_present_in_list_${init.file_canon[1]}_repaired.!file_key_value_parameter_present_in_list_${init.file_canon[1]}_not_ok"; + "ok_test2" expression => "file2_correct.file_key_value_parameter_present_in_list_${init.file_canon[2]}_repaired.!file_key_value_parameter_present_in_list_${init.file_canon[2]}_not_ok"; + "ok_test3" expression => "file3_correct.file_key_value_parameter_present_in_list_${init.file_canon[3]}_kept.!file_key_value_parameter_present_in_list_${init.file_canon[3]}_not_ok.!file_key_value_parameter_present_in_list_${init.file_canon[3]}_repaired"; + "ok_test4" expression => "file4_correct.!file_key_value_parameter_present_in_list_${init.file_canon[4]}_kept.!file_key_value_parameter_present_in_list_${init.file_canon[4]}_not_ok.file_key_value_parameter_present_in_list_${init.file_canon[4]}_repaired"; + "ok_test5" expression => "file5_correct.file_key_value_parameter_present_in_list_${init.file_canon[5]}_kept.!file_key_value_parameter_present_in_list_${init.file_canon[5]}_not_ok.!file_key_value_parameter_present_in_list_${init.file_canon[5]}_repaired"; + + "ok_test6" expression => "file6_correct.file_key_value_parameter_present_in_list_${init.file_canon[6]}_repaired.!file_key_value_parameter_present_in_list_${init.file_canon[6]}_not_ok"; + "ok_test7" expression => "file7_correct.file_key_value_parameter_present_in_list_${init.file_canon[7]}_kept.!file_key_value_parameter_present_in_list_${init.file_canon[7]}_repaired.!file_key_value_parameter_present_in_list_${init.file_canon[7]}_not_ok"; + "ok_test8" expression => "file8_correct.!file_key_value_parameter_present_in_list_${init.file_canon[8]}_kept.!file_key_value_parameter_present_in_list_${init.file_canon[8]}_not_ok.file_key_value_parameter_present_in_list_${init.file_canon[8]}_repaired"; + "ok_test9" expression => "file9_correct.!file_key_value_parameter_present_in_list_${init.file_canon[9]}_kept.!file_key_value_parameter_present_in_list_${init.file_canon[9]}_not_ok.file_key_value_parameter_present_in_list_${init.file_canon[9]}_repaired"; + + + "ok" and => {"ok_test1","ok_test2","ok_test3", "ok_test4", "ok_test5", "ok_test6", "ok_test7", "ok_test8", "ok_test9" }; + +commands: +"/bin/echo ${init.file[9]}"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + cfengine:: + "diff command doesn't returns 0 for file ${init.tests_list} (command ${file_diff_test[${init.tests_list}]})" + ifvarclass => "!file${init.tests_list}_correct"; + + file1_correct.!ok_test1:: + "Generic method return is invalid for first test (expected repaired)"; + file2_correct.!ok_test3:: + "Generic method return is invalid for second test (expected repaired)"; + file3_correct.!ok_test3:: + "Generic method return is invalid for third test (expected kept)"; + file4_correct.!ok_test4:: + "Generic method return is invalid for fourth test (expected repaired)"; + file5_correct.!ok_test5:: + "Generic method return is invalid for fifth test (expected kept)"; + file6_correct.!ok_test6:: + "Generic method return is invalid for sixth test (expected repaired)"; + file7_correct.!ok_test7:: + "Generic method return is invalid for seventh test (expected kept)"; + file8_correct.!ok_test8:: + "Generic method return is invalid for eighth test (expected repaired)"; + file9_correct.!ok_test9:: + "Generic method return is invalid for ninth test (expected repaired)"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_key_value_present.cf b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_present.cf new file mode 100644 index 00000000000..22e853db238 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_present.cf @@ -0,0 +1,154 @@ +####################################################### +# +# Test checking if a key-value pair are present in a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file1" string => "${tmp}/test1"; + "ref_file1" string => "${tmp}/ref1"; + "file_canon1" string => canonify("${file1}"); + + "file2" string => "${tmp}/test2"; + "ref_file2" string => "${tmp}/ref2"; + "file_canon2" string => canonify("${file2}"); + + "file3" string => "${tmp}/test3"; + "ref_file3" string => "${tmp}/ref3"; + "file_canon3" string => canonify("${file3}"); + + +# First test: ensure that a value is correctly modified + "base_text1" string => "# File with lhs=rhs text +JAVA_OPTS=\"-Djava.awt.headless=true -Xmx128m -XX:+UseConcMarkSweepGC\""; + + "ref_text1" string=> "# File with lhs=rhs text +JAVA_OPTS=\\\"-Djava.awt.headless=true -Xmx1024m\\\""; + + "key1" string => "JAVA_OPTS"; + "value1" string => "\"-Djava.awt.headless=true -Xmx1024m\""; + "separator1" string => "="; + +# Second test: ensure that a key-value is correctly added in a file + "base_text2" string => "# File with lhs=rhs text +Key=Value"; + + "ref_text2" string=> "# File with lhs=rhs text +Key=Value +AddedKey=AddedValue"; + + "key2" string => "AddedKey"; + "value2" string => "AddedValue"; + "separator2" string => "="; + +# Third test: ensure that if key-value is already correct, nothing is done + "base_text3" string => "# File with lhs rhs text +Key Value"; + + "ref_text3" string => "# File with lhs rhs text +Key Value"; + + "key3" string => "Key"; + "value3" string => "Value"; + "separator3" string => " "; + + commands: +# Initialize test and reference files + "/bin/echo" + args => "\"${ref_text1}\" > \"${ref_file1}\"", + contain => in_shell; + "/bin/echo" + args => "\"${base_text1}\" > \"${file1}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${ref_text2}\" > \"${ref_file2}\"", + contain => in_shell; + "/bin/echo" + args => "\"${base_text2}\" > \"${file2}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${ref_text3}\" > \"${ref_file3}\"", + contain => in_shell; + "/bin/echo" + args => "\"${base_text3}\" > \"${file3}\"", + contain => in_shell; + + +} + +####################################################### + +bundle agent test +{ + methods: + "modified key" usebundle => file_key_value_present("${init.file1}", "${init.key1}", "${init.value1}", "${init.separator1}"); + "added key" usebundle => file_key_value_present("${init.file2}", "${init.key2}", "${init.value2}", "${init.separator2}"); + "success key" usebundle => file_key_value_present("${init.file3}", "${init.key3}", "${init.value3}", "${init.separator3}"); + +} + +####################################################### + +bundle agent check +{ + vars: + "file_diff_test1" string => "/usr/bin/diff \"${init.ref_file1}\" \"${init.file1}\""; + "file_diff_test2" string => "/usr/bin/diff \"${init.ref_file2}\" \"${init.file2}\""; + "file_diff_test3" string => "/usr/bin/diff \"${init.ref_file3}\" \"${init.file3}\""; + + classes: + "file1_correct" + expression => returnszero("${file_diff_test1}", "noshell"), + ifvarclass => canonify("file_key_value_present_${init.file_canon1}_reached"); + "file2_correct" + expression => returnszero("${file_diff_test2}", "noshell"), + ifvarclass => canonify("file_key_value_present_${init.file_canon2}_reached"); + "file3_correct" + expression => returnszero("${file_diff_test3}", "noshell"), + ifvarclass => canonify("file_key_value_present_${init.file_canon3}_reached"); + + + "ok_test1" expression => "file1_correct.file_key_value_present_${init.file_canon1}_repaired.!file_key_value_present_${init.file_canon1}_not_ok"; + "ok_test2" expression => "file2_correct.file_key_value_present_${init.file_canon2}_repaired.!file_key_value_present_${init.file_canon2}_not_ok"; + "ok_test3" expression => "file3_correct.file_key_value_present_${init.file_canon3}_kept.!file_key_value_present_${init.file_canon3}_not_ok.!file_key_value_present_${init.file_canon3}_repaired"; + "ok" and => {"ok_test1","ok_test2","ok_test3"}; + + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file1_correct:: + "diff command doesn't returns 0 for file 1 (command: ${file_diff_test1})"; + !file2_correct:: + "diff command doesn't returns 0 for file 2 (command: ${file_diff_test2})"; + !file3_correct:: + "diff command doesn't returns 0 for file 3 (command: ${file_diff_test3})"; + file1_correct.!ok_test1:: + "Generic method return is invalid for first test (expected repaired)"; + file2_correct.!ok_test3:: + "Generic method return is invalid for second test (expected repaired)"; + file2_correct.!ok_test3:: + "Generic method return is invalid for third test (expected kept)"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_key_value_present_in_ini_section.cf b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_present_in_ini_section.cf new file mode 100644 index 00000000000..971ab866492 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_present_in_ini_section.cf @@ -0,0 +1,263 @@ +####################################################### +# +# Test checking if a key-value pair is present in a section file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + + + ## REPAIRED + # ensure that a line is really added into the right section + # with lines already present. + "file[0]" string => "${tmp}/test0.ini"; + "name[0]" string => "name1"; + "value[0]" string => "value1"; + "section[0]" string => "section_test1"; + "status[0]" string => "repaired"; + "initial[0]" string => "[section_test1] +[section_test2]"; + "expected[0]" string => "[section_test1] +${name[0]}=${value[0]} +[section_test2]"; + + # ensure that if the section does not exist, it will be created + "file[1]" string => "${tmp}/test1.ini"; + "name[1]" string => "name1"; + "value[1]" string => "value1"; + "section[1]" string => "section_test1"; + "status[1]" string => "repaired"; + "initial[1]" string => "[section_test2]"; + "expected[1]" string => "[section_test1] +${name[1]}=${value[1]} +[section_test2]"; + + + # ensure that if the value is modified if necessary + "file[2]" string => "${tmp}/test2.ini"; + "name[2]" string => "name2"; + "value[2]" string => "value2"; + "section[2]" string => "section_test2"; + "status[2]" string => "repaired"; + "initial[2]" string => "[section_test2] +name2=value"; + "expected[2]" string => "[section_test2] +${name[2]}=${value[2]}"; + + # ensure that if the value is modified if necessary + # but only in the selected region + "file[3]" string => "${tmp}/test3.ini"; + "name[3]" string => "name3"; + "value[3]" string => "key?value3"; + "section[3]" string => "section_test3"; + "status[3]" string => "repaired"; + "initial[3]" string => "[section_test3] +${name[3]}=key?value +[section_dummy] +${name[3]}=value"; + "expected[3]" string => "[section_test3] +${name[3]}=${value[3]} +[section_dummy] +${name[3]}=value"; + + # ensure that if the value is modified if necessary + # but only in the selected region + # with = sign in value + "file[4]" string => "${tmp}/test4.ini"; + "name[4]" string => "name4"; + "value[4]" string => "key=value4"; + "section[4]" string => "section_test4"; + "status[4]" string => "repaired"; + "initial[4]" string => "[section_test4] +${name[4]}=key=value +[section_dummy] +${name[4]}=key"; + "expected[4]" string => "[section_test4] +${name[4]}=${value[4]} +[section_dummy] +${name[4]}=key"; + + # KEPT + # ensure that if the value is NOT modified + # but only in the selected region + # with = sign in value + "file[5]" string => "${tmp}/test5.ini"; + "name[5]" string => "name5"; + "value[5]" string => "http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=BaseOS&infra=$infra"; + "section[5]" string => "section_test5"; + "status[5]" string => "success"; + "initial[5]" string => "[section_test5] +${name[5]}=${value[5]} +[section_dummy] +${name[5]}=key"; + "expected[5]" string => "${initial[5]}"; + + # Repaired + # ensure that if the value is uncommented + # but only in the selected region + # with = sign in value + "file[6]" string => "${tmp}/test6.ini"; + "name[6]" string => "name6"; + "value[6]" string => "http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=BaseOS&infra=$infra"; + "section[6]" string => "section_test6"; + "status[6]" string => "repaired"; + "initial[6]" string => "[section_test6] +#${name[6]}=${value[6]} +[section_dummy] +${name[6]}=key"; + "expected[6]" string => "[section_test6] +${name[6]}=${value[6]} +[section_dummy] +${name[6]}=key"; + + # ensure that if the key is uncommented + # and value corrected but only in the selected region + # with = sign in value + "file[7]" string => "${tmp}/test7.ini"; + "name[7]" string => "name7"; + "value[7]" string => "http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=BaseOS&infra=$infra"; + "section[7]" string => "section_test7"; + "status[7]" string => "repaired"; + "initial[7]" string => "[section_test7] +#${name[7]}=bla=bli=blu +[section_dummy] +${name[7]}=key"; + "expected[7]" string => "[section_test7] +${name[7]}=${value[7]} +[section_dummy] +${name[7]}=key"; + + # audit + # file is compliant + "file[8]" string => "${tmp}/test8.ini"; + "name[8]" string => "name8"; + "value[8]" string => "http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=BaseOS&infra=$infra"; + "section[8]" string => "section_test8"; + "status[8]" string => "success"; + "initial[8]" string => "[section_test8] +${name[8]}=${value[8]} +[section_dummy] +${name[8]}=key"; + "expected[8]" string => "${initial[8]}"; + + # file is not compliant, value is not there + "file[9]" string => "${tmp}/test9.ini"; + "name[9]" string => "name9"; + "value[9]" string => "two"; + "section[9]" string => "section_test9"; + "status[9]" string => "error"; + "initial[9]" string => "[section_test9] +${name[9]}=one +[section_dummy] +${name[9]}=key"; + "expected[9]" string => "${initial[9]}"; + + # Repaired + # create a file if not existent. Unfortunately, it will have an empty line + "file[10]" string => "${tmp}/test10.ini"; + "name[10]" string => "name10"; + "value[10]" string => "value10"; + "section[10]" string => "section_test10"; + "status[10]" string => "repaired"; + "initial[10]" string => ""; + "expected[10]" string => "[section_test10] + +${name[10]}=${value[10]}"; + + "indices" slist => getindices("status"); + + files: + "${file[${indices}]}" + create => "true", + edit_line => insert_lines("${initial[${indices}]}"), + edit_defaults => empty, + unless => strcmp("${indices}", "10"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.file[${init.indices}]}", "${init.section[${init.indices}]}", "${init.name[${init.indices}]}", "${init.value[${init.indices}]}"}; + + methods: + # Enforce + "ph0" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args0}, "${init.status[0]}", "ph0", "enforce" ); + "ph1" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args1}, "${init.status[1]}", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args2}, "${init.status[2]}", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args3}, "${init.status[3]}", "ph3", "enforce" ); + "ph4" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args4}, "${init.status[4]}", "ph4", "enforce" ); + "ph5" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args5}, "${init.status[5]}", "ph5", "enforce" ); + "ph6" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args6}, "${init.status[6]}", "ph6", "enforce" ); + "ph7" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args7}, "${init.status[7]}", "ph7", "enforce" ); + "ph8" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args8}, "${init.status[8]}", "ph8", "audit" ); + "ph9" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args9}, "${init.status[9]}", "ph9", "audit" ); + "ph10" usebundle => apply_gm("file_key_value_present_in_ini_section", @{args10}, "${init.status[10]}", "ph10", "enforce" ); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "indices" slist => { @{init.indices} }; + + # function readfile adds an extra trailing newline if there is no trailing newline, too inconsistent + "content[${indices}]" string => execresult("${paths.cat} ${init.file[${indices}]}", "noshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + # file 10 is non existent + "content_ok_${indices}" expression => and( + strcmp("${content[${indices}]}", "${init.expected[${indices}]}"), + fileexists("${init.file[${indices}]}") + ); + + + "content_not_ok" expression => "!content_ok_${indices}"; + "classes_ok" expression => "ph0_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok.ph8_ok.ph9_ok.ph10_ok"; + "ok" expression => "!content_not_ok.classes_ok"; + + + reports: + pass3:: + "########################### +ERROR test ${indices} in +${init.file[${indices}]} +EXPECTED: +${init.expected[${indices}]} +--------------------------- +FOUND: +${content[${indices}]} +###########################" + ifvarclass => "!content_ok_${indices}"; + + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_key_value_present_in_ini_section_twice_edit.cf b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_present_in_ini_section_twice_edit.cf new file mode 100644 index 00000000000..fa78c174537 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_present_in_ini_section_twice_edit.cf @@ -0,0 +1,87 @@ +####################################################### +# +# Test checking if a keng twice in a section does effectively edit twice +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file_1" string => "${tmp}/test1.ini"; + "reference_file_1" string => "${tmp}/ref1.ini"; + "file_1_canon" string => canonify("${file_1}"); + +# Ensure that double edition within a section succeed + "section_1" string => "section_test1"; + "name_1" string => "name"; + "value_1" string => "value"; + "name_2" string => "key"; + "value_2" string => "value"; + + "base_text_up" string => "[section_test1]"; + "base_text_down" string => "[section_test2]"; + "reference_1" string => "${base_text_up} +${name_1}=${value_1} +${name_2}=${value_2} +${base_text_down}"; + + + commands: +# Initialize first test files + "/bin/echo" + args => "\"${reference_1}\" > \"${reference_file_1}\"", + contain => in_shell; + "/bin/echo" + args => "\"${base_text_up}\" > \"${file_1}\"", + contain => in_shell; + "/bin/echo" + args => "\"${base_text_down}\" >> \"${file_1}\"", + contain => in_shell; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_key_value_present_in_ini_section("${init.file_1}", "${init.section_1}", "${init.name_1}", "${init.value_1}"); + "ph2" usebundle => file_key_value_present_in_ini_section("${init.file_1}", "${init.section_1}", "${init.name_2}", "${init.value_2}"); +} + +####################################################### + +bundle agent check +{ + vars: + # Commands to check that reference files (expected result) are the same + # than the modified files by the generic_method 'file_key_value_present_in_ini_section' + "file_correct_test" string => "/usr/bin/diff \"${init.reference_file_1}\" \"${init.file_1}\""; + + classes: + "file_correct" + expression => returnszero("${file_correct_test}", "noshell"), + ifvarclass => canonify("file_key_value_present_in_ini_section_${init.file_1}_reached"); + "ok" expression => "file_correct.file_key_value_present_in_ini_section_${init.file_1_canon}_ok.!file_1_line_present_in_ini_section_${init.file_1_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_key_value_twice_edit.cf b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_twice_edit.cf new file mode 100644 index 00000000000..e6963b05343 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_key_value_twice_edit.cf @@ -0,0 +1,79 @@ +####################################################### +# +# Test checking if a keng twice in a section does effectively edit twice +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file_1" string => "${tmp}/test1"; + "reference_file_1" string => "${tmp}/ref1"; + "file_1_canon" string => canonify("${file_1}"); + +# Ensure that double edition within a section succeed + "key_1" string => "name"; + "value_1" string => "value"; + "key_2" string => "key"; + "value_2" string => "value"; + "separator" string => "="; + + "reference_1" string => "file +${key_1}${separator}${value_1} +${key_2}${separator}${value_2}"; + + + commands: +# Initialize first test files + "/bin/echo" + args => "\"${reference_1}\" > \"${reference_file_1}\"", + contain => in_shell; + "/bin/echo" + args => "\"file\" > \"${file_1}\"", + contain => in_shell; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_key_value_present("${init.file_1}", "${init.key_1}", "${init.value_1}", "${init.separator}"); + "ph2" usebundle => file_key_value_present("${init.file_1}", "${init.key_2}", "${init.value_2}", "${init.separator}"); +} + +####################################################### + +bundle agent check +{ + vars: + "file_correct_test" string => "/usr/bin/diff \"${init.reference_file_1}\" \"${init.file_1}\""; + + classes: + "file_correct" + expression => returnszero("${file_correct_test}", "noshell"), + ifvarclass => canonify("file_key_value_present_${init.file_1}_reached"); + "ok" expression => "file_correct.file_key_value_present_${init.file_1_canon}_ok.!file_key_value_present_${init.file_1_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_keys_values_present.cf b/policies/lib/tests/acceptance/30_generic_methods/file_keys_values_present.cf new file mode 100644 index 00000000000..5c81986d4b7 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_keys_values_present.cf @@ -0,0 +1,79 @@ +####################################################### +# +# Test checking if keys values are present in a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "ref_file" string => "${tmp}/ref"; + "file_canon" string => canonify("${file}"); + "base_text" string => "# File with lhs=rhs text +JAVA_OPTS=\"-Djava.awt.headless=true -Xmx128m -XX:+UseConcMarkSweepGC\""; + + "ref_text" string=> "# File with lhs=rhs text +JAVA_OPTS=\\\"-Djava.awt.headless=true -Xmx1024m\\\""; + + "keys[JAVA_OPTS]" string => "\"-Djava.awt.headless=true -Xmx1024m\""; + "separator" string => "="; + + commands: + "/bin/echo" + args => "\"${ref_text}\" > \"${ref_file}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${base_text}\" > \"${file}\"", + contain => in_shell; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_keys_values_present("${init.file}", "init.keys", "${init.separator}"); +} + +####################################################### + +bundle agent check +{ + vars: + "file_diff_test" string => "/usr/bin/diff \"${init.ref_file}\" \"${init.file}\""; + + + classes: + "file_correct" + expression => returnszero("${file_diff_test}", "noshell"), + ifvarclass => canonify("file_keys_values_present_${init.file}_reached"); + + "ok" expression => "file_correct.file_keys_values_present_${init.file_canon}_ok.!file_keys_values_present_${init.file_canon}_not_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_correct:: + "diff command doesn't returns 0 for command: ${file_diff_test}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_line_present_in_xml_tag.cf b/policies/lib/tests/acceptance/30_generic_methods/file_line_present_in_xml_tag.cf new file mode 100644 index 00000000000..e6206cf8791 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_line_present_in_xml_tag.cf @@ -0,0 +1,107 @@ +####################################################### +# +# Test checking if a line is present in a XML file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test.xml"; + "file_canon" string => canonify("${file}"); + "tag" string => "tag"; + + "invalid_tag" string => "NOTAG"; + "invalid_content" string => "NOCONTENT"; + "invalid_file" string => "${tmp}/invalid.xml"; + "invalid_file_canon" string => canonify("${invalid_file}"); + + "line" string => "content"; + + "reference_file" string => "${tmp}/ref.xml"; + "base_text" string => " +"; + "reference" string => " +${line} +"; + + commands: + "/bin/echo" + args => "\"${reference}\" > \"${reference_file}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${base_text}\" > \"${file}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${reference}\" > \"${invalid_file}\"", + contain => in_shell; + + + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_line_present_in_xml_tag("${init.file}", "${init.tag}", "${init.line}"); + "ph2" usebundle => file_line_present_in_xml_tag("${init.invalid_file}", "${init.invalid_tag}", "${init.invalid_content}"); + +} + +####################################################### + +bundle agent check +{ + vars: + "line_exists_test" string => "/usr/bin/diff \"${init.reference_file}\" \"${init.file}\""; + "no_change_test" string => "/usr/bin/diff \"${init.reference_file}\" \"${init.invalid_file}\""; + + classes: + "line_exists" + expression => returnszero("${line_exists_test}", "noshell"), + ifvarclass => canonify("file_line_present_in_xml_tag_${init.file}_reached"); + + "no_change" + expression => returnszero("${no_change_test}", "noshell"), + ifvarclass => canonify("file_line_present_in_xml_tag_${init.invalid_file}_reached"); + + "valid_tag_change" expression => "file_line_present_in_xml_tag_${init.file_canon}_ok.!file_line_present_in_xml_tag_${init.file_canon}_not_ok"; + "invalid_tag_change" expression => "file_line_present_in_xml_tag_${init.invalid_file_canon}_not_ok"; + + "ok" expression => "line_exists.no_change.valid_tag_change.invalid_tag_change"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !line_exists:: + "diff command doesn't return 0 for command: ${line_exists_test}"; + + !no_change:: + "diff command doesn't return 0 for command: ${no_change_test}"; + !valid_tag_change:: + "The classes defined by edition of ${init.file} are invalid"; + !invalid_tag_change:: + "The classes defined by edition of ${init.invalid_file} are invalid"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_lines_absent.cf b/policies/lib/tests/acceptance/30_generic_methods/file_lines_absent.cf new file mode 100644 index 00000000000..25a193d09fe --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_lines_absent.cf @@ -0,0 +1,69 @@ +####################################################### +# +# Test checking if a line is absent in a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "line_to_test" string => "This is a test line!"; + + methods: + "ph1" usebundle => file_lines_present("${file}", "${line_to_test}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_lines_absent("${init.file}", "${init.line_to_test}"); +} + +####################################################### + +bundle agent check +{ + vars: + "line_exists_test" string => "${paths.path[grep]} -E \"^${init.line_to_test}$\" \"${init.file}\""; + + classes: + "file_exists" expression => fileexists("${init.file}"); + "line_absent" + not => returnszero("${line_exists_test}", "noshell"), + ifvarclass => canonify("file_lines_absent_${init.file}_reached"); + + "ok" expression => "file_exists.line_absent.file_lines_absent_${init.file_canon}_ok.!file_lines_absent_${init.file_canon}_error.file_lines_absent_${init.file_canon}_repaired"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists:: + "fileexists returns false for file ${init.file}"; + !line_absent:: + "grep command did return 0 for command: ${line_exists_test}"; + cfengine:: + "The file should be repaired by the method, but the repaired class is not defined" + ifvarclass => "file_lines_absent_${init.file_canon}_repaired"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_lines_present.cf b/policies/lib/tests/acceptance/30_generic_methods/file_lines_present.cf new file mode 100644 index 00000000000..7670b6730ca --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_lines_present.cf @@ -0,0 +1,64 @@ +####################################################### +# +# Test checking if a line is present in a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "line_to_add" string => "This is a test line!"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_lines_present("${init.file}", "${init.line_to_add}"); +} + +####################################################### + +bundle agent check +{ + vars: + "line_exists_test" string => "${paths.path[grep]} -E \"^${init.line_to_add}$\" \"${init.file}\""; + + classes: + # By default, file_lines_present should create the file if it doesn't exist + "file_exists" expression => fileexists("${init.file}"); + "line_exists" + expression => returnszero("${line_exists_test}", "noshell"), + ifvarclass => canonify("file_lines_present_${init.file}_reached"); + + "ok" expression => "file_exists.line_exists.file_lines_present_${init.file_canon}_ok.!file_lines_present_${init.file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists:: + "fileexists returns false for file ${init.file}"; + !line_exists:: + "grep command doesn't return 0 for command: ${line_exists_test}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_lines_present_in_ini_section.cf b/policies/lib/tests/acceptance/30_generic_methods/file_lines_present_in_ini_section.cf new file mode 100644 index 00000000000..ba4a631da9b --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_lines_present_in_ini_section.cf @@ -0,0 +1,112 @@ +####################################################### +# +# Test checking if a line is present in a section file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file_1" string => "${tmp}/test1.ini"; + "reference_file_1" string => "${tmp}/ref1.ini"; + "file_1_canon" string => canonify("${file_1}"); + "file_2" string => "${tmp}/test2.ini"; + "reference_file_2" string => "${tmp}/ref2.ini"; + "file_2_canon" string => canonify("${file_2}"); + +# First test: ensure that a line is really added into the right section +# with lines already present. + "section_1" string => "section_test1"; + "line_1" string => "content"; + "base_text_up" string => "[section_test1] +This section as some irrelevant content"; + "base_text_down" string => "[section_test2]"; + "reference_1" string => "${base_text_up} +${line_1} +${base_text_down}"; + +# Second test: ensure that if the section does not exist, it will be created +# and the lines added + "section_2" string => "section_test2"; + "line_2" string => "another content"; + "reference_2" string => "${base_text_up} +${base_text_down} + +${line_2}"; + + commands: +# Initialize first test files + "/bin/echo" + args => "\"${reference_1}\" > \"${reference_file_1}\"", + contain => in_shell; + "/bin/echo" + args => "\"${base_text_up}\" > \"${file_1}\"", + contain => in_shell; + "/bin/echo" + args => "\"${base_text_down}\" >> \"${file_1}\"", + contain => in_shell; +# Initialize second test files + "/bin/echo" + args => "\"${reference_2}\" > \"${reference_file_2}\"", + contain => in_shell; + "/bin/echo" + args => "\"${base_text_up}\" > \"${file_2}\"", + contain => in_shell; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_line_present_in_ini_section("${init.file_1}", "${init.section_1}", "${init.line_1}"); + "ph2" usebundle => file_line_present_in_ini_section("${init.file_2}", "${init.section_2}", "${init.line_2}"); +} + +####################################################### + +bundle agent check +{ + vars: + # Commands to check that reference files (expected result) are the same + # than the modified files by the generic_method 'file_line_present_in_ini_section' + "line_1_exists_test" string => "/usr/bin/diff \"${init.reference_file_1}\" \"${init.file_1}\""; + "line_2_exists_test" string => "/usr/bin/diff \"${init.reference_file_2}\" \"${init.file_2}\""; + + classes: + "line_1_exists" + expression => returnszero("${line_1_exists_test}", "noshell"), + ifvarclass => canonify("file_line_present_in_ini_section_${init.file_1}_reached"); + "line_2_exists" + expression => returnszero("${line_2_exists_test}", "noshell"), + ifvarclass => canonify("file_line_present_in_ini_section_${init.file_2}_reached"); + + "ok_test1" expression => "line_1_exists.file_line_present_in_ini_section_${init.file_1_canon}_ok.!file_1_line_present_in_ini_section_${init.file_1_canon}_error"; + "ok_test2" expression => "line_2_exists.file_line_present_in_ini_section_${init.file_2_canon}_ok.!file_2_line_present_in_ini_section_${init.file_2_canon}_error"; + "ok" and => {"ok_test1","ok_test2"}; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !line_1_exists:: + "diff command doesn't return 0 for command: ${line_1_exists_test}"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_lines_present_test_already_present.cf b/policies/lib/tests/acceptance/30_generic_methods/file_lines_present_test_already_present.cf new file mode 100644 index 00000000000..5ef4eab7755 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_lines_present_test_already_present.cf @@ -0,0 +1,77 @@ +####################################################### +# +# Test checking if checking is line is present in a file +# when it was already there don't duplicate +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "line_to_add" string => "This is a test line."; + + commands: + "/bin/echo" + args => "\"${init.line_to_add}\" > \"${init.file}\"", + contain => in_shell; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_lines_present("${init.file}", "${init.line_to_add}"); +} + +####################################################### + +bundle agent check +{ + vars: + "line_exists_test" string => "${paths.path[test]} `${paths.path[grep]} \"^${init.line_to_add}$\" \"${init.file}\" | wc -l` = '1'"; + + classes: + # By default, file_lines_present should create the file if it doesn't exist + "file_exists" expression => fileexists("${init.file}"); + + "line_exists" expression => returnszero("${line_exists_test}", "useshell"), + ifvarclass => canonify("file_lines_present_${init.file}_reached"); + + "ok" expression => "file_exists.line_exists.file_lines_present_${init.file_canon}_ok.!file_lines_present_${init.file_canon}_error.file_lines_present_${init.file_canon}_kept.!file_lines_present_${init.file_canon}_repaired"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists:: + "fileexists returns false for file ${init.file}"; + !line_exists:: + "grep command doesn't return 1 for command: ${line_exists_test}, meaning there is not exactly one line"; + cfengine:: + "the line was already there, but the kept class is not defined" + ifvarclass => "!file_lines_present_${init.file_canon}_kept"; + + "the line was already there, but the repaired class is defined" + ifvarclass => "file_lines_present_${init.file_canon}_repaired"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_lines_test_from_empty_file.cf b/policies/lib/tests/acceptance/30_generic_methods/file_lines_test_from_empty_file.cf new file mode 100644 index 00000000000..3d65bd33949 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_lines_test_from_empty_file.cf @@ -0,0 +1,73 @@ +####################################################### +# +# Test checking if a line is absent in a file, from an +# empty file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "line_to_test" string => "This is a test line!"; + + files: + "${file}" + create => "true"; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_lines_absent("${init.file}", "${init.line_to_test}"); +} + +####################################################### + +bundle agent check +{ + vars: + "line_exists_test" string => "${paths.path[grep]} -E \"^${init.line_to_test}$\" \"${init.file}\""; + + classes: + "file_exists" expression => fileexists("${init.file}"); + "line_absent" + not => returnszero("${line_exists_test}", "noshell"), + ifvarclass => canonify("file_lines_absent_${init.file}_reached"); + + "ok" expression => "file_exists.line_absent.file_lines_absent_${init.file_canon}_ok.!file_lines_absent_${init.file_canon}_error.!file_lines_absent_${init.file_canon}_repaired"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists:: + "fileexists returns false for file ${init.file}"; + !line_absent:: + "grep command did return 0 for command: ${line_exists_test}"; + + cfengine:: + "The file add to be repaired, but it was created empty" + ifvarclass => "file_lines_absent_${init.file_canon}_repaired"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_present.cf b/policies/lib/tests/acceptance/30_generic_methods/file_present.cf new file mode 100644 index 00000000000..4ac8712a39f --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_present.cf @@ -0,0 +1,59 @@ +####################################################### +# +# Test checking that file_create creates a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "destination_file" string => "${tmp}/present_test"; + "destination_file_canon" string => canonify("${destination_file}"); + + files: + "${destination_file}" + delete => tidy; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_create("${init.destination_file}"); +} + +####################################################### + +bundle agent check +{ + classes: + "file_exists" expression => fileexists("${init.destination_file}"); + + "ok" expression => "file_exists.file_present_${init.destination_file_canon}_repaired.!file_present_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists:: + "File ${init.destination_file} doesn't exist"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_replace_lines.cf b/policies/lib/tests/acceptance/30_generic_methods/file_replace_lines.cf new file mode 100644 index 00000000000..929f4be771b --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_replace_lines.cf @@ -0,0 +1,68 @@ +####################################################### +# +# Test checking if a line can be replaced in a file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "line_to_add" string => "This is a test line!"; + "line_for_replacement" string => "This is a better test line!"; + + methods: + "ph1" usebundle => file_ensure_lines_present("${file}", "${line_to_add}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_replace_lines("${init.file}", "${init.line_to_add}", "${init.line_for_replacement}"); +} + +####################################################### + +bundle agent check +{ + vars: + "line_exists_test" string => "${paths.path[grep]} -E \"^${init.line_for_replacement}$\" \"${init.file}\""; + + classes: + # By default, file_replace_lines should create the file if it doesn't exist + "file_exists" expression => fileexists("${init.file}"); + "line_exists" + expression => returnszero("${line_exists_test}", "noshell"), + ifvarclass => canonify("file_replace_lines_${init.file}_reached"); + + "ok" expression => "file_exists.line_exists.file_replace_lines_${init.file_canon}_ok.!file_replace_lines_${init.file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !file_exists:: + "fileexists returns false for file ${init.file}"; + !line_exists:: + "grep command doesn't return 0 for command: ${line_exists_test}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_report_content.cf b/policies/lib/tests/acceptance/30_generic_methods/file_report_content.cf new file mode 100644 index 00000000000..1c70d37367d --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_report_content.cf @@ -0,0 +1,128 @@ +####################################################### +# +# Test checking if a content of file is enforced +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "file_1" string => "${tmp}/test_1"; + "file_1_canon" string => canonify("${file_1}"); + "file_1_content" string => "this is +a multiline +1test +file"; + "file_1_expected" string => "this is +a multiline +1test +file +"; + + "file_2" string => "${tmp}/test_2"; + "file_2_canon" string => canonify("${file_2}"); + "file_2_content" string => "this is +a multiline +2test +file"; + "file_2_expected" string => "2test"; + + "file_3" string => "${tmp}/test_3"; + "file_3_canon" string => canonify("${file_3}"); + "file_3_content" string => "this is +a multiline +3test +file"; + "file_3_expected" string => "a multiline +3test +file"; + + "file_4" string => "${tmp}/test_4"; + "file_4_canon" string => canonify("${file_4}"); + "file_5" string => "${tmp}/test_5"; + "file_5_canon" string => canonify("${file_5}"); + + commands: + "/bin/echo" + args => "\"${file_1_content}\" > \"${file_1}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${file_2_content}\" > \"${file_2}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${file_3_content}\" > \"${file_3}\"", + contain => in_shell; +} + +####################################################### + +bundle agent test +{ + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + # To avoid re-evaluating results when content changes... + pass3:: + "ph1" usebundle => file_report_content("${init.file_1}", "", ""); + "stor1" usebundle => result("file_1", "${file_report_content.content}"); + "ph2" usebundle => file_report_content("${init.file_2}", "2test", ""); + "stor2" usebundle => result("file_2", "${file_report_content.content}"); + "ph3" usebundle => file_report_content("${init.file_3}", "3test", "1"); + "stor3" usebundle => result("file_3", "${file_report_content.content}"); + "ph4" usebundle => file_report_content("${init.file_4}", "", ""); + "ph5" usebundle => file_report_content("${init.file_5}", "test", ""); +} + +# This bundle is used to store the content of the result after each call +# allowing to test its content in the test bundle. +bundle agent result(id, result) +{ + vars: + "${id}" string => "${result}"; +} + +####################################################### + +bundle agent check +{ + classes: + "ok_1" expression => strcmp("${init.file_1_expected}", "${result.file_1}"); + "ok_1_class" expression => "file_report_content_${init.file_1_canon}_kept.!file_report_content_${init.file_1_canon}_error"; + "ok_2" expression => strcmp("${init.file_2_expected}", "${result.file_2}"); + "ok_2_class" expression => "file_report_content_${init.file_2_canon}_kept.!file_report_content_${init.file_2_canon}_error"; + "ok_3" expression => strcmp("${init.file_3_expected}", "${result.file_3}"); + "ok_3_class" expression => "file_report_content_${init.file_3_canon}_kept.!file_report_content_${init.file_3_canon}_error"; + "ok_4_class" expression => "!file_report_content_${init.file_4_canon}_kept.file_report_content_${init.file_4_canon}_error"; + "ok_5_class" expression => "!file_report_content_${init.file_5_canon}_kept.file_report_content_${init.file_5_canon}_error"; + + "ok" expression => "ok_1.ok_2.ok_3.ok_1_class.ok_2_class.ok_3_class.ok_4_class.ok_5_class"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_report_content_head_tail.cf b/policies/lib/tests/acceptance/30_generic_methods/file_report_content_head_tail.cf new file mode 100644 index 00000000000..9f0088629ee --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_report_content_head_tail.cf @@ -0,0 +1,120 @@ +####################################################### +# +# Test checking if a content of file is enforced +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "file_1" string => "${tmp}/test_1"; + "file_1_canon" string => canonify("${file_1}"); + "file_1_content" string => "this is +a multiline +1test +file"; + "file_1_expected" string => "this is +a multiline +1test"; + + "file_2" string => "${tmp}/test_2"; + "file_2_canon" string => canonify("${file_2}"); + "file_2_content" string => "this is +a multiline +2test +file"; + "file_2_expected" string => "a multiline +2test +file"; + + "file_3" string => "${tmp}/test_3"; + "file_3_canon" string => canonify("${file_3}"); + + "file_4" string => "${tmp}/test_4"; + "file_4_canon" string => canonify("${file_4}"); + + commands: + "/bin/echo" + args => "\"${file_1_content}\" > \"${file_1}\"", + contain => in_shell; + + "/bin/echo" + args => "\"${file_2_content}\" > \"${file_2}\"", + contain => in_shell; +} + +####################################################### + +bundle agent test +{ + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + # To avoid re-evaluating results when content changes... + pass3:: + "ph1" usebundle => file_report_content_head("${init.file_1}", "3"); + "stor1" usebundle => result("file_1", "${file_report_content_head.content}"); + "ph2" usebundle => file_report_content_tail("${init.file_2}", "3"); + "stor2" usebundle => result("file_2", "${file_report_content_tail.content}"); + "ph3" usebundle => file_report_content_head("${init.file_3}", "3"); + "ph4" usebundle => file_report_content_tail("${init.file_4}", "3"); +} + +# This bundle is used to store the content of the result after each call +# allowing to test its content in the test bundle. +bundle agent result(id, result) +{ + vars: + "${id}" string => "${result}"; +} + +####################################################### + +bundle agent check +{ + classes: + "ok_1" expression => strcmp("${init.file_1_expected}", "${result.file_1}"); + "ok_1_class" expression => "file_report_content_head_${init.file_1_canon}_kept.!file_report_content_head_${init.file_1_canon}_error"; + "ok_2" expression => strcmp("${init.file_2_expected}", "${result.file_2}"); + "ok_2_class" expression => "file_report_content_tail_${init.file_2_canon}_kept.!file_report_content_tail_${init.file_2_canon}_error"; + "ok_3_class" expression => "!file_report_content_head_${init.file_3_canon}_kept.file_report_content_head_${init.file_3_canon}_error"; + "ok_4_class" expression => "!file_report_content_tail_${init.file_4_canon}_kept.file_report_content_tail_${init.file_4_canon}_error"; + + "ok" expression => "ok_1.ok_2.ok_1_class.ok_2_class.ok_3_class.ok_4_class"; + + reports: + "'${result.file_1}'"; + !ok_1:: + "Issue in 1"; + !ok_2:: + "Issue in 2"; + !ok_3:: + "Issue in 3"; + !ok_4:: + "Issue in 4"; + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_symlink_present_invalid_wrong_enforce_arg.cf b/policies/lib/tests/acceptance/30_generic_methods/file_symlink_present_invalid_wrong_enforce_arg.cf new file mode 100644 index 00000000000..b6cde2432bb --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_symlink_present_invalid_wrong_enforce_arg.cf @@ -0,0 +1,75 @@ +####################################################### +# +# Test checking if a symlink is not created if not enforced +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "line_to_add" string => "This is a test line!"; + "symlink_1" string => "${tmp}/test_symlink1"; + "symlink_1_canon" string => canonify("${symlink_1}"); + "symlink_2" string => "${tmp}/test_symlink2"; + "symlink_2_canon" string => canonify("${symlink_2}"); + "symlink_3" string => "${tmp}/test_symlink3"; + "symlink_3_canon" string => canonify("${symlink_3}"); + + methods: + "ph1" usebundle => file_ensure_lines_present("${file}", "${line_to_add}"); + # This file will test if the symlink is enforced or not + "ph2" usebundle => file_ensure_lines_present("${symlink_1}", "${line_to_add}"); + "ph3" usebundle => file_ensure_lines_present("${symlink_2}", "${line_to_add}"); + "ph4" usebundle => file_ensure_lines_present("${symlink_3}", "${line_to_add}"); +} + +####################################################### + +bundle agent test +{ + methods: + # A file already exist and the creation is not enforced, so this symlink should not be created + "ph1" usebundle => file_symlink_present_option("${init.file}", "${init.symlink_1}", "false"); + # The use of a wrong argument result in a default value 'false', so this symlink should not be created too + "ph2" usebundle => file_symlink_present_option("${init.file}", "${init.symlink_2}", "dummy"); + "ph3" usebundle => file_symlink_present("${init.file}", "${init.symlink_3}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok_symlink_fs_1" not => islink("${init.symlink_1}"); + "ok_symlink_class_1" and => { "repair_failed_file_symlink_present_${init.symlink_1_canon}", "file_symlink_present_${init.symlink_1_canon}_failed", "file_symlink_present_${init.symlink_1_canon}_not_repaired", "file_symlink_present_${init.symlink_1_canon}_not_ok", "file_symlink_present_${init.symlink_1_canon}_not_kept","file_symlink_present_${init.symlink_1_canon}_reached" }; + "ok_symlink_fs_2" not => islink("${init.symlink_2}"); + "ok_symlink_class_2" and => { "repair_failed_file_symlink_present_${init.symlink_2_canon}", "file_symlink_present_${init.symlink_2_canon}_failed", "file_symlink_present_${init.symlink_2_canon}_not_repaired", "file_symlink_present_${init.symlink_2_canon}_not_ok", "file_symlink_present_${init.symlink_2_canon}_not_kept","file_symlink_present_${init.symlink_2_canon}_reached" }; + "ok_symlink_fs_3" not => islink("${init.symlink_3}"); + "ok_symlink_class_3" and => { "repair_failed_file_symlink_present_${init.symlink_3_canon}", "file_symlink_present_${init.symlink_3_canon}_failed", "file_symlink_present_${init.symlink_3_canon}_not_repaired", "file_symlink_present_${init.symlink_3_canon}_not_ok", "file_symlink_present_${init.symlink_3_canon}_not_kept","file_symlink_present_${init.symlink_3_canon}_reached" }; + "ok" and => { "ok_symlink_fs_1", "ok_symlink_class_1", "ok_symlink_fs_2", "ok_symlink_class_2", "ok_symlink_fs_3", "ok_symlink_class_3" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/file_symlink_present_valid.cf b/policies/lib/tests/acceptance/30_generic_methods/file_symlink_present_valid.cf new file mode 100644 index 00000000000..09f9471b210 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/file_symlink_present_valid.cf @@ -0,0 +1,82 @@ +####################################################### +# +# Test checking if a symlink is created +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "line_to_add" string => "This is a test line!"; + "symlink_1" string => "${tmp}/test_symlink1"; + "symlink_1_canon" string => canonify("${symlink_1}"); + "symlink_2" string => "${tmp}/test_symlink2"; + "symlink_2_canon" string => canonify("${symlink_2}"); + "symlink_3" string => "${tmp}/test_symlink3"; + "symlink_3_canon" string => canonify("${symlink_3}"); + "symlink_4" string => "${tmp}/test_symlink4"; + "symlink_4_canon" string => canonify("${symlink_4}"); + "symlink_5" string => "${tmp}/test_symlink5"; + "symlink_5_canon" string => canonify("${symlink_5}"); + "symlink_6" string => "${tmp}/test_symlink6"; + "symlink_6_canon" string => canonify("${symlink_6}"); + + methods: + "ph1" usebundle => file_ensure_lines_present("${file}", "${line_to_add}"); + # Theses files will test if the symlink is enforced or not + "ph2" usebundle => file_ensure_lines_present("${symlink_3}", "${line_to_add}"); + "ph3" usebundle => file_ensure_lines_present("${symlink_6}", "${line_to_add}"); +} + +####################################################### + +bundle agent test +{ + + methods: + "ph1" usebundle => file_symlink_present_option("${init.file}", "${init.symlink_1}", "false"); + "ph2" usebundle => file_symlink_present_option("${init.file}", "${init.symlink_2}", "true"); + "ph3" usebundle => file_symlink_present_option("${init.file}", "${init.symlink_3}", "true"); + "ph4" usebundle => file_symlink_present("${init.file}", "${init.symlink_4}"); + "ph5" usebundle => file_symlink_present_force("${init.file}", "${init.symlink_5}"); + "ph6" usebundle => file_symlink_present_force("${init.file}", "${init.symlink_6}"); +} + +#######################################################² + +bundle agent check +{ + classes: + "ok_symlink1" and => { islink("${init.symlink_1}"), "promise_repaired_file_symlink_present_${init.symlink_1_canon}", "file_symlink_present_${init.symlink_1_canon}_repaired", "file_symlink_present_${init.symlink_1_canon}_ok", "file_symlink_present_${init.symlink_1_canon}_reached" }; + "ok_symlink2" and => { islink("${init.symlink_2}"), "promise_repaired_file_symlink_present_${init.symlink_2_canon}", "file_symlink_present_${init.symlink_2_canon}_repaired", "file_symlink_present_${init.symlink_2_canon}_ok", "file_symlink_present_${init.symlink_2_canon}_reached" }; + "ok_symlink3" and => { islink("${init.symlink_3}"), "promise_repaired_file_symlink_present_${init.symlink_3_canon}", "file_symlink_present_${init.symlink_3_canon}_repaired", "file_symlink_present_${init.symlink_3_canon}_ok", "file_symlink_present_${init.symlink_3_canon}_reached" }; + "ok_symlink4" and => { islink("${init.symlink_4}"), "promise_repaired_file_symlink_present_${init.symlink_4_canon}", "file_symlink_present_${init.symlink_4_canon}_repaired", "file_symlink_present_${init.symlink_4_canon}_ok", "file_symlink_present_${init.symlink_4_canon}_reached" }; + "ok_symlink5" and => { islink("${init.symlink_5}"), "promise_repaired_file_symlink_present_${init.symlink_5_canon}", "file_symlink_present_${init.symlink_5_canon}_repaired", "file_symlink_present_${init.symlink_5_canon}_ok", "file_symlink_present_${init.symlink_5_canon}_reached" }; + "ok_symlink6" and => { islink("${init.symlink_6}"), "promise_repaired_file_symlink_present_${init.symlink_6_canon}", "file_symlink_present_${init.symlink_6_canon}_repaired", "file_symlink_present_${init.symlink_6_canon}_ok", "file_symlink_present_${init.symlink_6_canon}_reached" }; + "ok" and => { "ok_symlink1", "ok_symlink2", "ok_symlink3", "ok_symlink4", "ok_symlink5", "ok_symlink6" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/log_rudder.cf b/policies/lib/tests/acceptance/30_generic_methods/log_rudder.cf new file mode 100644 index 00000000000..027dd2bc8c6 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/log_rudder.cf @@ -0,0 +1,92 @@ +######################################################################################### +# +# This will check reporting within log_rudder, by checking the defined classes +# +######################################################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "message" string => "Test of the reporting"; + "param[1]" string => "/some/param.txt"; + "old_class_prefix[1]" string => canonify("files_lines_present_${param[1]}"); + "args_1" slist => { "${param[1]}", "${param[1]}", "${param[1]}" }; + "report_param[1]" string => join("_", args_1); + "class_prefix[1]" string => canonify("${old_class_prefix[1]}_${report_param[1]}"); + + "param[2]" string => "/some/param.txt"; + "old_class_prefix[2]" string => canonify("files_lines_present_${param[2]}"); + "args_2" slist => { "${param[2]}", "bla", "bli" }; + "report_param[2]" string => join("_", args_2); + "class_prefix[2]" string => canonify("${old_class_prefix[2]}_${report_param[2]}"); + + "param[3]" string => "/other/param.txt"; + "old_class_prefix[3]" string => canonify("files_lines_present_${param[3]}"); + "args_3" slist => { "${param[3]}", "bla", "bli" }; + "report_param[3]" string => join("_", args_3); + "class_prefix[3]" string => "cf_null"; + + "param[4]" string => "/very/long/parameter/text/results.txt"; + "old_class_prefix[4]" string => canonify("files_lines_present_${param[4]}"); + "args_4" slist => { "${param[4]}", "veryyyyyyyyyyyyyyyyyyyyyyyyyyyyylonnnnnnnnnnnnnnnnnnnnnnnnnnnnnnngtexxxxxxxxxxxxxxxxxxxxxxxxxt", "veryyyyyyyyyyyyyyyyyyyyyyyyyyyyylonnnnnnnnnnnnnnnnnnnnnnnnnnnnnnngtexxxxxxxxxxxxxxxxxxxxxxxxxt", "suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuupppppppppppppppppppppppppppppeeeeeeeeeeeeeeeeeeeerrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr", "suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuupppppppppppppppppppppppppppppeeeeeeeeeeeeeeeeeeeerrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr", "suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuupppppppppppppppppppppppppppppeeeeeeeeeeeeeeeeeeeerrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr", "suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuupppppppppppppppppppppppppppppeeeeeeeeeeeeeeeeeeeerrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr", "suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuupppppppppppppppppppppppppppppeeeeeeeeeeeeeeeeeeeerrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr", "suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuupppppppppppppppppppppppppppppeeeeeeeeeeeeeeeeeeeerrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr" }; + "report_param[4]" string => join("_", args_4); + "class_prefix[4]" string => canonify("${old_class_prefix[4]}_${report_param[4]}"); + + + "tests_list" slist => getindices("param"); +} + +####################################################### + +bundle agent test +{ + # 1,2 are success, 3 is repaired, 4 is error + methods: + "ph1" usebundle => _classes_success("${init.old_class_prefix[1]}"); + "ph1" usebundle => _classes_success("${init.class_prefix[1]}"); + "ph2" usebundle => _classes_success("${init.old_class_prefix[2]}"); + "ph2" usebundle => _classes_success("${init.class_prefix[2]}"); + "ph3" usebundle => _classes_repaired("${init.old_class_prefix[3]}"); + "ph3" usebundle => _classes_repaired("${init.class_prefix[3]}"); + "ph4" usebundle => _classes_failure("${init.old_class_prefix[4]}"); + "ph4" usebundle => _classes_failure("${init.class_prefix[4]}"); + + # do the reporting part + # 3, 4 should use old_class_prefix, other the new class_prefix + "report1" usebundle => log_rudder("${init.message}", "", "${init.old_class_prefix[1]}", "${init.class_prefix[1]}", "@{init.args_1}"); + "report2" usebundle => log_rudder("${init.message}", "", "${init.old_class_prefix[2]}", "${init.class_prefix[2]}", "@{init.args_2}"); + "report3" usebundle => log_rudder("${init.message}", "", "${init.old_class_prefix[3]}", "${init.class_prefix[3]}", "@{init.args_3}"); + "report4" usebundle => log_rudder("${init.message}", "", "${init.old_class_prefix[4]}", "${init.class_prefix[4]}", "@{init.args_4}"); + +} + +####################################################### + +bundle agent check +{ + classes: + # 3, 4 should use old_class_prefix, other the new class_prefix + "ok" expression => "logger_rudder_${init.class_prefix[1]}_reached.logger_rudder_${init.class_prefix[2]}_reached.logger_rudder_${init.old_class_prefix[3]}_reached.logger_rudder_${init.old_class_prefix[4]}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/logger_rudder.cf b/policies/lib/tests/acceptance/30_generic_methods/logger_rudder.cf new file mode 100644 index 00000000000..e1d2952cb3c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/logger_rudder.cf @@ -0,0 +1,71 @@ +####################################################### +# +# Test checking if reporting is correctly done with rudder_logger +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "message" string => "is the expected report message"; + "class_prefix" string => "file_create__tmp_1"; + "class_condition" string => "${class_prefix}_repaired"; + + "current_technique_report_info.technique_name" string => "Test_logging_in_ncf"; + + "expected_reporting" string => "R: @@Test_logging_in_ncf@@result_repaired@@32377fd7-02fd-43d0-aab7-28460a91347b@@15645b8f-4606-4549-9f62-1b99a7594d54@@0@@File create@@/tmp/1@@12-11-54##123456@#the expected report message was repaired"; + "escaped_expected" string => escape("${expected_reporting}"); +} + +####################################################### + +bundle agent test +{ +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + # execute the agent, and get its output in 'output' variable + "reporting_output_cmd" string => "${sys.workdir}/bin/cf-agent -Kf \"${this.promise_dirname}/logger_rudder_output.cf.sub\" -D AUTO"; + + "output" string => execresult("${reporting_output_cmd}", "noshell"); + + classes: + pass1:: + # Check if output contains the correct value + "ok" + expression => regcmp(".*${init.escaped_expected}.*", "${output}"); + + any:: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + + reports: + pass2.ok:: + "$(this.promise_filename) Pass"; + pass2.!ok:: + "$(this.promise_filename) FAIL"; + "$(this.promise_filename) Expected reports was not generated (got '${output}' but expected '${init.escaped_expected}' )"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/logger_rudder_output.cf.sub b/policies/lib/tests/acceptance/30_generic_methods/logger_rudder_output.cf.sub new file mode 100644 index 00000000000..89b3dea223b --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/logger_rudder_output.cf.sub @@ -0,0 +1,59 @@ +####################################################### +# +# Sub cf file that simply generates reporting +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "message" string => "the expected report message"; + "class_prefix" string => "file_create__tmp_1"; + "class_condition" string => "${class_prefix}_repaired"; + "class_parameter" string => "/tmp/1"; + + "g.uuid" string => "123456"; + "g.execRun" string => "12-11-54"; +} + +####################################################### + +bundle agent test +{ + vars: + "empty_slist" slist => { cf_null }; + + methods: + "call_context" usebundle => rudder_reporting_context("15645b8f-4606-4549-9f62-1b99a7594d54", "32377fd7-02fd-43d0-aab7-28460a91347b", "Test_logging_in_ncf"); + "method_call_context" usebundle => _method_reporting_context("File create", "/tmp/1"); + "class" usebundle => _classes_repaired("${init.class_prefix}"); + "ph1" usebundle => log_rudder("${init.message}", "${init.class_parameter}", "${init.class_prefix}", "", @{empty_slist}); +} + +####################################################### + +bundle agent check +{ + reports: + logger_rudder_final_resfile_repaired:: + "$(this.promise_filename) Pass"; + !logger_rudder_final_resfile_repaired:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/network/file_from_http_server.cf b/policies/lib/tests/acceptance/30_generic_methods/network/file_from_http_server.cf new file mode 100644 index 00000000000..fb9646d4ddd --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/network/file_from_http_server.cf @@ -0,0 +1,83 @@ +####################################################### +# +# Test checking if: +# * a file can be downloaded from a http +# * a file that is already here will not be downloaded +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + + "tmp" string => getenv("TEMP", 1024); + + "source_file" string => "http://repository.rudder.io/tools/ncf-setup"; + "destination_file" string => "${tmp}/test_file_from_http_server.txt"; + "destination_file_canon" string => canonify("${destination_file}"); + + "existing_file" string => "${tmp}/test_file_from_http_server2.txt"; + "existing_file_canon" string => canonify("${existing_file}"); + + + files: + "${existing_file}" + create => "true"; + +} + +####################################################### + +bundle agent test +{ + methods: + + # Should download the file + "ph1" usebundle => file_from_http_server("${init.source_file}", "${init.destination_file}"); + + # Should just be OK (no download) + "ph2" usebundle => file_from_http_server("${init.source_file}", "${init.existing_file}"); + +} + +####################################################### + +bundle agent check +{ + + classes: + + "ok" and => { + # File 1 should be downloaded, and no error should have happened + "file_from_http_server_${init.destination_file_canon}_ok", + "file_from_http_server_${init.destination_file_canon}_repaired", + "!file_from_http_server_${init.destination_file_canon}_error", + + # File 2 should be OK (already here), not repaired and not in error + "file_from_http_server_${init.existing_file_canon}_ok", + "!file_from_http_server_${init.existing_file_canon}_repaired", + "!file_from_http_server_${init.existing_file_canon}_error" + }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/network/http_request_check_status_headers.cf b/policies/lib/tests/acceptance/30_generic_methods/network/http_request_check_status_headers.cf new file mode 100644 index 00000000000..8c85cdf73d1 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/network/http_request_check_status_headers.cf @@ -0,0 +1,68 @@ +##################################################################################### +# +# Test that querying an existing URL returns 200, and that a non existing returns 404 +# +##################################################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "nonexistent_url" string => "https://repository.rudder.io/non_existent"; + "existent_url" string => "https://www.google.fr"; + "canon_nonexistent_url" string => canonify("${nonexistent_url}"); + "canon_existent_url" string => canonify("${existent_url}"); + "expected_nonexistent" string => "404"; + "expected_existent" string => "200"; + "headers" string => ""; + "method" string => "GET"; + +} + +####################################################### + +bundle agent test +{ + + methods: + "ph" usebundle => http_request_check_status_headers("${init.method}", "${init.nonexistent_url}", "${init.expected_nonexistent}", "${init.headers}"); + "ph" usebundle => http_request_check_status_headers("${init.method}", "${init.existent_url}", "${init.expected_existent}", "${init.headers}"); + + +} + +####################################################### + +bundle agent check +{ + classes: + "non_existent_returns_ok" expression => "http_request_check_status_headers_${init.canon_nonexistent_url}_reached.http_request_check_status_headers_${init.canon_nonexistent_url}_kept"; + "existent_returns_ok" expression => "http_request_check_status_headers_${init.canon_existent_url}_reached.http_request_check_status_headers_${init.canon_existent_url}_kept"; + "ok" expression => "non_existent_returns_ok.existent_returns_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + + !non_existent_returns_ok:: + "Wrong status for ${init.nonexistent_url}"; + + !existent_returns_ok:: + "Wrong status for ${init.existent_url}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/network/http_request_check_status_headers_invalid_expectation.cf b/policies/lib/tests/acceptance/30_generic_methods/network/http_request_check_status_headers_invalid_expectation.cf new file mode 100644 index 00000000000..11666b91818 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/network/http_request_check_status_headers_invalid_expectation.cf @@ -0,0 +1,69 @@ +##################################################################################### +# +# Test that invalid expectation fails, when querying an existing URL and expecting 404, +# and that a non existing expecting 200 +# +##################################################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "nonexistent_url" string => "https://repository.rudder.io/non_existent"; + "existent_url" string => "http://www.google.fr"; + "canon_nonexistent_url" string => canonify("${nonexistent_url}"); + "canon_existent_url" string => canonify("${existent_url}"); + "expected_nonexistent" string => "200"; + "expected_existent" string => "404"; + "headers" string => ""; + "method" string => "GET"; + +} + +####################################################### + +bundle agent test +{ + + methods: + "ph" usebundle => http_request_check_status_headers("${init.method}", "${init.nonexistent_url}", "${init.expected_nonexistent}", "${init.headers}"); + "ph" usebundle => http_request_check_status_headers("${init.method}", "${init.existent_url}", "${init.expected_existent}", "${init.headers}"); + + +} + +####################################################### + +bundle agent check +{ + classes: + "non_existent_returns_ok" expression => "http_request_check_status_headers_${init.canon_nonexistent_url}_reached.http_request_check_status_headers_${init.canon_nonexistent_url}_not_ok"; + "existent_returns_ok" expression => "http_request_check_status_headers_${init.canon_existent_url}_reached.http_request_check_status_headers_${init.canon_existent_url}_not_ok"; + "ok" expression => "non_existent_returns_ok.existent_returns_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + + !non_existent_returns_ok:: + "Wrong status for ${init.nonexistent_url}"; + + !existent_returns_ok:: + "Wrong status for ${init.existent_url}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/network/http_request_content_headers.cf b/policies/lib/tests/acceptance/30_generic_methods/network/http_request_content_headers.cf new file mode 100644 index 00000000000..dc887630bd8 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/network/http_request_content_headers.cf @@ -0,0 +1,71 @@ +##################################################################################### +# +# Send content to URL with specific method +# +##################################################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "existent_url" string => "https://postman-echo.com/post"; + "nonexistent_url" string => "https://postman-echo.com/something"; + "canon_nonexistent_url" string => canonify("${nonexistent_url}"); + "canon_existent_url" string => canonify("${existent_url}"); + "headers" string => "Content-type: application/xml"; + "method" string => "POST"; + "content" string => " + + + + +"; +} + +####################################################### + +bundle agent test +{ + + methods: + "ph" usebundle => http_request_content_headers("${init.method}", "${init.nonexistent_url}", "${init.content}", "${init.headers}"); + "ph" usebundle => http_request_content_headers("${init.method}", "${init.existent_url}", "${init.content}", "${init.headers}"); + + +} + +####################################################### + +bundle agent check +{ + classes: + "non_existent_returns_ok" expression => "http_request_content_headers_${init.canon_nonexistent_url}_reached.http_request_content_headers_${init.canon_nonexistent_url}_failed"; + "existent_returns_ok" expression => "http_request_content_headers_${init.canon_existent_url}_reached.http_request_content_headers_${init.canon_existent_url}_repaired"; + "ok" expression => "non_existent_returns_ok.existent_returns_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + + !non_existent_returns_ok:: + "Wrong status for ${init.nonexistent_url}"; + + !existent_returns_ok:: + "Wrong status for ${init.existent_url}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/network/package_install_nonexistent.cf b/policies/lib/tests/acceptance/30_generic_methods/network/package_install_nonexistent.cf new file mode 100644 index 00000000000..5558a5e440f --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/network/package_install_nonexistent.cf @@ -0,0 +1,52 @@ +####################################################### +# +# Test adding a package that doesn't exist +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "nonexistent" string => "nonexistentpackagename"; +} + +####################################################### + +bundle agent test +{ + + methods: + "ph" usebundle => package_install("${init.nonexistent}"); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "package_install_${init.nonexistent}_not_ok.package_install_${init.nonexistent}_reached.!package_install_${init.nonexistent}_ok.!package_install_${init.nonexistent}_kept"; + redhat:: + "ok" expression => "any"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/network/verify_package_nonexistent.cf b/policies/lib/tests/acceptance/30_generic_methods/network/verify_package_nonexistent.cf new file mode 100644 index 00000000000..bd278f7c38d --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/network/verify_package_nonexistent.cf @@ -0,0 +1,50 @@ +####################################################### +# +# Test verifying a package that doesn't exist +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "nonexistent" string => "nonexistentpackagename"; +} + +####################################################### + +bundle agent test +{ + + methods: + "ph" usebundle => package_verify("${init.nonexistent}"); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "package_install_${init.nonexistent}_not_ok.package_install_${init.nonexistent}_reached.!package_install_${init.nonexistent}_ok.!package_install_${init.nonexistent}_kept"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/package.module b/policies/lib/tests/acceptance/30_generic_methods/package.module new file mode 100644 index 00000000000..52f7b832e15 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/package.module @@ -0,0 +1,70 @@ +#!/bin/sh + +set -e + +case "$1" in + supports-api-version) + echo 1 + ;; + get-package-data) + while read line; do + case "$line" in + File=*) + echo PackageType=repo + echo Name=${line#File=} + ;; + *) + true + ;; + esac + done + ;; + list-installed) + while read line; do + case "$line" in + options=*) + OUTPUT=${line#options=} + ;; + *) + exit 1 + ;; + esac + done + if [ -f "$OUTPUT" ]; then + cat "$OUTPUT" + fi + ;; + list-*) + # Drain input. + cat > /dev/null + ;; + repo-install) + while read line; do + case "$line" in + options=*) + OUTPUT=${line#options=} + ;; + Name=*) + NAME="${line#Name=}" + ;; + Version=*) + VERSION="${line#Version=}" + ;; + Architecture=*) + ARCHITECTURE="${line#Architecture=}" + ;; + *) + exit 1 + ;; + esac + done + echo "Name=$NAME" >> "$OUTPUT" + echo "Version=$VERSION" >> "$OUTPUT" + echo "Architecture=$ARCHITECTURE" >> "$OUTPUT" + ;; + *) + exit 1 + ;; +esac + +exit 0 diff --git a/policies/lib/tests/acceptance/30_generic_methods/package_present.cf b/policies/lib/tests/acceptance/30_generic_methods/package_present.cf new file mode 100644 index 00000000000..26787080f28 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/package_present.cf @@ -0,0 +1,77 @@ +##################################################################################### +# Copyright 2016 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# @agent_version >=3.7 + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, initialization, default("${this.promise_filename}") }; + version => "1.0"; +} + + +bundle agent init +{ + files: + "$(sys.workdir)/modules/packages/." + create => "true"; + "$(sys.workdir)/modules/packages/test" + copy_from => local_cp("$(this.promise_dirname)/package.module"), + perms => m("ugo+x"); +} + + +bundle agent test +{ + + methods: + # version + "any" usebundle => package_present("package_1", "", "", "test"); + "any" usebundle => package_state("package_2", "3.3", "", "test", "present"); + "any" usebundle => package_state_options("package_3", "latest", "", "test", "present", ""); + "any" usebundle => package_present("package_4", "any", "", "test"); + # architecture + "any" usebundle => package_present("package_5", "", "x86_64", "test"); + "any" usebundle => package_present("package_6", "6.5", "i686", "test"); + "any" usebundle => package_absent("package_7", "6.5", "default", "test"); +} + +bundle agent check { + + vars: + "file_diff_test" string => "/usr/bin/diff \"${this.promise_filename}.expected\" \"${sys.workdir}/modules/packages/test_db\""; + + classes: + "ok" expression => returnszero("${file_diff_test}", "useshell"), + if => "package_present_package_1_ok.package_state_package_2_ok.package_state_options_package_3_ok.package_present_package_4_ok.package_present_package_5_ok.package_present_package_6_ok.package_absent_package_7_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/package_present.cf.expected b/policies/lib/tests/acceptance/30_generic_methods/package_present.cf.expected new file mode 100644 index 00000000000..a21f8ab39de --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/package_present.cf.expected @@ -0,0 +1,18 @@ +Name=package_1 +Version= +Architecture= +Name=package_2 +Version=3.3 +Architecture= +Name=package_3 +Version= +Architecture= +Name=package_4 +Version= +Architecture= +Name=package_5 +Version= +Architecture=x86_64 +Name=package_6 +Version=6.5 +Architecture=i686 diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions.cf new file mode 100644 index 00000000000..3adcea31ffa --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions.cf @@ -0,0 +1,85 @@ +####################################################### +# +# Test checking if a file / directory has the right mode +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "mode" string => "640"; + "owner" string => "bin"; + "group" string => "bin"; + + files: + "${file}" + create => "true", + perms => mog("000", "root", "0"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => permissions("${init.file}", "${init.mode}", "${init.owner}", "${init.group}"); +} + +####################################################### + +bundle agent check +{ + vars: + "owner_id" int => getuid("${init.owner}"); + "group_id" int => getgid("${init.group}"); + "permissions_test_mode" string => "/usr/bin/test ${const.dollar}(${test_utils.file_perms} ${init.file}) = \"${init.mode}\""; + "permissions_test_owner" string => "/usr/bin/test ${const.dollar}(${test_utils.file_owner} ${init.file}) = \"${owner_id}\""; + "permissions_test_group" string => "/usr/bin/test ${const.dollar}(${test_utils.file_group} ${init.file}) = \"${group_id}\""; + + classes: + # By default, permissions_type_recursion should create the file if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("permissions_${init.file}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("permissions_${init.file}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("permissions_${init.file}_reached"); + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.permissions_${init.file_canon}_ok.!permissions_${init.file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.audit.error.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.audit.error.cf new file mode 100644 index 00000000000..3aed85519aa --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.audit.error.cf @@ -0,0 +1,161 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "initial_acls_files" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r", "mask:rx" }; + "expected_lines_files" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--", "mask::r-x" }; + # Cfengine tends to minimize the mask when editing an ACL, do not force it here otherwise it will be recalculated + # at execution when calling the GM below and we will never have error outcomes. + "files[1]" string => "file1"; + "user[1]" string => "*:"; + "group[1]" string => ""; + "other[1]" string => ""; + + "files[2]" string => "file2"; + "user[2]" string => ""; + "group[2]" string => "*:"; + "other[2]" string => ""; + + "files[3]" string => "file3"; + "user[3]" string => "bin:-rwx"; + "group[3]" string => "*:"; + "other[3]" string => ""; + + "files[4]" string => "file4"; + "user[4]" string => "bin:-r, bin:+x"; + "group[4]" string => "*:"; + "other[4]" string => ""; + + "files[5]" string => "file5"; + "user[5]" string => "bin:-r, root:rx"; + "group[5]" string => "*:"; + "other[5]" string => ""; + + "files[6]" string => "file6"; + "user[6]" string => "bin:-r, root:rx"; + "group[6]" string => "*:"; + "other[6]" string => "-r, =rx"; + + # To test the non recursivity + "files[7]" string => "file7/."; + "user[7]" string => "bin:=rx"; + "group[7]" string => ""; + "other[7]" string => ""; + + "files[8]" string => "file7/subfile1"; + "user[8]" string => ""; + "group[8]" string => ""; + "other[8]" string => ""; + + "files[9]" string => "file1"; + "user[9]" string => "*:rwxyz"; + "group[9]" string => ""; + "other[9]" string => "r"; + + "files[10]" string => "file10"; + "user[10]" string => "unknown_user:rx"; + "group[10]" string => ""; + "other[10]" string => "r"; + + "files[11]" string => "unknown_file"; + "user[11]" string => ""; + "group[11]" string => ""; + "other[11]" string => "r"; + + "files[12]" string => "file*"; + "user[12]" string => "*:yy"; + "group[12]" string => ""; + "other[12]" string => "r"; + + "indices" slist => getindices("files"); + # Do not create 11 + "create_indices" slist => { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10" }; + "lines[${indices}]" slist => { @{expected_lines_files} }; + "lines[${create_indices}]" slist => { @{expected_lines_files} }; + + "printable_lines[${indices}]" string => join("${const.endl}", "lines[${indices}]"); + "canonified_lines[${indices}]" string => canonify("${lines[${indices}]}"); + + files: + "${tmp}/${files[${create_indices}]}" + create => "true", + acl => access_generic("@{initial_acls_files}"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "false", "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.other[${init.indices}]}" }; + + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => apply_gm("permissions_acl_entry", @{args1}, "error", "ph1", "audit" ); + "ph2" usebundle => apply_gm("permissions_acl_entry", @{args2}, "error", "ph2", "audit" ); + "ph3" usebundle => apply_gm("permissions_acl_entry", @{args3}, "error", "ph3", "audit" ); + "ph4" usebundle => apply_gm("permissions_acl_entry", @{args4}, "error", "ph4", "audit" ); + "ph5" usebundle => apply_gm("permissions_acl_entry", @{args5}, "error", "ph5", "audit" ); + "ph6" usebundle => apply_gm("permissions_acl_entry", @{args6}, "error", "ph6", "audit" ); + "ph7" usebundle => apply_gm("permissions_acl_entry", @{args7}, "error", "ph7", "audit" ); + # Do not apply anything on 8 + "ph9" usebundle => apply_gm("permissions_acl_entry", @{args9}, "error", "ph9", "audit" ); + "ph10" usebundle => apply_gm("permissions_acl_entry", @{args10}, "error", "ph10", "audit" ); + "ph11" usebundle => apply_gm("permissions_acl_entry", @{args11}, "error", "ph11", "audit" ); + "ph12" usebundle => apply_gm("permissions_acl_entry", @{args12}, "error", "ph12", "audit" ); + "disable" usebundle => set_dry_run_mode("false"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass3:: + "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}" not => regcmp(".*${init.lines[${init.create_indices}]}.*","${getfacl_output[${init.create_indices}]}"); + "lines_${init.create_indices}_ok" expression => "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}"; + "lines_not_ok" expression => "lines_${init.create_indices}_ok"; + + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok.ph9_ok.ph10_ok.ph11_ok.ph12_ok"; + "ok" expression => "classes_ok.!lines_not_ok"; + + reports: + pass3:: + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_${init.indices}_${init.canonified_lines[${init.indices}]}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.audit.success.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.audit.success.cf new file mode 100644 index 00000000000..c1938f81f67 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.audit.success.cf @@ -0,0 +1,137 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "initial_acls_files" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r", "mask:rx" }; + "initial_acls_directories" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:rx", "mask:rx" }; + "expected_lines_files" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--", "mask::r-x" }; + "expected_lines_directories" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r-x", "mask::r-x" }; + # Cfengine tends to minimize the mask when editing an ACL, do not force it here otherwise it will be recalculated + # at execution when calling the GM below and we will never have success outcomes. + + "files[1]" string => "file1"; + "user[1]" string => "*:+rwx"; + "group[1]" string => ""; + "other[1]" string => "r"; + + "files[2]" string => "file2"; + "user[2]" string => "*:rwx"; + "group[2]" string => "*:+w, *:-w, bin:-wx"; + "other[2]" string => "r"; + + "files[3]" string => "file3"; + "user[3]" string => "*:+rx, bin:-wx"; + "group[3]" string => "*:+w, *:-w, bin:-wx"; + "other[3]" string => "=r"; + + "files[4]" string => "file4/."; + "user[4]" string => "*:+rx, bin:-wx"; + "group[4]" string => "*:+w, *:-w, bin:-wx"; + "other[4]" string => ""; + + "files[5]" string => "file5"; + "user[5]" string => ""; + "group[5]" string => ""; + "other[5]" string => ""; + + "files[6]" string => "file6"; + "user[6]" string => "*:+rw"; + "group[6]" string => ""; + "other[6]" string => ""; + + "files[7]" string => "file*"; + "user[7]" string => "*:+rw"; + "group[7]" string => ""; + "other[7]" string => ""; + + "indices" slist => getindices("files"); + # Do not create 7 + "create_indices" slist => { "1", "2", "3", "4", "5", "6" }; + "lines[${indices}]" slist => { @{expected_lines_files} }; + "lines[4]" slist => { @{expected_lines_directories} }; + + "printable_lines[${indices}]" string => join("${const.endl}", "lines[${indices}]"); + "canonified_lines[${indices}]" string => canonify("${lines[${indices}]}"); + + files: + "${tmp}/${files[${create_indices}]}" + create => "true", + acl => access_generic("@{initial_acls_files}"); + + "${tmp}/${files[4]}" + create => "true", + acl => access_generic("@{initial_acls_directories}"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "false", "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.other[${init.indices}]}" }; + + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => apply_gm("permissions_acl_entry", @{args1}, "success", "ph1", "audit" ); + "ph2" usebundle => apply_gm("permissions_acl_entry", @{args2}, "success", "ph2", "audit" ); + "ph3" usebundle => apply_gm("permissions_acl_entry", @{args3}, "success", "ph3", "audit" ); + "ph4" usebundle => apply_gm("permissions_acl_entry", @{args4}, "success", "ph4", "audit" ); + "ph5" usebundle => apply_gm("permissions_acl_entry", @{args5}, "success", "ph5", "audit" ); + "ph6" usebundle => apply_gm("permissions_acl_entry", @{args6}, "success", "ph6", "audit" ); + "ph7" usebundle => apply_gm("permissions_acl_entry", @{args7}, "success", "ph7", "audit" ); + "disable" usebundle => set_dry_run_mode("false"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass3:: + "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}" not => regcmp(".*${init.lines[${init.create_indices}]}.*","${getfacl_output[${init.create_indices}]}"); + "lines_${init.create_indices}_ok" expression => "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}"; + "lines_not_ok" expression => "lines_${init.create_indices}_ok"; + + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok"; + "ok" expression => "classes_ok.!lines_not_ok"; + + reports: + pass3:: + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_${init.indices}_${init.canonified_lines[${init.indices}]}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.error.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.error.cf new file mode 100644 index 00000000000..d7ee9d711e6 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.error.cf @@ -0,0 +1,117 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "initial_acls_files" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r", "mask:rx" }; + "expected_lines_files" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--", "mask::r-x" }; + # Cfengine tends to minimize the mask when editing an ACL, do not force it here otherwise it will be recalculated + # at execution when calling the GM below and we will never have error outcomes. + + "files[1]" string => "file1"; + "user[1]" string => "*:rwxyz"; + "group[1]" string => ""; + "other[1]" string => "r"; + + "files[2]" string => "file2"; + "user[2]" string => "unknown_user:rx"; + "group[2]" string => ""; + "other[2]" string => "r"; + + "files[3]" string => "unknown_file"; + "user[3]" string => ""; + "group[3]" string => ""; + "other[3]" string => "r"; + + "files[4]" string => "file*"; + "user[4]" string => "*:yz"; + "group[4]" string => ""; + "other[4]" string => "r"; + + "files[5]" string => "non_matching*regex"; + "user[5]" string => "*:"; + "group[5]" string => ""; + "other[5]" string => "r"; + + "indices" slist => getindices("files"); + # Do not create 3 and 4 + "create_indices" slist => { "1", "2" }; + "lines[${indices}]" slist => { @{expected_lines_files} }; + "lines[${create_indices}]" slist => { @{expected_lines_files} }; + + "printable_lines[${indices}]" string => join("${const.endl}", "lines[${indices}]"); + "canonified_lines[${indices}]" string => canonify("${lines[${indices}]}"); + + files: + "${tmp}/${files[${create_indices}]}" + create => "true", + acl => access_generic("@{initial_acls_files}"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "false", "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.other[${init.indices}]}" }; + + methods: + "ph1" usebundle => apply_gm("permissions_acl_entry", @{args1}, "error", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("permissions_acl_entry", @{args2}, "error", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("permissions_acl_entry", @{args3}, "error", "ph3", "enforce" ); + "ph4" usebundle => apply_gm("permissions_acl_entry", @{args4}, "error", "ph4", "enforce" ); + "ph5" usebundle => apply_gm("permissions_acl_entry", @{args5}, "error", "ph5", "enforce" ); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass3:: + "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}" not => regcmp(".*${init.lines[${init.create_indices}]}.*","${getfacl_output[${init.create_indices}]}"); + "lines_${init.create_indices}_ok" expression => "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}"; + "lines_not_ok" expression => "lines_${init.create_indices}_ok"; + + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok"; + "ok" expression => "classes_ok.!lines_not_ok"; + + reports: + pass3:: + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_${init.indices}_${init.canonified_lines[${init.indices}]}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.repaired.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.repaired.cf new file mode 100644 index 00000000000..aa9a580926b --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.repaired.cf @@ -0,0 +1,152 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "initial_acls" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:", "mask:rwx" }; + # The tests uses "." instead in some mask pattern because it can be wrongly display in getfacl output + # if no aces are impacted by the mask + + "files[1]" string => "file1"; + "user[1]" string => "*:"; + "group[1]" string => ""; + "other[1]" string => ""; + "lines[1]" slist => { "user::---", "group::r-x", "user:bin:r--", "group:bin:r--", "other::---", "mask::r.x" }; + + "files[2]" string => "file2"; + "user[2]" string => ""; + "group[2]" string => "*:"; + "other[2]" string => ""; + "lines[2]" slist => { "user::rwx", "group::---", "user:bin:r--", "group:bin:r--", "other::---", "mask::r.." }; + + "files[3]" string => "file3"; + "user[3]" string => "bin:-rwx"; + "group[3]" string => "*:"; + "other[3]" string => ""; + "lines[3]" slist => { "user::rwx", "user:bin:---", "group::---", "group:bin:r--", "other::---", "mask::r.." }; + + "files[4]" string => "file4"; + "user[4]" string => "bin:-r, bin:+x"; + "group[4]" string => "*:"; + "other[4]" string => ""; + "lines[4]" slist => { "user:bin:--x", "user::rwx", "group::---", "group:bin:r--", "other::---", "mask::r.x" }; + + "files[5]" string => "file5"; + "user[5]" string => "bin:-r, root:rx"; + "group[5]" string => "*:"; + "other[5]" string => ""; + "lines[5]" slist => { "user:bin:---", "user:root:r-x", "group::---", "user::rwx", "group::---", "group:bin:r--", "other::---", "mask::r.x" }; + + "files[6]" string => "file6"; + "user[6]" string => "bin:-r, root:rx"; + "group[6]" string => "*:"; + "other[6]" string => "-r, =rx"; + "lines[6]" slist => { "user:bin:---", "user:root:r-x", "group::---", "user::rwx", "group::---", "group:bin:r--", "mask::r.x", "other::r-x" }; + + # To test the non recursivity + "files[7]" string => "file7/."; + "user[7]" string => "bin:=rx"; + "group[7]" string => ""; + "other[7]" string => ""; + "lines[7]" slist => { "user:bin:r-x", "user::rwx", "group::r-x", "group:bin:r--", "mask::r.x", "other::---" }; + + "files[8]" string => "file7/subfile1"; + "user[8]" string => ""; + "group[8]" string => ""; + "other[8]" string => ""; + "lines[8]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::---", "mask::r.x" }; + + # To test regex + "files[9]" string => "rfile1"; + "lines[9]" slist => { "user:bin:---", "user:root:r-x", "group::---", "user::rwx", "group::---", "group:bin:r--", "mask::r.x", "other::r-x" }; + + "files[10]" string => "rfile2"; + "lines[10]" slist => { "user:bin:---", "user:root:r-x", "group::---", "user::rwx", "group::---", "group:bin:r--", "mask::r.x", "other::r-x" }; + + "files[11]" string => "rfile*"; + "user[11]" string => "bin:-r, root:rx"; + "group[11]" string => "*:"; + "other[11]" string => "-r, =rx"; + + "indices" slist => getindices("files"); + # Do not create 11 + "create_indices" slist => { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10" }; + "printable_lines[${indices}]" string => join("${const.endl}", "lines[${indices}]"); + "canonified_lines[${indices}]" string => canonify("${lines[${indices}]}"); + + files: + "${tmp}/${files[${create_indices}]}" + create => "true", + acl => access_generic("@{initial_acls}"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "false", "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.other[${init.indices}]}" }; + + methods: + "ph1" usebundle => apply_gm("permissions_acl_entry", @{args1}, "repaired", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("permissions_acl_entry", @{args2}, "repaired", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("permissions_acl_entry", @{args3}, "repaired", "ph3", "enforce" ); + "ph4" usebundle => apply_gm("permissions_acl_entry", @{args4}, "repaired", "ph4", "enforce" ); + "ph5" usebundle => apply_gm("permissions_acl_entry", @{args5}, "repaired", "ph5", "enforce" ); + "ph6" usebundle => apply_gm("permissions_acl_entry", @{args6}, "repaired", "ph6", "enforce" ); + "ph7" usebundle => apply_gm("permissions_acl_entry", @{args7}, "repaired", "ph7", "enforce" ); + # Do not apply anything on 8-10 + "ph11" usebundle => apply_gm("permissions_acl_entry", @{args11}, "repaired", "ph11", "enforce" ); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass3:: + "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}" not => regcmp(".*${init.lines[${init.create_indices}]}.*","${getfacl_output[${init.create_indices}]}"); + "lines_${init.create_indices}_ok" expression => "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}"; + "lines_not_ok" expression => "lines_${init.create_indices}_ok"; + + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok.ph11_ok"; + "ok" expression => "classes_ok.!lines_not_ok"; + + reports: + pass3:: + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_${init.indices}_${init.canonified_lines[${init.indices}]}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.success.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.success.cf new file mode 100644 index 00000000000..ecb3c6f27b0 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.non_recursive.success.cf @@ -0,0 +1,135 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "initial_acls_files" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r", "mask:rx" }; + "initial_acls_directories" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:rx", "mask:rx" }; + "expected_lines_files" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--", "mask::r-x" }; + "expected_lines_directories" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r-x", "mask::r-x" }; + # Cfengine tends to minimize the mask when editing an ACL, do not force it here otherwise it will be recalculated + # at execution when calling the GM below and we will never have success outcomes. + + "files[1]" string => "file1"; + "user[1]" string => "*:+rwx"; + "group[1]" string => ""; + "other[1]" string => "r"; + + "files[2]" string => "file2"; + "user[2]" string => "*:rwx"; + "group[2]" string => "*:+w, *:-w, bin:-wx"; + "other[2]" string => "r"; + + "files[3]" string => "file3"; + "user[3]" string => "*:+rx, bin:-wx"; + "group[3]" string => "*:+w, *:-w, bin:-wx"; + "other[3]" string => "=r"; + + "files[4]" string => "file4/."; + "user[4]" string => "*:+rx, bin:-wx"; + "group[4]" string => "*:+w, *:-w, bin:-wx"; + "other[4]" string => ""; + + "files[5]" string => "file5"; + "user[5]" string => ""; + "group[5]" string => ""; + "other[5]" string => ""; + + "files[6]" string => "file6"; + "user[6]" string => "*:+rw"; + "group[6]" string => ""; + "other[6]" string => ""; + + "files[7]" string => "file*"; + "user[7]" string => "*:+rw"; + "group[7]" string => ""; + "other[7]" string => ""; + + "indices" slist => getindices("files"); + # Do not create 7 + "create_indices" slist => { "1", "2", "3", "4", "5", "6" }; + "lines[${indices}]" slist => { @{expected_lines_files} }; + "lines[4]" slist => { @{expected_lines_directories} }; + + "printable_lines[${indices}]" string => join("${const.endl}", "lines[${indices}]"); + "canonified_lines[${indices}]" string => canonify("${lines[${indices}]}"); + + files: + "${tmp}/${files[${create_indices}]}" + create => "true", + acl => access_generic("@{initial_acls_files}"); + + "${tmp}/${files[4]}" + create => "true", + acl => access_generic("@{initial_acls_directories}"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "false", "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.other[${init.indices}]}" }; + + methods: + "ph1" usebundle => apply_gm("permissions_acl_entry", @{args1}, "success", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("permissions_acl_entry", @{args2}, "success", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("permissions_acl_entry", @{args3}, "success", "ph3", "enforce" ); + "ph4" usebundle => apply_gm("permissions_acl_entry", @{args4}, "success", "ph4", "enforce" ); + "ph5" usebundle => apply_gm("permissions_acl_entry", @{args5}, "success", "ph5", "enforce" ); + "ph6" usebundle => apply_gm("permissions_acl_entry", @{args6}, "success", "ph6", "enforce" ); + "ph7" usebundle => apply_gm("permissions_acl_entry", @{args7}, "success", "ph7", "enforce" ); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass3:: + "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}" not => regcmp(".*${init.lines[${init.create_indices}]}.*","${getfacl_output[${init.create_indices}]}"); + "lines_${init.create_indices}_ok" expression => "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}"; + "lines_not_ok" expression => "lines_${init.create_indices}_ok"; + + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok"; + "ok" expression => "classes_ok.!lines_not_ok"; + + reports: + pass3:: + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_${init.indices}_${init.canonified_lines[${init.indices}]}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.audit.error.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.audit.error.cf new file mode 100644 index 00000000000..07b2e023534 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.audit.error.cf @@ -0,0 +1,177 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "initial_acls_files" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r", "mask:rx" }; + "subfile_acls" slist => { "user:*:rwx", "group:*:rx", "user:bin:rx", "group:bin:r", "all:r", "mask:rx" }; + "expected_lines_files" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--", "mask::r-x" }; + "expected_lines_subfiles" slist => { "user::rwx", "group::r-x", "user:bin:r-x", "group:bin:r--", "other::r--", "mask::r-x" }; + # Cfengine tends to minimize the mask when editing an ACL, do not force it here otherwise it will be recalculated + # at execution when calling the GM below and we will never have error outcomes. + "files[1]" string => "file1"; + "user[1]" string => "*:"; + "group[1]" string => ""; + "other[1]" string => ""; + + "files[2]" string => "file2"; + "user[2]" string => ""; + "group[2]" string => "*:"; + "other[2]" string => ""; + + "files[3]" string => "file3"; + "user[3]" string => "bin:-rwx"; + "group[3]" string => "*:"; + "other[3]" string => ""; + + "files[4]" string => "file4"; + "user[4]" string => "bin:-r, bin:+x"; + "group[4]" string => "*:"; + "other[4]" string => ""; + + "files[5]" string => "file5"; + "user[5]" string => "bin:-r, root:rx"; + "group[5]" string => "*:"; + "other[5]" string => ""; + + "files[6]" string => "file6"; + "user[6]" string => "bin:-r, root:rx"; + "group[6]" string => "*:"; + "other[6]" string => "-r, =rx"; + + # To test the recursivity + "files[7]" string => "file7/."; + "user[7]" string => "bin:=r"; + "group[7]" string => ""; + "other[7]" string => ""; + + # Has a user:bin:rx instead of user:bin:r + "files[8]" string => "file7/1/2/3/4/5/subfile1"; + "user[8]" string => ""; + "group[8]" string => ""; + "other[8]" string => ""; + + "files[9]" string => "file1"; + "user[9]" string => "*:rwxyz"; + "group[9]" string => ""; + "other[9]" string => "r"; + + "files[10]" string => "file10"; + "user[10]" string => "unknown_user:rx"; + "group[10]" string => ""; + "other[10]" string => "r"; + + "files[11]" string => "unknown_file"; + "user[11]" string => ""; + "group[11]" string => ""; + "other[11]" string => "r"; + + "files[12]" string => "file*"; + "user[12]" string => "*:yy"; + "group[12]" string => ""; + "other[12]" string => "r"; + + "files[13]" string => "non_matching*regex"; + "user[13]" string => "*:"; + "group[13]" string => ""; + "other[13]" string => "r"; + + "indices" slist => getindices("files"); + # Do not create 11 + "create_indices" slist => { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10" }; + "file_acls_indices" slist => { "1", "2", "3", "4", "5", "6", "7", "9", "10" }; + "subfile_acls_indices" slist => { "8" }; + + "lines[${file_acls_indices}]" slist => { @{expected_lines_files} }; + "lines[${subfile_acls_indices}]" slist => { @{expected_lines_subfiles} }; + + "printable_lines[${indices}]" string => join("${const.endl}", "lines[${indices}]"); + "canonified_lines[${indices}]" string => canonify("${lines[${indices}]}"); + + files: + "${tmp}/${files[${file_acls_indices}]}" + create => "true", + acl => access_generic("@{initial_acls_files}"); + + "${tmp}/${files[${subfile_acls_indices}]}" + create => "true", + acl => access_generic("@{subfile_acls}"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "true", "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.other[${init.indices}]}" }; + + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => apply_gm("permissions_acl_entry", @{args1}, "error", "ph1", "audit" ); + "ph2" usebundle => apply_gm("permissions_acl_entry", @{args2}, "error", "ph2", "audit" ); + "ph3" usebundle => apply_gm("permissions_acl_entry", @{args3}, "error", "ph3", "audit" ); + "ph4" usebundle => apply_gm("permissions_acl_entry", @{args4}, "error", "ph4", "audit" ); + "ph5" usebundle => apply_gm("permissions_acl_entry", @{args5}, "error", "ph5", "audit" ); + "ph6" usebundle => apply_gm("permissions_acl_entry", @{args6}, "error", "ph6", "audit" ); + "ph7" usebundle => apply_gm("permissions_acl_entry", @{args7}, "error", "ph7", "audit" ); + # Do not apply anything on 8 + "ph9" usebundle => apply_gm("permissions_acl_entry", @{args9}, "error", "ph9", "audit" ); + "ph10" usebundle => apply_gm("permissions_acl_entry", @{args10}, "error", "ph10", "audit" ); + "ph11" usebundle => apply_gm("permissions_acl_entry", @{args11}, "error", "ph11", "audit" ); + "ph12" usebundle => apply_gm("permissions_acl_entry", @{args12}, "error", "ph12", "audit" ); + "ph13" usebundle => apply_gm("permissions_acl_entry", @{args13}, "error", "ph13", "audit" ); + "disable" usebundle => set_dry_run_mode("false"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass3:: + "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}" not => regcmp(".*${init.lines[${init.create_indices}]}.*","${getfacl_output[${init.create_indices}]}"); + "lines_${init.create_indices}_ok" expression => "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}"; + "lines_not_ok" expression => "lines_${init.create_indices}_ok"; + + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok.ph9_ok.ph10_ok.ph11_ok.ph12_ok.ph13_ok"; + "ok" expression => "classes_ok.!lines_not_ok"; + + reports: + pass3:: + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_${init.indices}_${init.canonified_lines[${init.indices}]}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.audit.success.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.audit.success.cf new file mode 100644 index 00000000000..4c6998779c5 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.audit.success.cf @@ -0,0 +1,144 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "initial_acls_files" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r", "mask:rx" }; + "initial_acls_directories" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:rx", "mask:rx" }; + "expected_lines_files" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--", "mask::r-x" }; + "expected_lines_directories" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r-x", "mask::r-x" }; + # Cfengine tends to minimize the mask when editing an ACL, do not force it here otherwise it will be recalculated + # at execution when calling the GM below and we will never have success outcomes. + + "files[1]" string => "file1"; + "user[1]" string => "*:+rwx"; + "group[1]" string => ""; + "other[1]" string => "r"; + + "files[2]" string => "file2"; + "user[2]" string => "*:rwx"; + "group[2]" string => "*:+w, *:-w, bin:-wx"; + "other[2]" string => "r"; + + "files[3]" string => "file3"; + "user[3]" string => "*:+rx, bin:-wx"; + "group[3]" string => "*:+w, *:-w, bin:-wx"; + "other[3]" string => "=r"; + + "files[4]" string => "file4/."; + "user[4]" string => "*:+rx, bin:-wx"; + "group[4]" string => "*:+w, *:-w, bin:-wx"; + "other[4]" string => ""; + + "files[5]" string => "file5"; + "user[5]" string => ""; + "group[5]" string => ""; + "other[5]" string => ""; + + "files[6]" string => "file6"; + "user[6]" string => "*:+rw"; + "group[6]" string => ""; + "other[6]" string => ""; + + "files[7]" string => "file*"; + "user[7]" string => "*:+rw"; + "group[7]" string => ""; + "other[7]" string => ""; + + "files[8]" string => "file4/1/2/3/4/5/subfile1"; + "user[8]" string => "*:+rw"; + "group[8]" string => ""; + "other[8]" string => ""; + + "indices" slist => getindices("files"); + # Do not create 7 + "create_indices" slist => { "1", "2", "3", "4", "5", "6", "8" }; + "lines[${indices}]" slist => { @{expected_lines_files} }; + "lines[4]" slist => { @{expected_lines_directories} }; + + "printable_lines[${indices}]" string => join("${const.endl}", "lines[${indices}]"); + "canonified_lines[${indices}]" string => canonify("${lines[${indices}]}"); + + files: + "${tmp}/${files[${create_indices}]}" + create => "true", + acl => access_generic("@{initial_acls_files}"); + + "${tmp}/${files[4]}" + create => "true", + depth_search => recurse_with_base("3"), + acl => access_generic("@{initial_acls_directories}"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "true", "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.other[${init.indices}]}" }; + + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => apply_gm("permissions_acl_entry", @{args1}, "success", "ph1", "audit" ); + "ph2" usebundle => apply_gm("permissions_acl_entry", @{args2}, "success", "ph2", "audit" ); + "ph3" usebundle => apply_gm("permissions_acl_entry", @{args3}, "success", "ph3", "audit" ); + "ph4" usebundle => apply_gm("permissions_acl_entry", @{args4}, "success", "ph4", "audit" ); + "ph5" usebundle => apply_gm("permissions_acl_entry", @{args5}, "success", "ph5", "audit" ); + "ph6" usebundle => apply_gm("permissions_acl_entry", @{args6}, "success", "ph6", "audit" ); + "ph7" usebundle => apply_gm("permissions_acl_entry", @{args7}, "success", "ph7", "audit" ); + "ph8" usebundle => apply_gm("permissions_acl_entry", @{args8}, "success", "ph8", "audit" ); + "disable" usebundle => set_dry_run_mode("false"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass3:: + "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}" not => regcmp(".*${init.lines[${init.create_indices}]}.*","${getfacl_output[${init.create_indices}]}"); + "lines_${init.create_indices}_ok" expression => "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}"; + "lines_not_ok" expression => "lines_${init.create_indices}_ok"; + + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok.ph8_ok"; + "ok" expression => "classes_ok.!lines_not_ok"; + + reports: + pass3:: + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_${init.indices}_${init.canonified_lines[${init.indices}]}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.error.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.error.cf new file mode 100644 index 00000000000..38505c98c98 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.error.cf @@ -0,0 +1,117 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "initial_acls_files" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r", "mask:rx" }; + "expected_lines_files" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--", "mask::r-x" }; + # Cfengine tends to minimize the mask when editing an ACL, do not force it here otherwise it will be recalculated + # at execution when calling the GM below and we will never have error outcomes. + + "files[1]" string => "file1"; + "user[1]" string => "*:rwxyz"; + "group[1]" string => ""; + "other[1]" string => "r"; + + "files[2]" string => "file2"; + "user[2]" string => "unknown_user:rx"; + "group[2]" string => ""; + "other[2]" string => "r"; + + "files[3]" string => "unknown_file"; + "user[3]" string => ""; + "group[3]" string => ""; + "other[3]" string => "r"; + + "files[4]" string => "file*"; + "user[4]" string => "*:yz"; + "group[4]" string => ""; + "other[4]" string => "r"; + + "files[5]" string => "non_matching*regex"; + "user[5]" string => "*:"; + "group[5]" string => ""; + "other[5]" string => "r"; + + "indices" slist => getindices("files"); + # Do not create 3 and 4 + "create_indices" slist => { "1", "2" }; + "lines[${indices}]" slist => { @{expected_lines_files} }; + "lines[${create_indices}]" slist => { @{expected_lines_files} }; + + "printable_lines[${indices}]" string => join("${const.endl}", "lines[${indices}]"); + "canonified_lines[${indices}]" string => canonify("${lines[${indices}]}"); + + files: + "${tmp}/${files[${create_indices}]}" + create => "true", + acl => access_generic("@{initial_acls_files}"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "true", "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.other[${init.indices}]}" }; + + methods: + "ph1" usebundle => apply_gm("permissions_acl_entry", @{args1}, "error", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("permissions_acl_entry", @{args2}, "error", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("permissions_acl_entry", @{args3}, "error", "ph3", "enforce" ); + "ph4" usebundle => apply_gm("permissions_acl_entry", @{args4}, "error", "ph4", "enforce" ); + "ph5" usebundle => apply_gm("permissions_acl_entry", @{args5}, "error", "ph5", "enforce" ); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass3:: + "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}" not => regcmp(".*${init.lines[${init.create_indices}]}.*","${getfacl_output[${init.create_indices}]}"); + "lines_${init.create_indices}_ok" expression => "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}"; + "lines_not_ok" expression => "lines_${init.create_indices}_ok"; + + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok"; + "ok" expression => "classes_ok.!lines_not_ok"; + + reports: + pass3:: + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_${init.indices}_${init.canonified_lines[${init.indices}]}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.repaired.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.repaired.cf new file mode 100644 index 00000000000..65fbab63f16 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.repaired.cf @@ -0,0 +1,160 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "initial_acls" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:", "mask:rwx" }; + "subfile_initial_acls" slist => { "user:*:rwx", "group:*:rx", "user:bin:rx", "group:bin:r", "all:", "mask:rwx" }; + # The tests uses "." instead in some mask pattern because it can be wrongly display in getfacl output + # if no aces are impacted by the mask + + "files[1]" string => "file1"; + "user[1]" string => "*:"; + "group[1]" string => ""; + "other[1]" string => ""; + "lines[1]" slist => { "user::---", "group::r-x", "user:bin:r--", "group:bin:r--", "other::---", "mask::r.x" }; + + "files[2]" string => "file2"; + "user[2]" string => ""; + "group[2]" string => "*:"; + "other[2]" string => ""; + "lines[2]" slist => { "user::rwx", "group::---", "user:bin:r--", "group:bin:r--", "other::---", "mask::r.." }; + + "files[3]" string => "file3"; + "user[3]" string => "bin:-rwx"; + "group[3]" string => "*:"; + "other[3]" string => ""; + "lines[3]" slist => { "user::rwx", "user:bin:---", "group::---", "group:bin:r--", "other::---", "mask::r.." }; + + "files[4]" string => "file4"; + "user[4]" string => "bin:-r, bin:+x"; + "group[4]" string => "*:"; + "other[4]" string => ""; + "lines[4]" slist => { "user:bin:--x", "user::rwx", "group::---", "group:bin:r--", "other::---", "mask::r.x" }; + + "files[5]" string => "file5"; + "user[5]" string => "bin:-r, root:rx"; + "group[5]" string => "*:"; + "other[5]" string => ""; + "lines[5]" slist => { "user:bin:---", "user:root:r-x", "group::---", "user::rwx", "group::---", "group:bin:r--", "other::---", "mask::r.x" }; + + "files[6]" string => "file6"; + "user[6]" string => "bin:-r, root:rx"; + "group[6]" string => "*:"; + "other[6]" string => "-r, =rx"; + "lines[6]" slist => { "user:bin:---", "user:root:r-x", "group::---", "user::rwx", "group::---", "group:bin:r--", "mask::r.x", "other::r-x" }; + + # To test the recursivity + "files[7]" string => "file7/."; + "user[7]" string => "bin:=rx"; + "group[7]" string => ""; + "other[7]" string => ""; + "lines[7]" slist => { "user:bin:r-x", "user::rwx", "group::r-x", "group:bin:r--", "mask::r.x", "other::---" }; + + "files[8]" string => "file7/1/2/3/4/5/subfile1"; + "user[8]" string => ""; + "group[8]" string => ""; + "other[8]" string => ""; + "lines[8]" slist => { "user::rwx", "group::r-x", "user:bin:r-x", "group:bin:r--", "other::---", "mask::r.x" }; + + # To test regex + "files[9]" string => "rfile1"; + "lines[9]" slist => { "user:bin:---", "user:root:r-x", "group::---", "user::rwx", "group::---", "group:bin:r--", "mask::r.x", "other::r-x" }; + + "files[10]" string => "rfile2"; + "lines[10]" slist => { "user:bin:---", "user:root:r-x", "group::---", "user::rwx", "group::---", "group:bin:r--", "mask::r.x", "other::r-x" }; + + "files[11]" string => "rfile*"; + "user[11]" string => "bin:-r, root:rx"; + "group[11]" string => "*:"; + "other[11]" string => "-r, =rx"; + + "indices" slist => getindices("files"); + # Do not create 11 + "create_indices" slist => { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10" }; + "file_list" slist => { "1", "2", "3", "4", "5", "6", "8", "9", "10" }; + "subfile_list" slist => { "7" }; + + "printable_lines[${indices}]" string => join("${const.endl}", "lines[${indices}]"); + "canonified_lines[${indices}]" string => canonify("${lines[${indices}]}"); + + files: + "${tmp}/${files[${file_list}]}" + create => "true", + acl => access_generic("@{initial_acls}"); + + "${tmp}/${files[${subfile_list}]}" + create => "true", + acl => access_generic("@{subfile_initial_acls}"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "true", "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.other[${init.indices}]}" }; + + methods: + "ph1" usebundle => apply_gm("permissions_acl_entry", @{args1}, "repaired", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("permissions_acl_entry", @{args2}, "repaired", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("permissions_acl_entry", @{args3}, "repaired", "ph3", "enforce" ); + "ph4" usebundle => apply_gm("permissions_acl_entry", @{args4}, "repaired", "ph4", "enforce" ); + "ph5" usebundle => apply_gm("permissions_acl_entry", @{args5}, "repaired", "ph5", "enforce" ); + "ph6" usebundle => apply_gm("permissions_acl_entry", @{args6}, "repaired", "ph6", "enforce" ); + "ph7" usebundle => apply_gm("permissions_acl_entry", @{args7}, "repaired", "ph7", "enforce" ); + # Do not apply anything on 8-10 + "ph11" usebundle => apply_gm("permissions_acl_entry", @{args11}, "repaired", "ph11", "enforce" ); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass3:: + "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}" not => regcmp(".*${init.lines[${init.create_indices}]}.*","${getfacl_output[${init.create_indices}]}"); + "lines_${init.create_indices}_ok" expression => "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}"; + "lines_not_ok" expression => "lines_${init.create_indices}_ok"; + + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok.ph11_ok"; + "ok" expression => "classes_ok.!lines_not_ok"; + + reports: + pass3:: + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_${init.indices}_${init.canonified_lines[${init.indices}]}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.success.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.success.cf new file mode 100644 index 00000000000..71256c5128c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_acl_entry.recursive.success.cf @@ -0,0 +1,141 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "initial_acls_files" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r", "mask:rx" }; + "initial_acls_directories" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:rx", "mask:rx" }; + "expected_lines_files" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--", "mask::r-x" }; + "expected_lines_directories" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r-x", "mask::r-x" }; + # Cfengine tends to minimize the mask when editing an ACL, do not force it here otherwise it will be recalculated + # at execution when calling the GM below and we will never have success outcomes. + + "files[1]" string => "file1"; + "user[1]" string => "*:+rwx"; + "group[1]" string => ""; + "other[1]" string => "r"; + + "files[2]" string => "file2"; + "user[2]" string => "*:rwx"; + "group[2]" string => "*:+w, *:-w, bin:-wx"; + "other[2]" string => "r"; + + "files[3]" string => "file3"; + "user[3]" string => "*:+rx, bin:-wx"; + "group[3]" string => "*:+w, *:-w, bin:-wx"; + "other[3]" string => "=r"; + + "files[4]" string => "file4/."; + "user[4]" string => "*:+rx, bin:-wx"; + "group[4]" string => "*:+w, *:-w, bin:-wx"; + "other[4]" string => ""; + + "files[5]" string => "file5"; + "user[5]" string => ""; + "group[5]" string => ""; + "other[5]" string => ""; + + "files[6]" string => "file6"; + "user[6]" string => "*:+rw"; + "group[6]" string => ""; + "other[6]" string => ""; + + "files[7]" string => "file*"; + "user[7]" string => "*:+rw"; + "group[7]" string => ""; + "other[7]" string => ""; + + "files[8]" string => "file4/1/2/3/4/5/subfile1"; + "user[8]" string => "*:+rx, bin:-wx"; + "group[8]" string => "*:+w, *:-w, bin:-wx"; + "other[8]" string => ""; + + "indices" slist => getindices("files"); + # Do not create 7 + "create_indices" slist => { "1", "2", "3", "4", "5", "6", "8" }; + "lines[${indices}]" slist => { @{expected_lines_files} }; + "lines[4]" slist => { @{expected_lines_directories} }; + + "printable_lines[${indices}]" string => join("${const.endl}", "lines[${indices}]"); + "canonified_lines[${indices}]" string => canonify("${lines[${indices}]}"); + + files: + "${tmp}/${files[${create_indices}]}" + create => "true", + acl => access_generic("@{initial_acls_files}"); + + "${tmp}/${files[4]}" + create => "true", + depth_search => recurse_with_base("3"), + acl => access_generic("@{initial_acls_directories}"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "true", "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.other[${init.indices}]}" }; + + methods: + "ph1" usebundle => apply_gm("permissions_acl_entry", @{args1}, "success", "ph1", "enforce" ); + "ph2" usebundle => apply_gm("permissions_acl_entry", @{args2}, "success", "ph2", "enforce" ); + "ph3" usebundle => apply_gm("permissions_acl_entry", @{args3}, "success", "ph3", "enforce" ); + "ph4" usebundle => apply_gm("permissions_acl_entry", @{args4}, "success", "ph4", "enforce" ); + "ph5" usebundle => apply_gm("permissions_acl_entry", @{args5}, "success", "ph5", "enforce" ); + "ph6" usebundle => apply_gm("permissions_acl_entry", @{args6}, "success", "ph6", "enforce" ); + "ph7" usebundle => apply_gm("permissions_acl_entry", @{args7}, "success", "ph7", "enforce" ); + "ph8" usebundle => apply_gm("permissions_acl_entry", @{args8}, "success", "ph8", "enforce" ); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass3:: + "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}" not => regcmp(".*${init.lines[${init.create_indices}]}.*","${getfacl_output[${init.create_indices}]}"); + "lines_${init.create_indices}_ok" expression => "lines_matches_${init.create_indices}_${init.canonified_lines[${init.create_indices}]}"; + "lines_not_ok" expression => "lines_${init.create_indices}_ok"; + + + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok.ph8_ok"; + "ok" expression => "classes_ok.!lines_not_ok"; + + reports: + pass3:: + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_${init.indices}_${init.canonified_lines[${init.indices}]}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_dirs.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_dirs.cf new file mode 100644 index 00000000000..ccb41ede395 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_dirs.cf @@ -0,0 +1,85 @@ +####################################################### +# +# Test checking if a file / directory has the right mode +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "directory" string => "${tmp}/test"; + "directory_canon" string => canonify("${directory}"); + "mode" string => "750"; + "owner" string => "bin"; + "group" string => "bin"; + + files: + "${directory}/." + create => "true", + perms => mog("000", "root", "0"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => permissions_dirs("${init.directory}", "${init.mode}", "${init.owner}", "${init.group}"); +} + +####################################################### + +bundle agent check +{ + vars: + "owner_id" int => getuid("${init.owner}"); + "group_id" int => getgid("${init.group}"); + "permissions_test_mode" string => "/usr/bin/test ${const.dollar}(${test_utils.file_perms} ${init.directory}) = \"${init.mode}\""; + "permissions_test_owner" string => "/usr/bin/test ${const.dollar}(${test_utils.file_owner} ${init.directory}) = \"${owner_id}\""; + "permissions_test_group" string => "/usr/bin/test ${const.dollar}(${test_utils.file_group} ${init.directory}) = \"${group_id}\""; + + classes: + # By default, permissions_type_recursion should create the directory if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.permissions_${init.directory_canon}_ok.!permissions_${init.directory_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_dirs_recursive.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_dirs_recursive.cf new file mode 100644 index 00000000000..47555b133e7 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_dirs_recursive.cf @@ -0,0 +1,206 @@ +####################################################### +# +# Test checking if two file permissions in a directory can be changed recursively +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "directory[1]" string => "${tmp}/test1"; + "directory_canon[1]" string => canonify("${directory[1]}"); + "mode[1]" string => "750"; + "Emode[1]" string => "750"; + "owner[1]" string => "bin"; + "Eowner[1]" string => "bin"; + "group[1]" string => "bin"; + "Egroup[1]" string => "bin"; + "status[1]" string => "repaired"; # expected status, "repaired", "success" or "error" + "audit[1]" string => "enforce"; + "execute[1]" string => "true"; # Tell if an execution is needed + + "directory[2]" string => "${tmp}/test2"; + "directory_canon[2]" string => canonify("${directory[2]}"); + "mode[2]" string => "750"; + "Emode[2]" string => "750"; # expected resulting mode + "owner[2]" string => ""; + "Eowner[2]" string => "root"; + "group[2]" string => ""; + "Egroup[2]" string => "0"; + "status[2]" string => "repaired"; # expected status, "repaired", "success" or "error" + "audit[2]" string => "enforce"; + "execute[2]" string => "true"; # Tell if an execution is needed + + "directory[3]" string => "${tmp}/test3"; + "directory_canon[3]" string => canonify("${directory[3]}"); + "mode[3]" string => ""; + "Emode[3]" string => "000"; + "owner[3]" string => "bin"; + "Eowner[3]" string => "bin"; + "group[3]" string => ""; + "Egroup[3]" string => "0"; + "status[3]" string => "repaired"; # expected status, "repaired", "success" or "error" + "audit[3]" string => "enforce"; + "execute[3]" string => "true"; # Tell if an execution is needed + + "directory[4]" string => "${tmp}/test4"; + "directory_canon[4]" string => canonify("${directory[4]}"); + "mode[4]" string => ""; + "Emode[4]" string => "000"; + "owner[4]" string => ""; + "Eowner[4]" string => "root"; + "group[4]" string => "bin"; + "Egroup[4]" string => "bin"; + "status[4]" string => "repaired"; # expected status, "repaired", "success" or "error" + "audit[4]" string => "enforce"; + "execute[4]" string => "true"; # Tell if an execution is needed + + "directory[5]" string => "${tmp}/test5"; + "directory_canon[5]" string => canonify("${directory[5]}"); + "mode[5]" string => ""; + "Emode[5]" string => "000"; + "owner[5]" string => "root"; + "Eowner[5]" string => "root"; + "group[5]" string => ""; + "Egroup[5]" string => "0"; + "status[5]" string => "success"; # expected status, "repaired", "success" or "error" + "audit[5]" string => "enforce"; + "execute[5]" string => "true"; # Tell if an execution is needed + + "directory[6]" string => "${tmp}/test6"; + "directory_canon[6]" string => canonify("${directory[6]}"); + "mode[6]" string => "000"; + "Emode[6]" string => "000"; + "owner[6]" string => "root"; + "Eowner[6]" string => "root"; + "group[6]" string => ""; + "Egroup[6]" string => "0"; + "status[6]" string => "success"; # expected status, "repaired", "success" or "error" + "audit[6]" string => "audit"; + "execute[6]" string => "false"; # Tell if an execution is needed + + + "directory[7]" string => "${tmp}/test7"; + "directory_canon[7]" string => canonify("${directory[7]}"); + "mode[7]" string => "700"; + "Emode[7]" string => "000"; + "owner[7]" string => "root"; + "Eowner[7]" string => "root"; + "group[7]" string => ""; + "Egroup[7]" string => "0"; + "status[7]" string => "error"; # expected status, "repaired", "success" or "error" + "audit[7]" string => "audit"; + "execute[7]" string => "false"; # Tell if an execution is needed + + "indices" slist => {1, 2, 3, 4, 5, 6, 7}; + + files: + "${directory[${indices}]}/." + create => "true", + perms => mog("000", "root", "0"); + + "${directory[${indices}]}/file1/." + create => "true", + perms => mog("000", "root", "0"); + + "${directory[${indices}]}/file2/." + create => "true", + perms => mog("000", "root", "0"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.directory[${init.indices}]}", "${init.mode[${init.indices}]}", "${init.owner[${init.indices}]}", "${init.group[${init.indices}]}" }; + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + pass1.!pass2:: + #REPAIRED + "ph1" usebundle => apply_gm("permissions_dirs_recursive", @{args1}, "${init.status[1]}", "ph1", "${init.audit[1]}" ); + "ph2" usebundle => apply_gm("permissions_dirs_recursive", @{args2}, "${init.status[2]}", "ph2", "${init.audit[2]}" ); + "ph3" usebundle => apply_gm("permissions_dirs_recursive", @{args3}, "${init.status[3]}", "ph3", "${init.audit[3]}" ); + "ph4" usebundle => apply_gm("permissions_dirs_recursive", @{args4}, "${init.status[4]}", "ph4", "${init.audit[4]}" ); + # SUCCESS + "ph5" usebundle => apply_gm("permissions_dirs_recursive", @{args5}, "${init.status[5]}", "ph5", "${init.audit[5]}" ); + # audit success ERROR + "ph6" usebundle => apply_gm("permissions_dirs_recursive", @{args6}, "${init.status[6]}", "ph6", "${init.audit[6]}" ); + # AUDIT ERROR + "ph7" usebundle => apply_gm("permissions_dirs_recursive", @{args7}, "${init.status[7]}", "ph7", "${init.audit[7]}" ); + +} + +####################################################### + +bundle agent check +{ + vars: + "permissions_test_mode[${init.indices}]" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory[${init.indices}]} -perm ${init.Emode[${init.indices}]} | wc -l) = \"3\""; + "permissions_test_owner[${init.indices}]" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory[${init.indices}]} -user ${init.Eowner[${init.indices}]} | wc -l) = \"3\""; + "permissions_test_group[${init.indices}]" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory[${init.indices}]} -group ${init.Egroup[${init.indices}]} | wc -l) = \"3\""; + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass1:: + "permissions_${init.indices}_test_mode_ok" + expression => returnszero("${permissions_test_mode[${init.indices}]}", "useshell"); + "permissions_${init.indices}_test_owner_ok" + expression => returnszero("${permissions_test_owner[${init.indices}]}", "useshell"); + "permissions_${init.indices}_test_group_ok" + expression => returnszero("${permissions_test_group[${init.indices}]}", "useshell"); + + "result_nok" not => "permissions_${init.indices}_test_mode_ok.permissions_${init.indices}_test_owner_ok.permissions_${init.indices}_test_group_ok"; + + + # classes_ok is just a placeholder. What we really want is find the not ok + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"); + + "ok" expression => "!classes_not_ok.!result_nok"; + + + reports: + pass3:: + "Test for permissions_dirs_recursive nb ${init.indices} FAILED" + if => "!ph${init.indices}_ok"; + + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; + pass3:: + "test command doesn't return 0 for command: ${permissions_test_mode[${init.indices}]}" + if => "!permissions_${init.indices}_test_mode_ok"; + + "test command doesn't return 0 for command: ${permissions_test_owner[${init.indices}]}" + if => "!permissions_${init.indices}_test_owner_ok"; + + "test command doesn't return 0 for command: ${permissions_test_group[${init.indices}]}" + if => "!permissions_${init.indices}_test_group_ok"; + + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_group_acl_absent.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_group_acl_absent.cf new file mode 100644 index 00000000000..6e57fdc365b --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_group_acl_absent.cf @@ -0,0 +1,447 @@ +####################################################### +# +# Test checking if ACLs are absent or not. +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + # REPAIRED + ## recursive + "files[1]" string => "file1"; # args of the GM + "recursive[1]" string => "true"; # \\ + "group[1]" string => "bin"; # \\ + "create[1]" string => "true"; # if "true", the file will be created at init + "initial[1]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[1]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r--" }; + "unexp_lines[1]" slist => { "group:bin*" }; + # -> list of regex that {must|must not} match the output of getfacl on the file at the en of the test + "mode[1]" string => "enforce"; # mode, "enforce" or "audit" + "status[1]" string => "repaired"; # expected status, "repaired", "success" or "error" + "execute[1]" string => "true"; # Tell if an execution is needed + + "files[2]" string => "file2/."; + "recursive[2]" string => "true"; + "group[2]" string => "bin"; + "create[2]" string => "true"; + "initial[2]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "all:rx" }; + "exp_lines[2]" slist => { "user::rwx", "group::r-x", "other::r-x" }; + "unexp_lines[2]" slist => { "group:bin*" }; + "mode[2]" string => "enforce"; + "status[2]" string => "repaired"; + "execute[2]" string => "true"; + + "files[3]" string => "file2/subfile1"; + "create[3]" string => "true"; + "initial[3]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[3]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r--" }; + "unexp_lines[3]" slist => { "group:bin*" }; + + "files[4]" string => "file4/."; + "recursive[4]" string => "true"; + "group[4]" string => "bin"; + "create[4]" string => "true"; + "initial[4]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[4]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "other::r-x" }; + "unexp_lines[4]" slist => { "group:bin*" }; + "mode[4]" string => "enforce"; + "status[4]" string => "repaired"; + "execute[4]" string => "true"; + + # non-recursive + "files[5]" string => "file5"; + "recursive[5]" string => "false"; + "group[5]" string => "bin"; + "create[5]" string => "true"; + "initial[5]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[5]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r--" }; + "unexp_lines[5]" slist => { "group:bin*" }; + "mode[5]" string => "enforce"; + "status[5]" string => "repaired"; + "execute[5]" string => "true"; + + "files[6]" string => "file6/."; + "recursive[6]" string => "false"; + "group[6]" string => "bin"; + "create[6]" string => "true"; + "initial[6]" slist => { "user:*:rwx", "group:*:rx", "user:bin:+rwx", "group:bin:r", "all:rx" }; + "exp_lines[6]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "other::r-x" }; + "unexp_lines[6]" slist => { "group:bin*" }; + "mode[6]" string => "enforce"; + "status[6]" string => "repaired"; + "execute[6]" string => "true"; + + "files[7]" string => "file6/subfile1"; + "create[7]" string => "true"; + "initial[7]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[7]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + + "files[8]" string => "file8/."; + "recursive[8]" string => "false"; + "group[8]" string => "bin"; + "create[8]" string => "true"; + "initial[8]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[8]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "other::r-x" }; + "unexp_lines[8]" slist => { "group:bin*" }; + "mode[8]" string => "enforce"; + "status[8]" string => "repaired"; + "execute[8]" string => "true"; + + # SUCCESS + "files[9]" string => "file9"; + "recursive[9]" string => "true"; + "group[9]" string => "bin"; + "create[9]" string => "true"; + "initial[9]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "all:r" }; + "exp_lines[9]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r--" }; + "unexp_lines[9]" slist => { "group:bin*" }; + "mode[9]" string => "enforce"; + "status[9]" string => "success"; + "execute[9]" string => "true"; + + "files[10]" string => "file10/."; + "recursive[10]" string => "true"; + "group[10]" string => "bin"; + "create[10]" string => "true"; + "initial[10]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "all:r" }; + "exp_lines[10]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r--" }; + "unexp_lines[10]" slist => { "group:bin*" }; + "mode[10]" string => "enforce"; + "status[10]" string => "success"; + "execute[10]" string => "true"; + + # non-recursive + "files[11]" string => "file11/."; + "recursive[11]" string => "false"; + "group[11]" string => "bin"; + "create[11]" string => "true"; + "initial[11]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "all:rx" }; + "exp_lines[11]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r-x" }; + "unexp_lines[11]" slist => { "group:bin*" }; + "mode[11]" string => "enforce"; + "status[11]" string => "success"; + "execute[11]" string => "true"; + + "files[12]" string => "file11/subfile1"; + "create[12]" string => "true"; + "initial[12]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[12]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + + "files[13]" string => "file13"; + "recursive[13]" string => "false"; + "group[13]" string => "aGroupThatDoesNotExist"; + "create[13]" string => "true"; + "initial[13]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[13]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "unexp_lines[13]" slist => { "group:aGroupThatDoesNotExist*" }; + "mode[13]" string => "enforce"; + "status[13]" string => "success"; + "execute[13]" string => "true"; + + # ERROR + "files[14]" string => "fileThatDoesNotExist"; + "recursive[14]" string => "false"; + "group[14]" string => "bin"; + "create[14]" string => "false"; + "mode[14]" string => "enforce"; + "status[14]" string => "error"; + "execute[14]" string => "true"; + + "files[15]" string => "fileThatDoesNotExist2"; + "recursive[15]" string => "true"; + "group[15]" string => "bin"; + "create[15]" string => "false"; + "mode[15]" string => "enforce"; + "status[15]" string => "error"; + "execute[15]" string => "true"; + + # AUDIT SUCCESS + # Copy of success enforce part + "files[16]" string => "file16"; + "recursive[16]" string => "true"; + "group[16]" string => "bin"; + "create[16]" string => "true"; + "initial[16]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "all:r" }; + "exp_lines[16]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r--" }; + "unexp_lines[16]" slist => { "group:bin*" }; + "mode[16]" string => "audit"; + "status[16]" string => "success"; + "execute[16]" string => "true"; + + "files[17]" string => "file17/."; + "recursive[17]" string => "true"; + "group[17]" string => "bin"; + "create[17]" string => "true"; + "initial[17]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "all:r" }; + "exp_lines[17]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r--" }; + "unexp_lines[17]" slist => { "group:bin*" }; + "mode[17]" string => "audit"; + "status[17]" string => "success"; + "execute[17]" string => "true"; + + # non-recursive + "files[18]" string => "file18/."; + "recursive[18]" string => "false"; + "group[18]" string => "bin"; + "create[18]" string => "true"; + "initial[18]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "all:rx" }; + "exp_lines[18]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r-x" }; + "unexp_lines[18]" slist => { "group:bin*" }; + "mode[18]" string => "audit"; + "status[18]" string => "success"; + "execute[18]" string => "true"; + + "files[19]" string => "file18/subfile1"; + "create[19]" string => "true"; + "initial[19]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[19]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + + "files[20]" string => "file20"; + "recursive[20]" string => "false"; + "group[20]" string => "aGroupThatDoesNotExist"; + "create[20]" string => "true"; + "initial[20]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[20]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "unexp_lines[20]" slist => { "user:aGroupThatDoesNotExist*" }; + "mode[20]" string => "audit"; + "status[20]" string => "success"; + "execute[20]" string => "true"; + + # AUDIT ERROR + # As in the repaired tests + "files[21]" string => "file21/."; + "recursive[21]" string => "true"; + "group[21]" string => "bin"; + "create[21]" string => "true"; + "initial[21]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "all:rx" }; + "exp_lines[21]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r-x" }; + "unexp_lines[21]" slist => { "group:bin*" }; + "mode[21]" string => "audit"; + "status[21]" string => "error"; + "execute[21]" string => "true"; + + "files[22]" string => "file21/subfile1"; + "create[22]" string => "true"; + "initial[22]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[22]" slist => { "user::rwx", "group::r-x", "user:bin:r", "group:bin:r--", "other::r--" }; + + "files[23]" string => "file23/."; + "recursive[23]" string => "true"; + "group[23]" string => "bin"; + "create[23]" string => "true"; + "initial[23]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[23]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[23]" string => "audit"; + "status[23]" string => "error"; + "execute[23]" string => "true"; + + # non-recursive + "files[24]" string => "file24"; + "recursive[24]" string => "false"; + "group[24]" string => "bin"; + "create[24]" string => "true"; + "initial[24]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[24]" slist => { "user::rwx", "group::r-x", "user:bin:r", "group:bin:r--", "other::r--" }; + "mode[24]" string => "audit"; + "status[24]" string => "error"; + "execute[24]" string => "true"; + + "files[25]" string => "file25/."; + "recursive[25]" string => "false"; + "group[25]" string => "bin"; + "create[25]" string => "true"; + "initial[25]" slist => { "user:*:rwx", "group:*:rx", "user:bin:+rwx", "group:bin:r", "all:rx" }; + "exp_lines[25]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[25]" string => "audit"; + "status[25]" string => "error"; + "execute[25]" string => "true"; + + "files[26]" string => "file6/subfile1"; + "create[26]" string => "true"; + "initial[26]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "all:r" }; + "exp_lines[26]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r--" }; + + "files[27]" string => "file27/."; + "recursive[27]" string => "false"; + "group[27]" string => "bin"; + "create[27]" string => "true"; + "initial[27]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[27]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[27]" string => "audit"; + "status[27]" string => "error"; + "execute[27]" string => "true"; + + # As in the error tests + "files[28]" string => "fileThatDoesNotExist28"; + "recursive[28]" string => "false"; + "group[28]" string => "bin"; + "create[28]" string => "false"; + "mode[28]" string => "enforce"; + "status[28]" string => "error"; + "execute[28]" string => "true"; + + "files[29]" string => "fileThatDoesNotExist29"; + "recursive[29]" string => "true"; + "group[29]" string => "bin"; + "create[29]" string => "false"; + "mode[29]" string => "enforce"; + "status[29]" string => "error"; + "execute[29]" string => "true"; + + + # REGEX entry test + "files[30]" string => "rfile30"; + "create[30]" string => "true"; + "initial[30]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[30]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r--" }; + "unexp_lines[30]" slist => { "group:bin*" }; + "execute[30]" string => "false"; + + "files[31]" string => "rfile31/."; + "create[31]" string => "true"; + "initial[31]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:rx" }; + "exp_lines[31]" slist => { "user::rwx", "group::r-x", "user:bin:---", "other::r-x" }; + "unexp_lines[31]" slist => { "group:bin*" }; + "execute[31]" string => "false"; + + "files[32]" string => "rfile32/subfile1"; + "create[32]" string => "true"; + "initial[32]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[32]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "other::r--" }; + "unexp_lines[32]" slist => { "group:bin*" }; + + "files[33]" string => "rfile*"; + "recursive[33]" string => "true"; + "group[33]" string => "bin"; + "create[33]" string => "false"; + "mode[33]" string => "enforce"; + "status[33]" string => "repaired"; + "execute[33]" string => "true"; + + "indices" slist => getindices("files"); + + "printable_lines[${indices}]" string => join("${const.endl}", "exp_lines[${indices}]"); + "printable_unexpected_lines[${indices}]" string => join("${const.endl}", "unexp_lines[${indices}]"); + + classes: + "create_${indices}" expression => strcmp("${create[${indices}]}", "true"); + + files: + "${tmp}/${files[${indices}]}" + create => "true", + ifvarclass => "create_${indices}", + acl => access_generic("@{initial[${indices}]}"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "${init.recursive[${init.indices}]}", "${init.group[${init.indices}]}"}; + + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + methods: + pass1.!pass2:: + #REPAIRED + "ph1" usebundle => apply_gm("permissions_group_acl_absent", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}" ); + "ph2" usebundle => apply_gm("permissions_group_acl_absent", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}" ); + # Do not execute 3 + "ph4" usebundle => apply_gm("permissions_group_acl_absent", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}" ); + "ph5" usebundle => apply_gm("permissions_group_acl_absent", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}" ); + "ph6" usebundle => apply_gm("permissions_group_acl_absent", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}" ); + # Do not execute 7 + "ph8" usebundle => apply_gm("permissions_group_acl_absent", @{args8}, "${init.status[8]}", "ph8", "${init.mode[8]}" ); + #SUCCESS + "ph9" usebundle => apply_gm("permissions_group_acl_absent", @{args9}, "${init.status[9]}", "ph9", "${init.mode[9]}" ); + "ph10" usebundle => apply_gm("permissions_group_acl_absent", @{args10}, "${init.status[10]}", "ph10", "${init.mode[10]}" ); + "ph11" usebundle => apply_gm("permissions_group_acl_absent", @{args11}, "${init.status[11]}", "ph11", "${init.mode[11]}" ); + # Do not execute 12 + "ph13" usebundle => apply_gm("permissions_group_acl_absent", @{args13}, "${init.status[13]}", "ph13", "${init.mode[13]}" ); + #ERROR + "ph14" usebundle => apply_gm("permissions_group_acl_absent", @{args14}, "${init.status[14]}", "ph14", "${init.mode[14]}" ); + "ph15" usebundle => apply_gm("permissions_group_acl_absent", @{args15}, "${init.status[15]}", "ph15", "${init.mode[15]}" ); + #AUDIT SUCCESS + "ph16" usebundle => apply_gm("permissions_group_acl_absent", @{args16}, "${init.status[16]}", "ph16", "${init.mode[16]}" ); + "ph17" usebundle => apply_gm("permissions_group_acl_absent", @{args17}, "${init.status[17]}", "ph17", "${init.mode[17]}" ); + "ph18" usebundle => apply_gm("permissions_group_acl_absent", @{args18}, "${init.status[18]}", "ph18", "${init.mode[18]}" ); + # Do not execute 19 + "ph20" usebundle => apply_gm("permissions_group_acl_absent", @{args20}, "${init.status[20]}", "ph20", "${init.mode[20]}" ); + #AUDIT ERROR + "ph21" usebundle => apply_gm("permissions_group_acl_absent", @{args21}, "${init.status[21]}", "ph21", "${init.mode[21]}" ); + # Do not execute 22 + "ph23" usebundle => apply_gm("permissions_group_acl_absent", @{args23}, "${init.status[23]}", "ph23", "${init.mode[23]}" ); + "ph24" usebundle => apply_gm("permissions_group_acl_absent", @{args24}, "${init.status[24]}", "ph24", "${init.mode[24]}" ); + "ph25" usebundle => apply_gm("permissions_group_acl_absent", @{args25}, "${init.status[25]}", "ph25", "${init.mode[25]}" ); + # Do not execute 26 + "ph27" usebundle => apply_gm("permissions_group_acl_absent", @{args27}, "${init.status[27]}", "ph27", "${init.mode[27]}" ); + "ph28" usebundle => apply_gm("permissions_group_acl_absent", @{args28}, "${init.status[28]}", "ph28", "${init.mode[28]}" ); + "ph29" usebundle => apply_gm("permissions_group_acl_absent", @{args29}, "${init.status[29]}", "ph29", "${init.mode[29]}" ); + # REGEX entry test + "ph33" usebundle => apply_gm("permissions_group_acl_absent", @{args33}, "${init.status[33]}", "ph33", "${init.mode[33]}" ); + +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + "create_${init.indices}" expression => strcmp("${init.create[${init.indices}]}", "true"); + "execute_${init.indices}" expression => strcmp("${init.execute[${init.indices}]}", "true"); + pass3:: + "expected_matches_failed_${init.indices}" not => regcmp(".*${init.exp_lines[${init.indices}]}.*","${getfacl_output[${init.indices}]}"), + ifvarclass => "create_${init.indices}"; + + "unexpected_matches_failed_${init.indices}" expression => regcmp(".*${init.unexp_lines[${init.indices}]}.*","${getfacl_output[${init.indices}]}"), + ifvarclass => "create_${init.indices}"; + + "lines_not_ok" expression => or("expected_matches_failed_${init.indices}", "unexpected_matches_failed_${init.indices}"); + + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"), + ifvarclass => "execute_${init.indices}"; + "ok" expression => "!classes_not_ok.!lines_not_ok"; + + reports: + pass3:: + "Test for file nb ${init.indices} FAILED" + ifvarclass => "!ph${init.indices}_ok.execute_${init.indices}"; + + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "expected_matches_failed_${init.indices}"; + + "####################${const.endl}Found at least one of the following unexpected lines:${const.endl}${init.printable_unexpected_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "unexpected_matches_failed_${init.indices}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_group_acl_present.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_group_acl_present.cf new file mode 100644 index 00000000000..e1be64a053b --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_group_acl_present.cf @@ -0,0 +1,752 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + # REPAIRED + ## recursive + "files[1]" string => "file1"; # args of the GM + "recursive[1]" string => "true"; # \\ + "group[1]" string => "bin"; # \\ + "ace[1]" string => "+rwx"; # \\ + "create[1]" string => "true"; # if "true", the file will be created at init + "initial[1]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[1]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r--" }; + # -> list of regex that must match the output of getfacl on the file at the en of the test + "mode[1]" string => "enforce"; # mode, "enforce" or "audit" + "status[1]" string => "repaired"; # expected status, "repaired", "success" or "error" + "execute[1]" string => "true"; # Tell if an execution is needed + + "files[2]" string => "file2"; + "recursive[2]" string => "true"; + "group[2]" string => "bin"; + "ace[2]" string => "=rw"; + "create[2]" string => "true"; + "initial[2]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[2]" slist => { "user::rwx", "group::r-x", "group:bin:rw-", "user:bin:r--", "other::r--" }; + "mode[2]" string => "enforce"; + "status[2]" string => "repaired"; + "execute[2]" string => "true"; + + "files[3]" string => "file3"; + "recursive[3]" string => "true"; + "group[3]" string => "bin"; + "ace[3]" string => "-rw"; + "create[3]" string => "true"; + "initial[3]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[3]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r--" }; + "mode[3]" string => "enforce"; + "status[3]" string => "repaired"; + "execute[3]" string => "true"; + + "files[4]" string => "file4/."; + "recursive[4]" string => "true"; + "group[4]" string => "bin"; + "ace[4]" string => "-rw"; + "create[4]" string => "true"; + "initial[4]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:rx" }; + "exp_lines[4]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r-x" }; + "mode[4]" string => "enforce"; + "status[4]" string => "repaired"; + "execute[4]" string => "true"; + + "files[5]" string => "file4/1/2/3/4/5/subfile1"; + "create[5]" string => "true"; + "initial[5]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[5]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r--" }; + + "files[6]" string => "file6/."; + "recursive[6]" string => "true"; + "group[6]" string => "bin"; + "ace[6]" string => "-rw"; + "create[6]" string => "true"; + "initial[6]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rx" }; + "exp_lines[6]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r-x" }; + "mode[6]" string => "enforce"; + "status[6]" string => "repaired"; + "execute[6]" string => "true"; + + # non-recursive + "files[7]" string => "file7"; + "recursive[7]" string => "false"; + "group[7]" string => "bin"; + "ace[7]" string => "+rwx"; + "create[7]" string => "true"; + "initial[7]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[7]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r--" }; + "mode[7]" string => "enforce"; + "status[7]" string => "repaired"; + "execute[7]" string => "true"; + + "files[8]" string => "file8"; + "recursive[8]" string => "false"; + "group[8]" string => "bin"; + "ace[8]" string => "=rw"; + "create[8]" string => "true"; + "initial[8]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[8]" slist => { "user::rwx", "group::r-x", "group:bin:rw-", "user:bin:r--", "other::r--" }; + "mode[8]" string => "enforce"; + "status[8]" string => "repaired"; + "execute[8]" string => "true"; + + "files[9]" string => "file9"; + "recursive[9]" string => "false"; + "group[9]" string => "bin"; + "ace[9]" string => "-rw"; + "create[9]" string => "true"; + "initial[9]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[9]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r--" }; + "mode[9]" string => "enforce"; + "status[9]" string => "repaired"; + "execute[9]" string => "true"; + + "files[10]" string => "file10/."; + "recursive[10]" string => "false"; + "group[10]" string => "bin"; + "ace[10]" string => "-rw"; + "create[10]" string => "true"; + "initial[10]" slist => { "user:*:rwx", "group:*:rx", "group:bin:+rwx", "user:bin:r", "all:rx" }; + "exp_lines[10]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r-x" }; + "mode[10]" string => "enforce"; + "status[10]" string => "repaired"; + "execute[10]" string => "true"; + + "files[11]" string => "file10/1/2/3/4/5/subfile1"; + "create[11]" string => "true"; + "initial[11]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[11]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + + "files[12]" string => "file12/."; + "recursive[12]" string => "false"; + "group[12]" string => "bin"; + "ace[12]" string => "-rw"; + "create[12]" string => "true"; + "initial[12]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rx" }; + "exp_lines[12]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r-x" }; + "mode[12]" string => "enforce"; + "status[12]" string => "repaired"; + "execute[12]" string => "true"; + + # SUCCESS + "files[13]" string => "file13"; + "recursive[13]" string => "true"; + "group[13]" string => "bin"; + "ace[13]" string => "+rwx"; + "create[13]" string => "true"; + "initial[13]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:r" }; + "exp_lines[13]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r--" }; + "mode[13]" string => "enforce"; + "status[13]" string => "success"; + "execute[13]" string => "true"; + + + "files[14]" string => "file14"; + "recursive[14]" string => "true"; + "group[14]" string => "bin"; + "ace[14]" string => "-rw"; + "create[14]" string => "true"; + "initial[14]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:r" }; + "exp_lines[14]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r--" }; + "mode[14]" string => "enforce"; + "status[14]" string => "success"; + "execute[14]" string => "true"; + + + "files[15_bis]" string => "file15/1/2/3/4/5/subfile1"; + "create[15_bis]" string => "true"; + "initial[15_bis]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:r" }; + "exp_lines[15_bis]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r--" }; + + + "files[15]" string => "file15/."; + "recursive[15]" string => "true"; + "group[15]" string => "bin"; + "ace[15]" string => "-rw"; + "create[15]" string => "true"; + "initial[15]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:rx" }; + "exp_lines[15]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r", "other::r-x" }; + "mode[15]" string => "enforce"; + "status[15]" string => "success"; + "execute[15]" string => "true"; + + + "files[17]" string => "file17/."; + "recursive[17]" string => "true"; + "group[17]" string => "bin"; + "ace[17]" string => "-rw"; + "create[17]" string => "true"; + "initial[17]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:rx" }; + "exp_lines[17]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r-x" }; + "mode[17]" string => "enforce"; + "status[17]" string => "success"; + "execute[17]" string => "true"; + + # non-recursive + "files[18]" string => "file18"; + "recursive[18]" string => "false"; + "group[18]" string => "*"; + "ace[18]" string => "+rx"; + "create[18]" string => "true"; + "initial[18]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[18]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[18]" string => "enforce"; + "status[18]" string => "success"; + "execute[18]" string => "true"; + + "files[19]" string => "file19"; + "recursive[19]" string => "false"; + "group[19]" string => "bin"; + "ace[19]" string => "=rw"; + "create[19]" string => "true"; + "initial[19]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rw", "user:bin:r", "all:r" }; + "exp_lines[19]" slist => { "user::rwx", "group::r-x", "group:bin:rw", "user:bin:r", "other::r" }; + "mode[19]" string => "enforce"; + "status[19]" string => "success"; + "execute[19]" string => "true"; + + "files[20]" string => "file20"; + "recursive[20]" string => "false"; + "group[20]" string => "bin"; + "ace[20]" string => "-rw"; + "create[20]" string => "true"; + "initial[20]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:r" }; + "exp_lines[20]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r--" }; + "mode[20]" string => "enforce"; + "status[20]" string => "success"; + "execute[20]" string => "true"; + + "files[21]" string => "file21/."; + "recursive[21]" string => "false"; + "group[21]" string => "bin"; + "ace[21]" string => "-rw"; + "create[21]" string => "true"; + "initial[21]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:rx" }; + "exp_lines[21]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r-x" }; + "mode[21]" string => "enforce"; + "status[21]" string => "success"; + "execute[21]" string => "true"; + + "files[22]" string => "file21/1/2/3/4/5/subfile1"; + "create[22]" string => "true"; + "initial[22]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rw", "user:bin:r", "all:r" }; + "exp_lines[22]" slist => { "user::rwx", "group::r-x", "group:bin:rw-", "user:bin:r--", "other::r--" }; + + "files[23]" string => "file23/."; + "recursive[23]" string => "false"; + "group[23]" string => "bin"; + "ace[23]" string => "-rw"; + "create[23]" string => "true"; + "initial[23]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:rx" }; + "exp_lines[23]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r-x" }; + "mode[23]" string => "enforce"; + "status[23]" string => "success"; + "execute[23]" string => "true"; + + "files[24]" string => "file24"; + "recursive[24]" string => "true"; + "group[24]" string => "bin"; + "ace[24]" string => "=rw"; + "create[24]" string => "true"; + "initial[24]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rw", "user:bin:r", "all:r" }; + "exp_lines[24]" slist => { "user::rwx", "group::r-x", "group:bin:rw-", "user:bin:r--", "other::r--" }; + "mode[24]" string => "enforce"; + "status[24]" string => "success"; + "execute[24]" string => "true"; + + # ERROR + "files[25]" string => "fileThatDoesNotExist"; + "recursive[25]" string => "false"; + "group[25]" string => "bin"; + "ace[25]" string => "-rw"; + "create[25]" string => "false"; + "mode[25]" string => "enforce"; + "status[25]" string => "error"; + "execute[25]" string => "true"; + + "files[26]" string => "fileThatDoesNotExist2"; + "recursive[26]" string => "true"; + "group[26]" string => "bin"; + "ace[26]" string => "-rw"; + "create[26]" string => "false"; + "mode[26]" string => "enforce"; + "status[26]" string => "error"; + "execute[26]" string => "true"; + + "files[27]" string => "file27"; + "recursive[27]" string => "false"; + "group[27]" string => "aGroupThatDoesNotExist"; + "ace[27]" string => "-rw"; + "create[27]" string => "true"; + "initial[27]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rx" }; + "exp_lines[27]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r-x" }; + "mode[27]" string => "enforce"; + "status[27]" string => "error"; + "execute[27]" string => "true"; + + # AUDIT SUCCESS + # Copy of success audit part + "files[28]" string => "file28"; + "recursive[28]" string => "true"; + "group[28]" string => "bin"; + "ace[28]" string => "+rwx"; + "create[28]" string => "true"; + "initial[28]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:r" }; + "exp_lines[28]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r--" }; + "mode[28]" string => "audit"; + "status[28]" string => "success"; + "execute[28]" string => "true"; + + + "files[29]" string => "file29"; + "recursive[29]" string => "true"; + "group[29]" string => "bin"; + "ace[29]" string => "-rw"; + "create[29]" string => "true"; + "initial[29]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:r" }; + "exp_lines[29]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r--" }; + "mode[29]" string => "audit"; + "status[29]" string => "success"; + "execute[29]" string => "true"; + + "files[30_bis]" string => "file30/1/2/3/4/5/subfile1"; + "create[30_bis]" string => "true"; + "initial[30_bis]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:r" }; + "exp_lines[30_bis]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r--" }; + + "files[30]" string => "file30/."; + "recursive[30]" string => "true"; + "group[30]" string => "bin"; + "ace[30]" string => "-rw"; + "create[30]" string => "true"; + "initial[30]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:rx" }; + "exp_lines[30]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r", "other::r-x" }; + "mode[30]" string => "audit"; + "status[30]" string => "success"; + "execute[30]" string => "true"; + + "files[32]" string => "file32/."; + "recursive[32]" string => "true"; + "group[32]" string => "bin"; + "ace[32]" string => "-rw"; + "create[32]" string => "true"; + "initial[32]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:rx" }; + "exp_lines[32]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r-x" }; + "mode[32]" string => "audit"; + "status[32]" string => "success"; + "execute[32]" string => "true"; + + # non-recursive + "files[33]" string => "file33"; + "recursive[33]" string => "false"; + "group[33]" string => "*"; + "ace[33]" string => "+rx"; + "create[33]" string => "true"; + "initial[33]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[33]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[33]" string => "audit"; + "status[33]" string => "success"; + "execute[33]" string => "true"; + + "files[34]" string => "file34"; + "recursive[34]" string => "false"; + "group[34]" string => "bin"; + "ace[34]" string => "=rw"; + "create[34]" string => "true"; + "initial[34]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rw", "user:bin:r", "all:r" }; + "exp_lines[34]" slist => { "user::rwx", "group::r-x", "group:bin:rw", "user:bin:r", "other::r" }; + "mode[34]" string => "audit"; + "status[34]" string => "success"; + "execute[34]" string => "true"; + + "files[35]" string => "file35"; + "recursive[35]" string => "false"; + "group[35]" string => "bin"; + "ace[35]" string => "-rw"; + "create[35]" string => "true"; + "initial[35]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:r" }; + "exp_lines[35]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r--" }; + "mode[35]" string => "audit"; + "status[35]" string => "success"; + "execute[35]" string => "true"; + + "files[36]" string => "file36/."; + "recursive[36]" string => "false"; + "group[36]" string => "bin"; + "ace[36]" string => "-rw"; + "create[36]" string => "true"; + "initial[36]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:rx" }; + "exp_lines[36]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::r-x" }; + "mode[36]" string => "audit"; + "status[36]" string => "success"; + "execute[36]" string => "true"; + + "files[37]" string => "file36/1/2/3/4/5/subfile1"; + "create[37]" string => "true"; + "initial[37]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rw", "user:bin:r", "all:r" }; + "exp_lines[37]" slist => { "user::rwx", "group::r-x", "group:bin:rw-", "user:bin:r--", "other::r--" }; + + "files[38]" string => "file38/."; + "recursive[38]" string => "false"; + "group[38]" string => "bin"; + "ace[38]" string => "-rw"; + "create[38]" string => "true"; + "initial[38]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:rx" }; + "exp_lines[38]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r-x" }; + "mode[38]" string => "audit"; + "status[38]" string => "success"; + "execute[38]" string => "true"; + + "files[39]" string => "file39"; + "recursive[39]" string => "true"; + "group[39]" string => "bin"; + "ace[39]" string => "=rw"; + "create[39]" string => "true"; + "initial[39]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rw", "user:bin:r", "all:r" }; + "exp_lines[39]" slist => { "user::rwx", "group::r-x", "group:bin:rw-", "user:bin:r--", "other::r--" }; + "mode[39]" string => "audit"; + "status[39]" string => "success"; + "execute[39]" string => "true"; + + # AUDIT ERROR + ## recursive + "files[40]" string => "file40"; + "recursive[40]" string => "true"; + "group[40]" string => "bin"; + "ace[40]" string => "+rwx"; + "create[40]" string => "true"; + "initial[40]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[40]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[40]" string => "audit"; + "status[40]" string => "error"; + "execute[40]" string => "true"; + + "files[41]" string => "file41"; + "recursive[41]" string => "true"; + "group[41]" string => "bin"; + "ace[41]" string => "=rw"; + "create[41]" string => "true"; + "initial[41]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[41]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[41]" string => "audit"; + "status[41]" string => "error"; + "execute[41]" string => "true"; + + "files[42]" string => "file42"; + "recursive[42]" string => "true"; + "group[42]" string => "bin"; + "ace[42]" string => "-rw"; + "create[42]" string => "true"; + "initial[42]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[42]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[42]" string => "audit"; + "status[42]" string => "error"; + "execute[42]" string => "true"; + + "files[43]" string => "file43/."; + "recursive[43]" string => "true"; + "group[43]" string => "bin"; + "ace[43]" string => "-rw"; + "create[43]" string => "true"; + "initial[43]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:rx" }; + "exp_lines[43]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r", "other::r-x" }; + "mode[43]" string => "audit"; + "status[43]" string => "error"; + "execute[43]" string => "true"; + + "files[44]" string => "file43/1/2/3/4/5/subfile1"; + "create[44]" string => "true"; + "initial[44]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[44]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + + "files[45]" string => "file45/."; + "recursive[45]" string => "true"; + "group[45]" string => "bin"; + "ace[45]" string => "-rw"; + "create[45]" string => "true"; + "initial[45]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rx" }; + "exp_lines[45]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r-x" }; + "mode[45]" string => "audit"; + "status[45]" string => "error"; + "execute[45]" string => "true"; + + # non-recursive + "files[46]" string => "file46"; + "recursive[46]" string => "false"; + "group[46]" string => "bin"; + "ace[46]" string => "+rwx"; + "create[46]" string => "true"; + "initial[46]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[46]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[46]" string => "audit"; + "status[46]" string => "error"; + "execute[46]" string => "true"; + + "files[47]" string => "file47"; + "recursive[47]" string => "false"; + "group[47]" string => "bin"; + "ace[47]" string => "=rw"; + "create[47]" string => "true"; + "initial[47]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[47]" slist => { "user::rwx", "group::r-x", "group:bin:r", "user:bin:r", "other::r" }; + "mode[47]" string => "audit"; + "status[47]" string => "error"; + "execute[47]" string => "true"; + + "files[48]" string => "file48"; + "recursive[48]" string => "false"; + "group[48]" string => "bin"; + "ace[48]" string => "-rw"; + "create[48]" string => "true"; + "initial[48]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[48]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[48]" string => "audit"; + "status[48]" string => "error"; + "execute[48]" string => "true"; + + "files[49]" string => "file49/."; + "recursive[49]" string => "false"; + "group[49]" string => "bin"; + "ace[49]" string => "-rw"; + "create[49]" string => "true"; + "initial[49]" slist => { "user:*:rwx", "group:*:rx", "group:bin:+rwx", "user:bin:r", "all:rx" }; + "exp_lines[49]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r-x" }; + "mode[49]" string => "audit"; + "status[49]" string => "error"; + "execute[49]" string => "true"; + + "files[50]" string => "file49/1/2/3/4/5/subfile1"; + "create[50]" string => "true"; + "initial[50]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[50]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + + "files[51]" string => "file51/."; + "recursive[51]" string => "false"; + "group[51]" string => "bin"; + "ace[51]" string => "-rw"; + "create[51]" string => "true"; + "initial[51]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rx" }; + "exp_lines[51]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r-x" }; + "mode[51]" string => "audit"; + "status[51]" string => "error"; + "execute[51]" string => "true"; + + # As in the error tests + "files[52]" string => "fileThatDoesNotExist52"; + "recursive[52]" string => "false"; + "group[52]" string => "bin"; + "ace[52]" string => "-rw"; + "create[52]" string => "false"; + "mode[52]" string => "audit"; + "status[52]" string => "error"; + "execute[52]" string => "true"; + + "files[53]" string => "fileThatDoesNotExist53"; + "recursive[53]" string => "true"; + "group[53]" string => "bin"; + "ace[53]" string => "-rw"; + "create[53]" string => "false"; + "mode[53]" string => "audit"; + "status[53]" string => "error"; + "execute[53]" string => "true"; + + "files[54]" string => "file54"; + "recursive[54]" string => "false"; + "group[54]" string => "aGroupThatDoesNotExist"; + "ace[54]" string => "+r"; + "create[54]" string => "true"; + "initial[54]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rx" }; + "exp_lines[54]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r-x" }; + "mode[54]" string => "audit"; + "status[54]" string => "error"; + "execute[54]" string => "true"; + + # REGEX entry test + "files[55]" string => "rfile55"; + "create[55]" string => "true"; + "initial[55]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[55]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r--" }; + "execute[55]" string => "false"; + + "files[56]" string => "rfile56/."; + "create[56]" string => "true"; + "initial[56]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:rx" }; + "exp_lines[56]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r-x" }; + "execute[56]" string => "false"; + + "files[57]" string => "rfile4/1/2/3/4/5/subfile1"; + "create[57]" string => "true"; + "initial[57]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[57]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::r--" }; + + "files[58]" string => "rfile*"; + "recursive[58]" string => "true"; + "group[58]" string => "bin"; + "ace[58]" string => "-rwx"; + "create[58]" string => "false"; + "mode[58]" string => "enforce"; + "status[58]" string => "repaired"; + "execute[58]" string => "true"; + + "indices" slist => getindices("files"); + # Needed for success + recursive + "indices_recursive" slist => { "15", "30" }; + + "printable_lines[${indices}]" string => join("${const.endl}", "exp_lines[${indices}]"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + "create_${indices}" expression => strcmp("${create[${indices}]}", "true"); + + files: + "${tmp}/${files[${indices}]}" + create => "true", + ifvarclass => "create_${indices}", + acl => access_generic("@{initial[${indices}]}"); + + pass3:: + "${tmp}/${files[${indices_recursive}]}" + create => "true", + depth_search => recurse_with_base("3"), + acl => access_generic("@{initial[${indices_recursive}]}"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "${init.recursive[${init.indices}]}", "${init.group[${init.indices}]}", "${init.ace[${init.indices}]}" }; + + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + methods: + pass1.!pass2:: + #REPAIRED + "ph1" usebundle => apply_gm("permissions_group_acl_present", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}" ); + "ph2" usebundle => apply_gm("permissions_group_acl_present", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}" ); + "ph3" usebundle => apply_gm("permissions_group_acl_present", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}" ); + "ph4" usebundle => apply_gm("permissions_group_acl_present", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}" ); + # Do not execute 5 + "ph6" usebundle => apply_gm("permissions_group_acl_present", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}" ); + "ph7" usebundle => apply_gm("permissions_group_acl_present", @{args7}, "${init.status[7]}", "ph7", "${init.mode[7]}" ); + "ph8" usebundle => apply_gm("permissions_group_acl_present", @{args8}, "${init.status[8]}", "ph8", "${init.mode[8]}" ); + "ph9" usebundle => apply_gm("permissions_group_acl_present", @{args9}, "${init.status[9]}", "ph9", "${init.mode[9]}" ); + "ph10" usebundle => apply_gm("permissions_group_acl_present", @{args10}, "${init.status[10]}", "ph10", "${init.mode[10]}" ); + # Do not execute 11 + "ph12" usebundle => apply_gm("permissions_group_acl_present", @{args12}, "${init.status[12]}", "ph12", "${init.mode[12]}" ); + #SUCCESS + "ph13" usebundle => apply_gm("permissions_group_acl_present", @{args13}, "${init.status[13]}", "ph13", "${init.mode[13]}" ); + "ph14" usebundle => apply_gm("permissions_group_acl_present", @{args14}, "${init.status[14]}", "ph14", "${init.mode[14]}" ); + "ph15" usebundle => apply_gm("permissions_group_acl_present", @{args15}, "${init.status[15]}", "ph15", "${init.mode[15]}" ); + # Do not execute 16 + "ph17" usebundle => apply_gm("permissions_group_acl_present", @{args17}, "${init.status[17]}", "ph17", "${init.mode[17]}" ); + "ph18" usebundle => apply_gm("permissions_group_acl_present", @{args18}, "${init.status[18]}", "ph18", "${init.mode[18]}" ); + "ph19" usebundle => apply_gm("permissions_group_acl_present", @{args19}, "${init.status[19]}", "ph19", "${init.mode[19]}" ); + "ph20" usebundle => apply_gm("permissions_group_acl_present", @{args20}, "${init.status[20]}", "ph20", "${init.mode[20]}" ); + "ph21" usebundle => apply_gm("permissions_group_acl_present", @{args21}, "${init.status[21]}", "ph21", "${init.mode[21]}" ); + # Do not execute 22 + "ph23" usebundle => apply_gm("permissions_group_acl_present", @{args23}, "${init.status[23]}", "ph23", "${init.mode[23]}" ); + "ph24" usebundle => apply_gm("permissions_group_acl_present", @{args24}, "${init.status[24]}", "ph24", "${init.mode[24]}" ); + #ERROR + "ph25" usebundle => apply_gm("permissions_group_acl_present", @{args25}, "${init.status[25]}", "ph25", "${init.mode[25]}" ); + "ph26" usebundle => apply_gm("permissions_group_acl_present", @{args26}, "${init.status[26]}", "ph26", "${init.mode[26]}" ); + "ph27" usebundle => apply_gm("permissions_group_acl_present", @{args27}, "${init.status[27]}", "ph27", "${init.mode[27]}" ); + #AUDIT SUCCESS + "ph28" usebundle => apply_gm("permissions_group_acl_present", @{args28}, "${init.status[28]}", "ph28", "${init.mode[28]}" ); + "ph29" usebundle => apply_gm("permissions_group_acl_present", @{args29}, "${init.status[29]}", "ph29", "${init.mode[29]}" ); + "ph30" usebundle => apply_gm("permissions_group_acl_present", @{args30}, "${init.status[30]}", "ph30", "${init.mode[30]}" ); + # Do not execute 31 + "ph32" usebundle => apply_gm("permissions_group_acl_present", @{args32}, "${init.status[32]}", "ph32", "${init.mode[32]}" ); + "ph33" usebundle => apply_gm("permissions_group_acl_present", @{args33}, "${init.status[33]}", "ph33", "${init.mode[33]}" ); + "ph34" usebundle => apply_gm("permissions_group_acl_present", @{args34}, "${init.status[34]}", "ph34", "${init.mode[34]}" ); + "ph35" usebundle => apply_gm("permissions_group_acl_present", @{args35}, "${init.status[35]}", "ph35", "${init.mode[35]}" ); + "ph36" usebundle => apply_gm("permissions_group_acl_present", @{args36}, "${init.status[36]}", "ph36", "${init.mode[36]}" ); + # Do not execute 37 + "ph38" usebundle => apply_gm("permissions_group_acl_present", @{args38}, "${init.status[38]}", "ph38", "${init.mode[38]}" ); + "ph39" usebundle => apply_gm("permissions_group_acl_present", @{args39}, "${init.status[39]}", "ph39", "${init.mode[39]}" ); + #AUDIT ERROR + ## Copy of the repaired + "ph40" usebundle => apply_gm("permissions_group_acl_present", @{args40}, "${init.status[40]}", "ph40", "${init.mode[40]}" ); + "ph41" usebundle => apply_gm("permissions_group_acl_present", @{args41}, "${init.status[41]}", "ph41", "${init.mode[41]}" ); + "ph42" usebundle => apply_gm("permissions_group_acl_present", @{args42}, "${init.status[42]}", "ph42", "${init.mode[42]}" ); + "ph43" usebundle => apply_gm("permissions_group_acl_present", @{args43}, "${init.status[43]}", "ph43", "${init.mode[43]}" ); + # Do not execute 44 + "ph45" usebundle => apply_gm("permissions_group_acl_present", @{args45}, "${init.status[45]}", "ph45", "${init.mode[45]}" ); + "ph46" usebundle => apply_gm("permissions_group_acl_present", @{args46}, "${init.status[46]}", "ph46", "${init.mode[46]}" ); + "ph47" usebundle => apply_gm("permissions_group_acl_present", @{args47}, "${init.status[47]}", "ph47", "${init.mode[47]}" ); + "ph48" usebundle => apply_gm("permissions_group_acl_present", @{args48}, "${init.status[48]}", "ph48", "${init.mode[48]}" ); + "ph49" usebundle => apply_gm("permissions_group_acl_present", @{args49}, "${init.status[49]}", "ph49", "${init.mode[49]}" ); + # Do not execute 40 + "ph51" usebundle => apply_gm("permissions_group_acl_present", @{args51}, "${init.status[51]}", "ph51", "${init.mode[51]}" ); + ## Copy of the error + "ph52" usebundle => apply_gm("permissions_group_acl_present", @{args52}, "${init.status[52]}", "ph52", "${init.mode[52]}" ); + "ph53" usebundle => apply_gm("permissions_group_acl_present", @{args53}, "${init.status[53]}", "ph53", "${init.mode[53]}" ); + "ph54" usebundle => apply_gm("permissions_group_acl_present", @{args54}, "${init.status[54]}", "ph54", "${init.mode[54]}" ); + + # REGEX entry test + # Do not execute 55, 56, 57 + "ph58" usebundle => apply_gm("permissions_group_acl_present", @{args58}, "${init.status[58]}", "ph58", "${init.mode[58]}" ); + +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + "create_${init.indices}" expression => strcmp("${init.create[${init.indices}]}", "true"); + "execute_${init.indices}" expression => strcmp("${init.execute[${init.indices}]}", "true"); + pass3:: + "lines_matches_failed_${init.indices}" not => regcmp(".*${init.exp_lines[${init.indices}]}.*","${getfacl_output[${init.indices}]}"), + ifvarclass => "create_${init.indices}"; + + "lines_not_ok" expression => "lines_matches_failed_${init.indices}"; + + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"), + ifvarclass => "execute_${init.indices}"; + "ok" expression => "!classes_not_ok.!lines_not_ok"; + + reports: + pass3:: + "Test for file nb ${init.indices} FAILED" + ifvarclass => "!ph${init.indices}_ok.execute_${init.indices}"; + + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_failed_${init.indices}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_multiple_existing_dirs.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_multiple_existing_dirs.cf new file mode 100644 index 00000000000..dd0127d9510 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_multiple_existing_dirs.cf @@ -0,0 +1,94 @@ +####################################################### +# +# Test checking if three directory permissions can be changed recursively +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "directory" string => "${tmp}/test"; + "directory_canon" string => canonify("${directory}"); + "mode" string => "750"; + "owner" string => "bin"; + "group" string => "bin"; + "type" string => "directories"; + "recursion" string => "inf"; + + files: + "${directory}/." + create => "true", + perms => mog("000", "root", "0"); + + "${directory}/subdir/." + create => "true", + perms => mog("000", "root", "0"); + + "${directory}/subdir2/." + create => "true", + perms => mog("000", "root", "0"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => permissions_type_recursion("${init.directory}", "${init.mode}", "${init.owner}", "${init.group}", "${init.type}", "${init.recursion}"); +} + +####################################################### + +bundle agent check +{ + vars: + "permissions_test_mode" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory} -perm ${init.mode} | wc -l) = \"3\""; + "permissions_test_owner" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory} -user ${init.owner} | wc -l) = \"3\""; + "permissions_test_group" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory} -group ${init.group} | wc -l) = \"3\""; + + classes: + # By default, permissions_type_recursion should create the directory if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.permissions_${init.directory_canon}_ok.!permissions_${init.directory_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_multiple_existing_files.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_multiple_existing_files.cf new file mode 100644 index 00000000000..131a4bcb73d --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_multiple_existing_files.cf @@ -0,0 +1,94 @@ +####################################################### +# +# Test checking if two file permissions in a directory can be changed recursively +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "directory" string => "${tmp}/test"; + "directory_canon" string => canonify("${directory}"); + "mode" string => "750"; + "owner" string => "bin"; + "group" string => "bin"; + "type" string => "files"; + "recursion" string => "inf"; + + files: + "${directory}/." + create => "true", + perms => mog("000", "root", "0"); + + "${directory}/file1" + create => "true", + perms => mog("000", "root", "0"); + + "${directory}/file2" + create => "true", + perms => mog("000", "root", "0"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => permissions_type_recursion("${init.directory}", "${init.mode}", "${init.owner}", "${init.group}", "${init.type}", "${init.recursion}"); +} + +####################################################### + +bundle agent check +{ + vars: + "permissions_test_mode" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory} -perm ${init.mode} | wc -l) = \"2\""; + "permissions_test_owner" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory} -user ${init.owner} | wc -l) = \"2\""; + "permissions_test_group" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory} -group ${init.group} | wc -l) = \"2\""; + + classes: + # By default, permissions_type_recursion should create the directory if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.permissions_${init.directory_canon}_ok.!permissions_${init.directory_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_single_existing_dir.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_single_existing_dir.cf new file mode 100644 index 00000000000..09319d9b354 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_single_existing_dir.cf @@ -0,0 +1,88 @@ +####################################################### +# +# Test checking if a directory / directory has the right mode +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "directory" string => "${tmp}/test"; + "directory_canon" string => canonify("${directory}"); + "mode" string => "750"; + "owner" string => "bin"; + "group" string => "bin"; + "type" string => "directories"; + "recursion" string => "0"; + + files: + "${directory}/." + create => "true", + perms => mog("000", "root", "0"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => permissions_type_recursion("${init.directory}", "${init.mode}", "${init.owner}", "${init.group}", "${init.type}", "${init.recursion}"); +} + +####################################################### + +bundle agent check +{ + vars: + "owner_id" int => getuid("${init.owner}"); + "group_id" int => getgid("${init.group}"); + "permissions_test_mode" string => "/usr/bin/test ${const.dollar}(${test_utils.file_perms} ${init.directory}) = \"${init.mode}\""; + "permissions_test_owner" string => "/usr/bin/test ${const.dollar}(${test_utils.file_owner} ${init.directory}) = \"${owner_id}\""; + "permissions_test_group" string => "/usr/bin/test ${const.dollar}(${test_utils.file_group} ${init.directory}) = \"${group_id}\""; + + classes: + # By default, permissions_type_recursion should create the directory if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.permissions_${init.directory_canon}_ok.!permissions_${init.directory_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_single_existing_file.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_single_existing_file.cf new file mode 100644 index 00000000000..dc12e8c9423 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_single_existing_file.cf @@ -0,0 +1,88 @@ +####################################################### +# +# Test checking if a file / directory has the right mode +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "mode" string => "640"; + "owner" string => "bin"; + "group" string => "bin"; + "type" string => "files"; + "recursion" string => "0"; + + files: + "${file}" + create => "true", + perms => mog("000", "root", "0"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => permissions_type_recursion("${init.file}", "${init.mode}", "${init.owner}", "${init.group}", "${init.type}", "${init.recursion}"); +} + +####################################################### + +bundle agent check +{ + vars: + "owner_id" int => getuid("${init.owner}"); + "group_id" int => getgid("${init.group}"); + "permissions_test_mode" string => "/usr/bin/test ${const.dollar}(${test_utils.file_perms} ${init.file}) = \"${init.mode}\""; + "permissions_test_owner" string => "/usr/bin/test ${const.dollar}(${test_utils.file_owner} ${init.file}) = \"${owner_id}\""; + "permissions_test_group" string => "/usr/bin/test ${const.dollar}(${test_utils.file_group} ${init.file}) = \"${group_id}\""; + + classes: + # By default, permissions_type_recursion should create the file if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("permissions_${init.file}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("permissions_${init.file}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("permissions_${init.file}_reached"); + + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.permissions_${init.file_canon}_ok.!permissions_${init.file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_single_nonexisting_file.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_single_nonexisting_file.cf new file mode 100644 index 00000000000..9317d66e8b3 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_mode_single_nonexisting_file.cf @@ -0,0 +1,64 @@ +####################################################### +# +# Test checking if using permissions methods on a non existent file causes an error +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/testthatshouldneverexist"; + "file_canon" string => canonify("${file}"); + "mode" string => "640"; + "owner" string => "bin"; + "group" string => "bin"; + + files: + "${file}" + delete => tidy; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => permissions("${init.file}", "${init.mode}", "${init.owner}", "${init.group}"); +} + +####################################################### + +bundle agent check +{ + + classes: + # Make sure the file still doesn't exist - we don't want CFEngine creating it or this test getting confused + "permissions_test_exists" + expression => fileexists("${init.file}"); + + "ok" expression => "!permissions_test_exists.permissions_${init.file_canon}_reached.permissions_${init.file_canon}_error.!permissions_${init.file_canon}_ok.!permissions_${init.file_canon}_repaired"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_other_acl_present.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_other_acl_present.cf new file mode 100644 index 00000000000..13a6f3123eb --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_other_acl_present.cf @@ -0,0 +1,683 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + # REPAIRED + ## recursive + "files[1]" string => "file1"; # args of the GM + "recursive[1]" string => "true"; # \\ + "ace[1]" string => "+rwx"; # \\ + "create[1]" string => "true"; # if "true", the file will be created at init + "initial[1]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[1]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::rwx" }; + # -> list of regex that must match the output of getfacl on the file at the en of the test + "mode[1]" string => "enforce"; # mode, "enforce" or "audit" + "status[1]" string => "repaired"; # expected status, "repaired", "success" or "error" + "execute[1]" string => "true"; # Tell if an execution is needed + + "files[2]" string => "file2"; + "recursive[2]" string => "true"; + "ace[2]" string => "=rw"; + "create[2]" string => "true"; + "initial[2]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[2]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::rw-" }; + "mode[2]" string => "enforce"; + "status[2]" string => "repaired"; + "execute[2]" string => "true"; + + "files[3]" string => "file3"; + "recursive[3]" string => "true"; + "ace[3]" string => "-rw"; + "create[3]" string => "true"; + "initial[3]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[3]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::---" }; + "mode[3]" string => "enforce"; + "status[3]" string => "repaired"; + "execute[3]" string => "true"; + + "files[4]" string => "file4/."; + "recursive[4]" string => "true"; + "ace[4]" string => "-rw"; + "create[4]" string => "true"; + "initial[4]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:rx" }; + "exp_lines[4]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::--x" }; + "mode[4]" string => "enforce"; + "status[4]" string => "repaired"; + "execute[4]" string => "true"; + + "files[5]" string => "file4/1/2/3/4/5/subfile1"; + "create[5]" string => "true"; + "initial[5]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[5]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::---" }; + + "files[6]" string => "file6/."; + "recursive[6]" string => "true"; + "ace[6]" string => "-rw"; + "create[6]" string => "true"; + "initial[6]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rx" }; + "exp_lines[6]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::--x" }; + "mode[6]" string => "enforce"; + "status[6]" string => "repaired"; + "execute[6]" string => "true"; + + # non-recursive + "files[7]" string => "file7"; + "recursive[7]" string => "false"; + "ace[7]" string => "+rwx"; + "create[7]" string => "true"; + "initial[7]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[7]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::rwx" }; + "mode[7]" string => "enforce"; + "status[7]" string => "repaired"; + "execute[7]" string => "true"; + + "files[8]" string => "file8"; + "recursive[8]" string => "false"; + "ace[8]" string => "=rw"; + "create[8]" string => "true"; + "initial[8]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[8]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::rw-" }; + "mode[8]" string => "enforce"; + "status[8]" string => "repaired"; + "execute[8]" string => "true"; + + "files[9]" string => "file9"; + "recursive[9]" string => "false"; + "ace[9]" string => "-rw"; + "create[9]" string => "true"; + "initial[9]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[9]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::---" }; + "mode[9]" string => "enforce"; + "status[9]" string => "repaired"; + "execute[9]" string => "true"; + + "files[10]" string => "file10/."; + "recursive[10]" string => "false"; + "ace[10]" string => "-rw"; + "create[10]" string => "true"; + "initial[10]" slist => { "user:*:rwx", "group:*:rx", "group:bin:+rwx", "user:bin:r", "all:rx" }; + "exp_lines[10]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::--x" }; + "mode[10]" string => "enforce"; + "status[10]" string => "repaired"; + "execute[10]" string => "true"; + + "files[11]" string => "file10/1/2/3/4/5/subfile1"; + "create[11]" string => "true"; + "initial[11]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[11]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + + "files[12]" string => "file12/."; + "recursive[12]" string => "false"; + "ace[12]" string => "-rw"; + "create[12]" string => "true"; + "initial[12]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rx" }; + "exp_lines[12]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::--x" }; + "mode[12]" string => "enforce"; + "status[12]" string => "repaired"; + "execute[12]" string => "true"; + + # SUCCESS + "files[13]" string => "file13"; + "recursive[13]" string => "true"; + "ace[13]" string => "+rwx"; + "create[13]" string => "true"; + "initial[13]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rwx" }; + "exp_lines[13]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::rwx" }; + "mode[13]" string => "enforce"; + "status[13]" string => "success"; + "execute[13]" string => "true"; + + + "files[14]" string => "file14"; + "recursive[14]" string => "true"; + "ace[14]" string => "-rw"; + "create[14]" string => "true"; + "initial[14]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:x" }; + "exp_lines[14]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::--x" }; + "mode[14]" string => "enforce"; + "status[14]" string => "success"; + "execute[14]" string => "true"; + + "files[15_bis]" string => "file15/1/2/3/4/5/subfile1"; + "create[15_bis]" string => "true"; + "initial[15_bis]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:x" }; + "exp_lines[15_bis]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::--x" }; + + "files[15]" string => "file15/."; + "recursive[15]" string => "true"; + "ace[15]" string => "-rw"; + "create[15]" string => "true"; + "initial[15]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:-rwx" }; + "exp_lines[15]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r", "other::---" }; + "mode[15]" string => "enforce"; + "status[15]" string => "success"; + "execute[15]" string => "true"; + + + "files[17]" string => "file17/."; + "recursive[17]" string => "true"; + "ace[17]" string => "-rw"; + "create[17]" string => "true"; + "initial[17]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:x" }; + "exp_lines[17]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::--x" }; + "mode[17]" string => "enforce"; + "status[17]" string => "success"; + "execute[17]" string => "true"; + + # non-recursive + "files[18]" string => "file18"; + "recursive[18]" string => "false"; + "ace[18]" string => "+rx"; + "create[18]" string => "true"; + "initial[18]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:rx" }; + "exp_lines[18]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r-x" }; + "mode[18]" string => "enforce"; + "status[18]" string => "success"; + "execute[18]" string => "true"; + + "files[19]" string => "file19"; + "recursive[19]" string => "false"; + "ace[19]" string => "=rw"; + "create[19]" string => "true"; + "initial[19]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rx", "user:bin:r", "all:rw" }; + "exp_lines[19]" slist => { "user::rwx", "group::r-x", "group:bin:r-x", "user:bin:r--", "other::rw-" }; + "mode[19]" string => "enforce"; + "status[19]" string => "success"; + "execute[19]" string => "true"; + + "files[20]" string => "file20"; + "recursive[20]" string => "false"; + "ace[20]" string => "-rw"; + "create[20]" string => "true"; + "initial[20]" slist => { "user:*:rwx", "group:*:rx", "group:bin:+rwx", "user:bin:r", "all:x" }; + "exp_lines[20]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::--x" }; + "mode[20]" string => "enforce"; + "status[20]" string => "success"; + "execute[20]" string => "true"; + + "files[21]" string => "file21/."; + "recursive[21]" string => "false"; + "ace[21]" string => "-rw"; + "create[21]" string => "true"; + "initial[21]" slist => { "user:*:rwx", "group:*:rx", "group:bin:x", "user:bin:r", "all:x" }; + "exp_lines[21]" slist => { "user::rwx", "group::r-x", "group:bin:--x", "user:bin:r--", "other::--x" }; + "mode[21]" string => "enforce"; + "status[21]" string => "success"; + "execute[21]" string => "true"; + + "files[22]" string => "file21/1/2/3/4/5/subfile1"; + "create[22]" string => "true"; + "initial[22]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rw", "user:bin:r", "all:r" }; + "exp_lines[22]" slist => { "user::rwx", "group::r-x", "group:bin:rw-", "user:bin:r--", "other::r--" }; + + "files[23]" string => "file23/."; + "recursive[23]" string => "false"; + "ace[23]" string => "-rw"; + "create[23]" string => "true"; + "initial[23]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:x" }; + "exp_lines[23]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::--x" }; + "mode[23]" string => "enforce"; + "status[23]" string => "success"; + "execute[23]" string => "true"; + + "files[24]" string => "file24"; + "recursive[24]" string => "true"; + "ace[24]" string => "=x"; + "create[24]" string => "true"; + "initial[24]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rw", "user:bin:r", "all:x" }; + "exp_lines[24]" slist => { "user::rwx", "group::r-x", "group:bin:rw-", "user:bin:r--", "other::--x" }; + "mode[24]" string => "enforce"; + "status[24]" string => "success"; + "execute[24]" string => "true"; + + # ERROR + "files[25]" string => "fileThatDoesNotExist"; + "recursive[25]" string => "false"; + "ace[25]" string => "-rw"; + "create[25]" string => "false"; + "mode[25]" string => "enforce"; + "status[25]" string => "error"; + "execute[25]" string => "true"; + + "files[26]" string => "fileThatDoesNotExist2"; + "recursive[26]" string => "true"; + "ace[26]" string => "-rw"; + "create[26]" string => "false"; + "mode[26]" string => "enforce"; + "status[26]" string => "error"; + "execute[26]" string => "true"; + + # AUDIT SUCCESS + # Copy of success enforce part + "files[28]" string => "file28"; + "recursive[28]" string => "true"; + "ace[28]" string => "+rwx"; + "create[28]" string => "true"; + "initial[28]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:rwx" }; + "exp_lines[28]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::rwx" }; + "mode[28]" string => "enforce"; + "status[28]" string => "success"; + "execute[28]" string => "true"; + + + "files[29]" string => "file29"; + "recursive[29]" string => "true"; + "ace[29]" string => "-rw"; + "create[29]" string => "true"; + "initial[29]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rx", "user:bin:r", "all:x" }; + "exp_lines[29]" slist => { "user::rwx", "group::r-x", "group:bin:r-x", "user:bin:r--", "other::--x" }; + "mode[29]" string => "enforce"; + "status[29]" string => "success"; + "execute[29]" string => "true"; + + + "files[30_bis]" string => "file30/1/2/3/4/5/subfile1"; + "create[30_bis]" string => "true"; + "initial[30_bis]" slist => { "user:*:rwx", "group:*:rx", "group:bin:wx", "user:bin:r", "all:x" }; + "exp_lines[30_bis]" slist => { "user::rwx", "group::r-x", "group:bin:-wx", "user:bin:r--", "other::--x" }; + + "files[30]" string => "file30/."; + "recursive[30]" string => "true"; + "ace[30]" string => "-rw"; + "create[30]" string => "true"; + "initial[30]" slist => { "user:*:rwx", "group:*:rx", "group:bin:+rwx", "user:bin:r", "all:x" }; + "exp_lines[30]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r", "other::--x" }; + "mode[30]" string => "enforce"; + "status[30]" string => "success"; + "execute[30]" string => "true"; + + + "files[32]" string => "file32/."; + "recursive[32]" string => "true"; + "ace[32]" string => "-rw"; + "create[32]" string => "true"; + "initial[32]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rx", "user:bin:r", "all:x" }; + "exp_lines[32]" slist => { "user::rwx", "group::r-x", "group:bin:r-x", "user:bin:r--", "other::--x" }; + "mode[32]" string => "enforce"; + "status[32]" string => "success"; + "execute[32]" string => "true"; + + # non-recursive + "files[33]" string => "file33"; + "recursive[33]" string => "false"; + "ace[33]" string => "+rx"; + "create[33]" string => "true"; + "initial[33]" slist => { "user:*:rwx", "group:*:rw", "group:bin:r", "user:bin:r", "all:rx" }; + "exp_lines[33]" slist => { "user::rwx", "group::rw-", "group:bin:r--", "user:bin:r--", "other::r-x" }; + "mode[33]" string => "enforce"; + "status[33]" string => "success"; + "execute[33]" string => "true"; + + "files[34]" string => "file34"; + "recursive[34]" string => "false"; + "ace[34]" string => "=rw"; + "create[34]" string => "true"; + "initial[34]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rx", "user:bin:r", "all:rw" }; + "exp_lines[34]" slist => { "user::rwx", "group::r-x", "group:bin:r-x", "user:bin:r--", "other::rw-" }; + "mode[34]" string => "enforce"; + "status[34]" string => "success"; + "execute[34]" string => "true"; + + "files[35]" string => "file35"; + "recursive[35]" string => "false"; + "ace[35]" string => "-rw"; + "create[35]" string => "true"; + "initial[35]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:-rwx" }; + "exp_lines[35]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::---" }; + "mode[35]" string => "enforce"; + "status[35]" string => "success"; + "execute[35]" string => "true"; + + "files[36]" string => "file36/."; + "recursive[36]" string => "false"; + "ace[36]" string => "-r"; + "create[36]" string => "true"; + "initial[36]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rx", "user:bin:r", "all:x" }; + "exp_lines[36]" slist => { "user::rwx", "group::r-x", "group:bin:r-x", "user:bin:r--", "other::--x" }; + "mode[36]" string => "enforce"; + "status[36]" string => "success"; + "execute[36]" string => "true"; + + "files[37]" string => "file36/1/2/3/4/5/subfile1"; + "create[37]" string => "true"; + "initial[37]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rw", "user:bin:r", "all:r" }; + "exp_lines[37]" slist => { "user::rwx", "group::r-x", "group:bin:rw-", "user:bin:r--", "other::r--" }; + + "files[38]" string => "file38/."; + "recursive[38]" string => "false"; + "ace[38]" string => "-rw"; + "create[38]" string => "true"; + "initial[38]" slist => { "user:*:rwx", "group:*:rx", "group:bin:+rwx", "user:bin:r", "all:x" }; + "exp_lines[38]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::--x" }; + "mode[38]" string => "enforce"; + "status[38]" string => "success"; + "execute[38]" string => "true"; + + "files[39]" string => "file39"; + "recursive[39]" string => "true"; + "ace[39]" string => "=rw"; + "create[39]" string => "true"; + "initial[39]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rw" }; + "exp_lines[39]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::rw-" }; + "mode[39]" string => "enforce"; + "status[39]" string => "success"; + "execute[39]" string => "true"; + + # AUDIT ERROR + ## recursive + "files[40]" string => "file40"; + "recursive[40]" string => "true"; + "ace[40]" string => "+rwx"; + "create[40]" string => "true"; + "initial[40]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[40]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[40]" string => "audit"; + "status[40]" string => "error"; + "execute[40]" string => "true"; + + "files[41]" string => "file41"; + "recursive[41]" string => "true"; + "ace[41]" string => "=rw"; + "create[41]" string => "true"; + "initial[41]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[41]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[41]" string => "audit"; + "status[41]" string => "error"; + "execute[41]" string => "true"; + + "files[42]" string => "file42"; + "recursive[42]" string => "true"; + "ace[42]" string => "-rw"; + "create[42]" string => "true"; + "initial[42]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[42]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[42]" string => "audit"; + "status[42]" string => "error"; + "execute[42]" string => "true"; + + "files[43]" string => "file43/."; + "recursive[43]" string => "true"; + "ace[43]" string => "-rw"; + "create[43]" string => "true"; + "initial[43]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:x" }; + "exp_lines[43]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r", "other::--x" }; + "mode[43]" string => "audit"; + "status[43]" string => "error"; + "execute[43]" string => "true"; + + "files[44]" string => "file43/1/2/3/4/5/subfile1"; + "create[44]" string => "true"; + "initial[44]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[44]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + + "files[45]" string => "file45/."; + "recursive[45]" string => "true"; + "ace[45]" string => "-rw"; + "create[45]" string => "true"; + "initial[45]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:wx" }; + "exp_lines[45]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::-wx" }; + "mode[45]" string => "audit"; + "status[45]" string => "error"; + "execute[45]" string => "true"; + + # non-recursive + "files[46]" string => "file46"; + "recursive[46]" string => "false"; + "ace[46]" string => "+rwx"; + "create[46]" string => "true"; + "initial[46]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[46]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[46]" string => "audit"; + "status[46]" string => "error"; + "execute[46]" string => "true"; + + "files[47]" string => "file47"; + "recursive[47]" string => "false"; + "ace[47]" string => "=rw"; + "create[47]" string => "true"; + "initial[47]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[47]" slist => { "user::rwx", "group::r-x", "group:bin:r", "user:bin:r", "other::r" }; + "mode[47]" string => "audit"; + "status[47]" string => "error"; + "execute[47]" string => "true"; + + "files[48]" string => "file48"; + "recursive[48]" string => "false"; + "ace[48]" string => "-rw"; + "create[48]" string => "true"; + "initial[48]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[48]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::r--" }; + "mode[48]" string => "audit"; + "status[48]" string => "error"; + "execute[48]" string => "true"; + + "files[49]" string => "file49/."; + "recursive[49]" string => "false"; + "ace[49]" string => "-rw"; + "create[49]" string => "true"; + "initial[49]" slist => { "user:*:rwx", "group:*:rx", "group:bin:+rwx", "user:bin:r", "all:r" }; + "exp_lines[49]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r--" }; + "mode[49]" string => "audit"; + "status[49]" string => "error"; + "execute[49]" string => "true"; + + "files[50]" string => "file49/1/2/3/4/5/subfile1"; + "create[50]" string => "true"; + "initial[50]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:x" }; + "exp_lines[50]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::--x" }; + + "files[51]" string => "file51/."; + "recursive[51]" string => "false"; + "ace[51]" string => "-rw"; + "create[51]" string => "true"; + "initial[51]" slist => { "user:*:rwx", "group:*:rx", "group:bin:rwx", "user:bin:r", "all:rx" }; + "exp_lines[51]" slist => { "user::rwx", "group::r-x", "group:bin:rwx", "user:bin:r--", "other::r-x" }; + "mode[51]" string => "audit"; + "status[51]" string => "error"; + "execute[51]" string => "true"; + + # As in the error tests + "files[52]" string => "fileThatDoesNotExist52"; + "recursive[52]" string => "false"; + "ace[52]" string => "-rw"; + "create[52]" string => "false"; + "mode[52]" string => "audit"; + "status[52]" string => "error"; + "execute[52]" string => "true"; + + "files[53]" string => "fileThatDoesNotExist53"; + "recursive[53]" string => "true"; + "ace[53]" string => "-rw"; + "create[53]" string => "false"; + "mode[53]" string => "audit"; + "status[53]" string => "error"; + "execute[53]" string => "true"; + + # REGEX entry test + "files[55]" string => "rfile55"; + "create[55]" string => "true"; + "initial[55]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[55]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::---" }; + "execute[55]" string => "false"; + + "files[56]" string => "rfile56/."; + "create[56]" string => "true"; + "initial[56]" slist => { "user:*:rwx", "group:*:rx", "group:bin:-rwx", "user:bin:r", "all:rx" }; + "exp_lines[56]" slist => { "user::rwx", "group::r-x", "group:bin:---", "user:bin:r--", "other::---" }; + "execute[56]" string => "false"; + + "files[57]" string => "rfile4/1/2/3/4/5/subfile1"; + "create[57]" string => "true"; + "initial[57]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:bin:r", "all:r" }; + "exp_lines[57]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "user:bin:r--", "other::---" }; + + "files[58]" string => "rfile*"; + "recursive[58]" string => "true"; + "ace[58]" string => "-rwx"; + "create[58]" string => "false"; + "mode[58]" string => "enforce"; + "status[58]" string => "repaired"; + "execute[58]" string => "true"; + + "indices" slist => getindices("files"); + # Needed for success + recursive + "indices_recursive" slist => { "15", "30" }; + + "printable_lines[${indices}]" string => join("${const.endl}", "exp_lines[${indices}]"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + "create_${indices}" expression => strcmp("${create[${indices}]}", "true"); + + files: + "${tmp}/${files[${indices}]}" + create => "true", + ifvarclass => "create_${indices}", + acl => access_generic("@{initial[${indices}]}"); + + pass3:: + "${tmp}/${files[${indices_recursive}]}" + create => "true", + depth_search => recurse_with_base("3"), + acl => access_generic("@{initial[${indices_recursive}]}"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "${init.recursive[${init.indices}]}", "${init.ace[${init.indices}]}" }; + + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + methods: + pass1.!pass2:: + #REPAIRED + "ph1" usebundle => apply_gm("permissions_other_acl_present", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}" ); + "ph2" usebundle => apply_gm("permissions_other_acl_present", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}" ); + "ph3" usebundle => apply_gm("permissions_other_acl_present", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}" ); + "ph4" usebundle => apply_gm("permissions_other_acl_present", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}" ); + # Do not execute 5 + "ph6" usebundle => apply_gm("permissions_other_acl_present", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}" ); + "ph7" usebundle => apply_gm("permissions_other_acl_present", @{args7}, "${init.status[7]}", "ph7", "${init.mode[7]}" ); + "ph8" usebundle => apply_gm("permissions_other_acl_present", @{args8}, "${init.status[8]}", "ph8", "${init.mode[8]}" ); + "ph9" usebundle => apply_gm("permissions_other_acl_present", @{args9}, "${init.status[9]}", "ph9", "${init.mode[9]}" ); + "ph10" usebundle => apply_gm("permissions_other_acl_present", @{args10}, "${init.status[10]}", "ph10", "${init.mode[10]}" ); + # Do not execute 11 + "ph12" usebundle => apply_gm("permissions_other_acl_present", @{args12}, "${init.status[12]}", "ph12", "${init.mode[12]}" ); + #SUCCESS + "ph13" usebundle => apply_gm("permissions_other_acl_present", @{args13}, "${init.status[13]}", "ph13", "${init.mode[13]}" ); + "ph14" usebundle => apply_gm("permissions_other_acl_present", @{args14}, "${init.status[14]}", "ph14", "${init.mode[14]}" ); + "ph15" usebundle => apply_gm("permissions_other_acl_present", @{args15}, "${init.status[15]}", "ph15", "${init.mode[15]}" ); + # Do not execute 16 + "ph17" usebundle => apply_gm("permissions_other_acl_present", @{args17}, "${init.status[17]}", "ph17", "${init.mode[17]}" ); + "ph18" usebundle => apply_gm("permissions_other_acl_present", @{args18}, "${init.status[18]}", "ph18", "${init.mode[18]}" ); + "ph19" usebundle => apply_gm("permissions_other_acl_present", @{args19}, "${init.status[19]}", "ph19", "${init.mode[19]}" ); + "ph20" usebundle => apply_gm("permissions_other_acl_present", @{args20}, "${init.status[20]}", "ph20", "${init.mode[20]}" ); + "ph21" usebundle => apply_gm("permissions_other_acl_present", @{args21}, "${init.status[21]}", "ph21", "${init.mode[21]}" ); + # Do not execute 22 + "ph23" usebundle => apply_gm("permissions_other_acl_present", @{args23}, "${init.status[23]}", "ph23", "${init.mode[23]}" ); + "ph24" usebundle => apply_gm("permissions_other_acl_present", @{args24}, "${init.status[24]}", "ph24", "${init.mode[24]}" ); + #ERROR + "ph25" usebundle => apply_gm("permissions_other_acl_present", @{args25}, "${init.status[25]}", "ph25", "${init.mode[25]}" ); + "ph26" usebundle => apply_gm("permissions_other_acl_present", @{args26}, "${init.status[26]}", "ph26", "${init.mode[26]}" ); + #AUDIT SUCCESS + "ph28" usebundle => apply_gm("permissions_other_acl_present", @{args28}, "${init.status[28]}", "ph28", "${init.mode[28]}" ); + "ph29" usebundle => apply_gm("permissions_other_acl_present", @{args29}, "${init.status[29]}", "ph29", "${init.mode[29]}" ); + "ph30" usebundle => apply_gm("permissions_other_acl_present", @{args30}, "${init.status[30]}", "ph30", "${init.mode[30]}" ); + # Do not execute 31 + "ph32" usebundle => apply_gm("permissions_other_acl_present", @{args32}, "${init.status[32]}", "ph32", "${init.mode[32]}" ); + "ph33" usebundle => apply_gm("permissions_other_acl_present", @{args33}, "${init.status[33]}", "ph33", "${init.mode[33]}" ); + "ph34" usebundle => apply_gm("permissions_other_acl_present", @{args34}, "${init.status[34]}", "ph34", "${init.mode[34]}" ); + "ph35" usebundle => apply_gm("permissions_other_acl_present", @{args35}, "${init.status[35]}", "ph35", "${init.mode[35]}" ); + "ph36" usebundle => apply_gm("permissions_other_acl_present", @{args36}, "${init.status[36]}", "ph36", "${init.mode[36]}" ); + # Do not execute 37 + "ph38" usebundle => apply_gm("permissions_other_acl_present", @{args38}, "${init.status[38]}", "ph38", "${init.mode[38]}" ); + "ph39" usebundle => apply_gm("permissions_other_acl_present", @{args39}, "${init.status[39]}", "ph39", "${init.mode[39]}" ); + #AUDIT ERROR + ## Copy of the repaired + "ph40" usebundle => apply_gm("permissions_other_acl_present", @{args40}, "${init.status[40]}", "ph40", "${init.mode[40]}" ); + "ph41" usebundle => apply_gm("permissions_other_acl_present", @{args41}, "${init.status[41]}", "ph41", "${init.mode[41]}" ); + "ph42" usebundle => apply_gm("permissions_other_acl_present", @{args42}, "${init.status[42]}", "ph42", "${init.mode[42]}" ); + "ph43" usebundle => apply_gm("permissions_other_acl_present", @{args43}, "${init.status[43]}", "ph43", "${init.mode[43]}" ); + # Do not execute 44 + "ph45" usebundle => apply_gm("permissions_other_acl_present", @{args45}, "${init.status[45]}", "ph45", "${init.mode[45]}" ); + "ph46" usebundle => apply_gm("permissions_other_acl_present", @{args46}, "${init.status[46]}", "ph46", "${init.mode[46]}" ); + "ph47" usebundle => apply_gm("permissions_other_acl_present", @{args47}, "${init.status[47]}", "ph47", "${init.mode[47]}" ); + "ph48" usebundle => apply_gm("permissions_other_acl_present", @{args48}, "${init.status[48]}", "ph48", "${init.mode[48]}" ); + "ph49" usebundle => apply_gm("permissions_other_acl_present", @{args49}, "${init.status[49]}", "ph49", "${init.mode[49]}" ); + # Do not execute 40 + "ph51" usebundle => apply_gm("permissions_other_acl_present", @{args51}, "${init.status[51]}", "ph51", "${init.mode[51]}" ); + ## Copy of the error + "ph52" usebundle => apply_gm("permissions_other_acl_present", @{args52}, "${init.status[52]}", "ph52", "${init.mode[52]}" ); + "ph53" usebundle => apply_gm("permissions_other_acl_present", @{args53}, "${init.status[53]}", "ph53", "${init.mode[53]}" ); + + # REGEX entry test + # Do not execute 55, 56, 57 + "ph58" usebundle => apply_gm("permissions_other_acl_present", @{args58}, "${init.status[58]}", "ph58", "${init.mode[58]}" ); + +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + "create_${init.indices}" expression => strcmp("${init.create[${init.indices}]}", "true"); + "execute_${init.indices}" expression => strcmp("${init.execute[${init.indices}]}", "true"); + pass3:: + "lines_matches_failed_${init.indices}" not => regcmp(".*${init.exp_lines[${init.indices}]}.*","${getfacl_output[${init.indices}]}"), + ifvarclass => "create_${init.indices}"; + + "lines_not_ok" expression => "lines_matches_failed_${init.indices}"; + + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"), + ifvarclass => "execute_${init.indices}"; + "ok" expression => "!classes_not_ok.!lines_not_ok"; + + reports: + pass3:: + "Test for file nb ${init.indices} FAILED" + ifvarclass => "!ph${init.indices}_ok.execute_${init.indices}"; + + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_failed_${init.indices}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent.error.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent.error.cf new file mode 100644 index 00000000000..0bddaed8307 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent.error.cf @@ -0,0 +1,85 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +# Apply permissions_posix_acls_absent in enforce mode with recursion on +# unknown_file1 +# Apply permissions_posix_acls_absent in enforce mode without recursion on +# unknown_file* +# They all should fail since they do not exist +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "files[1]" string => "unknown_file1"; + "files[2]" string => "unknown_file*"; + + "indices" slist => { "1", "2" }; + + "files_canon[${indices}]" string => canonify("${files[${indices}]}"); + + "expected_old_class_prefix[1]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[1]}"); + "expected_class_prefix[1]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[1]}_true"); + + "expected_old_class_prefix[2]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[2]}"); + "expected_class_prefix[2]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[2]}_false"); + + + methods: + "file${indices}" usebundle => define_expected_classes("${expected_old_class_prefix[${indices}]}", "error", "old_${indices}"); + "file${indices}" usebundle => define_expected_classes("${expected_class_prefix[${indices}]}", "error", "${indices}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => permissions_posix_acls_absent("${init.tmp}/${init.files[1]}", "true"); + "ph2" usebundle => permissions_posix_acls_absent("${init.tmp}/${init.files[2]}", "false"); +} + +####################################################### + +bundle agent check +{ + classes: + "old_class_${init.indices}" expression => "${define_expected_classes.report_string_old_${init.indices}}"; + "class_${init.indices}" expression => "${define_expected_classes.report_string_${init.indices}}"; + + "old_class_prefix_ok" expression => "old_class_1.old_class_2"; + "class_prefix_ok" expression => "class_1.class_2"; + + "ok" expression => "class_prefix_ok.old_class_prefix_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + any:: + "Missing expected old class for ${init.files[${init.indices}]}" + ifvarclass => "!old_class_${init.indices}"; + + "Missing expected class for ${init.files[${init.indices}]}" + ifvarclass => "!class_${init.indices}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.audit.error.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.audit.error.cf new file mode 100644 index 00000000000..d9434dac0ac --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.audit.error.cf @@ -0,0 +1,131 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +# Create: acl_absent_test/sub_dir1/file1 +# acl_absent_test/sub_dir2/file1 +# acl_absent_test/file1 +# +# Set ACLs on every file and dir except acl_absent_test/sub_dir1/file1 +# Apply permissions_posix_acls_absent in audit mode without recursion on +# acl_absent_test/sub_dir1 +# acl_absent_test/sub_dir2 +# acl_absent_test/file1 +# unknown_file +# acl_absent_test/* +# They all should fail since they have ACls set +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "user" string => "bin"; + + "files[1]" string => "acl_absent_test/sub_dir1/."; + "files[2]" string => "acl_absent_test/sub_dir1/file1"; + "files[3]" string => "acl_absent_test/file1"; + "files[4]" string => "acl_absent_test/sub_dir2/."; + "files[5]" string => "acl_absent_test/sub_dir2/file1"; + "files[6]" string => "unknown_file"; + "files[7]" string => "acl_absent_test/*"; + + "indices" slist => { "1", "2", "3", "4", "5", "6", "7" }; + "create_indices" slist => { "1", "2", "3", "4", "5" }; + "effective_indices" slist => { "1", "3", "4", "6", "7" }; + + "files_canon[${indices}]" string => canonify("${files[${indices}]}"); + + "expected_old_class_prefix[${indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${indices}]}"); + "expected_class_prefix[${indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${indices}]}_false"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + # Do not put ACLs on the sub file to check if it detects correctly the acls in sub files + "acls_set_1" expression => returnszero("${paths.setfacl} -m u:${user}:rx ${tmp}/${files[1]}", "useshell"), + scope => "namespace"; + "acls_set_2" expression => returnszero("${paths.setfacl} -b ${tmp}/${files[2]}", "useshell"), + scope => "namespace"; + "acls_set_3" expression => returnszero("${paths.setfacl} -m u:${user}:rx ${tmp}/${files[3]}", "useshell"), + scope => "namespace"; + "acls_set_4" expression => returnszero("${paths.setfacl} -m u:${user}:rx ${tmp}/${files[4]}", "useshell"), + scope => "namespace"; + "acls_set_5" expression => returnszero("${paths.setfacl} -m u:${user}:rx ${tmp}/${files[5]}", "useshell"), + scope => "namespace"; + + + files: + # Do not create the unknown_file + any:: + "${tmp}/${files[${create_indices}]}" + create => "true", + perms => mog("555", "root", "0"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.effective_indices}" slist => {"${init.tmp}/${init.files[${init.effective_indices}]}", "false"}; + + methods: + "ph1" usebundle => apply_gm("permissions_posix_acls_absent", @{args1}, "error", "ph1", "audit"); + "ph3" usebundle => apply_gm("permissions_posix_acls_absent", @{args3}, "error", "ph3", "audit"); + "ph4" usebundle => apply_gm("permissions_posix_acls_absent", @{args4}, "error", "ph4", "audit"); + "ph6" usebundle => apply_gm("permissions_posix_acls_absent", @{args6}, "error", "ph6", "audit"); + "ph7" usebundle => apply_gm("permissions_posix_acls_absent", @{args7}, "error", "ph7", "audit"); +} + +####################################################### + +bundle agent check +{ + vars: + "getfacl_output[${init.create_indices}]" string => execresult("${paths.getfacl} --skip-base ${init.tmp}/${init.files[${init.create_indices}]}", "useshell"), + ifvarclass => "acls_set_${init.create_indices}"; + + classes: + "classes_ok" expression => "ph1_ok.ph3_ok.ph4_ok.ph6_ok.ph7_ok"; + + # We want the output to be not empty except on 2 + # For the regex entry, it is checked if all others files are already ok + "cleanup_1" not => strcmp("", "${getfacl_output[1]}"); + "cleanup_2" expression => strcmp("", "${getfacl_output[2]}"); + "cleanup_3" not => strcmp("", "${getfacl_output[3]}"); + "cleanup_4" not => strcmp("", "${getfacl_output[4]}"); + "cleanup_5" not => strcmp("", "${getfacl_output[5]}"); + # No cleanup check on unknown file + "all_cleanup" expression => "cleanup_1.cleanup_2.cleanup_3.cleanup_4.cleanup_5"; + + "ok" expression => "all_cleanup.classes_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + any:: + "Unexpected ACLs change on ${init.tmp}/${init.files[${init.create_indices}]}" + ifvarclass => "!cleanup_${init.create_indices}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.audit.success.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.audit.success.cf new file mode 100644 index 00000000000..6fe9ec37124 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.audit.success.cf @@ -0,0 +1,126 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +# Create: acl_absent_test/sub_dir1/file1 +# acl_absent_test/file1 +# +# Remove ACLs on acl_absent_test/sub_dir1/. +# and acl_absent_test/file1 +# Add some on acl_absent_test/sub_dir1/file1 +# +# Apply permissions_posix_acls_absent in audit mode without recursion on +# acl_absent_test/sub_dir1 +# acl_absent_test/file1 +# acl_absent_test/* +# They should all succeed since they have no ACLs set +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "user" string => "bin"; + "file_list" slist => { "acl_absent_test/sub_dir1/.", "acl_absent_test/sub_dir1/file1", "acl_absent_test/file1" }; + + "files[1]" string => "acl_absent_test/sub_dir1/."; + "files[2]" string => "acl_absent_test/sub_dir1/file1"; + "files[3]" string => "acl_absent_test/file1"; + "files[4]" string => "acl_absent_test/*"; + + "indices" slist => { "1", "2", "3", "4" }; + "create_indices" slist => { "1", "2", "3" }; + "effective_indices" slist => { "1", "3", "4" }; + + "files_canon[${indices}]" string => canonify("${files[${indices}]}"); + + "expected_class_prefix[${effective_indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${effective_indices}]}_false"); + "expected_old_class_prefix[${effective_indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${effective_indices}]}"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + "acls_set_1" expression => returnszero("${paths.setfacl} -b ${tmp}/${files[1]}", "useshell"), + scope => "namespace"; + # Set acl on sub file, it should be not be removed + "acls_set_2" expression => returnszero("${paths.setfacl} -m u:${user}:rx ${tmp}/${files[2]}", "useshell"), + scope => "namespace"; + "acls_set_3" expression => returnszero("${paths.setfacl} -b ${tmp}/${files[3]}", "useshell"), + scope => "namespace"; + + files: + any:: + "${tmp}/${files[${create_indices}]}" + create => "true", + perms => mog("555", "root", "0"); + + methods: + "any" usebundle => define_expected_classes("${expected_old_class_prefix[${effective_indices}]}", "success", "old_${effective_indices}"); + "any" usebundle => define_expected_classes("${expected_class_prefix[${effective_indices}]}", "success", "${effective_indices}"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph${init.effective_indices}" usebundle => permissions_posix_acls_absent("${init.tmp}/${init.files[${init.effective_indices}]}", "false"); + "disable" usebundle => set_dry_run_mode("false"); +} + +####################################################### + +bundle agent check +{ + vars: + "getfacl_output[${init.create_indices}]" string => execresult("${paths.getfacl} --skip-base ${init.tmp}/${init.files[${init.create_indices}]}", "useshell"), + ifvarclass => "acls_set_${init.create_indices}"; + + classes: + "old_class_${init.effective_indices}" expression => "${define_expected_classes.report_string_old_${init.effective_indices}}"; + "class_${init.effective_indices}" expression => "${define_expected_classes.report_string_${init.effective_indices}}"; + + "old_class_prefix_ok" expression => "old_class_1.old_class_3.old_class_4"; + "class_prefix_ok" expression => "class_1.class_3.class_4"; + + "cleanup_${init.create_indices}" expression => strcmp("${getfacl_output[${init.create_indices}]}", ""); + "cleanup_2" not => strcmp("${getfacl_output[2]}", ""); + "all_cleanup" expression => "cleanup_1.cleanup_2.cleanup_3"; + + "ok" expression => "all_cleanup.class_prefix_ok.old_class_prefix_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + any:: + "ACLs change found on ${init.tmp}/${init.files[${init.crceate_indices}]}" + ifvarclass => "!cleanup_${init.create_indices}"; + + "Missing expected old class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!old_class_${init.effective_indices}"; + + "Missing expected class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!class_${init.effective_indices}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.repaired.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.repaired.cf new file mode 100644 index 00000000000..d3df504ff97 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.repaired.cf @@ -0,0 +1,116 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +# Create: acl_absent_test/sub_dir1/file1 +# acl_absent_test/file1 +# acl_absent_test/sub_dir2/file1 +# +# Remove ACLs on acl_absent_test/sub_dir1/. +# Add some on acl_absent_test/sub_dir1/file1 +# on acl_absent_test/file1 +# and acl_absent_test/sub_dir2/. +# +# Apply permissions_posix_acls_absent in enforce mode without recursion on +# acl_absent_test/sub_dir1 +# acl_absent_test/sub_dir1/file1 +# acl_absent_test/file1 +# acl_absent_test/*1 +# acl_absent_test/*2 +# +# They should all repaired since they have ACLs set +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "user" string => "bin"; + + "files[1]" string => "acl_absent_test/sub_dir1/."; + "files[2]" string => "acl_absent_test/sub_dir1/file1"; + "files[3]" string => "acl_absent_test/file1"; + "files[4]" string => "acl_absent_test/*1"; + "files[5]" string => "acl_absent_test/sub_dir2/."; + "files[6]" string => "acl_absent_test/*2"; + + "indices" slist => { "1", "2", "3", "4", "5", "6" }; + "create_indices" slist => { "1", "2", "3", "5" }; + "effective_indices" slist => { "1", "2", "3", "4", "6" }; + + "files_canon[${indices}]" string => canonify("${files[${indices}]}"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + "acls_set_${files_canon[${create_indices}]}" expression => returnszero("${paths.setfacl} -m u:${user}:rx ${tmp}/${files[${create_indices}]}", "useshell"), + scope => "namespace"; + + files: + any:: + "${tmp}/${files[${create_indices}]}" + create => "true", + perms => mog("555", "root", "0"); +} + +####################################################### + +bundle agent test +{ + # We can assume the repaired were not recursive if we have a repaired on each file. + vars: + "args${init.effective_indices}" slist => {"${init.tmp}/${init.files[${init.effective_indices}]}", "false"}; + + methods: + "ph1" usebundle => apply_gm("permissions_posix_acls_absent", @{args1}, "repaired", "ph1", "enforce"); + "ph2" usebundle => apply_gm("permissions_posix_acls_absent", @{args2}, "repaired", "ph2", "enforce"); + "ph3" usebundle => apply_gm("permissions_posix_acls_absent", @{args3}, "repaired", "ph3", "enforce"); + "ph4" usebundle => apply_gm("permissions_posix_acls_absent", @{args4}, "success", "ph4", "enforce"); + "ph6" usebundle => apply_gm("permissions_posix_acls_absent", @{args6}, "repaired", "ph6", "enforce"); +} + +####################################################### + +bundle agent check +{ + vars: + "getfacl_output[${init.create_indices}]" string => execresult("${paths.getfacl} --skip-base ${init.tmp}/${init.files[${init.create_indices}]}", "useshell"), + ifvarclass => "acls_set_${init.files_canon[${init.create_indices}]}"; + + classes: + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph6_ok"; + + + "cleanup_${init.create_indices}" expression => strcmp("${getfacl_output[${init.create_indices}]}", ""); + "all_cleanup" expression => "cleanup_1.cleanup_2.cleanup_3.cleanup_5"; + + "ok" expression => "all_cleanup.classes_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + any:: + "Unexpected ACLs change on ${init.tmp}/${init.files[${init.create_indices}]}" + ifvarclass => "!cleanup_${init.create_indices}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.success.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.success.cf new file mode 100644 index 00000000000..d880adac44c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_non_recursive.success.cf @@ -0,0 +1,123 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +# Create: acl_absent_test/sub_dir1/file1 +# acl_absent_test/file1 +# +# Remove ACLs on acl_absent_test/sub_dir1/. +# and acl_absent_test/file1 +# Add some on acl_absent_test/sub_dir1/file1 +# +# Apply permissions_posix_acls_absent in enforce mode without recursion on +# acl_absent_test/sub_dir1 +# acl_absent_test/file1 +# acl_absent_test/* +# They should all succeed since they have no ACLs set +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "user" string => "bin"; + + "files[1]" string => "acl_absent_test/sub_dir1/."; + "files[2]" string => "acl_absent_test/sub_dir1/file1"; + "files[3]" string => "acl_absent_test/file1"; + "files[4]" string => "acl_absent_test/*"; + + "indices" slist => { "1", "2", "3", "4" }; + "create_indices" slist => { "1", "2", "3" }; + "effective_indices" slist => { "1", "3", "4" }; + + "files_canon[${indices}]" string => canonify("${files[${indices}]}"); + + "expected_class_prefix[${effective_indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${effective_indices}]}_false"); + "expected_old_class_prefix[${effective_indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${effective_indices}]}"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + "acls_set_1" expression => returnszero("${paths.setfacl} -b ${tmp}/${files[1]}", "useshell"), + scope => "namespace"; + # Set acl on sub file, it should be not be removed + "acls_set_2" expression => returnszero("${paths.setfacl} -m u:${user}:rx ${tmp}/${files[2]}", "useshell"), + scope => "namespace"; + "acls_set_3" expression => returnszero("${paths.setfacl} -b ${tmp}/${files[3]}", "useshell"), + scope => "namespace"; + + files: + any:: + "${tmp}/${files[${create_indices}]}" + create => "true", + perms => mog("555", "root", "0"); + + methods: + "any" usebundle => define_expected_classes("${expected_old_class_prefix[${effective_indices}]}", "success", "old_${effective_indices}"); + "any" usebundle => define_expected_classes("${expected_class_prefix[${effective_indices}]}", "success", "${effective_indices}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph${init.indices}" usebundle => permissions_posix_acls_absent("${init.tmp}/${init.files[${init.indices}]}", "false"); +} + +####################################################### + +bundle agent check +{ + vars: + "getfacl_output[${init.create_indices}]" string => execresult("${paths.getfacl} --skip-base ${init.tmp}/${init.files[${init.create_indices}]}", "useshell"), + ifvarclass => "acls_set_${init.create_indices}"; + + classes: + "old_class_${init.effective_indices}" expression => "${define_expected_classes.report_string_old_${init.effective_indices}}"; + "class_${init.effective_indices}" expression => "${define_expected_classes.report_string_${init.effective_indices}}"; + + "old_class_prefix_ok" expression => "old_class_1.old_class_3.old_class_4"; + "class_prefix_ok" expression => "class_1.class_3.class_4"; + + "cleanup_${init.create_indices}" expression => strcmp("${getfacl_output[${init.create_indices}]}", ""); + "cleanup_2" not => strcmp("${getfacl_output[2]}", ""); + "all_cleanup" expression => "cleanup_1.cleanup_2.cleanup_3"; + + "ok" expression => "all_cleanup.class_prefix_ok.old_class_prefix_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + any:: + "ACLs change found on ${init.tmp}/${init.files[${init.crceate_indices}]}" + ifvarclass => "!cleanup_${init.create_indices}"; + + "Missing expected old class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!old_class_${init.effective_indices}"; + + "Missing expected class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!class_${init.effective_indices}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.audit.error.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.audit.error.cf new file mode 100644 index 00000000000..e1e93beab3c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.audit.error.cf @@ -0,0 +1,129 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +# Create: acl_absent_test/sub_dir1/file1 +# acl_absent_test/file1 +# +# Remove ACLs on acl_absent_test/sub_dir1/. +# Add some on acl_absent_test/sub_dir1/file1 +# and acl_absent_test/file1 +# +# Apply permissions_posix_acls_absent in audit mode with recursion on +# acl_absent_test/sub_dir1 +# acl_absent_test/file1 +# acl_absent_test/* +# unknown_file +# They should all fail since they have ACLs set +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "user" string => "bin"; + + "files[1]" string => "acl_absent_test/sub_dir1/."; + "files[2]" string => "acl_absent_test/sub_dir1/file1"; + "files[3]" string => "acl_absent_test/file1"; + "files[4]" string => "unknown_file"; + "files[5]" string => "acl_absent_test/*"; + + "indices" slist => { "1", "2", "3", "4", "5" }; + "create_indices" slist => { "1", "2", "3" }; + "effective_indices" slist => { "1", "3", "4", "5" }; + + "files_canon[${indices}]" string => canonify("${files[${indices}]}"); + + "expected_old_class_prefix[${effective_indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${effective_indices}]}"); + "expected_class_prefix[${effective_indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${effective_indices}]}_true"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + # Do not put ACLs on the directory to check if it detects correctly the acls in sub files + "acls_set_1" expression => returnszero("${paths.setfacl} -b ${tmp}/${files[1]}", "useshell"), + scope => "namespace"; + "acls_set_2" expression => returnszero("${paths.setfacl} -m u:${user}:rx ${tmp}/${files[2]}", "useshell"), + scope => "namespace"; + "acls_set_3" expression => returnszero("${paths.setfacl} -m u:${user}:rx ${tmp}/${files[3]}", "useshell"), + scope => "namespace"; + + files: + any:: + "${tmp}/${files[${create_indices}]}" + create => "true", + perms => mog("555", "root", "0"); + + methods: + "file${effective_indices}" usebundle => define_expected_classes("${expected_old_class_prefix[${effective_indices}]}", "error", "old_${effective_indices}"); + "file${effective_indices}" usebundle => define_expected_classes("${expected_class_prefix[${effective_indices}]}", "error", "${effective_indices}"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph${init.effective_indices}" usebundle => permissions_posix_acls_absent("${init.tmp}/${init.files[${init.effective_indices}]}", "true"); + "disable" usebundle => set_dry_run_mode("false"); +} + +####################################################### + +bundle agent check +{ + vars: + "getfacl_output[${init.create_indices}]" string => execresult("${paths.getfacl} --skip-base ${init.tmp}/${init.files[${init.create_indices}]}", "useshell"), + ifvarclass => "acls_set_${init.create_indices}"; + + classes: + "old_class_${init.effective_indices}" expression => "${define_expected_classes.report_string_old_${init.effective_indices}}"; + "class_${init.effective_indices}" expression => "${define_expected_classes.report_string_${init.effective_indices}}"; + + "old_class_prefix_ok" expression => "old_class_1.old_class_3.old_class_4.old_class_5"; + "class_prefix_ok" expression => "class_1.class_3.class_4.class_5"; + + # We want the output to be not empty on 2 and 3, empty on 1 + "cleanup_1" expression => strcmp("${getfacl_output[1]}", ""); + "cleanup_2" not => strcmp("${getfacl_output[2]}", ""); + "cleanup_3" not => strcmp("${getfacl_output[3]}", ""); + "all_cleanup" expression => "cleanup_1.cleanup_2.cleanup_3"; + + "ok" expression => "all_cleanup.class_prefix_ok.old_class_prefix_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + any:: + "Unexpected ACLs change on ${init.tmp}/${init.files[${init.create_indices}]}" + ifvarclass => "!cleanup_${init.create_indices}"; + + "Missing expected old class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!old_class_${init.effective_indices}"; + + "Missing expected class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!class_${init.effective_indices}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.audit.success.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.audit.success.cf new file mode 100644 index 00000000000..333090217bc --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.audit.success.cf @@ -0,0 +1,118 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +# Create: acl_absent_test/sub_dir1/file1 +# acl_absent_test/file1 +# +# Remove ACLs on acl_absent_test/sub_dir1/. +# on acl_absent_test/sub_dir1/file1 +# and acl_absent_test/file1 +# +# Apply permissions_posix_acls_absent in audit mode with recursion on +# acl_absent_test/sub_dir1 +# acl_absent_test/file1 +# acl_absent_test/* +# They should all succeed since they have ACLs set +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "files[1]" string => "acl_absent_test/sub_dir1/."; + "files[2]" string => "acl_absent_test/sub_dir1/file1"; + "files[3]" string => "acl_absent_test/file1"; + "files[4]" string => "acl_absent_test/*"; + + "indices" slist => { "1", "2", "3", "4" }; + "create_indices" slist => { "1", "2", "3" }; + "effective_indices" slist => { "1", "3", "4"}; + + "files_canon[${indices}]" string => canonify("${files[${indices}]}"); + + "expected_class_prefix[${indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${indices}]}_true"); + "expected_old_class_prefix[${indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${indices}]}"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + "acls_set_${files_canon[${create_indices}]}" expression => returnszero("${paths.setfacl} -b ${tmp}/${files[${create_indices}]}", "useshell"), + scope => "namespace"; + + files: + any:: + "${tmp}/${files[${create_indices}]}" + create => "true", + perms => mog("555", "root", "0"); + + methods: + "any" usebundle => define_expected_classes("${expected_old_class_prefix[${indices}]}", "success", "old_${indices}"); + "any" usebundle => define_expected_classes("${expected_class_prefix[${indices}]}", "success", "${indices}"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph${init.effective_indices}" usebundle => permissions_posix_acls_absent("${init.tmp}/${init.files[${init.effective_indices}]}", "true"); + "disable" usebundle => set_dry_run_mode("true"); +} + +####################################################### + +bundle agent check +{ + vars: + "getfacl_output[${init.create_indices}]" string => execresult("${paths.getfacl} --skip-base ${init.tmp}/${init.files[${init.create_indices}]}", "useshell"), + ifvarclass => "acls_set_${init.files_canon[${init.create_indices}]}"; + + classes: + "old_class_${init.effective_indices}" expression => "${define_expected_classes.report_string_old_${init.effective_indices}}"; + "class_${init.effective_indices}" expression => "${define_expected_classes.report_string_${init.effective_indices}}"; + + "old_class_prefix_ok" expression => "old_class_1.old_class_3.old_class_4"; + "class_prefix_ok" expression => "class_1.class_3.class_4"; + + "cleanup_${init.indices}" expression => strcmp("${getfacl_output[${init.indices}]}", ""); + "all_cleanup" expression => "cleanup_1.cleanup_2.cleanup_3"; + + "ok" expression => "all_cleanup.class_prefix_ok.old_class_prefix_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + any:: + "Unexpected ACLs change on ${init.tmp}/${init.files[${init.create_indices}]}" + ifvarclass => "!cleanup_${init.create_indices}"; + + "Missing expected old class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!old_class_${init.effective_indices}"; + + "Missing expected class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!class_${init.effective_indices}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.repaired.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.repaired.cf new file mode 100644 index 00000000000..85948a2bf5f --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.repaired.cf @@ -0,0 +1,136 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +# Create: acl_absent_test/sub_dir1/file1 +# acl_absent_test/file1 +# acl_absent_test/sub_dir2/file1 +# +# Remove ACLs on acl_absent_test/sub_dir1/. +# Add some on acl_absent_test/sub_dir1/file1 +# on acl_absent_test/file1 +# and acl_absent_test/sub_dir2/. +# +# Apply permissions_posix_acls_absent in enforce mode with recursion on +# acl_absent_test/sub_dir1 +# acl_absent_test/sub_dir1/file1 +# acl_absent_test/file1 +# acl_absent_test/*1 +# acl_absent_test/*2 +# +# They should all repaired since they have ACLs set +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "user" string => "bin"; + + "files[1]" string => "acl_absent_test/sub_dir1/."; + "files[2]" string => "acl_absent_test/sub_dir1/file1"; + "files[3]" string => "acl_absent_test/file1"; + "files[4]" string => "acl_absent_test/*1"; + "files[5]" string => "acl_absent_test/sub_dir2/."; + "files[6]" string => "acl_absent_test/*2"; + + "indices" slist => { "1", "2", "3", "4", "5", "6" }; + "create_indices" slist => { "1", "2", "3", "5" }; + "effective_indices" slist => { "1", "2", "3", "4", "6" }; + + "files_canon[${indices}]" string => canonify("${files[${indices}]}"); + + "expected_class_prefix[${effective_indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${effective_indices}]}_true"); + "expected_old_class_prefix[${effective_indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${effective_indices}]}"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + "acls_set_${files_canon[${create_indices}]}" expression => returnszero("${paths.setfacl} -m u:${user}:rx ${tmp}/${files[${create_indices}]}", "useshell"), + scope => "namespace"; + # Remove ACLs on the directory to check if it detects correctly the acls in sub files + "acls_set_1" expression => returnszero("${paths.setfacl} -b ${tmp}/${files[1]}", "useshell"), + scope => "namespace"; + + files: + any:: + "${tmp}/${files[${create_indices}]}" + create => "true", + perms => mog("555", "root", "0"); + + methods: + "any" usebundle => define_expected_classes("${expected_old_class_prefix[${effective_indices}]}", "repaired", "old_${effective_indices}"); + "any" usebundle => define_expected_classes("${expected_class_prefix[${effective_indices}]}", "repaired", "${effective_indices}"); + + # Checking that recursivity worked + "any" usebundle => define_expected_classes("${expected_old_class_prefix[2]}", "success", "old_2"); + "any" usebundle => define_expected_classes("${expected_class_prefix[2]}", "success", "2"); + + # Checking that the check are well re-evaluated + "any" usebundle => define_expected_classes("${expected_old_class_prefix[4]}", "success", "old_4"); + "any" usebundle => define_expected_classes("${expected_class_prefix[4]}", "success", "4"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph${init.effective_indices}" usebundle => permissions_posix_acls_absent("${init.tmp}/${init.files[${init.effective_indices}]}", "true"); +} + +####################################################### + +bundle agent check +{ + vars: + "getfacl_output[${init.create_indices}]" string => execresult("${paths.getfacl} --skip-base ${init.tmp}/${init.files[${init.create_indices}]}", "useshell"), + ifvarclass => "acls_set_${init.files_canon[${init.create_indices}]}"; + + classes: + "old_class_${init.effective_indices}" expression => "${define_expected_classes.report_string_old_${init.effective_indices}}"; + "class_${init.effective_indices}" expression => "${define_expected_classes.report_string_${init.effective_indices}}"; + + "old_class_prefix_ok" expression => "old_class_1.old_class_2.old_class_3.old_class_4.old_class_6"; + "class_prefix_ok" expression => "class_1.class_2.class_3.class_4.class_6"; + + "cleanup_${init.create_indices}" expression => strcmp("${getfacl_output[${init.create_indices}]}", ""); + "all_cleanup" expression => "cleanup_1.cleanup_2.cleanup_3.cleanup_5"; + + "ok" expression => "all_cleanup.class_prefix_ok.old_class_prefix_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + any:: + "Unexpected ACLs change on ${init.tmp}/${init.files[${init.create_indices}]}" + ifvarclass => "!cleanup_${init.create_indices}"; + + "Missing expected old class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!old_class_${init.effective_indices}"; + + "Missing expected class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!class_${init.effective_indices}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.success.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.success.cf new file mode 100644 index 00000000000..8df342cba03 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_absent_recursive.success.cf @@ -0,0 +1,116 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +# Create: acl_absent_test/sub_dir1/file1 +# acl_absent_test/file1 +# +# Remove ACLs on acl_absent_test/sub_dir1/. +# on acl_absent_test/sub_dir1/file1 +# and acl_absent_test/file1 +# +# Apply permissions_posix_acls_absent in enforce mode without recursion on +# acl_absent_test/sub_dir1 +# acl_absent_test/file1 +# acl_absent_test/* +# They should all succeed since they have no ACLs set +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + "files[1]" string => "acl_absent_test/sub_dir1/."; + "files[2]" string => "acl_absent_test/sub_dir1/file1"; + "files[3]" string => "acl_absent_test/file1"; + "files[4]" string => "acl_absent_test/*"; + + "indices" slist => { "1", "2", "3", "4" }; + "create_indices" slist => { "1", "2", "3" }; + "effective_indices" slist => { "1", "3", "4" }; + + "files_canon[${indices}]" string => canonify("${files[${indices}]}"); + + "expected_class_prefix[${effective_indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${effective_indices}]}_true"); + "expected_old_class_prefix[${effective_indices}]" string => canonify("permissions_posix_acls_absent_${tmp}/${files[${effective_indices}]}"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + "acls_set_${files_canon[${create_indices}]}" expression => returnszero("${paths.setfacl} -b ${tmp}/${files[${create_indices}]}", "useshell"), + scope => "namespace"; + + files: + any:: + "${tmp}/${files[${create_indices}]}" + create => "true", + perms => mog("555", "root", "0"); + + methods: + "any" usebundle => define_expected_classes("${expected_old_class_prefix[${effective_indices}]}", "success", "old_${effective_indices}"); + "any" usebundle => define_expected_classes("${expected_class_prefix[${effective_indices}]}", "success", "${effective_indices}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph${init.effective_indices}" usebundle => permissions_posix_acls_absent("${init.tmp}/${init.files[${init.effective_indices}]}", "true"); +} + +####################################################### + +bundle agent check +{ + vars: + "getfacl_output[${init.create_indices}]" string => execresult("${paths.getfacl} --skip-base ${init.tmp}/${init.files[${init.create_indices}]}", "useshell"), + ifvarclass => "acls_set_${init.files_canon[${init.create_indices}]}"; + + classes: + "old_class_${init.effective_indices}" expression => "${define_expected_classes.report_string_old_${init.effective_indices}}"; + "class_${init.effective_indices}" expression => "${define_expected_classes.report_string_${init.effective_indices}}"; + + "old_class_prefix_ok" expression => "old_class_1.old_class_3.old_class_3"; + "class_prefix_ok" expression => "class_1.class_3.class_4"; + + "cleanup_${init.create_indices}" expression => strcmp("${getfacl_output[${init.indices}]}", ""); + "all_cleanup" expression => "cleanup_1.cleanup_2.cleanup_3"; + + "ok" expression => "all_cleanup.class_prefix_ok.old_class_prefix_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + any:: + "ACLs change found on ${init.tmp}/${init.files[${init.crceate_indices}]}" + ifvarclass => "!cleanup_${init.create_indices}"; + + "Missing expected old class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!old_class_${init.effective_indices}"; + + "Missing expected class for ${init.files[${init.effective_indices}]}" + ifvarclass => "!class_${init.effective_indices}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_entry_parent.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_entry_parent.cf new file mode 100644 index 00000000000..ad86f550dfe --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_posix_acls_entry_parent.cf @@ -0,0 +1,158 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, initialization, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "split_tmp" slist => { @(getParent_py.parentDirectories) }; + + # exp_lines = expected lines in the getfacl run on the target, in the check bundle + # exp_parent_lines = expected lines in the getfacl run on the parents, in the check bundle + # parents = list of parents dirs to audit + # REPAIRED + ## recursive + "path[1]" string => "${tmp}/file1"; + "recursive[1]" string => "true"; + "user[1]" string => ""; + "group[1]" string => "bin:+rwx"; + "other[1]" string => ""; + "parent_permissions_user[1]" string => ""; + "parent_permissions_group[1]" string => "bin:+rx"; + "parent_permissions_other[1]" string => ""; + "create[1]" string => "true"; # if "true", the file will be created at init + "mode[1]" string => "enforce"; # mode, "enforce" or "audit" + "status[1]" string => "repaired"; # expected status, "repaired", "success" or "error" + "execute[1]" string => "true"; # Tell if an execution is needed + "initial[1]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:*:r", "all:r" }; + "exp_lines[1]" slist => { "group:bin:rwx" }; + "exp_parent_lines[1]" slist => { "group:bin:r-x" }; + "parents[1]" slist => { @(split_tmp) }; + + "path[2]" string => "${tmp}/file1*"; # Same than 1, but with a different path resolution to avoid class conflicts + "recursive[2]" string => "true"; + "user[2]" string => ""; + "group[2]" string => "bin:+rwx"; + "other[2]" string => ""; + "parent_permissions_user[2]" string => ""; + "parent_permissions_group[2]" string => "bin:+rx"; + "parent_permissions_other[2]" string => ""; + "create[2]" string => "false"; # if "true", the file will be created at init + "mode[2]" string => "enforce"; # mode, "enforce" or "audit" + "status[2]" string => "success"; # expected status, "repaired", "success" or "error" + "execute[2]" string => "true"; # Tell if an execution is needed + "initial[2]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "user:*:r", "all:r" }; + "exp_lines[2]" slist => { "group:bin:rwx" }; + "exp_parent_lines[2]" slist => { "group:bin:r-x" }; + "parents[2]" slist => { @(split_tmp) }; + + pass1:: + "indices" slist => getindices("path"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + "create_${indices}" expression => strcmp("${create[${indices}]}", "true"); + + files: + "${path[${indices}]}" + create => "true", + ifvarclass => "create_${indices}", + acl => access_generic("@{initial[${indices}]}"); + + commands: + pass1:: + # Pre compute the ENV path is pretty hard, so we are using the module to split it. + # This is fine since the module has its own dedicated test set. + "${sys.workdir}/modules/promises/getParent.py" + args => "\"${tmp}\"", + module => "true"; +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { + "${init.path[${init.indices}]}", + "${init.recursive[${init.indices}]}", + "${init.user[${init.indices}]}", + "${init.group[${init.indices}]}", + "${init.other[${init.indices}]}", + "${init.parent_permissions_user[${init.indices}]}", + "${init.parent_permissions_group[${init.indices}]}", + "${init.parent_permissions_other[${init.indices}]}" + }; + + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + pass1.!pass2:: + #REPAIRED + "ph1" usebundle => apply_gm_v4("permissions_posix_acl_entry_parent", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}" ); + # This one does not work, might be from a troncated class since the code seems correct? + "ph2" usebundle => apply_gm_v4("permissions_posix_acl_entry_parent", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}" ); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_target_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.path[${init.indices}]}", "useshell"); + "getfacl_parents_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.path[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + "create_${init.indices}" expression => strcmp("${init.create[${init.indices}]}", "true"); + "execute_${init.indices}" expression => strcmp("${init.execute[${init.indices}]}", "true"); + + # Reverse grep exit code as we want a class when it does not match + # ! getfacl /tmp 2>/dev/null | grep -q 'group::rwx' + "parent_acls_failed_${init.indices}" expression => returnszero("! ${paths.getfacl} ${init.parents[${init.indices}]} 2>/dev/null | grep -q '${init.exp_parent_lines[${init_indices}]}'", "useshell"); + + pass3:: + "target_acl_failed_${init.indices}" not => regcmp(".*${init.exp_lines[${init.indices}]}.*", "${getfacl_output[${init.indices}]}"), + ifvarclass => "create_${init.indices}"; + "parents_acl_failed_${init.indices}" not => regcmp(".*${init.exp_lines[${init.indices}]}.*", "${getfacl_output[${init.indices}]}"), + ifvarclass => "create_${init.indices}"; + + "lines_not_ok" expression => "target_acl_failed_${init.indices}|parent_acls_failed_${init.indices}"; + + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"), + ifvarclass => "execute_${init.indices}"; + + "ok" expression => "!classes_not_ok.!lines_not_ok"; + + reports: + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_recursive.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_recursive.cf new file mode 100644 index 00000000000..d4a16153d8f --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_recursive.cf @@ -0,0 +1,92 @@ +####################################################### +# +# Test checking if two file permissions in a directory can be changed recursively +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "directory" string => "${tmp}/test"; + "directory_canon" string => canonify("${directory}"); + "mode" string => "750"; + "owner" string => "bin"; + "group" string => "bin"; + + files: + "${directory}/." + create => "true", + perms => mog("000", "root", "0"); + + "${directory}/file1" + create => "true", + perms => mog("000", "root", "0"); + + "${directory}/file2" + create => "true", + perms => mog("000", "root", "0"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => permissions_recursive("${init.directory}", "${init.mode}", "${init.owner}", "${init.group}"); +} + +####################################################### + +bundle agent check +{ + vars: + "permissions_test_mode" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory} -perm ${init.mode} | wc -l) = \"3\""; + "permissions_test_owner" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory} -user ${init.owner} | wc -l) = \"3\""; + "permissions_test_group" string => "/usr/bin/test ${const.dollar}(/usr/bin/find ${init.directory} -group ${init.group} | wc -l) = \"3\""; + + classes: + # By default, permissions_type_recursion should create the directory if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("permissions_${init.directory}_reached"); + + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.permissions_${init.directory_canon}_ok.!permissions_${init.directory_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_user_acl_absent.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_user_acl_absent.cf new file mode 100644 index 00000000000..718f902190d --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_user_acl_absent.cf @@ -0,0 +1,447 @@ +####################################################### +# +# Test checking if ACLs are absent or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + # REPAIRED + ## recursive + "files[1]" string => "file1"; # args of the GM + "recursive[1]" string => "true"; # \\ + "user[1]" string => "bin"; # \\ + "create[1]" string => "true"; # if "true", the file will be created at init + "initial[1]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[1]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r--" }; + "unexp_lines[1]" slist => { "user:bin*" }; + # -> list of regex that {must|must not} match the output of getfacl on the file at the en of the test + "mode[1]" string => "enforce"; # mode, "enforce" or "audit" + "status[1]" string => "repaired"; # expected status, "repaired", "success" or "error" + "execute[1]" string => "true"; # Tell if an execution is needed + + "files[2]" string => "file2/."; + "recursive[2]" string => "true"; + "user[2]" string => "bin"; + "create[2]" string => "true"; + "initial[2]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "all:rx" }; + "exp_lines[2]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r-x" }; + "unexp_lines[2]" slist => { "user:bin*" }; + "mode[2]" string => "enforce"; + "status[2]" string => "repaired"; + "execute[2]" string => "true"; + + "files[3]" string => "file2/subfile1"; + "create[3]" string => "true"; + "initial[3]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[3]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r--" }; + "unexp_lines[3]" slist => { "user:bin*" }; + + "files[4]" string => "file4/."; + "recursive[4]" string => "true"; + "user[4]" string => "bin"; + "create[4]" string => "true"; + "initial[4]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[4]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r-x" }; + "unexp_lines[4]" slist => { "user:bin*" }; + "mode[4]" string => "enforce"; + "status[4]" string => "repaired"; + "execute[4]" string => "true"; + + # non-recursive + "files[5]" string => "file5"; + "recursive[5]" string => "false"; + "user[5]" string => "bin"; + "create[5]" string => "true"; + "initial[5]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[5]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r--" }; + "unexp_lines[5]" slist => { "user:bin*" }; + "mode[5]" string => "enforce"; + "status[5]" string => "repaired"; + "execute[5]" string => "true"; + + "files[6]" string => "file6/."; + "recursive[6]" string => "false"; + "user[6]" string => "bin"; + "create[6]" string => "true"; + "initial[6]" slist => { "user:*:rwx", "group:*:rx", "user:bin:+rwx", "group:bin:r", "all:rx" }; + "exp_lines[6]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r-x" }; + "unexp_lines[6]" slist => { "user:bin*" }; + "mode[6]" string => "enforce"; + "status[6]" string => "repaired"; + "execute[6]" string => "true"; + + "files[7]" string => "file6/subfile1"; + "create[7]" string => "true"; + "initial[7]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[7]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + + "files[8]" string => "file8/."; + "recursive[8]" string => "false"; + "user[8]" string => "bin"; + "create[8]" string => "true"; + "initial[8]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[8]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r-x" }; + "unexp_lines[8]" slist => { "user:bin*" }; + "mode[8]" string => "enforce"; + "status[8]" string => "repaired"; + "execute[8]" string => "true"; + + # SUCCESS + "files[9]" string => "file9"; + "recursive[9]" string => "true"; + "user[9]" string => "bin"; + "create[9]" string => "true"; + "initial[9]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "all:r" }; + "exp_lines[9]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r--" }; + "unexp_lines[9]" slist => { "user:bin*" }; + "mode[9]" string => "enforce"; + "status[9]" string => "success"; + "execute[9]" string => "true"; + + "files[10]" string => "file10/."; + "recursive[10]" string => "true"; + "user[10]" string => "bin"; + "create[10]" string => "true"; + "initial[10]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "all:r" }; + "exp_lines[10]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r--" }; + "unexp_lines[10]" slist => { "user:bin*" }; + "mode[10]" string => "enforce"; + "status[10]" string => "success"; + "execute[10]" string => "true"; + + # non-recursive + "files[11]" string => "file11/."; + "recursive[11]" string => "false"; + "user[11]" string => "bin"; + "create[11]" string => "true"; + "initial[11]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "all:rx" }; + "exp_lines[11]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r-x" }; + "unexp_lines[11]" slist => { "user:bin*" }; + "mode[11]" string => "enforce"; + "status[11]" string => "success"; + "execute[11]" string => "true"; + + "files[12]" string => "file11/subfile1"; + "create[12]" string => "true"; + "initial[12]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[12]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + + "files[13]" string => "file13"; + "recursive[13]" string => "false"; + "user[13]" string => "aUserThatDoesNotExist"; + "create[13]" string => "true"; + "initial[13]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[13]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "unexp_lines[11]" slist => { "user:aUserThatDoesNotExist*" }; + "mode[13]" string => "enforce"; + "status[13]" string => "success"; + "execute[13]" string => "true"; + + # ERROR + "files[14]" string => "fileThatDoesNotExist"; + "recursive[14]" string => "false"; + "user[14]" string => "bin"; + "create[14]" string => "false"; + "mode[14]" string => "enforce"; + "status[14]" string => "error"; + "execute[14]" string => "true"; + + "files[15]" string => "fileThatDoesNotExist2"; + "recursive[15]" string => "true"; + "user[15]" string => "bin"; + "create[15]" string => "false"; + "mode[15]" string => "enforce"; + "status[15]" string => "error"; + "execute[15]" string => "true"; + + # AUDIT SUCCESS + # Copy of success enforce part + "files[16]" string => "file16"; + "recursive[16]" string => "true"; + "user[16]" string => "bin"; + "create[16]" string => "true"; + "initial[16]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "all:r" }; + "exp_lines[16]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r--" }; + "unexp_lines[16]" slist => { "user:bin*" }; + "mode[16]" string => "audit"; + "status[16]" string => "success"; + "execute[16]" string => "true"; + + "files[17]" string => "file17/."; + "recursive[17]" string => "true"; + "user[17]" string => "bin"; + "create[17]" string => "true"; + "initial[17]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "all:r" }; + "exp_lines[17]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r--" }; + "unexp_lines[17]" slist => { "user:bin*" }; + "mode[17]" string => "audit"; + "status[17]" string => "success"; + "execute[17]" string => "true"; + + # non-recursive + "files[18]" string => "file18/."; + "recursive[18]" string => "false"; + "user[18]" string => "bin"; + "create[18]" string => "true"; + "initial[18]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "all:rx" }; + "exp_lines[18]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r-x" }; + "unexp_lines[18]" slist => { "user:bin*" }; + "mode[18]" string => "audit"; + "status[18]" string => "success"; + "execute[18]" string => "true"; + + "files[19]" string => "file18/subfile1"; + "create[19]" string => "true"; + "initial[19]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[19]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + + "files[20]" string => "file20"; + "recursive[20]" string => "false"; + "user[20]" string => "aUserThatDoesNotExist"; + "create[20]" string => "true"; + "initial[20]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[20]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "unexp_lines[20]" slist => { "user:aUserThatDoesNotExist*" }; + "mode[20]" string => "audit"; + "status[20]" string => "success"; + "execute[20]" string => "true"; + + # AUDIT ERROR + # As in the repaired tests + "files[21]" string => "file21/."; + "recursive[21]" string => "true"; + "user[21]" string => "bin"; + "create[21]" string => "true"; + "initial[21]" slist => { "user:*:rwx", "group:*:rx", "group:bin:r", "all:rx" }; + "exp_lines[21]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r-x" }; + "unexp_lines[21]" slist => { "user:bin*" }; + "mode[21]" string => "audit"; + "status[21]" string => "error"; + "execute[21]" string => "true"; + + "files[22]" string => "file21/subfile1"; + "create[22]" string => "true"; + "initial[22]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[22]" slist => { "user::rwx", "group::r-x", "user:bin:r", "group:bin:r--", "other::r--" }; + + "files[23]" string => "file23/."; + "recursive[23]" string => "true"; + "user[23]" string => "bin"; + "create[23]" string => "true"; + "initial[23]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[23]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[23]" string => "audit"; + "status[23]" string => "error"; + "execute[23]" string => "true"; + + # non-recursive + "files[24]" string => "file24"; + "recursive[24]" string => "false"; + "user[24]" string => "bin"; + "create[24]" string => "true"; + "initial[24]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[24]" slist => { "user::rwx", "group::r-x", "user:bin:r", "group:bin:r--", "other::r--" }; + "mode[24]" string => "audit"; + "status[24]" string => "error"; + "execute[24]" string => "true"; + + "files[25]" string => "file25/."; + "recursive[25]" string => "false"; + "user[25]" string => "bin"; + "create[25]" string => "true"; + "initial[25]" slist => { "user:*:rwx", "group:*:rx", "user:bin:+rwx", "group:bin:r", "all:rx" }; + "exp_lines[25]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[25]" string => "audit"; + "status[25]" string => "error"; + "execute[25]" string => "true"; + + "files[26]" string => "file6/subfile1"; + "create[26]" string => "true"; + "initial[26]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[26]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + + "files[27]" string => "file27/."; + "recursive[27]" string => "false"; + "user[27]" string => "bin"; + "create[27]" string => "true"; + "initial[27]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[27]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[27]" string => "audit"; + "status[27]" string => "error"; + "execute[27]" string => "true"; + + # As in the error tests + "files[28]" string => "fileThatDoesNotExist28"; + "recursive[28]" string => "false"; + "user[28]" string => "bin"; + "create[28]" string => "false"; + "mode[28]" string => "enforce"; + "status[28]" string => "error"; + "execute[28]" string => "true"; + + "files[29]" string => "fileThatDoesNotExist29"; + "recursive[29]" string => "true"; + "user[29]" string => "bin"; + "create[29]" string => "false"; + "mode[29]" string => "enforce"; + "status[29]" string => "error"; + "execute[29]" string => "true"; + + + # REGEX entry test + "files[30]" string => "rfile30"; + "create[30]" string => "true"; + "initial[30]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[30]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r--" }; + "unexp_lines[30]" slist => { "user:bin*" }; + "execute[30]" string => "false"; + + "files[31]" string => "rfile31/."; + "create[31]" string => "true"; + "initial[31]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:rx" }; + "exp_lines[31]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r-x" }; + "unexp_lines[31]" slist => { "user:bin*" }; + "execute[31]" string => "false"; + + "files[32]" string => "rfile32/subfile1"; + "create[32]" string => "true"; + "initial[32]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[32]" slist => { "user::rwx", "group::r-x", "group:bin:r--", "other::r--" }; + "unexp_lines[32]" slist => { "user:bin*" }; + + "files[33]" string => "rfile*"; + "recursive[33]" string => "true"; + "user[33]" string => "bin"; + "create[33]" string => "false"; + "mode[33]" string => "enforce"; + "status[33]" string => "repaired"; + "execute[33]" string => "true"; + + "indices" slist => getindices("files"); + + "printable_lines[${indices}]" string => join("${const.endl}", "exp_lines[${indices}]"); + "printable_unexpected_lines[${indices}]" string => join("${const.endl}", "unexp_lines[${indices}]"); + + classes: + "create_${indices}" expression => strcmp("${create[${indices}]}", "true"); + + files: + "${tmp}/${files[${indices}]}" + create => "true", + ifvarclass => "create_${indices}", + acl => access_generic("@{initial[${indices}]}"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "${init.recursive[${init.indices}]}", "${init.user[${init.indices}]}"}; + + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + methods: + pass1.!pass2:: + #REPAIRED + "ph1" usebundle => apply_gm("permissions_user_acl_absent", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}" ); + "ph2" usebundle => apply_gm("permissions_user_acl_absent", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}" ); + # Do not execute 3 + "ph4" usebundle => apply_gm("permissions_user_acl_absent", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}" ); + "ph5" usebundle => apply_gm("permissions_user_acl_absent", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}" ); + "ph6" usebundle => apply_gm("permissions_user_acl_absent", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}" ); + # Do not execute 7 + "ph8" usebundle => apply_gm("permissions_user_acl_absent", @{args8}, "${init.status[8]}", "ph8", "${init.mode[8]}" ); + #SUCCESS + "ph9" usebundle => apply_gm("permissions_user_acl_absent", @{args9}, "${init.status[9]}", "ph9", "${init.mode[9]}" ); + "ph10" usebundle => apply_gm("permissions_user_acl_absent", @{args10}, "${init.status[10]}", "ph10", "${init.mode[10]}" ); + "ph11" usebundle => apply_gm("permissions_user_acl_absent", @{args11}, "${init.status[11]}", "ph11", "${init.mode[11]}" ); + # Do not execute 12 + "ph13" usebundle => apply_gm("permissions_user_acl_absent", @{args13}, "${init.status[13]}", "ph13", "${init.mode[13]}" ); + #ERROR + "ph14" usebundle => apply_gm("permissions_user_acl_absent", @{args14}, "${init.status[14]}", "ph14", "${init.mode[14]}" ); + "ph15" usebundle => apply_gm("permissions_user_acl_absent", @{args15}, "${init.status[15]}", "ph15", "${init.mode[15]}" ); + #AUDIT SUCCESS + "ph16" usebundle => apply_gm("permissions_user_acl_absent", @{args16}, "${init.status[16]}", "ph16", "${init.mode[16]}" ); + "ph17" usebundle => apply_gm("permissions_user_acl_absent", @{args17}, "${init.status[17]}", "ph17", "${init.mode[17]}" ); + "ph18" usebundle => apply_gm("permissions_user_acl_absent", @{args18}, "${init.status[18]}", "ph18", "${init.mode[18]}" ); + # Do not execute 19 + "ph20" usebundle => apply_gm("permissions_user_acl_absent", @{args20}, "${init.status[20]}", "ph20", "${init.mode[20]}" ); + #AUDIT ERROR + "ph21" usebundle => apply_gm("permissions_user_acl_absent", @{args21}, "${init.status[21]}", "ph21", "${init.mode[21]}" ); + # Do not execute 22 + "ph23" usebundle => apply_gm("permissions_user_acl_absent", @{args23}, "${init.status[23]}", "ph23", "${init.mode[23]}" ); + "ph24" usebundle => apply_gm("permissions_user_acl_absent", @{args24}, "${init.status[24]}", "ph24", "${init.mode[24]}" ); + "ph25" usebundle => apply_gm("permissions_user_acl_absent", @{args25}, "${init.status[25]}", "ph25", "${init.mode[25]}" ); + # Do not execute 26 + "ph27" usebundle => apply_gm("permissions_user_acl_absent", @{args27}, "${init.status[27]}", "ph27", "${init.mode[27]}" ); + "ph28" usebundle => apply_gm("permissions_user_acl_absent", @{args28}, "${init.status[28]}", "ph28", "${init.mode[28]}" ); + "ph29" usebundle => apply_gm("permissions_user_acl_absent", @{args29}, "${init.status[29]}", "ph29", "${init.mode[29]}" ); + # REGEX entry test + "ph33" usebundle => apply_gm("permissions_user_acl_absent", @{args33}, "${init.status[33]}", "ph33", "${init.mode[33]}" ); + +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + "create_${init.indices}" expression => strcmp("${init.create[${init.indices}]}", "true"); + "execute_${init.indices}" expression => strcmp("${init.execute[${init.indices}]}", "true"); + pass3:: + "expected_matches_failed_${init.indices}" not => regcmp(".*${init.exp_lines[${init.indices}]}.*","${getfacl_output[${init.indices}]}"), + ifvarclass => "create_${init.indices}"; + + "unexpected_matches_failed_${init.indices}" expression => regcmp(".*${init.unexp_lines[${init.indices}]}.*","${getfacl_output[${init.indices}]}"), + ifvarclass => "create_${init.indices}"; + + "lines_not_ok" expression => or("expected_matches_failed_${init.indices}", "unexpected_matches_failed_${init.indices}"); + + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"), + ifvarclass => "execute_${init.indices}"; + "ok" expression => "!classes_not_ok.!lines_not_ok"; + + reports: + pass3:: + "Test for file nb ${init.indices} FAILED" + ifvarclass => "!ph${init.indices}_ok.execute_${init.indices}"; + + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "expected_matches_failed_${init.indices}"; + + "####################${const.endl}Found at least one of the following unexpected lines:${const.endl}${init.printable_unexpected_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "unexpected_matches_failed_${init.indices}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/permissions_user_acl_present.cf b/policies/lib/tests/acceptance/30_generic_methods/permissions_user_acl_present.cf new file mode 100644 index 00000000000..56dc235ef0d --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/permissions_user_acl_present.cf @@ -0,0 +1,751 @@ +####################################################### +# +# Test checking if ACLs are present or not +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + # REPAIRED + ## recursive + "files[1]" string => "file1"; # args of the GM + "recursive[1]" string => "true"; # \\ + "user[1]" string => "bin"; # \\ + "ace[1]" string => "+rwx"; # \\ + "create[1]" string => "true"; # if "true", the file will be created at init + "initial[1]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[1]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r--" }; + # -> list of regex that must match the output of getfacl on the file at the en of the test + "mode[1]" string => "enforce"; # mode, "enforce" or "audit" + "status[1]" string => "repaired"; # expected status, "repaired", "success" or "error" + "execute[1]" string => "true"; # Tell if an execution is needed + + "files[2]" string => "file2"; + "recursive[2]" string => "true"; + "user[2]" string => "bin"; + "ace[2]" string => "=rw"; + "create[2]" string => "true"; + "initial[2]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[2]" slist => { "user::rwx", "group::r-x", "user:bin:rw-", "group:bin:r--", "other::r--" }; + "mode[2]" string => "enforce"; + "status[2]" string => "repaired"; + "execute[2]" string => "true"; + + "files[3]" string => "file3"; + "recursive[3]" string => "true"; + "user[3]" string => "bin"; + "ace[3]" string => "-rw"; + "create[3]" string => "true"; + "initial[3]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[3]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r--" }; + "mode[3]" string => "enforce"; + "status[3]" string => "repaired"; + "execute[3]" string => "true"; + + "files[4]" string => "file4/."; + "recursive[4]" string => "true"; + "user[4]" string => "bin"; + "ace[4]" string => "-rw"; + "create[4]" string => "true"; + "initial[4]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:rx" }; + "exp_lines[4]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r-x" }; + "mode[4]" string => "enforce"; + "status[4]" string => "repaired"; + "execute[4]" string => "true"; + + "files[5]" string => "file4/1/2/3/4/5/subfile1"; + "create[5]" string => "true"; + "initial[5]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[5]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r--" }; + + "files[6]" string => "file6/."; + "recursive[6]" string => "true"; + "user[6]" string => "bin"; + "ace[6]" string => "-rw"; + "create[6]" string => "true"; + "initial[6]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[6]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r-x" }; + "mode[6]" string => "enforce"; + "status[6]" string => "repaired"; + "execute[6]" string => "true"; + + # non-recursive + "files[7]" string => "file7"; + "recursive[7]" string => "false"; + "user[7]" string => "bin"; + "ace[7]" string => "+rwx"; + "create[7]" string => "true"; + "initial[7]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[7]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r--" }; + "mode[7]" string => "enforce"; + "status[7]" string => "repaired"; + "execute[7]" string => "true"; + + "files[8]" string => "file8"; + "recursive[8]" string => "false"; + "user[8]" string => "bin"; + "ace[8]" string => "=rw"; + "create[8]" string => "true"; + "initial[8]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[8]" slist => { "user::rwx", "group::r-x", "user:bin:rw-", "group:bin:r--", "other::r--" }; + "mode[8]" string => "enforce"; + "status[8]" string => "repaired"; + "execute[8]" string => "true"; + + "files[9]" string => "file9"; + "recursive[9]" string => "false"; + "user[9]" string => "bin"; + "ace[9]" string => "-rw"; + "create[9]" string => "true"; + "initial[9]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[9]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r--" }; + "mode[9]" string => "enforce"; + "status[9]" string => "repaired"; + "execute[9]" string => "true"; + + "files[10]" string => "file10/."; + "recursive[10]" string => "false"; + "user[10]" string => "bin"; + "ace[10]" string => "-rw"; + "create[10]" string => "true"; + "initial[10]" slist => { "user:*:rwx", "group:*:rx", "user:bin:+rwx", "group:bin:r", "all:rx" }; + "exp_lines[10]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r-x" }; + "mode[10]" string => "enforce"; + "status[10]" string => "repaired"; + "execute[10]" string => "true"; + + "files[11]" string => "file10/1/2/3/4/5/subfile1"; + "create[11]" string => "true"; + "initial[11]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[11]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + + "files[12]" string => "file12/."; + "recursive[12]" string => "false"; + "user[12]" string => "bin"; + "ace[12]" string => "-rw"; + "create[12]" string => "true"; + "initial[12]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[12]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r-x" }; + "mode[12]" string => "enforce"; + "status[12]" string => "repaired"; + "execute[12]" string => "true"; + + # SUCCESS + "files[13]" string => "file13"; + "recursive[13]" string => "true"; + "user[13]" string => "bin"; + "ace[13]" string => "+rwx"; + "create[13]" string => "true"; + "initial[13]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:r" }; + "exp_lines[13]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r--" }; + "mode[13]" string => "enforce"; + "status[13]" string => "success"; + "execute[13]" string => "true"; + + + "files[14]" string => "file14"; + "recursive[14]" string => "true"; + "user[14]" string => "bin"; + "ace[14]" string => "-rw"; + "create[14]" string => "true"; + "initial[14]" slist => { "user:*:rwx", "group:*:rx", "user:bin:x", "group:bin:r", "all:r" }; + "exp_lines[14]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r--" }; + "mode[14]" string => "enforce"; + "status[14]" string => "success"; + "execute[14]" string => "true"; + + "files[15_bis]" string => "file15/1/2/3/4/5/subfile1"; + "create[15_bis]" string => "true"; + "initial[15_bis]" slist => { "user:*:rwx", "group:*:rx", "user:bin:x", "group:bin:r", "all:r" }; + "exp_lines[15_bis]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r--" }; + + "files[15]" string => "file15/."; + "recursive[15]" string => "true"; + "user[15]" string => "bin"; + "ace[15]" string => "-rw"; + "create[15]" string => "true"; + "initial[15]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:rx" }; + "exp_lines[15]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r", "other::r-x" }; + "mode[15]" string => "enforce"; + "status[15]" string => "success"; + "execute[15]" string => "true"; + + + "files[17]" string => "file17/."; + "recursive[17]" string => "true"; + "user[17]" string => "bin"; + "ace[17]" string => "-rw"; + "create[17]" string => "true"; + "initial[17]" slist => { "user:*:rwx", "group:*:rx", "user:bin:x", "group:bin:r", "all:rx" }; + "exp_lines[17]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r-x" }; + "mode[17]" string => "enforce"; + "status[17]" string => "success"; + "execute[17]" string => "true"; + + # non-recursive + "files[18]" string => "file18"; + "recursive[18]" string => "false"; + "user[18]" string => "*"; + "ace[18]" string => "+rwx"; + "create[18]" string => "true"; + "initial[18]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[18]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + "mode[18]" string => "enforce"; + "status[18]" string => "success"; + "execute[18]" string => "true"; + + "files[19]" string => "file19"; + "recursive[19]" string => "false"; + "user[19]" string => "bin"; + "ace[19]" string => "=rw"; + "create[19]" string => "true"; + "initial[19]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rw", "group:bin:r", "all:r" }; + "exp_lines[19]" slist => { "user::rwx", "group::r-x", "user:bin:rw", "group:bin:r", "other::r" }; + "mode[19]" string => "enforce"; + "status[19]" string => "success"; + "execute[19]" string => "true"; + + "files[20]" string => "file20"; + "recursive[20]" string => "false"; + "user[20]" string => "bin"; + "ace[20]" string => "-rw"; + "create[20]" string => "true"; + "initial[20]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:r" }; + "exp_lines[20]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r--" }; + "mode[20]" string => "enforce"; + "status[20]" string => "success"; + "execute[20]" string => "true"; + + "files[21]" string => "file21/."; + "recursive[21]" string => "false"; + "user[21]" string => "bin"; + "ace[21]" string => "-rw"; + "create[21]" string => "true"; + "initial[21]" slist => { "user:*:rwx", "group:*:rx", "user:bin:x", "group:bin:r", "all:rx" }; + "exp_lines[21]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r-x" }; + "mode[21]" string => "enforce"; + "status[21]" string => "success"; + "execute[21]" string => "true"; + + "files[22]" string => "file21/1/2/3/4/5/subfile1"; + "create[22]" string => "true"; + "initial[22]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rw", "group:bin:r", "all:r" }; + "exp_lines[22]" slist => { "user::rwx", "group::r-x", "user:bin:rw-", "group:bin:r--", "other::r--" }; + + "files[23]" string => "file23/."; + "recursive[23]" string => "false"; + "user[23]" string => "bin"; + "ace[23]" string => "-rw"; + "create[23]" string => "true"; + "initial[23]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:rx" }; + "exp_lines[23]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r-x" }; + "mode[23]" string => "enforce"; + "status[23]" string => "success"; + "execute[23]" string => "true"; + + "files[24]" string => "file24"; + "recursive[24]" string => "true"; + "user[24]" string => "bin"; + "ace[24]" string => "=rw"; + "create[24]" string => "true"; + "initial[24]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rw", "group:bin:r", "all:r" }; + "exp_lines[24]" slist => { "user::rwx", "group::r-x", "user:bin:rw-", "group:bin:r--", "other::r--" }; + "mode[24]" string => "enforce"; + "status[24]" string => "success"; + "execute[24]" string => "true"; + + # ERROR + "files[25]" string => "fileThatDoesNotExist"; + "recursive[25]" string => "false"; + "user[25]" string => "bin"; + "ace[25]" string => "-rw"; + "create[25]" string => "false"; + "mode[25]" string => "enforce"; + "status[25]" string => "error"; + "execute[25]" string => "true"; + + "files[26]" string => "fileThatDoesNotExist2"; + "recursive[26]" string => "true"; + "user[26]" string => "bin"; + "ace[26]" string => "-rw"; + "create[26]" string => "false"; + "mode[26]" string => "enforce"; + "status[26]" string => "error"; + "execute[26]" string => "true"; + + "files[27]" string => "file27"; + "recursive[27]" string => "false"; + "user[27]" string => "aUserThatDoesNotExist"; + "ace[27]" string => "-rw"; + "create[27]" string => "true"; + "initial[27]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[27]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[27]" string => "enforce"; + "status[27]" string => "error"; + "execute[27]" string => "true"; + + # AUDIT SUCCESS + # Copy of success enforce part + "files[28]" string => "file28"; + "recursive[28]" string => "true"; + "user[28]" string => "bin"; + "ace[28]" string => "+rwx"; + "create[28]" string => "true"; + "initial[28]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:r" }; + "exp_lines[28]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r--" }; + "mode[28]" string => "enforce"; + "status[28]" string => "success"; + "execute[28]" string => "true"; + + + "files[29]" string => "file29"; + "recursive[29]" string => "true"; + "user[29]" string => "bin"; + "ace[29]" string => "-rw"; + "create[29]" string => "true"; + "initial[29]" slist => { "user:*:rwx", "group:*:rx", "user:bin:x", "group:bin:r", "all:r" }; + "exp_lines[29]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r--" }; + "mode[29]" string => "enforce"; + "status[29]" string => "success"; + "execute[29]" string => "true"; + + "files[30_bis]" string => "file30/1/2/3/4/5/subfile1"; + "create[30_bis]" string => "true"; + "initial[30_bis]" slist => { "user:*:rwx", "group:*:rx", "user:bin:x", "group:bin:r", "all:r" }; + "exp_lines[30_bis]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r--" }; + + "files[30]" string => "file30/."; + "recursive[30]" string => "true"; + "user[30]" string => "bin"; + "ace[30]" string => "-rw"; + "create[30]" string => "true"; + "initial[30]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:rx" }; + "exp_lines[30]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r", "other::r-x" }; + "mode[30]" string => "enforce"; + "status[30]" string => "success"; + "execute[30]" string => "true"; + + + "files[32]" string => "file32/."; + "recursive[32]" string => "true"; + "user[32]" string => "bin"; + "ace[32]" string => "-rw"; + "create[32]" string => "true"; + "initial[32]" slist => { "user:*:rwx", "group:*:rx", "user:bin:x", "group:bin:r", "all:rx" }; + "exp_lines[32]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r-x" }; + "mode[32]" string => "enforce"; + "status[32]" string => "success"; + "execute[32]" string => "true"; + + # non-recursive + "files[33]" string => "file33"; + "recursive[33]" string => "false"; + "user[33]" string => "*"; + "ace[33]" string => "+rwx"; + "create[33]" string => "true"; + "initial[33]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[33]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + "mode[33]" string => "enforce"; + "status[33]" string => "success"; + "execute[33]" string => "true"; + + "files[34]" string => "file34"; + "recursive[34]" string => "false"; + "user[34]" string => "bin"; + "ace[34]" string => "=rw"; + "create[34]" string => "true"; + "initial[34]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rw", "group:bin:r", "all:r" }; + "exp_lines[34]" slist => { "user::rwx", "group::r-x", "user:bin:rw", "group:bin:r", "other::r" }; + "mode[34]" string => "enforce"; + "status[34]" string => "success"; + "execute[34]" string => "true"; + + "files[35]" string => "file35"; + "recursive[35]" string => "false"; + "user[35]" string => "bin"; + "ace[35]" string => "-rw"; + "create[35]" string => "true"; + "initial[35]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:r" }; + "exp_lines[35]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r--" }; + "mode[35]" string => "enforce"; + "status[35]" string => "success"; + "execute[35]" string => "true"; + + "files[36]" string => "file36/."; + "recursive[36]" string => "false"; + "user[36]" string => "bin"; + "ace[36]" string => "-rw"; + "create[36]" string => "true"; + "initial[36]" slist => { "user:*:rwx", "group:*:rx", "user:bin:x", "group:bin:r", "all:rx" }; + "exp_lines[36]" slist => { "user::rwx", "group::r-x", "user:bin:--x", "group:bin:r--", "other::r-x" }; + "mode[36]" string => "enforce"; + "status[36]" string => "success"; + "execute[36]" string => "true"; + + "files[37]" string => "file36/1/2/3/4/5/subfile1"; + "create[37]" string => "true"; + "initial[37]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rw", "group:bin:r", "all:r" }; + "exp_lines[37]" slist => { "user::rwx", "group::r-x", "user:bin:rw-", "group:bin:r--", "other::r--" }; + + "files[38]" string => "file38/."; + "recursive[38]" string => "false"; + "user[38]" string => "bin"; + "ace[38]" string => "-rw"; + "create[38]" string => "true"; + "initial[38]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:rx" }; + "exp_lines[38]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r-x" }; + "mode[38]" string => "enforce"; + "status[38]" string => "success"; + "execute[38]" string => "true"; + + "files[39]" string => "file39"; + "recursive[39]" string => "true"; + "user[39]" string => "bin"; + "ace[39]" string => "=rw"; + "create[39]" string => "true"; + "initial[39]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rw", "group:bin:r", "all:r" }; + "exp_lines[39]" slist => { "user::rwx", "group::r-x", "user:bin:rw-", "group:bin:r--", "other::r--" }; + "mode[39]" string => "enforce"; + "status[39]" string => "success"; + "execute[39]" string => "true"; + + # AUDIT ERROR + ## recursive + "files[40]" string => "file40"; + "recursive[40]" string => "true"; + "user[40]" string => "bin"; + "ace[40]" string => "+rwx"; + "create[40]" string => "true"; + "initial[40]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[40]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + "mode[40]" string => "audit"; + "status[40]" string => "error"; + "execute[40]" string => "true"; + + "files[41]" string => "file41"; + "recursive[41]" string => "true"; + "user[41]" string => "bin"; + "ace[41]" string => "=rw"; + "create[41]" string => "true"; + "initial[41]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[41]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + "mode[41]" string => "audit"; + "status[41]" string => "error"; + "execute[41]" string => "true"; + + "files[42]" string => "file42"; + "recursive[42]" string => "true"; + "user[42]" string => "bin"; + "ace[42]" string => "-rw"; + "create[42]" string => "true"; + "initial[42]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[42]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + "mode[42]" string => "audit"; + "status[42]" string => "error"; + "execute[42]" string => "true"; + + "files[43]" string => "file43/."; + "recursive[43]" string => "true"; + "user[43]" string => "bin"; + "ace[43]" string => "-rw"; + "create[43]" string => "true"; + "initial[43]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:rx" }; + "exp_lines[43]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r", "other::r-x" }; + "mode[43]" string => "audit"; + "status[43]" string => "error"; + "execute[43]" string => "true"; + + "files[44]" string => "file43/1/2/3/4/5/subfile1"; + "create[44]" string => "true"; + "initial[44]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[44]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + + "files[45]" string => "file45/."; + "recursive[45]" string => "true"; + "user[45]" string => "bin"; + "ace[45]" string => "-rw"; + "create[45]" string => "true"; + "initial[45]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[45]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[45]" string => "audit"; + "status[45]" string => "error"; + "execute[45]" string => "true"; + + # non-recursive + "files[46]" string => "file46"; + "recursive[46]" string => "false"; + "user[46]" string => "bin"; + "ace[46]" string => "+rwx"; + "create[46]" string => "true"; + "initial[46]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[46]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + "mode[46]" string => "audit"; + "status[46]" string => "error"; + "execute[46]" string => "true"; + + "files[47]" string => "file47"; + "recursive[47]" string => "false"; + "user[47]" string => "bin"; + "ace[47]" string => "=rw"; + "create[47]" string => "true"; + "initial[47]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[47]" slist => { "user::rwx", "group::r-x", "user:bin:r", "group:bin:r", "other::r" }; + "mode[47]" string => "audit"; + "status[47]" string => "error"; + "execute[47]" string => "true"; + + "files[48]" string => "file48"; + "recursive[48]" string => "false"; + "user[48]" string => "bin"; + "ace[48]" string => "-rw"; + "create[48]" string => "true"; + "initial[48]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[48]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + "mode[48]" string => "audit"; + "status[48]" string => "error"; + "execute[48]" string => "true"; + + "files[49]" string => "file49/."; + "recursive[49]" string => "false"; + "user[49]" string => "bin"; + "ace[49]" string => "-rw"; + "create[49]" string => "true"; + "initial[49]" slist => { "user:*:rwx", "group:*:rx", "user:bin:+rwx", "group:bin:r", "all:rx" }; + "exp_lines[49]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[49]" string => "audit"; + "status[49]" string => "error"; + "execute[49]" string => "true"; + + "files[50]" string => "file49/1/2/3/4/5/subfile1"; + "create[50]" string => "true"; + "initial[50]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[50]" slist => { "user::rwx", "group::r-x", "user:bin:r--", "group:bin:r--", "other::r--" }; + + "files[51]" string => "file51/."; + "recursive[51]" string => "false"; + "user[51]" string => "bin"; + "ace[51]" string => "-rw"; + "create[51]" string => "true"; + "initial[51]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[51]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[51]" string => "audit"; + "status[51]" string => "error"; + "execute[51]" string => "true"; + + # As in the error tests + "files[52]" string => "fileThatDoesNotExist52"; + "recursive[52]" string => "false"; + "user[52]" string => "bin"; + "ace[52]" string => "-rw"; + "create[52]" string => "false"; + "mode[52]" string => "audit"; + "status[52]" string => "error"; + "execute[52]" string => "true"; + + "files[53]" string => "fileThatDoesNotExist53"; + "recursive[53]" string => "true"; + "user[53]" string => "bin"; + "ace[53]" string => "-rw"; + "create[53]" string => "false"; + "mode[53]" string => "audit"; + "status[53]" string => "error"; + "execute[53]" string => "true"; + + "files[54]" string => "file54"; + "recursive[54]" string => "false"; + "user[54]" string => "aUserThatDoesNotExist"; + "ace[54]" string => "+r"; + "create[54]" string => "true"; + "initial[54]" slist => { "user:*:rwx", "group:*:rx", "user:bin:rwx", "group:bin:r", "all:rx" }; + "exp_lines[54]" slist => { "user::rwx", "group::r-x", "user:bin:rwx", "group:bin:r--", "other::r-x" }; + "mode[54]" string => "audit"; + "status[54]" string => "error"; + "execute[54]" string => "true"; + + # REGEX entry test + "files[55]" string => "rfile55"; + "create[55]" string => "true"; + "initial[55]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[55]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r--" }; + "execute[55]" string => "false"; + + "files[56]" string => "rfile56/."; + "create[56]" string => "true"; + "initial[56]" slist => { "user:*:rwx", "group:*:rx", "user:bin:-rwx", "group:bin:r", "all:rx" }; + "exp_lines[56]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r-x" }; + "execute[56]" string => "false"; + + "files[57]" string => "rfile4/1/2/3/4/5/subfile1"; + "create[57]" string => "true"; + "initial[57]" slist => { "user:*:rwx", "group:*:rx", "user:bin:r", "group:bin:r", "all:r" }; + "exp_lines[57]" slist => { "user::rwx", "group::r-x", "user:bin:---", "group:bin:r--", "other::r--" }; + + "files[58]" string => "rfile*"; + "recursive[58]" string => "true"; + "user[58]" string => "bin"; + "ace[58]" string => "-rwx"; + "create[58]" string => "false"; + "mode[58]" string => "enforce"; + "status[58]" string => "repaired"; + "execute[58]" string => "true"; + + "indices" slist => getindices("files"); + # Needed for success + recursive + "indices_recursive" slist => { "15", "30" }; + + "printable_lines[${indices}]" string => join("${const.endl}", "exp_lines[${indices}]"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + "create_${indices}" expression => strcmp("${create[${indices}]}", "true"); + + files: + "${tmp}/${files[${indices}]}" + create => "true", + ifvarclass => "create_${indices}", + acl => access_generic("@{initial[${indices}]}"); + + pass3:: + "${tmp}/${files[${indices_recursive}]}" + create => "true", + depth_search => recurse_with_base("3"), + acl => access_generic("@{initial[${indices_recursive}]}"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.tmp}/${init.files[${init.indices}]}", "${init.recursive[${init.indices}]}", "${init.user[${init.indices}]}", "${init.ace[${init.indices}]}" }; + + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + methods: + pass1.!pass2:: + #REPAIRED + "ph1" usebundle => apply_gm("permissions_user_acl_present", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}" ); + "ph2" usebundle => apply_gm("permissions_user_acl_present", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}" ); + "ph3" usebundle => apply_gm("permissions_user_acl_present", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}" ); + "ph4" usebundle => apply_gm("permissions_user_acl_present", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}" ); + # Do not execute 5 + "ph6" usebundle => apply_gm("permissions_user_acl_present", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}" ); + "ph7" usebundle => apply_gm("permissions_user_acl_present", @{args7}, "${init.status[7]}", "ph7", "${init.mode[7]}" ); + "ph8" usebundle => apply_gm("permissions_user_acl_present", @{args8}, "${init.status[8]}", "ph8", "${init.mode[8]}" ); + "ph9" usebundle => apply_gm("permissions_user_acl_present", @{args9}, "${init.status[9]}", "ph9", "${init.mode[9]}" ); + "ph10" usebundle => apply_gm("permissions_user_acl_present", @{args10}, "${init.status[10]}", "ph10", "${init.mode[10]}" ); + # Do not execute 11 + "ph12" usebundle => apply_gm("permissions_user_acl_present", @{args12}, "${init.status[12]}", "ph12", "${init.mode[12]}" ); + #SUCCESS + "ph13" usebundle => apply_gm("permissions_user_acl_present", @{args13}, "${init.status[13]}", "ph13", "${init.mode[13]}" ); + "ph14" usebundle => apply_gm("permissions_user_acl_present", @{args14}, "${init.status[14]}", "ph14", "${init.mode[14]}" ); + "ph15" usebundle => apply_gm("permissions_user_acl_present", @{args15}, "${init.status[15]}", "ph15", "${init.mode[15]}" ); + # Do not execute 16 + "ph17" usebundle => apply_gm("permissions_user_acl_present", @{args17}, "${init.status[17]}", "ph17", "${init.mode[17]}" ); + "ph18" usebundle => apply_gm("permissions_user_acl_present", @{args18}, "${init.status[18]}", "ph18", "${init.mode[18]}" ); + "ph19" usebundle => apply_gm("permissions_user_acl_present", @{args19}, "${init.status[19]}", "ph19", "${init.mode[19]}" ); + "ph20" usebundle => apply_gm("permissions_user_acl_present", @{args20}, "${init.status[20]}", "ph20", "${init.mode[20]}" ); + "ph21" usebundle => apply_gm("permissions_user_acl_present", @{args21}, "${init.status[21]}", "ph21", "${init.mode[21]}" ); + # Do not execute 22 + "ph23" usebundle => apply_gm("permissions_user_acl_present", @{args23}, "${init.status[23]}", "ph23", "${init.mode[23]}" ); + "ph24" usebundle => apply_gm("permissions_user_acl_present", @{args24}, "${init.status[24]}", "ph24", "${init.mode[24]}" ); + #ERROR + "ph25" usebundle => apply_gm("permissions_user_acl_present", @{args25}, "${init.status[25]}", "ph25", "${init.mode[25]}" ); + "ph26" usebundle => apply_gm("permissions_user_acl_present", @{args26}, "${init.status[26]}", "ph26", "${init.mode[26]}" ); + "ph27" usebundle => apply_gm("permissions_user_acl_present", @{args27}, "${init.status[27]}", "ph27", "${init.mode[27]}" ); + #AUDIT SUCCESS + "ph28" usebundle => apply_gm("permissions_user_acl_present", @{args28}, "${init.status[28]}", "ph28", "${init.mode[28]}" ); + "ph29" usebundle => apply_gm("permissions_user_acl_present", @{args29}, "${init.status[29]}", "ph29", "${init.mode[29]}" ); + "ph30" usebundle => apply_gm("permissions_user_acl_present", @{args30}, "${init.status[30]}", "ph30", "${init.mode[30]}" ); + # Do not execute 31 + "ph32" usebundle => apply_gm("permissions_user_acl_present", @{args32}, "${init.status[32]}", "ph32", "${init.mode[32]}" ); + "ph33" usebundle => apply_gm("permissions_user_acl_present", @{args33}, "${init.status[33]}", "ph33", "${init.mode[33]}" ); + "ph34" usebundle => apply_gm("permissions_user_acl_present", @{args34}, "${init.status[34]}", "ph34", "${init.mode[34]}" ); + "ph35" usebundle => apply_gm("permissions_user_acl_present", @{args35}, "${init.status[35]}", "ph35", "${init.mode[35]}" ); + "ph36" usebundle => apply_gm("permissions_user_acl_present", @{args36}, "${init.status[36]}", "ph36", "${init.mode[36]}" ); + # Do not execute 37 + "ph38" usebundle => apply_gm("permissions_user_acl_present", @{args38}, "${init.status[38]}", "ph38", "${init.mode[38]}" ); + "ph39" usebundle => apply_gm("permissions_user_acl_present", @{args39}, "${init.status[39]}", "ph39", "${init.mode[39]}" ); + #AUDIT ERROR + ## Copy of the repaired + "ph40" usebundle => apply_gm("permissions_user_acl_present", @{args40}, "${init.status[40]}", "ph40", "${init.mode[40]}" ); + "ph41" usebundle => apply_gm("permissions_user_acl_present", @{args41}, "${init.status[41]}", "ph41", "${init.mode[41]}" ); + "ph42" usebundle => apply_gm("permissions_user_acl_present", @{args42}, "${init.status[42]}", "ph42", "${init.mode[42]}" ); + "ph43" usebundle => apply_gm("permissions_user_acl_present", @{args43}, "${init.status[43]}", "ph43", "${init.mode[43]}" ); + # Do not execute 44 + "ph45" usebundle => apply_gm("permissions_user_acl_present", @{args45}, "${init.status[45]}", "ph45", "${init.mode[45]}" ); + "ph46" usebundle => apply_gm("permissions_user_acl_present", @{args46}, "${init.status[46]}", "ph46", "${init.mode[46]}" ); + "ph47" usebundle => apply_gm("permissions_user_acl_present", @{args47}, "${init.status[47]}", "ph47", "${init.mode[47]}" ); + "ph48" usebundle => apply_gm("permissions_user_acl_present", @{args48}, "${init.status[48]}", "ph48", "${init.mode[48]}" ); + "ph49" usebundle => apply_gm("permissions_user_acl_present", @{args49}, "${init.status[49]}", "ph49", "${init.mode[49]}" ); + # Do not execute 40 + "ph51" usebundle => apply_gm("permissions_user_acl_present", @{args51}, "${init.status[51]}", "ph51", "${init.mode[51]}" ); + ## Copy of the error + "ph52" usebundle => apply_gm("permissions_user_acl_present", @{args52}, "${init.status[52]}", "ph52", "${init.mode[52]}" ); + "ph53" usebundle => apply_gm("permissions_user_acl_present", @{args53}, "${init.status[53]}", "ph53", "${init.mode[53]}" ); + "ph54" usebundle => apply_gm("permissions_user_acl_present", @{args54}, "${init.status[54]}", "ph54", "${init.mode[54]}" ); + + # REGEX entry test + # Do not execute 55, 56, 57 + "ph58" usebundle => apply_gm("permissions_user_acl_present", @{args58}, "${init.status[58]}", "ph58", "${init.mode[58]}" ); + +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "getfacl_output[${init.indices}]" string => execresult("${paths.getfacl} ${init.tmp}/${init.files[${init.indices}]}", "useshell"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + "create_${init.indices}" expression => strcmp("${init.create[${init.indices}]}", "true"); + "execute_${init.indices}" expression => strcmp("${init.execute[${init.indices}]}", "true"); + pass3:: + "lines_matches_failed_${init.indices}" not => regcmp(".*${init.exp_lines[${init.indices}]}.*","${getfacl_output[${init.indices}]}"), + ifvarclass => "create_${init.indices}"; + + "lines_not_ok" expression => "lines_matches_failed_${init.indices}"; + + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"), + ifvarclass => "execute_${init.indices}"; + "ok" expression => "!classes_not_ok.!lines_not_ok"; + + reports: + pass3:: + "Test for file nb ${init.indices} FAILED" + ifvarclass => "!ph${init.indices}_ok.execute_${init.indices}"; + + "####################${const.endl}Missing at least one of the following lines:${const.endl}${init.printable_lines[${init.indices}]} ${const.endl}in the following output: ${const.endl}${getfacl_output[${init.indices}]}${const.endl} ####################" + ifvarclass => "lines_matches_failed_${init.indices}"; + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/schedule_simple.cf b/policies/lib/tests/acceptance/30_generic_methods/schedule_simple.cf new file mode 100644 index 00000000000..a2c3390a170 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/schedule_simple.cf @@ -0,0 +1,197 @@ +####################################################### +# +# Test the simple generic scheduler +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ +} + +####################################################### + +# Bundle all tests into this one +bundle agent test +{ + vars: + "job_already_run_out_of_time" string => "test21"; + "job_already_run" string => "test22"; + + classes: + #Pretent that job21 & 22 was already ran + "job_${job_already_run_out_of_time}_persist" expression => "any", + scope => "namespace"; + "job_${job_already_run}_persist" expression => "any", + scope => "namespace"; + + methods: + any:: + + # 01/01/01 is a monday + # id date time splay ag_p sp_m sp_h min hrs dow p_m p_h p_d mode cid rid err must_run + # fail - expected periodicity is 3, max delay btw tasks is 30 + "any" usebundle => schedule_test("test1", "01/01/01 00:01", "89478486", "3", "30", "0", "0", "0", "1", "3", "0", "0", "", "", "", "ko", ""); + # fail - null agent periodicity + expected periodicity is 3, max delay tasks is 10 + "any" usebundle => schedule_test("test2", "01/01/01 00:01", "89478486", "0", "10", "0", "0", "0", "1", "3", "0", "0", "", "", "", "ko", ""); + # fail - null agent periodicity + correct delays + "any" usebundle => schedule_test("test3", "01/01/01 00:01", "89478486", "0", "10", "0", "0", "0", "1", "30", "0", "0", "", "", "", "ko", ""); + # fail - expected periodicity is 3, max delay tasks is 10 + "any" usebundle => schedule_test("test4", "01/01/01 00:01", "89478486", "9", "10", "0", "0", "0", "1", "3", "0", "0", "", "", "", "ko", ""); + + # success - no run - schedule at next execution + "any" usebundle => schedule_test("test11", "01/01/01 00:01", "89478486", "3", "10", "0", "0", "0", "1", "30", "0", "0", "", "0", "1", "ok", ""); + # success - must run now + "any" usebundle => schedule_test("test12", "01/01/01 00:04", "89478486", "3", "10", "0", "0", "0", "1", "30", "0", "0", "", "1", "1", "ok", ""); + # success - must run now + "any" usebundle => schedule_test("test13", "01/01/01 00:01", "89478486", "3", "10", "0", "0", "0", "1", "30", "0", "1", "", "1", "1", "ok", ""); + # success - must run now + "any" usebundle => schedule_test("test14", "01/01/18 01:34", "89478486", "3", "10", "0", "0", "0", "1", "30", "0", "0", "", "1", "1", "ok", ""); + + # success - must run now as it "didn't" run + "any" usebundle => schedule_test("test15", "01/01/01 00:01", "89478486", "3", "10", "0", "0", "0", "1", "30", "0", "0", "catchup", "1", "1", "ok", "true"); + + + # success - will not rerun, as it was already "ran" + "any" usebundle => schedule_test("${job_already_run_out_of_time}", "01/01/01 00:01", "89478486", "3", "10", "0", "0", "0", "1", "30", "0", "0", "catchup", "0", "1", "ok", "false"); + # success - will not run, as it was already "ran" (same as 12, but with persist class set) + "any" usebundle => schedule_test("${job_already_run}", "01/01/01 00:04", "89478486", "3", "10", "0", "0", "0", "1", "30", "0", "0", "catchup", "0", "1", "ok", "false"); + + +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "!global_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + +# A simple scheduler test +# parameters are the scheduler parameter plus : +# - test_id: a unique id for the test +# - test_now: the current date to use for the test +# - mode is by default stateless +# - exp_current_run: expected value of current_run +# - exp_run: expected value of the job run id +# - exp_error: "ko" is we expect an error and "ok" otherwise +# - must_run: if true, must have class _repaired, if false, must not have. If empty: don't use. +# overrides the exp_current_run measure +bundle agent schedule_test(test_id, test_now, test_splay, agent_periodicity, delay_minutes, delay_hours, + start_on_minutes, start_on_hours, start_on_day_of_week, + periodicity_minutes, periodicity_hours, periodicity_days, mode, + exp_current_run, exp_run, exp_error, must_run) +{ + vars: + any:: + "date" string => execresult("date --date='${test_now}' +%s", "useshell"); + + classes: + "must_run_true" expression => strcmp("true", "${must_run}"); + "must_run_false" expression => strcmp("false", "${must_run}"); + "use_must_run" expression => "must_run_true|must_run_false"; + pass2:: + "ok_run" expression => strcmp("${exp_current_run}", "${schedule_generic.current_run_id}"); + "ok_job" expression => strcmp("${exp_run}", "${schedule_generic.job_run_id}"); + "expect_error" expression => strcmp("${exp_error}", "ko"); + pass2.expect_error:: + "ok_error" expression => "job_${test_id}_error"; + pass2.!expect_error:: + "ok_error" expression => "!job_${test_id}_error"; + + # if use_must_run is set, we are dealing with catchup or others + pass2.!use_must_run:: + "global_error" expression => "(expect_error.!ok_error)|(!expect_error.(!ok_run|!ok_job|!ok_error))", + scope => "namespace"; + + pass2.must_run_true:: + "global_error" expression => "(expect_error.!ok_error)|(!expect_error.(!schedule_simple_${test_id}_repaired|!ok_job|!ok_error))", + scope => "namespace"; + + pass2.must_run_false:: + "global_error" expression => "(expect_error.!ok_error)|(!expect_error.(schedule_simple_${test_id}_repaired|!ok_job|!ok_error))", + scope => "namespace"; + + any:: + "pass3" expression => "pass2"; + "pass2" expression => "any"; + + methods: + "date" usebundle => ncf_date("${date}"); + "splay" usebundle => ncf_splay("${test_splay}"); + "test" usebundle => schedule_simple("${test_id}", "${agent_periodicity}", "${delay_minutes}", "${delay_hours}", + "${start_on_minutes}", "${start_on_hours}", "${start_on_day_of_week}", + "${periodicity_minutes}", "${periodicity_hours}", "${periodicity_days}", "${mode}"); + + reports: +# any:: +# "-- test -- ${test_id}"; +# " prefix ${test_id} ${schedule_generic.test}"; +# " timestamp ${test_id} = ${ncf_date.timestamp}"; +# " splay_dec ${test_id} = ${ncf_splay.splay}"; +# " max_execution_delay ${test_id} = ${schedule_simple_generic.max_execution_delay}"; +# " run_shift ${test_id} = ${schedule_generic.run_shift}"; +# " max_run_id ${test_id} = ${schedule_generic.max_run_id}"; +# " job_run_id ${test_id} = ${schedule_generic.job_run_id}"; +# " run_id ${test_id} = ${schedule_generic.job_run_id}"; +# " elapsed ${test_id} = ${schedule_generic.elapsed}"; +# " current_run_id ${test_id} = ${schedule_generic.current_run_id}"; +# +# " splay ${test_id} = ${schedule_simple_generic.splay}"; +# " periodicity ${test_id} = ${schedule_simple_generic.job_periodicity}"; +# " now ${test_id} = ${schedule_simple_generic.now}"; +# +# " error_null_agent_periodicity for ${test_id}" ifvarclass => "job_${test_id}_error_null_agent_periodicity"; +# " error_interval_too_big for ${test_id}" ifvarclass => "job_${test_id}_error_splay_too_big"; +# " error_interval_too_small for ${test_id}" ifvarclass => "job_${test_id}_error_splay_too_small"; +# " Error for ${test_id} !" ifvarclass => "job_{test_id}_error"; +# " Must RUN !" ifvarclass => "job_${test_id}_run"; +# " After run ${test_id}!" ifvarclass => "job_${test_id}_after_run"; +# " After splay ${test_id}!" ifvarclass => "job_${test_id}_after_splay"; +# +# must_run_true:: +# " Job ${test_id} must run"; +# +# must_run_false:: +# " Job ${test_id} must not run"; + +# pass3:: +# " Job run schedule_simple_${test_id}_repaired" ifvarclass => "schedule_simple_${test_id}_repaired"; + + # test expectations from parameters +# pass3.expect_error.ok_error:: # we expect an error and got one +# "TEST result OK - correctly got an error ${test_id}"; +# pass3.expect_error.!ok_error:: +# "TEST result ERROR ${test_id}: error ${test_id}_error false"; +# pass3.!expect_error.ok_run.ok_job.ok_error:: +# "TEST result OK ${test_id}"; # we expected no error and got the correct result +# pass3.!expect_error.!ok_run:: +# "TEST result ERROR ${test_id}: current_run ${schedule_generic.current_run_id} != ${exp_current_run}"; +# pass3.!expect_error.!ok_job:: +# "TEST result ERROR ${test_id}: run ${schedule_generic.run_id} != ${exp_run}"; +# pass3.!expect_error.!ok_error:: +# "TEST result ERROR ${test_id}: error ${test_id}_error true"; + +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/staging/file_copy_from_remote_source.cf b/policies/lib/tests/acceptance/30_generic_methods/staging/file_copy_from_remote_source.cf new file mode 100644 index 00000000000..71707c660f7 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/staging/file_copy_from_remote_source.cf @@ -0,0 +1,87 @@ +####################################################### +# +# Test checking if a file can be copied from a remote source +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_file" string => "${tmp}/source_test"; + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + + "mode" string => "644"; + "owner" string => "root"; + "group" string => "0"; + + files: + "${source_file}" + create => "true", + perms => mog("${mode}", "${owner}", "${group}"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_copy_from_remote_source("${init.source_file}", "${init.destination_file}"); +} + +####################################################### + +bundle agent check +{ + vars: + "owner_id" int => getuid("${init.owner}"); + "permissions_test_mode" string => "/usr/bin/test $(${test_utils.file_perms} ${init.destination_file}) = \"${init.mode}\""; + "permissions_test_owner" string => "/usr/bin/test $(${test_utils.file_owner} ${init.destination_file}) = \"${owner_id}\""; + "permissions_test_group" string => "/usr/bin/test $(${test_utils.file_group} ${init.destination_file}) = \"${init.group}\""; + + classes: + # By default, file_copy_from_remote_source_type_recursion should create the file if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("file_copy_from_remote_source_${init.destination_file}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("file_copy_from_remote_source_${init.destination_file}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("file_copy_from_remote_source_${init.destination_file}_reached"); + + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.file_copy_from_remote_source_${init.destination_file_canon}_ok.!file_copy_from_remote_source_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/staging/file_copy_from_remote_source_recursion.cf b/policies/lib/tests/acceptance/30_generic_methods/staging/file_copy_from_remote_source_recursion.cf new file mode 100644 index 00000000000..ea54cfe33db --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/staging/file_copy_from_remote_source_recursion.cf @@ -0,0 +1,88 @@ +####################################################### +# +# Test checking if a file can be copied from a remote source +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "source_file" string => "${tmp}/source_test"; + "destination_file" string => "${tmp}/destination_test"; + "destination_file_canon" string => canonify("${destination_file}"); + "recursion" string => "0"; + + "mode" string => "644"; + "owner" string => "root"; + "group" string => "0"; + + files: + "${source_file}" + create => "true", + perms => mog("${mode}", "${owner}", "${group}"); + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => file_copy_from_remote_source_recursion("${init.source_file}", "${init.destination_file}", "${init.recursion}"); +} + +####################################################### + +bundle agent check +{ + vars: + "owner_id" int => getuid("${init.owner}"); + "permissions_test_mode" string => "/usr/bin/test $(${test_utils.file_perms} ${init.destination_file}) = \"${init.mode}\""; + "permissions_test_owner" string => "/usr/bin/test $(${test_utils.file_owner} ${init.destination_file}) = \"${owner_id}\""; + "permissions_test_group" string => "/usr/bin/test $(${test_utils.file_group} ${init.destination_file}) = \"${init.group}\""; + + classes: + # By default, file_copy_from_remote_source_type_recursion should create the file if it doesn't exist + "permissions_test_mode_ok" + expression => returnszero("${permissions_test_mode}", "useshell"), + ifvarclass => canonify("file_copy_from_remote_source_${init.destination_file}_reached"); + + "permissions_test_owner_ok" + expression => returnszero("${permissions_test_owner}", "useshell"), + ifvarclass => canonify("file_copy_from_remote_source_${init.destination_file}_reached"); + + "permissions_test_group_ok" + expression => returnszero("${permissions_test_group}", "useshell"), + ifvarclass => canonify("file_copy_from_remote_source_${init.destination_file}_reached"); + + + "ok" expression => "permissions_test_mode_ok.permissions_test_owner_ok.permissions_test_group_ok.file_copy_from_remote_source_${init.destination_file_canon}_ok.!file_copy_from_remote_source_${init.destination_file_canon}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !permissions_test_mode_ok:: + "test command doesn't return 0 for command: ${permissions_test_mode}"; + !permissions_test_owner_ok:: + "test command doesn't return 0 for command: ${permissions_test_owner}"; + !permissions_test_group_ok:: + "test command doesn't return 0 for command: ${permissions_test_group}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/staging/service_check_started_at_boot.cf b/policies/lib/tests/acceptance/30_generic_methods/staging/service_check_started_at_boot.cf new file mode 100644 index 00000000000..9e3170df07d --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/staging/service_check_started_at_boot.cf @@ -0,0 +1,53 @@ +####################################################### +# +# Test if cron is started at boot +# +# Test in staging because the generic method doesn't support +# Debian systems +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "service_name" string => "cron"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_check_started_at_boot("${init.service_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok_success" expression => "(promise_kept_service_check_started_at_boot_${init.service_name}.service_check_started_at_boot_${init.service_name}_kept.service_check_started_at_boot_${init.service_name}_ok.service_check_started_at_boot_${init.service_name}_not_repaired.service_check_started_at_boot_${init.service_name}_reached)"; + "ok_repaired" expression => "!(promise_repaired_service_check_started_at_boot_${init.service_name}|service_check_started_at_boot_${init.service_name}_repaired)"; + "ok_error" expression => "!(service_check_started_at_boot_${init.service_name}_not_kept|repair_failed_service_check_started_at_boot_${init.service_name}|service_check_started_at_boot_${init.service_name}_failed|service_check_started_at_boot_${init.service_name}_not_ok)"; + "ok" and => { "ok_success", "ok_repaired", "ok_error" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/staging/unsafe/file_check_block_device.cf b/policies/lib/tests/acceptance/30_generic_methods/staging/unsafe/file_check_block_device.cf new file mode 100644 index 00000000000..f887251f4ea --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/staging/unsafe/file_check_block_device.cf @@ -0,0 +1,101 @@ +######################################### +# +# Test checking if a block device or not +# +######################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + + "block_device" string => "${tmp}/block_device"; + "block_device_canon" string => canonify("${block_device}"); + + "file_list" slist => { "${file}", "${block_device}" }; + solaris:: + "mknod" string => "/usr/sbin/mknod"; + debian:: + "mknod" string => "/bin/mknod"; + !solaris.!debian:: + "mknod" string => "/usr/bin/mknod"; + + files: + "${file}" + create => "true"; + + commands: + "${mknod}" + args => "\"${block_device}\" b 7 0", # /dev/loop0 + if => "solaris"; + + "${mknod}" + args => "-m 640 \"${block_device}\" b 7 0", # /dev/loop0 + if => "!solaris"; + +} + +####################################################### + +bundle agent test +{ + methods: + "fce" usebundle => file_check_block_device("${init.file_list}"); +} + +####################################################### + +bundle agent check +{ + vars: + # Check file type + "test_file_type" string => "/usr/bin/file -b \"${init.file}\""; + + # Check block_device type + "test_block_device_type" string => "/usr/bin/file -b \"${init.block_device}\""; + + classes: + "test_file_is_block_device" + expression => regcmp("block special.*", execresult("${test_file_type}", "noshell") ), + ifvarclass => "file_check_block_device_${init.file_canon}_reached"; + + "test_block_device_is_block_device" + expression => regcmp("block special.*", execresult("${test_block_device_type}", "noshell") ), + ifvarclass => "file_check_block_device_${init.block_device_canon}_reached"; + + "ok" expression => "test_block_device_is_block_device.file_check_block_device_${init.block_device_canon}_ok.!test_file_is_block_device.!file_check_block_device_${init.file_canon}_ok.file_check_block_device_${init.file_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !test_block_device_is_block_device:: + "${init.block_device} is not a block_device as expected. It is '${test_block_device_type}'"; + test_file_is_block_device:: + "${init.file} is a block_device, not as expected. It is '${test_file_type}'"; + + cfengine:: + "Check of ${init.file} is not reached" + ifvarclass => "!file_check_block_device_${init.file_canon}_reached"; + "Check of ${init.block_device} is not reached" + ifvarclass => "!file_check_block_device_${init.block_device_canon}_reached"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/staging/unsafe/file_check_character_device.cf b/policies/lib/tests/acceptance/30_generic_methods/staging/unsafe/file_check_character_device.cf new file mode 100644 index 00000000000..973a0ddeaa0 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/staging/unsafe/file_check_character_device.cf @@ -0,0 +1,89 @@ +######################################### +# +# Test checking if a character device or not +# +######################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + + "character_device" string => "${tmp}/character_device"; + "character_device_canon" string => canonify("${character_device}"); + + "file_list" slist => { "${file}", "${character_device}" }; + + files: + "${file}" + create => "true"; + + commands: + "/bin/mknod" + args => "-m 640 \"${character_device}\" c 5 1"; # /dev/console +} + +####################################################### + +bundle agent test +{ + methods: + "fce" usebundle => file_check_character_device("${init.file_list}"); +} + +####################################################### + +bundle agent check +{ + vars: + # Check file type + "test_file_type" string => "/usr/bin/file -b \"${init.file}\""; + + # Check character_device type + "test_character_device_type" string => "/usr/bin/file -b \"${init.character_device}\""; + + classes: + "test_file_is_character_device" + expression => regcmp("character special.*", execresult("${test_file_type}", "noshell") ), + ifvarclass => "file_check_character_device_${init.file_canon}_reached"; + + "test_character_device_is_character_device" + expression => regcmp("character special.*", execresult("${test_character_device_type}", "noshell") ), + ifvarclass => "file_check_character_device_${init.character_device_canon}_reached"; + + "ok" expression => "test_character_device_is_character_device.file_check_character_device_${init.character_device_canon}_ok.!test_file_is_character_device.!file_check_character_device_${init.file_canon}_ok.file_check_character_device_${init.file_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !test_character_device_is_character_device:: + "${init.character_device} is not a character_device as expected. It is '${test_character_device_type}'"; + test_file_is_character_device:: + "${init.file} is a character_device, not as expected. It is '${test_file_type}'"; + + cfengine:: + "Check of ${init.file} is not reached" + ifvarclass => "!file_check_character_device_${init.file_canon}_reached"; + "Check of ${init.character_device} is not reached" + ifvarclass => "!file_check_character_device_${init.character_device_canon}_reached"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/staging/unsafe/service_ensure_started_at_boot.cf b/policies/lib/tests/acceptance/30_generic_methods/staging/unsafe/service_ensure_started_at_boot.cf new file mode 100644 index 00000000000..91c7c7468e0 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/staging/unsafe/service_ensure_started_at_boot.cf @@ -0,0 +1,54 @@ +####################################################### +# +# Make sure to have ssh started at boot +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + !redhat:: + "service_name" string => "ssh"; + redhat:: + "service_name" string => "sshd"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_ensure_at_boot("${init.service_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok_success" expression => "(promise_kept_service_ensure_at_boot_${init.service_name}.service_ensure_at_boot_${init.service_name}_kept.service_ensure_at_boot_${init.service_name}_ok.service_ensure_at_boot_${init.service_name}_not_repaired.service_ensure_at_boot_${init.service_name}_reached)"; + "ok_repaired" expression => "!(promise_repaired_service_ensure_at_boot_${init.service_name}|service_ensure_at_boot_${init.service_name}_repaired)"; + "ok_error" expression => "!(service_ensure_at_boot_${init.service_name}_not_kept|repair_failed_service_ensure_at_boot_${init.service_name}|service_ensure_at_boot_${init.service_name}_failed|service_ensure_at_boot_${init.service_name}_not_ok)"; + "ok" and => { "ok_success", "ok_repaired", "ok_error" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_FIFO_pipe.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_FIFO_pipe.cf new file mode 100644 index 00000000000..724ad99f411 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_FIFO_pipe.cf @@ -0,0 +1,89 @@ +######################################### +# +# Test checking if a file is a FIFO or not +# +######################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + + "FIFO_pipe" string => "${tmp}/FIFO_pipe"; + "FIFO_pipe_canon" string => canonify("${FIFO_pipe}"); + + "file_list" slist => { "${file}", "${FIFO_pipe}" }; + + files: + "${file}" + create => "true"; + + commands: + "/usr/bin/mkfifo" + args => "-m 640 \"${FIFO_pipe}\""; +} + +####################################################### + +bundle agent test +{ + methods: + "fce" usebundle => file_check_FIFO_pipe("${init.file_list}"); +} + +####################################################### + +bundle agent check +{ + vars: + # Check file type + "test_file_type" string => "/usr/bin/file -b \"${init.file}\""; + + # Check FIFO_pipe type + "test_FIFO_pipe_type" string => "/usr/bin/file -b \"${init.FIFO_pipe}\""; + + classes: + "test_file_is_FIFO_pipe" + expression => strcmp("fifo (named pipe)", execresult("${test_file_type}", "noshell") ), + ifvarclass => "file_check_FIFO_pipe_${init.file_canon}_reached"; + + "test_FIFO_pipe_is_FIFO_pipe" + expression => strcmp("fifo (named pipe)", execresult("${test_FIFO_pipe_type}", "noshell") ), + ifvarclass => "file_check_FIFO_pipe_${init.FIFO_pipe_canon}_reached"; + + "ok" expression => "test_FIFO_pipe_is_FIFO_pipe.file_check_FIFO_pipe_${init.FIFO_pipe_canon}_ok.!test_file_is_FIFO_pipe.!file_check_FIFO_pipe_${init.file_canon}_ok.file_check_FIFO_pipe_${init.file_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !test_FIFO_pipe_is_FIFO_pipe:: + "${init.FIFO_pipe} is not a FIFO_pipe as expected. It is '${test_FIFO_pipe_type}'"; + test_file_is_FIFO_pipe:: + "${init.file} is a FIFO_pipe, not as expected. It is '${test_file_type}'"; + + cfengine:: + "Check of ${init.file} is not reached" + ifvarclass => "!file_check_FIFO_pipe_${init.file_canon}_reached"; + "Check of ${init.FIFO_pipe} is not reached" + ifvarclass => "!file_check_FIFO_pipe_${init.FIFO_pipe_canon}_reached"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_regular.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_regular.cf new file mode 100644 index 00000000000..8df5a8e7352 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_regular.cf @@ -0,0 +1,88 @@ +######################################### +# +# Test checking if a file is a regular file or not +# +######################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + + "symlink" string => "${tmp}/symlink"; + "symlink_canon" string => canonify("${symlink}"); + + "file_list" slist => { "${file}", "${symlink}" }; + + files: + "${file}" + create => "true"; + + "${symlink}" + link_from => ln_s("${file}"); +} + +####################################################### + +bundle agent test +{ + methods: + "fce" usebundle => file_check_regular("${init.file_list}"); +} + +####################################################### + +bundle agent check +{ + vars: + # Check file type + "test_file_type" string => "/usr/bin/file -b \"${init.file}\""; + + # Check symlink type + "test_symlink_type" string => "/usr/bin/file -b \"${init.symlink}\""; + + classes: + "test_file_is_regular" + expression => strcmp("empty", execresult("${test_file_type}", "noshell") ), + ifvarclass => "file_check_regular_${init.file_canon}_reached"; + + "test_symlink_is_regular" + expression => regcmp("empty", execresult("${test_symlink_type}", "noshell") ), + ifvarclass => "file_check_regular_${init.symlink_canon}_reached"; + + "ok" expression => "test_file_is_regular.file_check_regular_${init.file_canon}_ok.!test_symlink_is_regular.!file_check_regular_${init.symlink_canon}_ok.file_check_regular_${init.symlink_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + test_symlink_is_regular:: + "${init.symlink} is a regular, not as expected."; + !test_file_is_regular:: + "${init.file} is not a regular as expected."; + + cfengine:: + "Check of ${init.file} is not reached" + ifvarclass => "!file_check_regular_${init.file_canon}_reached"; + "Check of ${init.symlink} is not reached" + ifvarclass => "!file_check_regular_${init.symlink_canon}_reached"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_symlink.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_symlink.cf new file mode 100644 index 00000000000..61f772d26cc --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_symlink.cf @@ -0,0 +1,88 @@ +######################################### +# +# Test checking if a file is a symlink or not +# +######################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + + "symlink" string => "${tmp}/symlink"; + "symlink_canon" string => canonify("${symlink}"); + + "file_list" slist => { "${file}", "${symlink}" }; + + files: + "${file}" + create => "true"; + + "${symlink}" + link_from => ln_s("${file}"); +} + +####################################################### + +bundle agent test +{ + methods: + "fce" usebundle => file_check_symlink("${init.file_list}"); +} + +####################################################### + +bundle agent check +{ + vars: + # Check file type + "test_file_type" string => "/usr/bin/file -b \"${init.file}\""; + + # Check symlink type + "test_symlink_type" string => "/usr/bin/file -b \"${init.symlink}\""; + + classes: + "test_file_is_symlink" + expression => regcmp("^symbolic link to.*", execresult("${test_file_type}", "noshell") ), + ifvarclass => "file_check_symlink_${init.file_canon}_reached"; + + "test_symlink_is_symlink" + expression => regcmp("^symbolic link to.*", execresult("${test_symlink_type}", "noshell") ), + ifvarclass => "file_check_symlink_${init.symlink_canon}_reached"; + + "ok" expression => "test_symlink_is_symlink.file_check_symlink_${init.symlink_canon}_ok.!test_file_is_symlink.!file_check_symlink_${init.file_canon}_ok.file_check_symlink_${init.file_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !test_symlink_is_symlink:: + "${init.symlink} is not a symlink as expected. It is '${test_symlink_type}'"; + test_file_is_symlink:: + "${init.file} is a symlink, not as expected. It is '${test_file_type}'"; + + cfengine:: + "Check of ${init.file} is not reached" + ifvarclass => "!file_check_symlink_${init.file_canon}_reached"; + "Check of ${init.symlink} is not reached" + ifvarclass => "!file_check_symlink_${init.symlink_canon}_reached"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_symlinkto.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_symlinkto.cf new file mode 100644 index 00000000000..3b3c0d5a0aa --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/file_check_symlinkto.cf @@ -0,0 +1,96 @@ +######################################### +# +# Test checking if a file is a symlink to a file or not +# +######################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + + "symlink_good_name" string => "${tmp}/symlink_good"; + "symlink_good_target" string => "${file}"; + "symlink_good_canon" string => canonify("${symlink_good_name}"); + + "symlink_bad_name" string => "${tmp}/symlink_bad"; + "symlink_bad_target" string => "/dev/null"; + "symlink_bad_canon" string => canonify("${symlink_bad_name}"); + + "symlink_list" slist => { "${symlink_good_name}", "${symlink_bad_name}" }; + + files: + "${file}" + create => "true"; + + "${symlink_good_name}" + link_from => ln_s("${symlink_good_target}"); + + "${symlink_bad_name}" + link_from => ln_s("${symlink_bad_target}"); + +} + +####################################################### + +bundle agent test +{ + methods: + "fce" usebundle => file_check_symlinkto("${init.symlink_list}", "${init.file}"); +} + +####################################################### + +bundle agent check +{ + vars: + # Check symlink_good target + "test_good_target" string => "/usr/bin/file -b \"${init.symlink_good_name}\""; + + # Check symlink type + "test_bad_target" string => "/usr/bin/file -b \"${init.symlink_bad_name}\""; + + classes: + "symlink_good_is_ok" + expression => regcmp("symbolic link to `*${init.symlink_good_target}'*", execresult("${test_good_target}", "noshell") ), + ifvarclass => "file_check_symlinkto_${init.symlink_good_canon}_reached"; + + "symlink_bad_is_ok" + expression => regcmp("symbolic link to `*${init.symlink_good_target}'*", execresult("${test_bad_target}", "noshell") ), + ifvarclass => "file_check_symlinkto_${init.symlink_bad_canon}_reached"; + + "ok" expression => "symlink_good_is_ok.file_check_symlinkto_${init.symlink_good_canon}_ok.!symlink_bad_is_ok.!file_check_symlinkto_${init.symlink_bad_canon}_ok.file_check_symlinkto_${init.symlink_bad_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !symlink_good_is_ok:: + "${init.symlink_good_name} is a symlink not pointing to where expected"; + symlink_bad_is_ok:: + "${init.symlink_bad_name} is a symlink pointing to where not expected."; + + cfengine:: + "Check of ${init.symlink_good_name} is not reached" + ifvarclass => "!file_check_symlinkto_${init.symlink_good_canon}_reached"; + "Check of ${init.symlink_bad_name} is not reached" + ifvarclass => "!file_check_symlinkto_${init.symlink_bad_canon}_reached"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/group_absent.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/group_absent.cf new file mode 100644 index 00000000000..f93adfd2da3 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/group_absent.cf @@ -0,0 +1,72 @@ +####################################################### +# +# Test checking if a group exist +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "group1" string => "group1"; + "group2" string => "group2"; + + commands: + "/usr/sbin/groupadd ${init.group2}" handle => "a2"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => group_absent("${init.group1}"); + "ph2" usebundle => group_absent("${init.group2}"); +} + +####################################################### + +bundle agent check +{ + classes: + "group1_ok" not => groupexists("${init.group1}"); + "group2_ok" not => groupexists("${init.group2}"); + + "ph1_ok" expression => "group_absent_${init.group1}_kept.!group_absent_${init.group1}_repaired.!group_absent_${init.group1}_error"; + "ph2_ok" expression => "!group_absent_${init.group2}_kept.group_absent_${init.group2}_repaired.!group_absent_${init.group2}_error"; + + + "ok" expression => "group1_ok.group2_ok.ph1_ok.ph2_ok"; + + commands: + "/usr/sbin/groupdel ${init.group1}" handle => "h1"; + "/usr/sbin/groupdel ${init.group2}" handle => "h2"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +# group1_ok:: +# "group1_ok"; +# group2_ok:: +# "group2_ok"; +# ph1_ok:: +# "ph1_ok"; +# ph2_ok:: +# "ph2_ok"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/group_present.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/group_present.cf new file mode 100644 index 00000000000..89b2b6abfa0 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/group_present.cf @@ -0,0 +1,76 @@ +####################################################### +# +# Test checking if a group exist +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "group1" string => "group1"; + "group2" string => "group2"; + + commands: + "/usr/sbin/groupadd ${init.group2}" handle => "a2"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => group_present("${init.group1}"); + "ph2" usebundle => group_present("${init.group2}"); +} + +####################################################### + +bundle agent check +{ + classes: + "group1_ok" expression => groupexists("${init.group1}"); + "group2_ok" expression => groupexists("${init.group2}"); + + "ph1_ok" expression => "!group_present_${init.group1}_kept.group_present_${init.group1}_repaired.!group_present_${init.group1}_error"; + "ph2_ok" expression => "group_present_${init.group2}_kept.!group_present_${init.group2}_repaired.!group_present_${init.group2}_error"; + + + "ok" expression => "group1_ok.group2_ok.ph1_ok.ph2_ok"; + + commands: + "/usr/sbin/groupdel ${init.group1}" handle => "h1"; + "/usr/sbin/groupdel ${init.group2}" handle => "h2"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + group1_ok:: + "group1_ok"; + group2_ok:: + "group2_ok"; + ph1_ok:: + "ph1_ok"; + ph2_ok:: + "ph2_ok"; + cfengine:: + "group_present_${init.group1}_kept" ifvarclass => "group_present_${init.group1}_kept"; + "group_present_${init.group1}_repaired" ifvarclass => "group_present_${init.group1}_repaired"; + "group_present_${init.group1}_error" ifvarclass => "group_present_${init.group1}_error"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/install_package_existent.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/install_package_existent.cf new file mode 100644 index 00000000000..bba8ae92712 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/install_package_existent.cf @@ -0,0 +1,58 @@ +####################################################### +# +# Test adding a package that exists +# In unsafe because: +#  - Will install the "htop" package on the machine +#  - Or will install the "dos2unix" package on the machine +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + debian:: + "package_name" string => "htop"; + !debian:: + "package_name" string => "dos2unix"; +} + +####################################################### + +bundle agent test +{ + + methods: + "ph" usebundle => package_install("${init.package_name}"); + +} + +####################################################### + +bundle agent check +{ + classes: + !redhat:: + "ok" expression => "package_install_${init.package_name}_reached.(package_install_${init.package_name}_ok|package_install_${init.package_name}_kept).!package_install_${init.package_name}_error"; + redhat:: + "ok" expression => "any"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL (Package installation failed or no OS support)"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_check_installed_even_if_upgradable.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_check_installed_even_if_upgradable.cf new file mode 100644 index 00000000000..fbd4a2b0c30 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_check_installed_even_if_upgradable.cf @@ -0,0 +1,131 @@ +####################################################### +# +# Test checking that a package is installed, and make +# sure that even if the package is upgradable, is still +# reports as installed (not repaired!) +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; + # Do not cache returnszero results + cache_system_functions => false; +} + +####################################################### + +bundle agent init +{ + vars: + "package_name" string => "rudder-techniques"; + "c_package_name" string => canonify("${package_name}"); + + debian:: + "package_info_cmd" string => "/usr/bin/dpkg -l ${init.package_name}"; + rhel:: + "package_info_cmd" string => "/bin/rpm -q ${init.package_name}"; + + debian_7:: + "repo_file_1" string => "/etc/apt/sources.list.d/ncf-tests1.list"; + "repo_file_2" string => "/etc/apt/sources.list.d/ncf-tests2.list"; + centos_6:: + "repo_file_1" string => "/etc/yum.repos.d/ncf-tests1.repo"; + "repo_file_2" string => "/etc/yum.repos.d/ncf-tests2.repo"; + + # Do this once the methods below have been run + pass_two:: + "package_info" string => execresult("${package_info_cmd}", "noshell"); + + classes: + "pass_two" expression => "any"; + + methods: + # First set up repos to download an *old* version of the test package + any:: + "create repo file 1" usebundle => file_create("${init.repo_file_1}"); + debian_7:: + "remove package" usebundle => command_execution("$(debian_knowledge.call_apt_get) purge -y ${init.package_name}"); + "add repo key" usebundle => command_execution("$(debian_knowledge.apt_prefix) /usr/bin/apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 474A19E8"); + "set up repo" usebundle => file_ensure_lines_present("${init.repo_file_1}", "deb http://www.rudder-project.org/apt-stable/ wheezy main"); + "update package list" usebundle => command_execution("$(debian_knowledge.call_apt_get) update"); + "install test package" usebundle => command_execution("$(debian_knowledge.call_apt_get) -y install ${init.package_name}"); + centos_6:: + "remove package" usebundle => command_execution("/bin/rpm -e ${init.package_name}"); + "set up repo" usebundle => file_ensure_lines_present("${init.repo_file_1}", "[ncf-tests]${const.n}name=ncf-test${const.n}baseurl=http://www.rudder-project.org/rpm-stable/RHEL_6/${const.n}enabled=1${const.n}gpgcheck=1${const.n}gpgcheck=http://www.rudder-project.org/rpm-stable/RHEL_6/repodata/repomd.xml.key"); + "update package list" usebundle => command_execution("${redhat_knowledge.yum_options} clean all"); + "install test package" usebundle => command_execution("${redhat_knowledge.yum_options} -y install ${init.package_name}"); + + any:: + "clean up repo file 1" usebundle => file_remove("${init.repo_file_1}"); + + # Replace repo with one containing a newer version of the test package + "create repo file 2" usebundle => file_create("${init.repo_file_2}"); + + debian_7:: + "update repo to use" usebundle => file_ensure_lines_present("${init.repo_file_2}", "deb http://www.rudder-project.org/apt-nightly/ wheezy main"); + "update packages" usebundle => command_execution("$(debian_knowledge.call_aptitude) update"); + centos_6:: + "set up repo" usebundle => file_ensure_lines_present("${init.repo_file_2}", "[ncf-tests]${const.n}name=ncf-test${const.n}baseurl=http://www.rudder-project.org/rpm-nightly/RHEL_6/${const.n}enabled=1${const.n}gpgcheck=1${const.n}gpgcheck=http://www.rudder-project.org/rpm-nightly/RHEL_6/repodata/repomd.xml.key"); + "update package list" usebundle => command_execution("/usr/bin/yum ${redhat_knowledge.yum_options} check-update"); + + # Nuke CFEngine's cache file to force it to run "dpkg -l" or the rpm equivalent again + any:: + "remove the packages cache" usebundle => file_remove("${sys.workdir}/state/software_packages.csv"); + +} + +####################################################### + +bundle agent test +{ + + methods: + "test it" usebundle => package_check_installed("${init.package_name}"); + +} + +####################################################### + +bundle agent check +{ + vars: + "suffix" slist => { "ok", "not_ok", "repaired", "kept", "error", "reached" }; + + # Read in info about the package now to confirm it hasn't changed during the test + "package_info" + string => execresult("${init.package_info_cmd}", "noshell"), + ifvarclass => canonify("package_check_installed_${init.package_name}_reached"); + + classes: + # Make sure the package is actually installed + "package_installed_ok" + expression => returnszero("${init.package_info_cmd}", "noshell"); + + # Compare the package info we got now to what we had before running the test, to make sure this didn't change anything + "package_info_ok" expression => strcmp("${init.package_info}", "${package_info}"); + + + "ok" expression => "package_installed_ok.package_info_ok.package_check_installed_${init.c_package_name}_ok.package_check_installed_${init.c_package_name}_reached.!package_check_installed_${init.c_package_name}_not_ok.!package_check_installed_${init.c_package_name}_repaired"; + + methods: + "clean up repo file 1" usebundle => file_remove("${init.repo_file_1}"); + "clean repo file 2" usebundle => file_remove("${init.repo_file_2}"); + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + cfengine:: + "class package_install_${init.c_package_name}_${suffix} defined" ifvarclass => "package_check_installed_${init.c_package_name}_${suffix}"; + "class package_check_installed_${init.c_package_name}_${suffix} defined" ifvarclass => "package_check_installed_${init.c_package_name}_${suffix}"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_check_upgradable.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_check_upgradable.cf new file mode 100644 index 00000000000..dae8763faa6 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_check_upgradable.cf @@ -0,0 +1,127 @@ +####################################################### +# +# Test checking that a package is installed, and make +# sure that even if the package is upgradable, is still +# reports as upgradable (not ok!) +# +# Doesn't work currently because CFEngine 3.5 doesn't figure out that the package can be upgraded. Duh. +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "package_name" string => "rudder-techniques"; + "c_package_name" string => canonify("${package_name}"); + + debian_7:: + "repo_file_1" string => "/etc/apt/sources.list.d/ncf-tests1.list"; + "repo_file_2" string => "/etc/apt/sources.list.d/ncf-tests2.list"; + centos_6:: + "repo_file_1" string => "/etc/yum.repos.d/ncf-tests1.repo"; + "repo_file_2" string => "/etc/yum.repos.d/ncf-tests2.repo"; + + debian:: + "package_info_cmd" string => "/usr/bin/dpkg -l ${init.package_name}"; + rhel:: + "package_info_cmd" string => "/bin/rpm -q ${init.package_name}"; + + # Do this once the methods below have been run + pass_two:: + "package_info" string => execresult("${package_info_cmd}", "noshell"); + + classes: + "pass_two" expression => "any"; + + methods: + # First set up repos to download an *old* version of the test package + any:: + "create repo file 1" usebundle => file_create("${init.repo_file_1}"); + debian_7:: + "remove package" usebundle => command_execution("$(debian_knowledge.call_apt_get) purge -y ${init.package_name}"); + "add repo key" usebundle => command_execution("$(debian_knowledge.apt_prefix) /usr/bin/apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 474A19E8"); + "set up repo" usebundle => file_ensure_lines_present("${init.repo_file_1}", "deb http://www.rudder-project.org/apt-stable/ wheezy main"); + "update package list" usebundle => command_execution("$(debian_knowledge.call_apt_get) update"); + "install test package" usebundle => command_execution("$(debian_knowledge.call_apt_get) install -y ${init.package_name}"); + centos_6:: + "remove package" usebundle => command_execution("/bin/rpm -e ${init.package_name}"); + "set up repo" usebundle => file_ensure_lines_present("${init.repo_file_1}", "[ncf-tests]${const.n}name=ncf-test${const.n}baseurl=http://www.rudder-project.org/rpm-stable/RHEL_6/${const.n}enabled=1${const.n}gpgcheck=1${const.n}gpgcheck=http://www.rudder-project.org/rpm-stable/RHEL_6/repodata/repomd.xml.key"); + "update package list" usebundle => command_execution("/usr/bin/yum ${redhat_knowledge.yum_options} clean all"); + "install test package" usebundle => command_execution("/usr/bin/yum ${redhat_knowledge.yum_options} install -y ${init.package_name}"); + + any:: + + "clean up repo file 1" usebundle => file_remove("${init.repo_file_1}"); + + # Replace repo with one containing a newer version of the test package + "create repo file 2" usebundle => file_create("${init.repo_file_2}"); + + debian_7:: + "update repo to use" usebundle => file_ensure_lines_present("${init.repo_file_2}", "deb http://www.rudder-project.org/apt-nightly/ wheezy main"); + "update packages" usebundle => command_execution("$(debian_knowledge.call_aptitude) update"); + centos_6:: + "set up repo" usebundle => file_ensure_lines_present("${init.repo_file_2}", "[ncf-tests]${const.n}name=ncf-test${const.n}baseurl=http://www.rudder-project.org/rpm-nightly/RHEL_6/${const.n}enabled=1${const.n}gpgcheck=1${const.n}gpgcheck=http://www.rudder-project.org/rpm-nightly/RHEL_6/repodata/repomd.xml.key"); + "update packages" usebundle => command_execution("/usr/bin/yum ${redhat_knowledge.yum_options} check-update"); + + # Nuke CFEngine's cache file to force it to run "aptitude update" or the yum equivalent again + any:: + "remove the packages cache" usebundle => file_remove("${sys.workdir}/state/software_packages.csv"); + +} + +####################################################### + +bundle agent test +{ + + methods: + "test it" usebundle => package_verify("${init.package_name}"); + +} + +####################################################### + +bundle agent check +{ + vars: + # Read in info about the package now to confirm it hasn't changed during the test + "package_info" + string => execresult("${init.package_info_cmd}", "noshell"), + ifvarclass => canonify("package_install_${init.package_name}_reached"); + + classes: + # Make sure the package is actually installed + "package_installed_ok" + expression => returnszero("${init.package_info_cmd}", "noshell"), + ifvarclass => canonify("package_install_${init.package_name}_reached"); + + # Compare the package info we got now to what we had before running the test, to make sure this didn't change anything + "package_info_ok" expression => strcmp("${init.package_info}", "${package_info}"); + + "ok" expression => "package_installed_ok.package_info_ok.!package_install_${init.c_package_name}_kept.package_check_installed_${init.c_package_name}_reached.!package_check_installed_${init.c_package_name}_not_ok.package_check_installed_${init.c_package_name}_repaired"; + + methods: + "clean up repo file 1" usebundle => file_remove("${init.repo_file_1}"); + #"clean repo file 2" usebundle => file_remove("${init.repo_file_2}"); + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_install.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_install.cf new file mode 100644 index 00000000000..49f491ce552 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_install.cf @@ -0,0 +1,52 @@ +####################################################### +# +# Install the htop or dos2unix package +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "canonified_package_name" string => canonify("${package_name}"); + debian:: + "package_name" string => "htop"; + !debian:: + "package_name" string => "dos2unix"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => package_install("${init.package_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "package_install_${init.canonified_package_name}_ok.!package_install_${init.canonified_package_name}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_remove.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_remove.cf new file mode 100644 index 00000000000..72f4063b571 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/network/staging/package_remove.cf @@ -0,0 +1,57 @@ +####################################################### +# +# Remove the htop or dos2unix package +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "canonified_package_name" string => canonify("${package_name}"); + debian:: + "package_name" string => "htop"; + !debian:: + "package_name" string => "dos2unix"; +} + +####################################################### + +bundle agent test +{ + methods: + # Test is skipped on old debian due to a known bug in aptitude, see #13305 and #6696 + !(debian_7|debian_8):: + "ph1" usebundle => package_remove("${init.package_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + !(debian_7|debian_8):: + "ok" expression => "package_remove_${init.canonified_package_name}_ok.!package_remove_${init.canonified_package_name}_error"; + debian_7|debian_8:: + "ok" expression => "any"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/file_check_socket.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/file_check_socket.cf new file mode 100644 index 00000000000..6e4acebdb97 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/file_check_socket.cf @@ -0,0 +1,86 @@ +######################################### +# +# Test checking if a file is a socket or not +# +######################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + + "socket" string => execresult("/bin/readlink -f /dev/log", "noshell"); # this should always point to a socket + "socket_canon" string => canonify("${socket}"); + + "file_list" slist => { "${file}", "${socket}" }; + + files: + "${file}" + create => "true"; + +} + +####################################################### + +bundle agent test +{ + methods: + "fce" usebundle => file_check_socket("${init.file_list}"); +} + +####################################################### + +bundle agent check +{ + vars: + # Check file type + "test_file_type" string => "/usr/bin/file -b \"${init.file}\""; + + # Check socket type + "test_socket_type" string => "/usr/bin/file -b \"${init.socket}\""; + + classes: + "test_file_is_socket" + expression => strcmp("socket", execresult("${test_file_type}", "noshell") ), + ifvarclass => "file_check_socket_${init.file_canon}_reached"; + + "test_socket_is_socket" + expression => strcmp("socket", execresult("${test_socket_type}", "noshell") ), + ifvarclass => "file_check_socket_${init.socket_canon}_reached"; + + "ok" expression => "test_socket_is_socket.file_check_socket_${init.socket_canon}_ok.!test_file_is_socket.!file_check_socket_${init.file_canon}_ok.file_check_socket_${init.file_canon}_reached"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !test_socket_is_socket:: + "${init.socket} is not a socket as expected. It is '${test_socket_type}'"; + test_file_is_socket:: + "${init.file} is a socket, not as expected. It is '${test_file_type}'"; + + cfengine:: + "Check of ${init.file} is not reached" + ifvarclass => "!file_check_socket_${init.file_canon}_reached"; + "Check of ${init.socket} is not reached" + ifvarclass => "!file_check_socket_${init.socket_canon}_reached"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_disabled_at_boot.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_disabled_at_boot.cf new file mode 100644 index 00000000000..c5208e2d781 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_disabled_at_boot.cf @@ -0,0 +1,62 @@ +####################################################### +# +# Test if disable cron at boot, and try with an unknown service to check error case +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + ubuntu:: + "service_name" string => "ntp"; + (debian|suse).!ubuntu:: + "service_name" string => "cron"; + redhat:: + "service_name" string => "crond"; + any:: + "unknown_service_name" string => "unknown"; + + classes: + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} disable", "useshell"); + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} off", "useshell"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_check_disabled_at_boot("${init.service_name}"); + "ph3" usebundle => service_check_disabled_at_boot("${init.unknown_service_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "service_check_disabled_at_boot_${init.service_name}_ok.!service_check_disabled_at_boot_${init.service_name}_error.service_check_disabled_at_boot_${init.unknown_service_name}_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_running.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_running.cf new file mode 100644 index 00000000000..cda6bba1ae5 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_running.cf @@ -0,0 +1,56 @@ +####################################################### +# +# Test if cron is started +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + aix:: + "service_name" string => "sshd"; + !(redhat|aix):: + "service_name" string => "cron"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_check_running("${init.service_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok_success" expression => "(promise_kept_service_check_running_${init.service_name}.service_check_running_${init.service_name}_kept.service_check_running_${init.service_name}_ok.service_check_running_${init.service_name}_not_repaired.service_check_running_${init.service_name}_reached)"; + "ok_repaired" expression => "!(promise_repaired_service_check_running_${init.service_name}|service_check_running_${init.service_name}_repaired)"; + "ok_error" expression => "!(service_check_running_${init.service_name}_not_kept|repair_failed_service_check_running_${init.service_name}|service_check_running_${init.service_name}_failed|service_check_running_${init.service_name}_not_ok)"; + "ok" and => { "ok_success", "ok_repaired", "ok_error" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_running_ps.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_running_ps.cf new file mode 100644 index 00000000000..245802e322c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_running_ps.cf @@ -0,0 +1,64 @@ +####################################################### +# +# Test if cron is started using ps +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + !ubuntu.!redhat:: + "service_regex_1" string => "/usr/sbin/cron"; + ubuntu:: + "service_regex_1" string => "cron"; + redhat:: + "service_regex_1" string => "crond"; + any:: + "service_regex_2" string => "thisisadummyservice"; + "canonified_service_regex_1" string => canonify("${service_regex_1}"); + "canonified_service_regex_2" string => canonify("${service_regex_2}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_check_running_ps("${init.service_regex_1}"); + "ph2" usebundle => service_check_running_ps("${init.service_regex_2}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok_success_1" expression => "(promise_kept_service_check_running_${init.canonified_service_regex_1}.service_check_running_${init.canonified_service_regex_1}_kept.service_check_running_${init.canonified_service_regex_1}_ok.service_check_running_${init.canonified_service_regex_1}_not_repaired.service_check_running_${init.canonified_service_regex_1}_reached)"; + "ok_repaired_1" expression => "!(promise_repaired_service_check_running_${init.canonified_service_regex_1}|service_check_running_${init.canonified_service_regex_1}_repaired)"; + "ok_error_1" expression => "!(service_check_running_${init.canonified_service_regex_1}_not_kept|repair_failed_service_check_running_${init.canonified_service_regex_1}|service_check_running_${init.canonified_service_regex_1}_failed|service_check_running_${init.canonified_service_regex_1}_not_ok)"; + "ok_success_2" expression => "!(promise_kept_service_check_running_${init.canonified_service_regex_2}|service_check_running_${init.canonified_service_regex_2}_kept|service_check_running_${init.canonified_service_regex_2}_ok)"; + "ok_repaired_2" expression => "!(promise_repaired_service_check_running_${init.canonified_service_regex_2}|service_check_running_${init.canonified_service_regex_2}_repaired)"; + "ok_error_2" expression => "repair_failed_service_check_running_${init.canonified_service_regex_2}.service_check_running_${init.canonified_service_regex_2}_failed.service_check_running_${init.canonified_service_regex_2}_not_ok.service_check_running_${init.canonified_service_regex_2}_not_kept"; + "ok" and => { "ok_success_1", "ok_repaired_1", "ok_error_1", "ok_success_2", "ok_repaired_2", "ok_error_2" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.audit.error.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.audit.error.cf new file mode 100644 index 00000000000..d0d9e3770d1 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.audit.error.cf @@ -0,0 +1,71 @@ +####################################################### +# +# Test if cron is started at boot +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + any:: + "unknown_service_name" string => "unknown"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} off", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} disable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_check_started_at_boot_${service_name}", "error", "1"); + "any" usebundle => define_expected_classes("service_check_started_at_boot_${unknown_service_name}", "error", "2"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => service_check_started_at_boot("${init.service_name}"); + "ph2" usebundle => service_check_started_at_boot("${init.unknown_service_name}"); + "disable" usebundle => set_dry_run_mode("false"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok_1" expression => "${define_expected_classes.report_string_1}"; + "ok_2" expression => "${define_expected_classes.report_string_2}"; + + "ok" expression => "ok_1.ok_2"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.audit.success.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.audit.success.cf new file mode 100644 index 00000000000..c7a79a10915 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.audit.success.cf @@ -0,0 +1,66 @@ +####################################################### +# +# Test if cron is started at boot +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} on", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} enable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_check_started_at_boot_${service_name}", "success", "1"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => service_check_started_at_boot("${init.service_name}"); + "disable" usebundle => set_dry_run_mode("false"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok_1" expression => "${define_expected_classes.report_string_1}"; + + "ok" expression => "ok_1"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.enforce.error.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.enforce.error.cf new file mode 100644 index 00000000000..59f19a3a828 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.enforce.error.cf @@ -0,0 +1,69 @@ +####################################################### +# +# Test if cron is started at boot +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + any:: + "unknown_service_name" string => "unknown"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} off", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} disable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_check_started_at_boot_${service_name}", "error", "1"); + "any" usebundle => define_expected_classes("service_check_started_at_boot_${unknown_service_name}", "error", "2"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_check_started_at_boot("${init.service_name}"); + "ph2" usebundle => service_check_started_at_boot("${init.unknown_service_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok_1" expression => "${define_expected_classes.report_string_1}"; + "ok_2" expression => "${define_expected_classes.report_string_2}"; + + "ok" expression => "ok_1.ok_2"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.enforce.success.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.enforce.success.cf new file mode 100644 index 00000000000..7626cd44998 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_check_started_at_boot.enforce.success.cf @@ -0,0 +1,63 @@ +####################################################### +# +# Test if cron is started at boot +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} on", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} enable", "useshell"); + methods: + "any" usebundle => define_expected_classes("service_check_started_at_boot_${service_name}", "success", "1"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_check_started_at_boot("${init.service_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok_1" expression => "${define_expected_classes.report_string_1}"; + + "ok" expression => "ok_1"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.audit.error.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.audit.error.cf new file mode 100644 index 00000000000..28e57c0d881 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.audit.error.cf @@ -0,0 +1,66 @@ +####################################################### +# +# Disable cron at boot, also try with an unknown service to test error case +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} on", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} enable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_disabled_${service_name}", "error", "1"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => service_disabled("${init.service_name}"); + "disable" usebundle => set_dry_run_mode("false"); + + "testinfra" usebundle => execute_testinfra("${init.service_name}_enabled.py", "service_still_enabled", "0"); +} + +####################################################### +bundle agent check +{ + classes: + "service_ok" expression => "${define_expected_classes.report_string_1}"; + "ok" expression => "service_ok.service_still_enabled"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.audit.success.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.audit.success.cf new file mode 100644 index 00000000000..bb92e26df0f --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.audit.success.cf @@ -0,0 +1,73 @@ +####################################################### +# +# Disable cron at boot, also try with an unknown service to test error case +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + any:: + "unknown_service_name" string => "unknown"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} off", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} disable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_disabled_${service_name}", "success", "1"); + "any" usebundle => define_expected_classes("service_disabled_${unknown_service_name}", "success", "2"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => service_disabled("${init.service_name}"); + "ph2" usebundle => service_disabled("${init.unknown_service_name}"); + "disable" usebundle => set_dry_run_mode("false"); + + "testinfra" usebundle => execute_testinfra("${init.service_name}_disabled.py", "service_still_disabled", "0"); +} + +####################################################### +bundle agent check +{ + classes: + "service_${init.service_name}_ok" expression => "${define_expected_classes.report_string_1}"; + "service_${init.unknown_service_name}_ok" expression => "${define_expected_classes.report_string_2}"; + "services_ok" expression => "service_${init.unknown_service_name}_ok.service_${init.service_name}_ok"; + + "ok" expression => "services_ok.service_still_disabled"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.enforce.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.enforce.cf new file mode 100644 index 00000000000..aea146a808e --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.enforce.cf @@ -0,0 +1,70 @@ +####################################################### +# +# Disable cron at boot, also try with an unknown service to test error case +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + any:: + "unknown_service_name" string => "unknown"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} off", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} disable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_disabled_${service_name}", "success", "1"); + "any" usebundle => define_expected_classes("service_disabled_${unknown_service_name}", "success", "2"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_disabled("${init.service_name}"); + "ph2" usebundle => service_disabled("${init.unknown_service_name}"); + + "testinfra" usebundle => execute_testinfra("${init.service_name}_disabled.py", "service_still_disabled", "0"); +} + +####################################################### + +bundle agent check +{ + classes: + "success_ok" expression => "${define_expected_classes.report_string_1}"; + "error_ok" expression => "${define_expected_classes.report_string_2}"; + "ok" expression => "success_ok.error_ok.service_still_disabled"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.enforce.repaired.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.enforce.repaired.cf new file mode 100644 index 00000000000..8c7df31729d --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_disabled.enforce.repaired.cf @@ -0,0 +1,64 @@ +####################################################### +# +# Disable cron at boot, also try with an unknown service to test error case +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} on", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} enable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_disabled_${service_name}", "repaired", "1"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_disabled("${init.service_name}"); + "testinfra" usebundle => execute_testinfra("${init.service_name}_disabled.py", "service_now_disabled", "0"); +} + +####################################################### + +bundle agent check +{ + classes: + "success_ok" expression => "${define_expected_classes.report_string_1}"; + "ok" expression => "success_ok.service_now_disabled"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.audit.error.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.audit.error.cf new file mode 100644 index 00000000000..061cd69fc5c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.audit.error.cf @@ -0,0 +1,72 @@ +####################################################### +# +# Disable cron at boot, also try with an unknown service to test error case +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + any:: + "unknown_service_name" string => "unknown"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} off", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} disable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_enabled_${service_name}", "error", "1"); + "any" usebundle => define_expected_classes("service_enabled_${unknown_service_name}", "error", "2"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => service_enabled("${init.service_name}"); + "ph2" usebundle => service_enabled("${init.unknown_service_name}"); + "disable" usebundle => set_dry_run_mode("false"); + + "testinfra" usebundle => execute_testinfra("${init.service_name}_disabled.py", "service_still_disabled", "0"); +} + +####################################################### +bundle agent check +{ + classes: + "service_${init.service_name}_ok" expression => "${define_expected_classes.report_string_1}"; + "service_${init.unknown_service_name}_ok" expression => "${define_expected_classes.report_string_2}"; + "services_ok" expression => "service_${init.service_name}_ok.service_${init.unknown_service_name}_ok"; + "ok" expression => "services_ok.service_still_disabled"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.audit.success.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.audit.success.cf new file mode 100644 index 00000000000..b0223fb8156 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.audit.success.cf @@ -0,0 +1,67 @@ +####################################################### +# +# Disable cron at boot, also try with an unknown service to test error case +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} on", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} enable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_enabled_${service_name}", "success", "1"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => service_enabled("${init.service_name}"); + "disable" usebundle => set_dry_run_mode("false"); + + "testinfra" usebundle => execute_testinfra("${init.service_name}_enabled.py", "service_still_enabled", "0"); +} + +####################################################### +bundle agent check +{ + classes: + "service_${init.service_name}_ok" expression => "${define_expected_classes.report_string_1}"; + + "ok" expression => "service_${init.service_name}_ok.service_still_enabled"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.enforce.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.enforce.cf new file mode 100644 index 00000000000..49acba02ef9 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.enforce.cf @@ -0,0 +1,70 @@ +####################################################### +# +# Disable cron at boot, also try with an unknown service to test error case +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + any:: + "unknown_service_name" string => "unknown"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} on", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} enable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_enabled_${service_name}", "success", "1"); + "any" usebundle => define_expected_classes("service_enabled_${unknown_service_name}", "error", "2"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_enabled("${init.service_name}"); + "ph2" usebundle => service_enabled("${init.unknown_service_name}"); + + "testinfra" usebundle => execute_testinfra("${init.service_name}_enabled.py", "service_still_enabled", "0"); +} + +####################################################### + +bundle agent check +{ + classes: + "success_ok" expression => "${define_expected_classes.report_string_1}"; + "error_ok" expression => "${define_expected_classes.report_string_2}"; + "ok" expression => "success_ok.error_ok.service_still_enabled"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.enforce.repaired.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.enforce.repaired.cf new file mode 100644 index 00000000000..034d7e106a9 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_enabled.enforce.repaired.cf @@ -0,0 +1,64 @@ +####################################################### +# +# Disable cron at boot, also try with an unknown service to test error case +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_name" string => "crond"; + !redhat.!ubuntu:: + "service_name" string => "cron"; + ubuntu:: + "service_name" string => "ntp"; + + classes: + redhat|suse:: + "enabled" expression => returnszero("chkconfig ${service_name} off", "useshell"); + debian:: + "enabled" expression => returnszero("update-rc.d ${service_name} disable", "useshell"); + + methods: + "any" usebundle => define_expected_classes("service_enabled_${service_name}", "repaired", "1"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_enabled("${init.service_name}"); + "testinfra" usebundle => execute_testinfra("${init.service_name}_enabled.py", "service_now_enabled", "0"); +} + +####################################################### + +bundle agent check +{ + classes: + "repaired_ok" expression => "${define_expected_classes.report_string_1}"; + "ok" expression => "repaired_ok.service_now_enabled"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_management.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_management.cf new file mode 100644 index 00000000000..ff4bae8c59c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_management.cf @@ -0,0 +1,57 @@ +####################################################### +# +# Test managing a service (reload ssh) +# In unsafe because: +#  - Will reload the "ssh" service on the machine +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat|suse:: + "service" string => "sshd"; + aix:: + "service" string => "syslogd"; + !(redhat|aix|suse):: + "service" string => "ssh"; +} + +####################################################### + +bundle agent test +{ + + methods: + + "ph" usebundle => service_action("${init.service}", "reload"); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "service_action_${init.service}_reached.service_action_${init.service}_repaired.!service_action_${init.service}_kept.!service_action_${init.service}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_reload.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_reload.cf new file mode 100644 index 00000000000..0564262b27e --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_reload.cf @@ -0,0 +1,68 @@ +####################################################### +# +# Test reloading a service +# In unsafe because: +#  - Will reload the "cron" service on the machine +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_script" string => "crond"; + aix:: + "service_script" string => "syslogd"; + systemd.(debian|suse):: + "service_script" string => "dbus"; + !systemd.(debian|suse):: + "service_script" string => "cron"; + + any:: + "c_service_script" string => canonify("${service_script}"); +} + +####################################################### + +bundle agent test +{ + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + pass2:: + "ph" usebundle => service_reload("${init.service_script}"); + + commands: + "${paths.service} ${init.service_script} start"; +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "service_reload_${init.c_service_script}_reached.service_reload_${init.c_service_script}_repaired.!service_reload_${init.c_service_script}_kept.!service_reload_${init.c_service_script}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL (Service verification failed)"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_restart.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_restart.cf new file mode 100755 index 00000000000..79280e6ca20 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_restart.cf @@ -0,0 +1,56 @@ +####################################################### +# +# Test restarting a service +# In unsafe because: +#  - Will restart the "cron" service on the machine +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_script" string => "crond"; + aix:: + "service_script" string => "syslogd"; + !(redhat|aix):: + "service_script" string => "cron"; +} + +####################################################### + +bundle agent test +{ + + methods: + "ph" usebundle => service_restart("${init.service_script}"); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "(service_restart_${init.service_script}_ok.service_restart_${init.service_script}_reached.service_restart_${init.service_script}_repaired.promise_repaired_service_restart_${init.service_script}).!(service_restart_${init.service_script}_kept|service_restart_${init.service_script}_not_repaired|promise_kept_service_restart_${init.service_script})"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL (Service restart failed)"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_restart_if.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_restart_if.cf new file mode 100755 index 00000000000..c6bec0c52d1 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_restart_if.cf @@ -0,0 +1,62 @@ +####################################################### +# +# Test restarting a service with conditions +# In unsafe because: +#  - Will restart the "cron" service on the machine +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + redhat:: + "service_script_1" string => "crond"; + aix:: + "service_script_1" string => "syslogd"; + !redhat.!aix:: + "service_script_1" string => "cron"; + any:: + "service_script_2" string => "my_service"; +} + +####################################################### + +bundle agent test +{ + + methods: + "ph1" usebundle => service_restart_if("${init.service_script_1}", "any"); + "ph2" usebundle => service_restart_if("${init.service_script_2}", "!any"); + +} + +####################################################### + +bundle agent check +{ + classes: + "ok_service_1" expression => "(service_restart_${init.service_script_1}_ok.service_restart_${init.service_script_1}_reached.service_restart_${init.service_script_1}_repaired.promise_repaired_service_restart_${init.service_script_1}).!(service_restart_${init.service_script_1}_kept|service_restart_${init.service_script_1}_not_repaired|promise_kept_service_restart_${init.service_script_1})"; + "ok_service_2" expression => "(service_restart_${init.service_script_2}_ok.service_restart_${init.service_script_2}_reached.service_restart_${init.service_script_2}_kept.service_restart_${init.service_script_2}_not_repaired.promise_kept_service_restart_${init.service_script_2}).!(service_restart_${init.service_script_2}_repaired|promise_repaired_service_restart_${init.service_script_2})"; + + "ok" and => { "ok_service_1", "ok_service_2" }; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL (Service restart failed)"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_start.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_start.cf new file mode 100644 index 00000000000..526cea9458c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_start.cf @@ -0,0 +1,57 @@ +####################################################### +# +# Start cron +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + debian|suse:: + "service_name" string => "cron"; + redhat:: + "service_name" string => "crond"; + aix:: + "service_name" string => "syslogd"; + + commands: + "${paths.service} ${service_name} stop"; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_start("${init.service_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "service_start_${init.service_name}_ok.!service_start_${init.service_name}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started.audit.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started.audit.cf new file mode 100644 index 00000000000..3e098cabb39 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started.audit.cf @@ -0,0 +1,74 @@ +####################################################### +# +# Test if cron is started and if no, start it +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + debian|suse:: + "service_name" string => "cron"; + redhat:: + "service_name" string => "crond"; + aix:: + "service_name" string => "syslogd"; + any:: + "unknown_service_name" string => "unknown_service_name"; + + methods: + "testinfra" usebundle => execute_testinfra("${init.service_name}_started.py", "service_already_started", "0"); + service_already_started:: + "any" usebundle => define_expected_classes("service_started_${service_name}", "success", "1"); + !service_already_started:: + "any" usebundle => define_expected_classes("service_started_${service_name}", "error", "1"); + any:: + "any" usebundle => define_expected_classes("service_started_${unknown_service_name}", "error", "2"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => service_started("${init.service_name}"); + "ph2" usebundle => service_started("${init.unknown_service_name}"); + "disable" usebundle => set_dry_run_mode("false"); + + "testinfra" usebundle => execute_testinfra("${init.service_name}_started.py", "service_started", "1"); +} + +####################################################### + +bundle agent check +{ + classes: + "service_${init.service_name}_ok" expression => "${define_expected_classes.report_string_1}"; + "service_${init.unknown_service_name}_ok" expression => "${define_expected_classes.report_string_2}"; + + "services_ok" expression => "service_${init.service_name}_ok.service_${init.unknown_service_name}_ok"; + "service_status_not_changed" expression => "(service_already_started.service_started)|(!service_already_started.!service_started)"; + "ok" expression => "services_ok.service_status_not_changed"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started.cf new file mode 100644 index 00000000000..bda69d0a399 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started.cf @@ -0,0 +1,53 @@ +####################################################### +# +# Test if cron is started and if no, start it +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + debian|suse:: + "service_name" string => "cron"; + redhat:: + "service_name" string => "crond"; + aix:: + "service_name" string => "syslogd"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_started("${init.service_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "service_started_${init.service_name}_ok.!service_started_${init.service_name}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started.enforce.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started.enforce.cf new file mode 100644 index 00000000000..24da979860a --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started.enforce.cf @@ -0,0 +1,70 @@ +####################################################### +# +# Test if cron is started and if no, start it +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + debian|suse:: + "service_name" string => "cron"; + redhat:: + "service_name" string => "crond"; + aix:: + "service_name" string => "syslogd"; + any:: + "unknown_service_name" string => "unknown_service_name"; + + methods: + "testinfra" usebundle => execute_testinfra("${init.service_name}_started.py", "service_already_started", "0"); + service_already_started:: + "any" usebundle => define_expected_classes("service_started_${service_name}", "success", "1"); + !service_already_started:: + "any" usebundle => define_expected_classes("service_started_${service_name}", "repaired", "1"); + any:: + "any" usebundle => define_expected_classes("service_started_${unknown_service_name}", "error", "2"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_started("${init.service_name}"); + "ph2" usebundle => service_started("${init.unknown_service_name}"); + "testinfra" usebundle => execute_testinfra("${init.service_name}_started.py", "service_started", "1"); +} + +####################################################### + +bundle agent check +{ + classes: + "service_${init.service_name}_ok" expression => "${define_expected_classes.report_string_1}"; + "service_${init.unknown_service_name}_ok" expression => "${define_expected_classes.report_string_2}"; + + "services_ok" expression => "service_${init.service_name}_ok.service_${init.unknown_service_name}_ok"; + "ok" expression => "services_ok.service_started"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started_path.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started_path.cf new file mode 100644 index 00000000000..e9dfdfb6129 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started_path.cf @@ -0,0 +1,58 @@ +####################################################### +# +# Test if cf-serverd is started and if no, start it +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + debian:: + "service_name" string => "ssh"; + redhat|suse:: + "service_name" string => "sshd"; + any:: + # here we don't use the full path to support multiple version of agent + # plus, the check on the path is not anchored, ensuring that it will + # find the right process + "service_path" string => "/sbin/sshd"; + "cservice_name" string => canonify("${service_name}"); + "cservice_path" string => canonify("${service_path}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_started_path("${init.service_name}", "${init.service_path}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "service_started_${init.cservice_name}_ok.!service_started_${init.cservice_name}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started_start.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started_start.cf new file mode 100644 index 00000000000..fd1a3b48621 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_started_start.cf @@ -0,0 +1,66 @@ +####################################################### +# +# Manually stop service, start it using ncf and check +# if it's been started +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + !aix.!redhat:: + "service_name" string => "cron"; + redhat:: + "service_name" string => "crond"; + aix:: + "service_name" string => "syslogd"; + + commands: + "${paths.service} ${service_name} stop"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_started("${init.service_name}"); +} + +####################################################### + +bundle agent check +{ + vars: + "command_ps" string => "/bin/ps afux | ${paths.path[grep]} ${init.service_name} | ${paths.path[grep]} -v grep"; + + classes: + "service_running" expression => returnszero("${command_ps}", "useshell"), + ifvarclass => "service_started_${init.service_name}_reached"; + + "ok" expression => "service_running.(service_started_${init.service_name}_repaired.!service_started_${init.service_name}_error)"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + !service_running:: + "Service ${init.service_name} was not detected as running using '${command_ps}' command"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_stopped.audit.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_stopped.audit.cf new file mode 100644 index 00000000000..14d2c642a86 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_stopped.audit.cf @@ -0,0 +1,74 @@ +####################################################### +# +# Test if cron is stopped and if no, start it +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + debian|suse:: + "service_name" string => "cron"; + redhat:: + "service_name" string => "crond"; + aix:: + "service_name" string => "syslogd"; + any:: + "unknown_service_name" string => "unknown_service_name"; + + methods: + "testinfra" usebundle => execute_testinfra("${init.service_name}_stopped.py", "service_already_stopped", "0"); + service_already_stopped:: + "any" usebundle => define_expected_classes("service_stopped_${service_name}", "success", "1"); + !service_already_stopped:: + "any" usebundle => define_expected_classes("service_stopped_${service_name}", "error", "1"); + any:: + "any" usebundle => define_expected_classes("service_stopped_${unknown_service_name}", "success", "2"); +} + +####################################################### + +bundle agent test +{ + methods: + "enable" usebundle => set_dry_run_mode("true"); + "ph1" usebundle => service_stopped("${init.service_name}"); + "ph2" usebundle => service_stopped("${init.unknown_service_name}"); + "disable" usebundle => set_dry_run_mode("false"); + + "testinfra" usebundle => execute_testinfra("${init.service_name}_stopped.py", "service_stopped", "1"); +} + +####################################################### + +bundle agent check +{ + classes: + "service_${init.service_name}_ok" expression => "${define_expected_classes.report_string_1}"; + "service_${init.unknown_service_name}_ok" expression => "${define_expected_classes.report_string_2}"; + + "services_ok" expression => "service_${init.service_name}_ok.service_${init.unknown_service_name}_ok"; + "service_status_not_changed" expression => "(service_already_stopped.service_stopped)|(!service_already_stopped.!service_stopped)"; + "ok" expression => "services_ok.service_status_not_changed"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_stopped.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_stopped.cf new file mode 100644 index 00000000000..34f8d7637ce --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_stopped.cf @@ -0,0 +1,48 @@ +####################################################### +# +# Test if cron is started and if yes, stop it +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "service_name" string => "cron"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_stopped("${init.service_name}"); +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => "service_stopped_${init.service_name}_ok.!service_stopped_${init.service_name}_error"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_stopped.enforce.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_stopped.enforce.cf new file mode 100644 index 00000000000..a219fc0e3f1 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/service_stopped.enforce.cf @@ -0,0 +1,70 @@ +####################################################### +# +# Test if cron is stopped and if no, start it +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + debian|suse:: + "service_name" string => "cron"; + redhat:: + "service_name" string => "crond"; + aix:: + "service_name" string => "syslogd"; + any:: + "unknown_service_name" string => "unknown_service_name"; + + methods: + "testinfra" usebundle => execute_testinfra("${init.service_name}_stopped.py", "service_already_stopped", "0"); + service_already_stopped:: + "any" usebundle => define_expected_classes("service_stopped_${service_name}", "success", "1"); + !service_already_stopped:: + "any" usebundle => define_expected_classes("service_stopped_${service_name}", "repaired", "1"); + any:: + "any" usebundle => define_expected_classes("service_stopped_${unknown_service_name}", "success", "2"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => service_stopped("${init.service_name}"); + "ph2" usebundle => service_stopped("${init.unknown_service_name}"); + "testinfra" usebundle => execute_testinfra("${init.service_name}_stopped.py", "service_stopped", "1"); +} + +####################################################### + +bundle agent check +{ + classes: + "service_${init.service_name}_ok" expression => "${define_expected_classes.report_string_1}"; + "service_${init.unknown_service_name}_ok" expression => "${define_expected_classes.report_string_2}"; + + "services_ok" expression => "service_${init.service_name}_ok.service_${init.unknown_service_name}_ok"; + "ok" expression => "services_ok.service_stopped"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/sysctl_value.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/sysctl_value.cf new file mode 100644 index 00000000000..c1f3e9f767a --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/non_container/sysctl_value.cf @@ -0,0 +1,189 @@ +####################################################### +# +# Test sysctl value setting +# All values are put in 00_rudder.conf file and it ought to be +# cleaned in post execution +# +####################################################### + + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +body agent control { + default_repository => "/tmp/modified-files"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + + "exp1" string => execresult("${paths.sysctl} vm.swappiness > ${tmp}/backup.conf", "useshell"); + "exp2" string => execresult("${paths.sysctl} vm.dirty_ratio >> ${tmp}/backup.conf", "useshell"); + "exp3" string => execresult("${paths.sysctl} vm.lowmem_reserve_ratio >> ${tmp}/backup.conf", "useshell"); + "exp4" string => execresult("${paths.sysctl} vm.oom_dump_tasks >> ${tmp}/backup.conf", "useshell"); + "exp5" string => execresult("${paths.sysctl} vm.stat_interval >> ${tmp}/backup.conf", "useshell"); + + + # REPAIR + + "key[1]" string => "vm.swappiness"; + "value[1]" string => "1"; + "expected[1]" string => "1"; + "filename[1]" string => "00_rudder"; + "option[1]" string => "default"; + "mode[1]" string => "enforce"; # mode, "enforce" or "audit" + "status[1]" string => "repaired"; # expected status, "repaired", "success" or "error" + "execute[1]" string => "true"; # Tell if an execution is needed + + "key[2]" string => "vm.dirty_ratio"; + "value[2]" string => "31"; + "expected[2]" string => "31"; + "filename[2]" string => "00_rudder"; + "option[2]" string => "default"; + "mode[2]" string => "enforce"; + "status[2]" string => "repaired"; + "execute[2]" string => "true"; + + "key[4]" string => "vm.oom_dump_tasks"; + "value[4]" string => "99999"; # usually this is 1 + "expected[4]" string => "99999"; + "filename[4]" string => "00_rudder"; + "option[4]" string => "min"; + "mode[4]" string => "enforce"; + "status[4]" string => "repaired"; + "execute[4]" string => "true"; + + "key[5]" string => "vm.stat_interval"; + "value[5]" string => "0"; # default is 1 + "expected[5]" string => "0"; + "filename[5]" string => "00_rudder"; + "option[5]" string => "max"; + "mode[5]" string => "enforce"; + "status[5]" string => "repaired"; + "execute[5]" string => "true"; + + # non existent nor creatable key, should fail + "key[6]" string => "rudder.key6"; + "value[6]" string => "abcf.fgfggf.fgfgfgde"; + "expected[6]" string => "abcf.fgfggf.fgfgfgde"; + "filename[6]" string => "00_rudder"; + "option[6]" string => "max"; + "mode[6]" string => "enforce"; + "status[6]" string => "error"; + "execute[6]" string => "true"; + + + # Audit. + # Key not present, should fail + "key[7]" string => "rudder.key7"; + "value[7]" string => "256 256 32"; + "expected[7]" string => ""; + "filename[7]" string => "00_rudder"; + "option[7]" string => "max"; + "mode[7]" string => "audit"; + "status[7]" string => "error"; + "execute[7]" string => "false"; + + # Success for an already modifie value + "key[8]" string => "vm.oom_dump_tasks"; + "value[8]" string => "99999"; # usually this is 1 + "expected[8]" string => "99999"; + "filename[8]" string => "00_rudder"; + "option[8]" string => "default"; + "mode[8]" string => "enforce"; + "status[8]" string => "success"; + "execute[8]" string => "true"; + + "indices" slist => {1, 2, 3, 4, 5, 6, 7, 8}; + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + "create_${indices}" expression => strcmp("${create[${indices}]}", "true"); + + # need to store existing values + commands: + +} +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.key[${init.indices}]}", "${init.value[${init.indices}]}", "${init.filename[${init.indices}]}", "${init.option[${init.indices}]}" }; + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + pass1.!pass2:: + #REPAIRED + "ph1" usebundle => apply_gm("sysctl_value", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}" ); + "ph2" usebundle => apply_gm("sysctl_value", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}" ); + "ph4" usebundle => apply_gm("sysctl_value", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}" ); + "ph5" usebundle => apply_gm("sysctl_value", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}" ); + # ENFORCE ERROR + "ph6" usebundle => apply_gm("sysctl_value", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}" ); + # AUDIT ERROR + "ph7" usebundle => apply_gm("sysctl_value", @{args7}, "${init.status[7]}", "ph7", "${init.mode[7]}" ); + # SUCCESS + "ph8" usebundle => apply_gm("sysctl_value", @{args8}, "${init.status[8]}", "ph8", "${init.mode[8]}" ); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "sysctl_output[${init.indices}]" string => execresult("${paths.sysctl} -n ${init.key[${init.indices}]}", "useshell"); + + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + + + "execute_${init.indices}" expression => strcmp("${init.execute[${init.indices}]}", "true"); + "result_nok" not => strcmp("${sysctl_output[${init.indices}]}", "${init.expected[${init.indices}]}"), + if => "execute_${init.indices}"; + + # classes_ok is just a placeholder. What we really want is find the not ok + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"), + if => "execute_${init.indices}"; + "ok" expression => "!classes_not_ok.!result_nok"; + + commands: + pass3:: + "/bin/rm -rf /etc/sysctl.d/00_rudder.conf"; + # restore old values + "${paths.sysctl} -p ${init.tmp}/backup.conf"; + + reports: + pass3:: + "Test for sysctl nb ${init.indices} FAILED" + ifvarclass => "!ph${init.indices}_ok"; + + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/sysctl_value.audit.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/sysctl_value.audit.cf new file mode 100644 index 00000000000..e10b9fd208f --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/sysctl_value.audit.cf @@ -0,0 +1,169 @@ +####################################################### +# +# Test sysctl value setting in audit mode +# All values are put in 00_rudder.conf file and it ought to be +# cleaned in post execution +# +####################################################### + + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +body agent control { + default_repository => "/tmp/modified-files"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + # Store initial values + "exp1" string => execresult("${paths.sysctl} vm.swappiness > ${tmp}/backup.conf", "useshell"); + "exp2" string => execresult("${paths.sysctl} vm.dirty_ratio >> ${tmp}/backup.conf", "useshell"); + "exp3" string => execresult("${paths.sysctl} vm.lowmem_reserve_ratio >> ${tmp}/backup.conf", "useshell"); + "exp4" string => execresult("${paths.sysctl} vm.oom_dump_tasks >> ${tmp}/backup.conf", "useshell"); + "exp5" string => execresult("${paths.sysctl} vm.stat_interval >> ${tmp}/backup.conf", "useshell"); + + + "key[1]" string => "vm.swappiness"; + "value[1]" string => execresult("${paths.sysctl} -n vm.swappiness", "useshell"); + "expected[1]" string => "${value[1]}"; + "filename[1]" string => "00_rudder"; + "option[1]" string => "default"; + "mode[1]" string => "audit"; # mode, "enforce" or "audit" + "status[1]" string => "success"; # expected status, "repaired", "success" or "error" + "execute[1]" string => "true"; # Tell if an execution is needed + + "key[2]" string => "vm.dirty_ratio"; + "value[2]" string => "aaaa"; + "expected[2]" string => "aaaa"; + "filename[2]" string => "00_rudder"; + "option[2]" string => "default"; + "mode[2]" string => "audit"; + "status[2]" string => "error"; + "execute[2]" string => "true"; + + "key[3]" string => "vm.lowmem_reserve_ratio"; + "value[3]" string => execresult("${paths.sysctl} -n vm.lowmem_reserve_ratio", "useshell"); + "expected[3]" string => "${value[3]}"; + "filename[3]" string => "00_rudder"; + "option[3]" string => "min"; + "mode[3]" string => "audit"; + "status[3]" string => "success"; + "execute[3]" string => "true"; + + "key[4]" string => "vm.oom_dump_tasks"; + "value[4]" string => "99999"; # usually this is 1 + "expected[4]" string => execresult("${paths.sysctl} -n vm.oom_dump_tasks", "useshell"); + "filename[4]" string => "00_rudder"; + "option[4]" string => "max"; + "mode[4]" string => "audit"; + "status[4]" string => "success"; + "execute[4]" string => "true"; + + "key[5]" string => "vm.stat_interval"; + "value[5]" string => "0"; # default is 1 + "expected[5]" string => execresult("${paths.sysctl} -n vm.stat_interval", "useshell"); + "filename[5]" string => "00_rudder"; + "option[5]" string => "min"; + "mode[5]" string => "audit"; + "status[5]" string => "success"; + "execute[5]" string => "true"; + + "key[6]" string => "vm.oom_dump_tasks"; + "value[6]" string => "999"; + "expected[6]" string => execresult("${paths.sysctl} -n vm.oom_dump_tasks", "useshell"); + "filename[6]" string => "00_rudder"; + "option[6]" string => "min"; + "mode[6]" string => "audit"; + "status[6]" string => "error"; + "execute[6]" string => "true"; + + "indices" slist => getindices("key"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + "create_${indices}" expression => strcmp("${create[${indices}]}", "true"); + + # need to store existing values + commands: + +} +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.key[${init.indices}]}", "${init.value[${init.indices}]}", "${init.filename[${init.indices}]}", "${init.option[${init.indices}]}" }; + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + pass1.!pass2:: + "ph1" usebundle => apply_gm("sysctl_value", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}" ); + "ph2" usebundle => apply_gm("sysctl_value", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}" ); + "ph3" usebundle => apply_gm("sysctl_value", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}" ); + "ph4" usebundle => apply_gm("sysctl_value", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}" ); + "ph5" usebundle => apply_gm("sysctl_value", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}" ); + "ph6" usebundle => apply_gm("sysctl_value", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}" ); + +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "sysctl_output[${init.indices}]" string => execresult("${paths.sysctl} -n ${init.key[${init.indices}]}", "useshell"); + + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + + + "execute_${init.indices}" expression => strcmp("${init.execute[${init.indices}]}", "true"); + "result_nok" not => strcmp("${sysctl_output[${init.indices}]}", "${init.expected[${init.indices}]}"), + if => "execute_${init.indices}"; + + # classes_ok is just a placeholder. What we really want is find the not ok + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"), + if => "execute_${init.indices}"; + "ok" expression => "!classes_not_ok.!result_nok"; + + commands: + pass3:: + "/bin/rm -rf /etc/sysctl.d/00_rudder.conf"; + # restore old values + "${paths.sysctl} -p ${init.tmp}/backup.conf"; + + reports: + pass3:: + "Test for sysctl nb ${init.indices} FAILED" + ifvarclass => "!ph${init.indices}_ok"; + + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_absent.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_absent.cf new file mode 100644 index 00000000000..e7a9502d8f2 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_absent.cf @@ -0,0 +1,83 @@ +####################################################### +# +# Test checking if a user exist +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "user1" string => "user1"; + "user2" string => "user2"; + "user3" string => "user3"; + + commands: + "/usr/sbin/useradd ${init.user2}" handle => "a2"; + "/usr/sbin/useradd ${init.user3}" handle => "a3"; + "/usr/bin/perl -i -pe 's/^${init.user3}:!:/${init.user3}::/' /etc/shadow" handle => "p3"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => user_absent("${init.user1}"); + "ph2" usebundle => user_absent("${init.user2}"); + "ph3" usebundle => user_absent("${init.user3}"); +} + +####################################################### + +bundle agent check +{ + classes: + "user1_ok" not => userexists("${init.user1}"); + "user2_ok" not => userexists("${init.user2}"); + "user3_ok" not => userexists("${init.user3}"); + + "ph1_ok" expression => "user_absent_${init.user1}_kept.!user_absent_${init.user1}_repaired.!user_absent_${init.user1}_error"; + "ph2_ok" expression => "!user_absent_${init.user2}_kept.user_absent_${init.user2}_repaired.!user_absent_${init.user2}_error"; + "ph3_ok" expression => "!user_absent_${init.user3}_kept.user_absent_${init.user3}_repaired.!user_absent_${init.user3}_error"; + + + "ok" expression => "user1_ok.user2_ok.user3_ok.ph1_ok.ph2_ok.ph3_ok"; + + commands: + "/usr/sbin/userdel ${init.user1}" handle => "h1"; + "/usr/sbin/userdel ${init.user2}" handle => "h2"; + "/usr/sbin/userdel ${init.user3}" handle => "h3"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +# user1_ok:: +# "user1_ok"; +# user2_ok:: +# "user2_ok"; +# user3_ok:: +# "user3_ok"; +# ph1_ok:: +# "ph1_ok"; +# ph2_ok:: +# "ph2_ok"; +# ph3_ok:: +# "ph3_ok"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_create.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_create.cf new file mode 100644 index 00000000000..cf45bf39b21 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_create.cf @@ -0,0 +1,73 @@ +####################################################### +# +# Test checking user creation +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "user" string => "user1"; + "description" string => "the user"; + "home" string => "/tmp/userhome"; + "group" string => "bin"; + "shell" string => "/bin/sh"; + "locked" string => "false"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => user_create("${init.user}", "${init.description}", "${init.home}", "${init.group}", "${init.shell}", "${init.locked}"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "user_line" string => execresult("${paths.path[grep]} '^${init.user}:' /etc/passwd", "useshell"); + + classes: + pass1:: + "user1_ok" expression => regcmp("user1:x:\d+:\d+:the user:/tmp/userhome:/bin/sh", "${user_line}"); + + "ph1_ok" expression => "!user_create_${init.user}_kept.user_create_${init.user}_repaired.!user_create_${init.user}_error"; + + "ok" expression => "user1_ok.ph1_ok"; + + any:: + "pass1" expression => "any"; + + commands: + pass1:: + "/usr/sbin/userdel ${init.user}" handle => "h1"; + + reports: + pass1.ok:: + "$(this.promise_filename) Pass"; + pass1.!ok:: + "$(this.promise_filename) FAIL"; + pass1.user1_ok:: + "user1_ok"; + pass1.ph1_ok:: + "ph1_ok"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_fullname.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_fullname.cf new file mode 100644 index 00000000000..14bf0671256 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_fullname.cf @@ -0,0 +1,88 @@ + +####################################################### +# +# Test checking user_fullname +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "user1" string => "user1"; + "user2" string => "user2"; + "fullname2" string => "fullname2"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => user_fullname("${init.user1}", "${init.fullname2}"); + "ph2" usebundle => user_present("${init.user2}"); + "ph3" usebundle => user_fullname("${init.user2}", "${init.fullname2}"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "user_line1" string => execresult("${paths.path[grep]} '^${init.user1}:' /etc/passwd", "useshell"); + "user_line2" string => execresult("${paths.path[grep]} '^${init.user2}:' /etc/passwd", "useshell"); + + classes: + + pass1:: + "user1_ok" expression => strcmp("", "${user_line1}"); + "user2_ok" expression => regcmp("^(${init.user2}):x:([0-9]+):([0-9]+):(${init.fullname2}):\/home\/\1:.*", "${user_line2}"); + + "ph1_ok" expression => "!user_fullname_${init.user1}_kept.!user_fullname_${init.user1}_repaired.user_fullname_${init.user1}_error"; + "ph2_ok" expression => "!user_present_${init.user2}_kept.user_present_${init.user2}_repaired.!user_present_${init.user2}_error"; + "ph3_ok" expression => "!user_fullname_${init.user2}_kept.user_fullname_${init.user2}_repaired.!user_fullname_${init.user2}_error"; + + "ok" expression => "user1_ok.user2_ok.ph1_ok.ph2_ok.ph3_ok"; + + any:: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + commands: + pass3:: + "/usr/sbin/userdel ${init.user1}" handle => "h1"; + "/usr/sbin/userdel ${init.user2}" handle => "h2"; + + reports: + pass2.ok:: + "$(this.promise_filename) Pass"; + pass2.!ok:: + "$(this.promise_filename) FAIL"; + pass2.user1_ok:: + "user1_ok"; + pass2.user2_ok:: + "user2_ok"; + pass2.ph1_ok:: + "ph1_ok"; + pass2.ph2_ok:: + "ph2_ok"; + pass2.ph3_ok:: + "ph3_ok"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_group.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_group.cf new file mode 100644 index 00000000000..f79a4b872ba --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_group.cf @@ -0,0 +1,199 @@ +####################################################### +# +# Test setting user secondary group +# Works with users rudder_{i} +# cleaned in post execution +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + # REPAIR + "user[1]" string => "rudder_1"; + "group[1]" string => "ruddergroup_1"; + "expected_group[1]" string => "ruddergroup_1"; + "init_group[1]" string => ""; # init group for user + "create_group[1]" string => "false"; # precreate group, yes or no + "create_user[1]" string => "true"; # precreate user, yes or no + "mode[1]" string => "enforce"; + "status[1]" string => "repaired"; + + "user[2]" string => "rudder_2"; + "group[2]" string => "ruddergroup_2"; + "expected_group[2]" string => "ruddergroup_1 ruddergroup_2"; + "init_group[2]" string => "ruddergroup_1"; # init group for user + "create_group[2]" string => "true"; # precreate group, yes or no + "create_user[2]" string => "true"; # precreate user, yes or no + "mode[2]" string => "enforce"; + "status[2]" string => "repaired"; + + "user[2bis]" string => "rudder_2"; + "group[2bis]" string => "ruddergroup_2bis"; + "expected_group[2bis]" string => "ruddergroup_1 ruddergroup_2 ruddergroup_2bis"; + "init_group[2bis]" string => ""; + "create_group[2bis]" string => "false"; + "create_user[2bis]" string => "false"; + "mode[2bis]" string => "enforce"; + "status[2bis]" string => "repaired"; + + # SUCCESS + "user[3]" string => "rudder_3"; + "group[3]" string => "ruddergroup_3"; + "expected_group[3]" string => "ruddergroup_3"; + "init_group[3]" string => "ruddergroup_3"; # init group for user + "create_group[3]" string => "true"; # precreate group, yes or no + "create_user[3]" string => "true"; # precreate user, yes or no + "mode[3]" string => "enforce"; + "status[3]" string => "success"; + + "user[4]" string => "rudder_4"; + "group[4]" string => "ruddergroup_4"; + "expected_group[4]" string => "ruddergroup_4 ruddergroup_1"; + "init_group[4]" string => "ruddergroup_4,ruddergroup_1"; # init group for user + "create_group[4]" string => "true"; # precreate group, yes or no + "create_user[4]" string => "true"; # precreate user, yes or no + "mode[4]" string => "enforce"; + "status[4]" string => "success"; + + # ERROR + "user[5]" string => "rudder_5"; + "group[5]" string => "ruddergroup_5"; + "expected_group[5]" string => "ruddergroup_5"; + "init_group[5]" string => ""; # init group for user + "create_group[5]" string => "false"; # precreate group, yes or no + "create_user[5]" string => "false"; # user doesn't exist + "mode[5]" string => "enforce"; + "status[5]" string => "error"; + + "user[6]" string => "rudder_6"; + "group[6]" string => "ruddergroup_none"; + "expected_group[6]" string => "ruddergroup_none"; + "init_group[6]" string => ""; # init group for user + "create_group[6]" string => "false"; # group don't exist + "create_user[6]" string => "true"; # user exists + "mode[6]" string => "enforce"; + "status[6]" string => "error"; + + # Audit + "user[7]" string => "rudder_7"; + "group[7]" string => "ruddergroup_7"; + "expected_group[7]" string => "ruddergroup_7"; + "init_group[7]" string => "ruddergroup_7"; # init group for user + "create_group[7]" string => "true"; # group exists + "create_user[7]" string => "true"; # user exists + "mode[7]" string => "audit"; + "status[7]" string => "success"; + + "user[8]" string => "rudder_8"; + "group[8]" string => "ruddergroup_8"; + "expected_group[8]" string => "ruddergroup_8"; + "init_group[8]" string => ""; # init group for user + "create_group[8]" string => "false"; # group exists + "create_user[8]" string => "true"; # user exists + "mode[8]" string => "audit"; + "status[8]" string => "error"; + + "indices" slist => {1, 2, "2bis", 3, 4, 5, 6, 7, 8}; + + classes: + # define create class + "create_user_${indices}" expression => strcmp("${create_user[${indices}]}", "true"); + "create_group_${indices}" expression => strcmp("${create_group[${indices}]}", "true"); + + commands: + # create groups + "${paths.groupadd} ruddergroup_${indices}"; + + # create user + "${paths.useradd} ${user[${indices}]}" + if => "create_user_${indices}"; + + "${paths.usermod} -a -G ${init_group[${indices}]} ${user[${indices}]}" + if => "create_user_${indices}.create_group_${indices}"; + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.user[${init.indices}]}", "${init.group[${init.indices}]}" }; + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + pass1.!pass2:: + #REPAIRED + "ph1" usebundle => apply_gm("user_group", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}" ); + "ph2" usebundle => apply_gm("user_group", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}" ); + "ph2bis" usebundle => apply_gm("user_group", @{args2bis}, "${init.status[2bis]}", "ph2bis", "${init.mode[2bis]}" ); + # SUCCESS + "ph3" usebundle => apply_gm("user_group", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}" ); + "ph4" usebundle => apply_gm("user_group", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}" ); + # ERROR + "ph5" usebundle => apply_gm("user_group", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}" ); + "ph6" usebundle => apply_gm("user_group", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}" ); + # AUDIT + "ph7" usebundle => apply_gm("user_group", @{args7}, "${init.status[7]}", "ph7", "${init.mode[7]}" ); + "ph8" usebundle => apply_gm("user_group", @{args8}, "${init.status[8]}", "ph8", "${init.mode[8]}" ); +} + + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "user_group[${init.indices}]" string => execresult("${ncf_paths.path[id]} -Gn ${init.user[${init.indices}]}", "useshell"); + + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + # checking mode only for enforce mode + "execute_${init.indices}" expression => strcmp("${init.mode[${init.indices}]}", "enforce"); + "result_nok" not => strcmp("${user_group[${init.indices}]}", "${init.expected_group[${init.indices}]}"), + if => "execute_${init.indices}"; + + # classes_ok is just a placeholder. What we really want is find the not ok + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"); + "ok" expression => "!classes_not_ok.!result_nok"; + + commands: + pass3:: + "${paths.userdel} ${init.user[${init.indices}]}"; + "${paths.groupdel} ruddergroup_${init.indices}"; + + reports: + pass3:: + "Test for user_group nb ${init.indices} FAILED" + ifvarclass => "!ph${init.indices}_ok"; + + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} + +####################################################### diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_home.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_home.cf new file mode 100644 index 00000000000..ec09eb823fc --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_home.cf @@ -0,0 +1,86 @@ + +####################################################### +# +# Test checking user_home +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "user1" string => "user1"; + "user2" string => "user2"; + "home2" string => "/home/arandomhome"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => user_home("${init.user1}", "${init.home2}"); + "ph2" usebundle => user_present("${init.user2}"); + "ph3" usebundle => user_home("${init.user2}", "${init.home2}"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "user_line1" string => execresult("${paths.path[grep]} '^${init.user1}:' /etc/passwd", "useshell"); + "user_line2" string => execresult("${paths.path[grep]} '^${init.user2}:' /etc/passwd", "useshell"); + + classes: + + pass1:: + "user1_ok" expression => strcmp("", "${user_line1}"); + "user2_ok" expression => regcmp("^(${init.user2}):x:([0-9]+):([0-9]+)::(${init.home2}):.*", "${user_line2}"); + "home_does_not_exist" not => isdir("${init.home2}"); + + "ph1_ok" expression => "!user_home_${init.user1}_kept.!user_home_${init.user1}_repaired.user_home_${init.user1}_error"; + "ph2_ok" expression => "!user_present_${init.user2}_kept.user_present_${init.user2}_repaired.!user_present_${init.user2}_error"; + "ph3_ok" expression => "!user_home_${init.user2}_kept.user_home_${init.user2}_repaired.!user_home_${init.user2}_error"; + + "ok" expression => "user1_ok.user2_ok.home_does_not_exist.ph1_ok.ph2_ok.ph3_ok"; + + any:: + "pass2" expression => "any"; + "pass1" expression => "any"; + commands: + pass2:: + "/usr/sbin/userdel ${init.user1}" handle => "h1"; + "/usr/sbin/userdel ${init.user2}" handle => "h2"; + + reports: + pass2.ok:: + "$(this.promise_filename) Pass"; + pass2.!ok:: + "$(this.promise_filename) FAIL"; + pass2.user1_ok:: + "user1_ok"; + pass2.user2_ok:: + "user2_ok"; + pass2.ph1_ok:: + "ph1_ok"; + pass2.ph2_ok:: + "ph2_ok"; + pass2.ph3_ok:: + "ph3_ok"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_locked.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_locked.cf new file mode 100644 index 00000000000..553bc851907 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_locked.cf @@ -0,0 +1,91 @@ + +####################################################### +# +# Test checking user_password_hash +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "user1" string => "user1"; + "user2" string => "user2"; + "password1" string => "$1$jp5rCMS4$mhvf4utonDubW5M00z0Ow0"; + "password1_reg" string => "\$1\$jp5rCMS4\$mhvf4utonDubW5M00z0Ow0"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => user_present("${init.user1}"); + "ph2" usebundle => user_password_hash("${init.user1}", "${init.password1}"); + "ph3" usebundle => user_locked("${init.user1}"); + "ph4" usebundle => user_locked("${init.user2}"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "user_line1" string => execresult("${paths.path[grep]} '^${init.user1}:' /etc/shadow", "useshell"); + "user_line2" string => execresult("${paths.path[grep]} '^${init.user2}:' /etc/shadow", "useshell"); + + classes: + pass1:: + "user1_ok" expression => regcmp("(${init.user1}):!(${init.password1_reg}):.*", "${user_line1}"); + "user2_ok" expression => strcmp("", "${user_line2}"); + + "ph1_ok" expression => "!user_present_${init.user1}_kept.user_present_${init.user1}_repaired.!user_present_${init.user1}_error"; + "ph2_ok" expression => "!user_password_hash_${init.user1}_kept.user_password_hash_${init.user1}_repaired.!user_password_hash_${init.user1}_error"; + "ph3_ok" expression => "!user_locked_${init.user1}_kept.user_locked_${init.user1}_repaired.!user_locked_${init.user1}_error"; + "ph4_ok" expression => "!user_locked_${init.user2}_kept.!user_locked_${init.user2}_repaired.user_locked_${init.user2}_error"; + + "ok" expression => "user1_ok.user2_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok"; + + any:: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + commands: + pass2:: + "/usr/sbin/userdel ${init.user1}" handle => "h1"; + "/usr/sbin/userdel ${init.user2}" handle => "h2"; + + reports: + pass2.ok:: + "$(this.promise_filename) Pass"; + pass2.!ok:: + "$(this.promise_filename) FAIL"; + pass2.user1_ok:: + "user1_ok"; + pass2.user2_ok:: + "user2_ok"; + pass2.ph1_ok:: + "ph1_ok"; + pass2.ph2_ok:: + "ph2_ok"; + pass2.ph3_ok:: + "ph3_ok"; + pass2.ph4_ok:: + "ph4_ok"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_password_hash.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_password_hash.cf new file mode 100644 index 00000000000..ce55a6cc328 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_password_hash.cf @@ -0,0 +1,102 @@ + +####################################################### +# +# Test checking user_password_hash +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "user1" string => "user1"; + "user2" string => "user2"; + "user3" string => "user3"; + "password1" string => ""; + "password2" string => "$1$jp5rCMS4$mhvf4utonDubW5M00z0Ow0"; + "password2_reg" string => "\$1\$jp5rCMS4\$mhvf4utonDubW5M00z0Ow0"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => user_present("${init.user1}"); + "ph2" usebundle => user_present("${init.user2}"); + "ph3" usebundle => user_password_hash("${init.user1}", "${init.password1}"); + "ph4" usebundle => user_password_hash("${init.user2}", "${init.password2}"); + "ph5" usebundle => user_password_hash("${init.user3}", "${init.password2}"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "user_line1" string => execresult("${paths.path[grep]} '^${init.user1}:' /etc/shadow", "useshell"); + "user_line2" string => execresult("${paths.path[grep]} '^${init.user2}:' /etc/shadow", "useshell"); + "user_line3" string => execresult("${paths.path[grep]} '^${init.user3}:' /etc/shadow", "useshell"); + + classes: + pass1:: + "user1_ok" expression => regcmp("(${init.user1}):x:.*", "${user_line1}"); + "user2_ok" expression => regcmp("(${init.user2}):(${init.password2_reg}):.*", "${user_line2}"); + "user3_ok" expression => strcmp("", "${user_line3}"); + + "ph1_ok" expression => "!user_present_${init.user1}_kept.user_present_${init.user1}_repaired.!user_present_${init.user1}_error"; + "ph2_ok" expression => "!user_present_${init.user2}_kept.user_present_${init.user2}_repaired.!user_present_${init.user2}_error"; + "ph3_ok" expression => "!user_password_hash_${init.user1}_kept.!user_password_hash_${init.user1}_repaired.user_password_hash_${init.user1}_error"; + "ph4_ok" expression => "!user_password_hash_${init.user2}_kept.user_password_hash_${init.user2}_repaired.!user_password_hash_${init.user2}_error"; + "ph5_ok" expression => "!user_password_hash_${init.user3}_kept.!user_password_hash_${init.user3}_repaired.user_password_hash_${init.user3}_error"; + + "ok" expression => "user1_ok.user2_ok.user3_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok"; + + any:: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + commands: + pass2:: + "/usr/sbin/userdel ${init.user1}" handle => "h1"; + "/usr/sbin/userdel ${init.user2}" handle => "h2"; + "/usr/sbin/userdel ${init.user3}" handle => "h3"; + + reports: + pass2.ok:: + "$(this.promise_filename) Pass"; + pass2.!ok:: + "$(this.promise_filename) FAIL"; + pass2.user1_ok:: + "user1_ok"; + pass2.user2_ok:: + "user2_ok"; + pass2.user3_ok:: + "user3_ok"; + pass2.ph1_ok:: + "ph1_ok"; + pass2.ph2_ok:: + "ph2_ok"; + pass2.ph3_ok:: + "ph3_ok"; + pass2.ph4_ok:: + "ph4_ok"; + pass2.ph5_ok:: + "ph5_ok"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_present.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_present.cf new file mode 100644 index 00000000000..9f420d665c2 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_present.cf @@ -0,0 +1,81 @@ + +####################################################### +# +# Test checking user_present +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "user1" string => "user1"; + "user2" string => "users"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => user_present("${init.user1}"); + "ph2" usebundle => user_present("${init.user2}"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "user_line1" string => execresult("${paths.path[grep]} '^${init.user1}:' /etc/passwd", "useshell"); + "user_line2" string => execresult("${paths.path[grep]} '^${init.user2}:' /etc/passwd", "useshell"); + + classes: + pass1:: + "user1_ok" expression => regcmp("^(${init.user1}):x:([0-9]+):([0-9]+)::\/home\/\1:.*", "${user_line1}"); + "user2_ok" expression => strcmp("", "${user_line2}"); + + "ph1_ok" expression => "!user_present_${init.user1}_kept.user_present_${init.user1}_repaired.!user_present_${init.user1}_error"; + "ph2_ok" expression => "!user_present_${init.user2}_kept.!user_present_${init.user2}_repaired.user_present_${init.user2}_error"; + + "ok" expression => "user1_ok.user2_ok.ph1_ok.ph2_ok"; + + any:: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + commands: + pass2:: + "/usr/sbin/userdel ${init.user1}" handle => "h1"; + "/usr/sbin/userdel ${init.user2}" handle => "h2"; + + reports: + pass2.ok:: + "$(this.promise_filename) Pass"; + pass2.!ok:: + "$(this.promise_filename) FAIL"; + pass2.user1_ok:: + "user1_ok"; + pass2.user2_ok:: + "user2_ok"; + pass2.ph1_ok:: + "ph1_ok"; + pass2.ph2_ok:: + "ph2_ok"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_primary_group.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_primary_group.cf new file mode 100644 index 00000000000..b91d6d76eed --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_primary_group.cf @@ -0,0 +1,163 @@ + +####################################################### +# +# Test checking user_primary_group +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + # Enforce mode + "user[1]" string => "user1"; + "primary_group[1]" string => "ncf_test_group1"; + "status[1]" string => "repaired"; + "mode[1]" string => "enforce"; + + "user[2]" string => "user2"; + "primary_group[2]" string => "ncf_test_group2"; + "status[2]" string => "success"; + "mode[2]" string => "enforce"; + + "user[3]" string => "user3"; + "primary_group[3]" string => "anonexistentgroup"; + "status[3]" string => "error"; + "mode[3]" string => "enforce"; + + "user[4]" string => "userthatdonotexist"; + "primary_group[4]" string => "ncf_test_group1"; + "status[4]" string => "error"; + "mode[4]" string => "enforce"; + + # Audit mode + "user[5]" string => "user5"; + "primary_group[5]" string => "ncf_test_group1"; + "status[5]" string => "error"; + "mode[5]" string => "audit"; + + "user[6]" string => "user6"; + "primary_group[6]" string => "ncf_test_group1"; + "status[6]" string => "success"; + "mode[6]" string => "audit"; + + "user[7]" string => "user7"; + "primary_group[7]" string => "anonexistentgroup"; + "status[7]" string => "error"; + "mode[7]" string => "audit"; + + + "indices" slist => getindices("user"); + + pass2:: + "line3" string => execresult("${paths.path[grep]} '^${user[3]}:' /etc/passwd", "useshell"); + "line5" string => execresult("${paths.path[grep]} '^${user[5]}:' /etc/passwd", "useshell"); + "line6" string => execresult("${paths.path[grep]} '^${user[6]}:' /etc/passwd", "useshell"); + "line7" string => execresult("${paths.path[grep]} '^${user[7]}:' /etc/passwd", "useshell"); + + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + "user_present" usebundle => user_present("${user[1]}"); + "user_present" usebundle => user_present("${user[2]}"); + "user_present" usebundle => user_present("${user[3]}"); + "user_present" usebundle => user_present("${user[5]}"); + "user_present" usebundle => user_present("${user[6]}"); + "user_present" usebundle => user_present("${user[7]}"); + + commands: + "${paths.groupadd} ${primary_group[1]}"; + "${paths.groupadd} ${primary_group[2]}"; + + "${paths.usermod} -g ${init.primary_group[2]} ${init.user[2]}"; + "${paths.usermod} -g ${init.primary_group[6]} ${init.user[6]}"; +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.user[${init.indices}]}", "${init.primary_group[${init.indices}]}" }; + + methods: + "ph1" usebundle => apply_gm("user_primary_group", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}"); + "ph2" usebundle => apply_gm("user_primary_group", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}"); + "ph3" usebundle => apply_gm("user_primary_group", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}"); + "ph4" usebundle => apply_gm("user_primary_group", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}"); + + "ph5" usebundle => apply_gm("user_primary_group", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}"); + "ph6" usebundle => apply_gm("user_primary_group", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}"); + "ph7" usebundle => apply_gm("user_primary_group", @{args7}, "${init.status[7]}", "ph7", "${init.mode[7]}"); + +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "gid1" int => getgid("${init.primary_group[1]}"); + "gid2" int => getgid("${init.primary_group[2]}"); + "uid${init.indices}" int => getuid("${init.user[${init.indices}]}"); + + "user_line1" string => execresult("${paths.path[grep]} '^${init.user[1]}:' /etc/passwd", "useshell"); + "user_line2" string => execresult("${paths.path[grep]} '^${init.user[2]}:' /etc/passwd", "useshell"); + "user_line3" string => execresult("${paths.path[grep]} '^${init.user[3]}:' /etc/passwd", "useshell"); + "user_line4" string => execresult("${paths.path[grep]} '^${init.user[4]}:' /etc/passwd", "useshell"); + "user_line5" string => execresult("${paths.path[grep]} '^${init.user[5]}:' /etc/passwd", "useshell"); + "user_line6" string => execresult("${paths.path[grep]} '^${init.user[6]}:' /etc/passwd", "useshell"); + "user_line7" string => execresult("${paths.path[grep]} '^${init.user[7]}:' /etc/passwd", "useshell"); + + classes: + pass1:: + "user1_ok" expression => regcmp("(${init.user[1]}):x:${uid1}:${gid1}::\/home\/\1:.*", "${user_line1}"); + "user2_ok" expression => regcmp("${init.user[2]}:x:${uid2}:${gid2}.*", "${user_line2}"); + "user3_ok" expression => strcmp("${init.line3}", "${user_line3}"); + "user4_ok" expression => regcmp("", "${user_line4}"); + "user5_ok" expression => regcmp("${init.line5}", "${user_line5}"); + "user6_ok" expression => regcmp("${init.line6}", "${user_line6}"); + "user7_ok" expression => regcmp("${init.line7}", "${user_line7}"); + + "users_ok" expression => "user1_ok.user2_ok.user3_ok.user4_ok.user5_ok.user6_ok.user7_ok"; + "classes_ok" expression => "ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok"; + "ok" expression => "users_ok.classes_ok"; + + any:: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + commands: + pass2:: + "${paths.userdel} ${init.user[${init.indices}]}" handle => "deluser${init.indices}"; + "${paths.groupdel} ${init.user[${init.indices}]}" handle => "delgroup${init.indices}"; + "${paths.groupdel} ${init.primary_group[1]}" handle => "del1"; + "${paths.groupdel} ${init.primary_group[2]}" handle => "del2"; + + reports: + pass2.ok:: + "$(this.promise_filename) Pass"; + pass2.!ok:: + "$(this.promise_filename) FAIL"; + pass2:: + "user${init.indices}" + ifvarclass => "user${init.indices}_ok"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_secondary_groups.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_secondary_groups.cf new file mode 100644 index 00000000000..d9370b1b1cd --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_secondary_groups.cf @@ -0,0 +1,256 @@ +####################################################### +# +# Test setting user secondary group +# Works with users rudder_{i} +# cleaned in post execution +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + # REPAIR + "user[1]" string => "rudder_1"; + "group[1]" string => "ruddergroup_1"; + "force[1]" string => "false"; + "expected_group[1]" string => "ruddergroup_1"; + "init_group[1]" string => ""; # init group for user + "create_group[1]" string => "false"; # precreate group, yes or no + "create_user[1]" string => "true"; # precreate user, yes or no + "mode[1]" string => "enforce"; + "status[1]" string => "repaired"; + + "user[2]" string => "rudder_2"; + "group[2]" string => "ruddergroup_2"; + "force[2]" string => "false"; + "expected_group[2]" string => "ruddergroup_1 ruddergroup_2"; + "init_group[2]" string => "ruddergroup_1"; # init group for user + "create_group[2]" string => "true"; # precreate group, yes or no + "create_user[2]" string => "true"; # precreate user, yes or no + "mode[2]" string => "enforce"; + "status[2]" string => "repaired"; + + "user[2bis]" string => "rudder_2"; + "group[2bis]" string => "ruddergroup_2bis"; + "force[2bis]" string => "false"; + "expected_group[2bis]" string => "ruddergroup_1 ruddergroup_2 ruddergroup_2bis"; + "init_group[2bis]" string => ""; + "create_group[2bis]" string => "false"; + "create_user[2bis]" string => "false"; + "mode[2bis]" string => "enforce"; + "status[2bis]" string => "repaired"; + + # SUCCESS + "user[3]" string => "rudder_3"; + "group[3]" string => "ruddergroup_3"; + "force[3]" string => "false"; + "expected_group[3]" string => "ruddergroup_3"; + "init_group[3]" string => "ruddergroup_3"; # init group for user + "create_group[3]" string => "true"; # precreate group, yes or no + "create_user[3]" string => "true"; # precreate user, yes or no + "mode[3]" string => "enforce"; + "status[3]" string => "success"; + + "user[4]" string => "rudder_4"; + "group[4]" string => "ruddergroup_4"; + "force[4]" string => "false"; + "expected_group[4]" string => "ruddergroup_4 ruddergroup_1"; + "init_group[4]" string => "ruddergroup_4,ruddergroup_1"; # init group for user + "create_group[4]" string => "true"; # precreate group, yes or no + "create_user[4]" string => "true"; # precreate user, yes or no + "mode[4]" string => "enforce"; + "status[4]" string => "success"; + + # ERROR + "user[5]" string => "rudder_5"; + "group[5]" string => "ruddergroup_5"; + "force[5]" string => "false"; + "expected_group[5]" string => "ruddergroup_5"; + "init_group[5]" string => ""; # init group for user + "create_group[5]" string => "false"; # precreate group, yes or no + "create_user[5]" string => "false"; # user doesn't exist + "mode[5]" string => "enforce"; + "status[5]" string => "error"; + + "user[6]" string => "rudder_6"; + "group[6]" string => "ruddergroup_none"; + "force[6]" string => "false"; + "expected_group[6]" string => "ruddergroup_none"; + "init_group[6]" string => ""; # init group for user + "create_group[6]" string => "false"; # group don't exist + "create_user[6]" string => "true"; # user exists + "mode[6]" string => "enforce"; + "status[6]" string => "error"; + + # Audit + "user[7]" string => "rudder_7"; + "group[7]" string => "ruddergroup_7"; + "force[7]" string => "false"; + "expected_group[7]" string => "ruddergroup_7"; + "init_group[7]" string => "ruddergroup_7"; # init group for user + "create_group[7]" string => "true"; # group exists + "create_user[7]" string => "true"; # user exists + "mode[7]" string => "audit"; + "status[7]" string => "success"; + + "user[8]" string => "rudder_8"; + "group[8]" string => "ruddergroup_8"; + "force[8]" string => "false"; + "expected_group[8]" string => "ruddergroup_8"; + "init_group[8]" string => ""; # init group for user + "create_group[8]" string => "false"; # group exists + "create_user[8]" string => "true"; # user exists + "mode[8]" string => "audit"; + "status[8]" string => "error"; + + # Force = true + "user[9]" string => "rudder_9"; + "group[9]" string => "ruddergroup_9"; + "force[9]" string => "true"; + "expected_group[9]" string => "ruddergroup_9"; + "init_group[9]" string => ""; # init group for user + "create_group[9]" string => "true"; # group exists + "create_user[9]" string => "true"; # user exists + "mode[9]" string => "enforce"; + "status[9]" string => "repaired"; + + "user[10]" string => "rudder_10"; + "group[10]" string => "ruddergroup_10"; + "force[10]" string => "true"; + "expected_group[10]" string => "ruddergroup_10"; + "init_group[10]" string => "games"; # init group for user + "create_group[10]" string => "true"; # group exists + "create_user[10]" string => "true"; # user exists + "mode[10]" string => "enforce"; + "status[10]" string => "repaired"; + + "user[11]" string => "rudder_11"; + "group[11]" string => "ruddergroup_11,games"; + "force[11]" string => "true"; + "expected_group[11]" string => "games"; + "init_group[11]" string => "games"; # init group for user + "create_group[11]" string => "true"; # group exists + "create_user[11]" string => "true"; # user exists + "mode[11]" string => "audit"; + "status[11]" string => "error"; + + "user[12]" string => "rudder_12"; + "group[12]" string => "ruddergroup_12,games"; + "force[12]" string => "true"; + "expected_group[12]" string => "games,ruddergroup_12"; + "init_group[12]" string => "games"; # init group for user + "create_group[12]" string => "true"; # group exists + "create_user[12]" string => "true"; # user exists + "mode[12]" string => "enforce"; + "status[12]" string => "repaired"; + + "indices" slist => {1, 2, "2bis", 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + + classes: + # define create class + "create_user_${indices}" expression => strcmp("${create_user[${indices}]}", "true"); + "create_group_${indices}" expression => strcmp("${create_group[${indices}]}", "true"); + + commands: + # create groups + "${paths.groupadd} ruddergroup_${indices}"; + + # create user + "${paths.useradd} ${user[${indices}]}" + if => "create_user_${indices}"; + + "${paths.usermod} -a -G ${init_group[${indices}]} ${user[${indices}]}" + if => "create_user_${indices}.create_group_${indices}"; + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.user[${init.indices}]}", "${init.group[${init.indices}]}", "${init.force[${init.indices}]}" }; + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + pass1.!pass2:: + # Force = false + #REPAIRED + "ph1" usebundle => apply_gm_v4("user_secondary_groups", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}" ); + "ph2" usebundle => apply_gm_v4("user_secondary_groups", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}" ); + "ph2bis" usebundle => apply_gm_v4("user_secondary_groups", @{args2bis}, "${init.status[2bis]}", "ph2bis", "${init.mode[2bis]}" ); + # SUCCESS + "ph3" usebundle => apply_gm_v4("user_secondary_groups", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}" ); + "ph4" usebundle => apply_gm_v4("user_secondary_groups", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}" ); + # ERROR + "ph5" usebundle => apply_gm_v4("user_secondary_groups", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}" ); + "ph6" usebundle => apply_gm_v4("user_secondary_groups", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}" ); + # AUDIT + "ph7" usebundle => apply_gm_v4("user_secondary_groups", @{args7}, "${init.status[7]}", "ph7", "${init.mode[7]}" ); + "ph8" usebundle => apply_gm_v4("user_secondary_groups", @{args8}, "${init.status[8]}", "ph8", "${init.mode[8]}" ); + + # Force = true + "ph9" usebundle => apply_gm_v4("user_secondary_groups", @{args9}, "${init.status[9]}", "ph9", "${init.mode[9]}" ); + "ph10" usebundle => apply_gm_v4("user_secondary_groups", @{args10}, "${init.status[10]}", "ph10", "${init.mode[10]}" ); + "ph11" usebundle => apply_gm_v4("user_secondary_groups", @{args11}, "${init.status[11]}", "ph11", "${init.mode[11]}" ); + "ph12" usebundle => apply_gm_v4("user_secondary_groups", @{args12}, "${init.status[12]}", "ph12", "${init.mode[12]}" ); +} + + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "user_secondary_groups[${init.indices}]" string => execresult("${ncf_paths.path[id]} -Gn ${init.user[${init.indices}]}", "useshell"); + + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + # checking mode only for enforce mode + "execute_${init.indices}" expression => strcmp("${init.mode[${init.indices}]}", "enforce"); + "result_nok" not => strcmp("${user_secondary_groups[${init.indices}]}", "${init.expected_group[${init.indices}]}"), + if => "execute_${init.indices}"; + + # classes_ok is just a placeholder. What we really want is find the not ok + "classes_not_ok" expression => or("classes_ok", "!ph${init.indices}_ok"); + "ok" expression => "!classes_not_ok.!result_nok"; + + commands: + pass3:: + "${paths.userdel} ${init.user[${init.indices}]}"; + "${paths.groupdel} ruddergroup_${init.indices}"; + + reports: + pass3:: + "Test for user_secondary_groups nb ${init.indices} FAILED" + ifvarclass => "!ph${init.indices}_ok"; + + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; +} + +####################################################### diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_shell.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_shell.cf new file mode 100644 index 00000000000..5662b41e709 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_shell.cf @@ -0,0 +1,85 @@ + +####################################################### +# +# Test checking user_shell +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "user1" string => "user1"; + "user2" string => "user2"; + "shell2" string => "/bin/zsh"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => user_shell("${init.user1}", "${init.shell2}"); + "ph2" usebundle => user_present("${init.user2}"); + "ph3" usebundle => user_shell("${init.user2}", "${init.shell2}"); +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "user_line1" string => execresult("${paths.path[grep]} '^${init.user1}:' /etc/passwd", "useshell"); + "user_line2" string => execresult("${paths.path[grep]} '^${init.user2}:' /etc/passwd", "useshell"); + + classes: + pass1:: + "user1_ok" expression => strcmp("", "${user_line1}"); + "user2_ok" expression => regcmp("^(${init.user2}):x:([0-9]+):([0-9]+)::\/home\/\1:(${init.shell2}).*", "${user_line2}"); + + "ph1_ok" expression => "!user_shell_${init.user1}_kept.!user_shell_${init.user1}_repaired.user_shell_${init.user1}_error"; + "ph2_ok" expression => "!user_present_${init.user2}_kept.user_present_${init.user2}_repaired.!user_present_${init.user2}_error"; + "ph3_ok" expression => "!user_shell_${init.user2}_kept.user_shell_${init.user2}_repaired.!user_shell_${init.user2}_error"; + + "ok" expression => "user1_ok.user2_ok.ph1_ok.ph2_ok.ph3_ok"; + + any:: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + commands: + pass2:: + "/usr/sbin/userdel ${init.user1}" handle => "h1"; + "/usr/sbin/userdel ${init.user2}" handle => "h2"; + + reports: + pass2.ok:: + "$(this.promise_filename) Pass"; + pass2.!ok:: + "$(this.promise_filename) FAIL"; + pass2.user1_ok:: + "user1_ok"; + pass2.user2_ok:: + "user2_ok"; + pass2.ph1_ok:: + "ph1_ok"; + pass2.ph2_ok:: + "ph2_ok"; + pass2.ph3_ok:: + "ph3_ok"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_uid.cf b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_uid.cf new file mode 100644 index 00000000000..35097b204f3 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/unsafe/user_uid.cf @@ -0,0 +1,115 @@ +####################################################### +# +# Test checking user_uid +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "user1" string => "user1"; + "user2" string => "user2"; + "user3" string => "user3"; +} + +####################################################### + +bundle agent test +{ + vars: + "uid1b" string => "242424"; + pass3:: + "uid1" int => getuid("${init.user1}"); + "uid2" int => getuid("${init.user2}"); + + classes: + any:: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + methods: + pass1:: + "ph1" usebundle => user_present("${init.user1}"); + "ph2" usebundle => user_present("${init.user2}"); + pass3:: + "ph3" usebundle => user_uid("${init.user3}", "${uid2}"); + "ph4" usebundle => user_uid("${init.user2}", "${uid1}"); + "ph5" usebundle => user_uid("${init.user1}", "${uid1b}"); + +} + +####################################################### + +bundle agent check +{ + vars: + pass1:: + "user_line1" string => execresult("${paths.path[grep]} '^${init.user1}:' /etc/passwd", "useshell"); + "user_line2" string => execresult("${paths.path[grep]} '^${init.user2}:' /etc/passwd", "useshell"); + "user_line3" string => execresult("${paths.path[grep]} '^${init.user3}:' /etc/passwd", "useshell"); + + classes: + any:: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + "user1_ok" expression => regcmp("^(${init.user1}):x:${test.uid1b}:([0-9]+)::\/home\/\1:.*", "${user_line1}"); + "user2_ok" expression => regcmp("^(${init.user2}):x:${test.uid2}:([0-9]+)::\/home\/\1:.*", "${user_line2}"); + "user3_ok" expression => strcmp("", "${user_line3}"); + + "ph1_ok" expression => "!user_present_${init.user1}_kept.user_present_${init.user1}_repaired.!user_present_${init.user1}_error"; + "ph2_ok" expression => "!user_present_${init.user2}_kept.user_present_${init.user2}_repaired.!user_present_${init.user2}_error"; + "ph3_ok" expression => "!user_uid_${init.user3}_kept.!user_uid_${init.user3}_repaired.user_uid_${init.user3}_error"; + "ph4_ok" expression => "!user_uid_${init.user2}_kept.!user_uid_${init.user2}_repaired.user_uid_${init.user2}_error"; + "ph5_ok" expression => "!user_uid_${init.user1}_kept.user_uid_${init.user1}_repaired.!user_uid_${init.user1}_error"; + + "ok" expression => "user1_ok.user2_ok.user3_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok"; + + + commands: + pass3:: + "/usr/sbin/userdel ${init.user1}" handle => "h1"; + "/usr/sbin/userdel ${init.user2}" handle => "h2"; + "/usr/sbin/userdel ${init.user3}" handle => "h3"; + + reports: + pass3.ok:: + "$(this.promise_filename) Pass"; + pass3.!ok:: + "$(this.promise_filename) FAIL"; + pass3.user1_ok:: + "user1_ok"; + pass3.user2_ok:: + "user2_ok"; + pass3.user3_ok:: + "user3_ok"; + pass3.ph1_ok:: + "ph1_ok"; + pass3.ph2_ok:: + "ph2_ok"; + pass3.ph3_ok:: + "ph3_ok"; + pass3.ph4_ok:: + "ph4_ok"; + pass3.ph5_ok:: + "ph5_ok"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_dict.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_dict.cf new file mode 100644 index 00000000000..3aa892f282e --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_dict.cf @@ -0,0 +1,63 @@ +####################################################### +# +# Create a dict variable +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: +} + +####################################################### + +bundle agent test +{ + vars: + "dict1" string => '{ "key1": "value1", "key2": "value2", "key3": { "keyx": "valuex" }, "key4": [ "valuey" ] }'; + "dict2" string => '"string"'; + "dict3" string => 'invalid'; + + methods: + "ph1" usebundle => variable_dict("prefix", "var1", "${dict1}"); + "ph2" usebundle => variable_dict("prefix", "var2", "${dict2}"); + "ph3" usebundle => variable_dict("prefix", "var3", "${dict3}"); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "variable_dict_var1_kept.!variable_dict_var1_repaired.!variable_dict_var1_error"; + "ok_2" expression => "!variable_dict_var2_kept.!variable_dict_var2_repaired.variable_dict_var2_error"; + "ok_3" expression => "!variable_dict_var3_kept.!variable_dict_var3_repaired.variable_dict_var3_error"; + "ok_key1" expression => strcmp("${prefix.var1[key1]}", "value1"); + "ok_key2" expression => strcmp("${prefix.var1[key2]}", "value2"); + "ok_key3" expression => strcmp("${prefix.var1[key3][keyx]}", "valuex"); + "ok_key4" expression => strcmp("${prefix.var1[key4][0]}", "valuey"); + + "ok" expression => "ok_1.ok_2.ok_3.ok_key1.ok_key2.ok_key3.ok_key4"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_dict_from_file.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_dict_from_file.cf new file mode 100644 index 00000000000..0988fe870ed --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_dict_from_file.cf @@ -0,0 +1,62 @@ +####################################################### +# +# Create a read a json file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "json" string => '{ "key1": "value1", "key2": "value2", "key3": { "keyx": "valuex" }, "key4": [ "valuey" ] }'; + + methods: + "ph1" usebundle => file_ensure_lines_present("${file}", "${json}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => variable_dict_from_file("prefix", "var1", "${init.file}"); + "ph2" usebundle => variable_dict_from_file("prefix", "var2", "invalid"); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "variable_dict_from_file_var1_kept.!variable_dict_from_file_var1_repaired.!variable_dict_from_file_var1_error"; + "ok_2" expression => "!variable_dict_from_file_var2_kept.!variable_dict_from_file_var2_repaired.variable_dict_from_file_var2_error"; + "ok_key1" expression => strcmp("${prefix.var1[key1]}", "value1"); + "ok_key2" expression => strcmp("${prefix.var1[key2]}", "value2"); + "ok_key3" expression => strcmp("${prefix.var1[key3][keyx]}", "valuex"); + "ok_key4" expression => strcmp("${prefix.var1[key4][0]}", "valuey"); + + "ok" expression => "ok_1.ok_2.ok_key1.ok_key2.ok_key3.ok_key4"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_dict_from_file_type.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_dict_from_file_type.cf new file mode 100644 index 00000000000..6aaf75b9ebc --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_dict_from_file_type.cf @@ -0,0 +1,66 @@ +####################################################### +# +# Create a read a json file +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "json" string => '{ "key1": "value1", "key2": "value2", "key3": { "keyx": "valuex" }, "key4": [ "valuey" ] }'; + "yaml" string => "- key1: value1"; + + methods: + "ph1" usebundle => file_ensure_lines_present("${file}.json", "${json}"); + "ph3" usebundle => file_ensure_lines_present("${file}.yaml", "${yaml}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => variable_dict_from_file_type("prefix", "var1", "${init.file}.json", "auto"); + "ph2" usebundle => variable_dict_from_file_type("prefix", "var2", "invalid", "auto"); + "ph3" usebundle => variable_dict_from_file_type("prefix", "var3", "${init.file}.yaml", "auto"); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "variable_dict_from_file_type_var1_kept.!variable_dict_from_file_type_var1_repaired.!variable_dict_from_file_type_var1_error"; + "ok_2" expression => "!variable_dict_from_file_type_var2_kept.!variable_dict_from_file_type_var2_repaired.variable_dict_from_file_type_var2_error"; + "ok_3" expression => "variable_dict_from_file_type_var3_kept.!variable_dict_from_file_type_var3_repaired.!variable_dict_from_file_type_var3_error"; + "ok_key1" expression => strcmp("${prefix.var1[key1]}", "value1"); + "ok_key2" expression => strcmp("${prefix.var1[key2]}", "value2"); + "ok_key3" expression => strcmp("${prefix.var1[key3][keyx]}", "valuex"); + "ok_key4" expression => strcmp("${prefix.var1[key4][0]}", "valuey"); + "ok_3_content" expression => strcmp("${prefix.var1[key1]}", "value1"); + "ok" expression => "ok_1.ok_2.ok_key1.ok_key2.ok_key3.ok_key4.ok_3.ok_3_content"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_dict_from_osquery.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_dict_from_osquery.cf new file mode 100644 index 00000000000..70ac2fb78ed --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_dict_from_osquery.cf @@ -0,0 +1,51 @@ +####################################################### +# +# Create a read a json +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => variable_dict_from_osquery("prefix", "var1", "select cpu_logical_cores from system_info;"); +} + +####################################################### + +bundle agent check +{ + classes: + "has_osquery" expression => isexecutable("/usr/bin/osqueryi"); + "ok_1" expression => strcmp("${prefix.var1[0][cpu_logical_cores]}", "${sys.cpus}"); + "ok_1_class" expression => "variable_dict_from_osquery_var1_kept.!variable_dict_from_osquery_var1_repaired.!variable_dict_from_osquery_var1_error"; + "ok" expression => "ok_1.ok_1_class"; + + reports: + ok|!has_osquery:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_dict_merge.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_dict_merge.cf new file mode 100644 index 00000000000..1d96676afde --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_dict_merge.cf @@ -0,0 +1,90 @@ +####################################################### +# +# Create a dict variable by merging two other variables +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + + "dict1" data => parsejson('{ "key1": "value1", "key2": "value2", "key3": { "keyx": "valuex" }, "key4": [ "valuey" ] }'); + "dict2" data => parsejson('{ "key1": "value1bis", "key4": "value4" }'); + "ref" string => '{ + "key1": "value1bis", + "key2": "value2", + "key3": { + "keyx": "valuex" + }, + "key4": "value4" +}'; + # CFEngine 3.6 has different output + "ref2" string => '{ + "key2": "value2", + "key3": { + "keyx": "valuex" + }, + "key1": "value1bis", + "key4": "value4" +}'; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph3" usebundle => variable_dict_merge("prefix", "var3", "init.dict1", "init.dict2"); + "ph4" usebundle => variable_dict_merge("prefix", "var4", "init.undef", "init.dict2"); + "ph5" usebundle => variable_dict_merge("prefix", "var5", "init.dict1", "init.undef"); + "ph6" usebundle => variable_dict_merge("prefix", "var6", "init.undef", "init.undef"); +} + +####################################################### + +bundle agent check +{ + vars: + "result3" string => storejson("prefix.var3"); + + classes: + + "ok_class_3" expression => "variable_dict_merge_var3_kept.!variable_dict_merge_var3_repaired.!variable_dict_merge_var3_error"; + "ok_class_4" expression => "!variable_dict_merge_var4_kept.!variable_dict_merge_var4_repaired.variable_dict_merge_var4_error"; + "ok_class_5" expression => "!variable_dict_merge_var5_kept.!variable_dict_merge_var5_repaired.variable_dict_merge_var5_error"; + "ok_class_6" expression => "!variable_dict_merge_var6_kept.!variable_dict_merge_var6_repaired.variable_dict_merge_var6_error"; + "ok_value_3" expression => strcmp("${result3}", "${init.ref}"); + "ok_value_3" expression => strcmp("${result3}", "${init.ref2}"); + "ok_value_4" not => isvariable("prefix.var4"); + "ok_value_5" not => isvariable("prefix.var5"); + "ok_value_6" not => isvariable("prefix.var6"); + + "ok_3" expression => "ok_class_3.ok_value_3"; + "ok_4" expression => "ok_class_4.ok_value_4"; + "ok_5" expression => "ok_class_5.ok_value_5"; + "ok_6" expression => "ok_class_6.ok_value_6"; + + "ok" expression => "ok_3.ok_4.ok_5.ok_6"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_dict_merge_tolerant.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_dict_merge_tolerant.cf new file mode 100644 index 00000000000..40f777c8cfe --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_dict_merge_tolerant.cf @@ -0,0 +1,94 @@ +####################################################### +# +# Create a dict variable by merging two other variables +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + + "dict1" data => parsejson('{ "key1": "value1", "key2": "value2", "key3": { "keyx": "valuex" }, "key4": [ "valuey" ] }'); + "dict2" data => parsejson('{ "key1": "value1bis", "key4": "value4" }'); + "ref" string => '{ + "key1": "value1bis", + "key2": "value2", + "key3": { + "keyx": "valuex" + }, + "key4": "value4" +}'; + # CFEngine 3.6 has different output + "ref2" string => '{ + "key2": "value2", + "key3": { + "keyx": "valuex" + }, + "key1": "value1bis", + "key4": "value4" +}'; + +} + +####################################################### + +bundle agent test +{ + methods: + "ph3" usebundle => variable_dict_merge_tolerant("prefix", "var3", "init.dict1", "init.dict2"); + "ph4" usebundle => variable_dict_merge_tolerant("prefix", "var4", "init.undef", "init.dict2"); + "ph5" usebundle => variable_dict_merge_tolerant("prefix", "var5", "init.dict1", "init.undef"); + "ph6" usebundle => variable_dict_merge_tolerant("prefix", "var6", "init.undef", "init.undef"); +} + +####################################################### + +bundle agent check +{ + vars: + "result3" string => storejson("prefix.var3"); + "result4" string => storejson("prefix.var4"); + "result5" string => storejson("prefix.var5"); + "dict1" string => storejson("init.dict1"); + "dict2" string => storejson("init.dict2"); + + classes: + + "ok_class_3" expression => "variable_dict_merge_tolerant_var3_kept.!variable_dict_merge_tolerant_var3_repaired.!variable_dict_merge_tolerant_var3_error"; + "ok_class_4" expression => "variable_dict_merge_tolerant_var4_kept.!variable_dict_merge_tolerant_var4_repaired.!variable_dict_merge_tolerant_var4_error"; + "ok_class_5" expression => "variable_dict_merge_tolerant_var5_kept.!variable_dict_merge_tolerant_var5_repaired.!variable_dict_merge_tolerant_var5_error"; + "ok_class_6" expression => "!variable_dict_merge_tolerant_var6_kept.!variable_dict_merge_tolerant_var6_repaired.variable_dict_merge_tolerant_var6_error"; + "ok_value_3" expression => strcmp("${result3}", "${init.ref}"); + "ok_value_3" expression => strcmp("${result3}", "${init.ref2}"); + "ok_value_4" expression => strcmp("${result4}", "${dict2}"); + "ok_value_5" expression => strcmp("${result5}", "${dict1}"); + "ok_value_6" not => isvariable("prefix.var6"); + + "ok_3" expression => "ok_class_3.ok_value_3"; + "ok_4" expression => "ok_class_4.ok_value_4"; + "ok_5" expression => "ok_class_5.ok_value_5"; + "ok_6" expression => "ok_class_6.ok_value_6"; + + "ok" expression => "ok_3.ok_4.ok_5.ok_6"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_iterator.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_iterator.cf new file mode 100644 index 00000000000..1dc67badc85 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_iterator.cf @@ -0,0 +1,59 @@ +####################################################### +# +# Create an slist variable +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: +} + +####################################################### + +bundle agent test +{ + vars: + "iterator1" string => 'a,b, c'; + + methods: + "ph1" usebundle => variable_iterator("prefix", "var1", "${iterator1}", ","); + "ph2" usebundle => variable_iterator("prefix", "var2", "${iterator1}", "\s*,\s*"); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "variable_iterator_var1_kept.!variable_iterator_var1_repaired.!variable_iterator_var1_error"; + "ok_2" expression => "variable_iterator_var2_kept.!variable_iterator_var2_repaired.!variable_iterator_var2_error"; + "ok_length1" expression => strcmp(length("prefix.var1"), "3"); + "ok_length2" expression => strcmp(length("prefix.var2"), "3"); + "ok_3rd_1" expression => strcmp(nth("prefix.var1", "2"), ' c'); + "ok_3rd_2" expression => strcmp(nth("prefix.var2", "2"), 'c'); + + "ok" expression => "ok_1.ok_2.ok_length1.ok_length2.ok_3rd_1.ok_3rd_2"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_iterator_from_file.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_iterator_from_file.cf new file mode 100644 index 00000000000..3e9591b800c --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_iterator_from_file.cf @@ -0,0 +1,58 @@ +####################################################### +# +# Read a file into a slist +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "data" string => 'text1 with${const.n}#comment here${const.n}many${const.n}lines'; + + methods: + "ph1" usebundle => file_ensure_lines_present("${file}", "${data}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => variable_iterator_from_file("prefix", "var1", "${init.file}", "\n", "\s*#.*?(?=\n)"); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "variable_iterator_from_file_var1_kept.!variable_iterator_from_file_var1_repaired.!variable_iterator_from_file_var1_error"; + "ok_length1" expression => strcmp(length("prefix.var1"), "3"); + "ok_3rd_1" expression => strcmp(nth("prefix.var1", "2"), "lines"); + + "ok" expression => "ok_1.ok_length1.ok_3rd_1"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_string.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_string.cf new file mode 100644 index 00000000000..5420b8c59fc --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_string.cf @@ -0,0 +1,58 @@ +####################################################### +# +# Create a string variable +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "var" string => "var2"; + "prefix" string => "prefix1"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => variable_string("${init.prefix}", "var1", "${init.var}"); + "ph2" usebundle => variable_string("prefix1", "var2", "value"); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "variable_string_var1_kept.!variable_string_var1_repaired.!variable_string_var1_error"; + "ok_2" expression => "variable_string_var2_kept.!variable_string_var2_repaired.!variable_string_var2_error"; + "ok_var1" expression => strcmp("${prefix1.var1}", "${init.var}"); + "ok_var1_bis" expression => strcmp("${${init.prefix}.var1}", "${init.var}"); + "ok_var2" expression => strcmp("${prefix1.var2}", "value"); + "ok_var2_bis" expression => strcmp("${${init.prefix}.${${init.prefix}.var1}}", "value"); + + "ok" expression => "ok_1.ok_2.ok_var1.ok_var1_bis.ok_var2.ok_var2_bis"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_string_default.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_string_default.cf new file mode 100644 index 00000000000..51e4bf7cbcd --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_string_default.cf @@ -0,0 +1,61 @@ +####################################################### +# +# Create a string variable +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "var" string => "var2"; + "prefix" string => "prefix1"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => variable_string_default("${init.prefix}", "var1", "init.var", "default1"); + "ph2" usebundle => variable_string_default("prefix1", "var2", "init.var", "default2"); + "ph3" usebundle => variable_string_default("prefix1", "var3", "init.undef", "default3"); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "variable_string_default_var1_kept.!variable_string_default_var1_repaired.!variable_string_default_var1_error"; + "ok_2" expression => "variable_string_default_var2_kept.!variable_string_default_var2_repaired.!variable_string_default_var2_error"; + "ok_3" expression => "variable_string_default_var3_kept.!variable_string_default_var3_repaired.!variable_string_default_var3_error"; + "ok_var1" expression => strcmp("${prefix1.var1}", "${init.var}"); + "ok_var1_bis" expression => strcmp("${${init.prefix}.var1}", "${init.var}"); + "ok_var2" expression => strcmp("${prefix1.var2}", "${init.var}"); + "ok_var2_bis" expression => strcmp("${${init.prefix}.${${init.prefix}.var2}}", "${init.var}"); + "ok_var3" expression => strcmp("${prefix1.var3}", "default3"); + + "ok" expression => "ok_1.ok_2.ok_var1.ok_var1_bis.ok_var2.ok_var2_bis.ok_3.ok_var3"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_string_escaped.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_string_escaped.cf new file mode 100644 index 00000000000..6fe80f7703a --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_string_escaped.cf @@ -0,0 +1,104 @@ +####################################################### +# +# Exit with a specific code +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + # Enforce + "variable_name[0]" string => "my_prefix.variable0"; + "value[0]" string => "c.*t"; + "expected_value[0]" string => "c\.\*t"; + "status[0]" string => "success"; + "mode[0]" string => "enforce"; + + "variable_name[1]" string => "my_prefix.variable1"; + "value[1]" string => "cat"; + "expected_value[1]" string => "cat"; + "status[1]" string => "success"; + "mode[1]" string => "enforce"; + + "variable_name[2]" string => "my_prefix.variable_that_does_not_exist2"; + "status[2]" string => "error"; + "mode[2]" string => "enforce"; + + # Audit + "variable_name[3]" string => "my_prefix.variable3"; + "value[3]" string => "c.*t"; + "expected_value[3]" string => "c\.\*t"; + "status[3]" string => "success"; + "mode[3]" string => "audit"; + + "variable_name[4]" string => "my_prefix.variable4"; + "value[4]" string => "cat"; + "expected_value[4]" string => "cat"; + "status[4]" string => "success"; + "mode[4]" string => "audit"; + + "variable_name[5]" string => "my_prefix.variable_that_does_not_exist5"; + "status[5]" string => "error"; + "mode[5]" string => "audit"; + + "indices" slist => getindices("variable_name"); + + methods: + "method_call_${indices}" usebundle => variable_string("my_prefix", "variable${indices}", "${value[${indices}]}"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.variable_name[${init.indices}]}" }; + + methods: + # Enforce + "ph0" usebundle => apply_gm("variable_string_escaped", @{args0}, "${init.status[0]}", "ph0", "${init.mode[0]}"); + "ph1" usebundle => apply_gm("variable_string_escaped", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}"); + "ph2" usebundle => apply_gm("variable_string_escaped", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}"); + # Audit + "ph3" usebundle => apply_gm("variable_string_escaped", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}"); + "ph4" usebundle => apply_gm("variable_string_escaped", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}"); + "ph5" usebundle => apply_gm("variable_string_escaped", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}"); + + +} + +####################################################### + +bundle agent check +{ + classes: + "values_not_ok" expression => or("values_not_ok", strcmp("${${init.variable_name[${init.indices}]}}", "${init.expected_value[${init.indices}]}")); + "classes_ok" expression => "ph0_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok"; + + "ok" expression => "classes_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; + +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_augeas.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_augeas.cf new file mode 100644 index 00000000000..0cc3a8c8a93 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_augeas.cf @@ -0,0 +1,64 @@ + ####################################################### +# +# Read a file into a string +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "/etc/passwd"; + "lens" string => "Passwd"; + "path" string => "/etc/passwd/root/uid"; + "path2" string => "/etc/passwd/rot"; +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => variable_string_from_augeas("prefix", "var1", "${init.path}", "${init.lens}", "${init.file}"); + "ph2" usebundle => variable_string_from_augeas("prefix", "var2", "${init.path}", "", ""); + "ph3" usebundle => variable_string_from_augeas("prefix", "var3", "${init.path2}", "", ""); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "variable_string_from_augeas_var1_kept.!variable_string_from_augeas_var1_repaired.!variable_string_from_augeas_var1_error"; + "ok_2" expression => "variable_string_from_augeas_var2_kept.!variable_string_from_augeas_var2_repaired.!variable_string_from_augeas_var2_error"; + "ok_3" expression => "!variable_string_from_augeas_var3_kept.!variable_string_from_augeas_var3_repaired.variable_string_from_augeas_var3_error"; + "ok_var1" expression => isvariable("prefix.var1"); + "ok_var2" expression => isvariable("prefix.var2"); + "ok_var1_cont" expression => strcmp("${prefix.var1}","0"); + "ok_var2_cont" expression => strcmp("${prefix.var2}","0"); + "ok" expression => "ok_1.ok_var1.ok_var1_cont.ok_2.ok_var2.ok_var2_cont.ok_3"; + + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} + diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_command.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_command.cf new file mode 100644 index 00000000000..5e066cf24cf --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_command.cf @@ -0,0 +1,184 @@ +####################################################### +# +# Exit with a specific code +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + # expected_value to "" will check if the variable is well undefined + # Enforce + "var_prefix[0]" string => "my"; + "var_name[0]" string => "var0"; + "command[0]" string => "echo teststring0"; + "status[0]" string => "success"; + "expected_value[0]" string => execresult("${command[0]}", "useshell"); + "mode[0]" string => "enforce"; + + "var_prefix[1]" string => "my"; + "var_name[1]" string => "var1"; + "command[1]" string => "/bin/echo teststring1"; + "status[1]" string => "success"; + "expected_value[1]" string => execresult("${command[1]}", "useshell"); + "mode[1]" string => "enforce"; + + "var_prefix[2]" string => "my"; + "var_name[2]" string => "var2"; + "command[2]" string => "/bin/false"; + "status[2]" string => "error"; + "expected_value[2]" string => ""; + "mode[2]" string => "enforce"; + + # with stderr output and failure command + "var_prefix[3]" string => "my"; + "var_name[3]" string => "var3"; + "command[3]" string => "cat afilethatdoesnotexists"; + "status[3]" string => "error"; + "expected_value[3]" string => ""; + "mode[3]" string => "enforce"; + + "var_prefix[4]" string => "my"; + "var_name[4]" string => "var4"; + "command[4]" string => "echo 'hey' #with a comment"; + "status[4]" string => "success"; + "expected_value[4]" string => "hey"; + "mode[4]" string => "enforce"; + + # with stderr, stdout and a successful command + "var_prefix[5]" string => "my"; + "var_name[5]" string => "var5"; + "command[5]" string => "${tmp}/test5.sh"; + "status[5]" string => "success"; + "expected_value[5]" string => "stdout"; + "mode[5]" string => "enforce"; + + + #Audit + "var_prefix[10]" string => "my"; + "var_name[10]" string => "var10"; + "command[10]" string => "echo teststring10"; + "status[10]" string => "success"; + "expected_value[10]" string => execresult("${command[10]}", "useshell"); + "mode[10]" string => "audit"; + + "var_prefix[11]" string => "my"; + "var_name[11]" string => "var11"; + "command[11]" string => "/bin/echo teststring11"; + "status[11]" string => "success"; + "expected_value[11]" string => execresult("${command[11]}", "useshell"); + "mode[11]" string => "audit"; + + "var_prefix[12]" string => "my"; + "var_name[12]" string => "var12"; + "command[12]" string => "/bin/false"; + "status[12]" string => "error"; + "expected_value[12]" string => ""; + "mode[12]" string => "audit"; + + "var_prefix[13]" string => "my"; + "var_name[13]" string => "var13"; + "command[13]" string => "cat afilethatdoesnotexists"; + "status[13]" string => "error"; + "expected_value[13]" string => ""; + "mode[13]" string => "audit"; + + "var_prefix[14]" string => "my"; + "var_name[14]" string => "var14"; + "command[14]" string => "echo 'hey' #with a comment"; + "status[14]" string => "success"; + "expected_value[14]" string => "hey"; + "mode[14]" string => "audit"; + + # with stderr, stdout and a successful command + "var_prefix[15]" string => "my"; + "var_name[15]" string => "var15"; + "command[15]" string => "${tmp}/test5.sh"; + "status[15]" string => "success"; + "expected_value[15]" string => "stdout"; + "mode[15]" string => "audit"; + + "indices" slist => getindices("var_prefix"); + + files: + "${tmp}/test5.sh" + create => "true", + edit_line => insert_lines("echo 'stdout' > /dev/stdout +echo 'stderr' > /dev/stderr"), + perms => mo("755", "root"); +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.var_prefix[${init.indices}]}", "${init.var_name[${init.indices}]}", "${init.command[${init.indices}]}" }; + + methods: + # Enforce + "ph0" usebundle => apply_gm("variable_string_from_command", @{args0}, "${init.status[0]}", "ph0", "${init.mode[0]}"); + "ph1" usebundle => apply_gm("variable_string_from_command", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}"); + "ph2" usebundle => apply_gm("variable_string_from_command", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}"); + "ph3" usebundle => apply_gm("variable_string_from_command", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}"); + "ph4" usebundle => apply_gm("variable_string_from_command", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}"); + "ph5" usebundle => apply_gm("variable_string_from_command", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}"); + + # Audit + "ph10" usebundle => apply_gm("variable_string_from_command", @{args10}, "${init.status[10]}", "ph10", "${init.mode[10]}"); + "ph11" usebundle => apply_gm("variable_string_from_command", @{args11}, "${init.status[11]}", "ph11", "${init.mode[11]}"); + "ph12" usebundle => apply_gm("variable_string_from_command", @{args12}, "${init.status[12]}", "ph12", "${init.mode[12]}"); + "ph13" usebundle => apply_gm("variable_string_from_command", @{args13}, "${init.status[13]}", "ph13", "${init.mode[13]}"); + "ph14" usebundle => apply_gm("variable_string_from_command", @{args14}, "${init.status[14]}", "ph14", "${init.mode[14]}"); + "ph15" usebundle => apply_gm("variable_string_from_command", @{args15}, "${init.status[15]}", "ph15", "${init.mode[15]}"); +} + +####################################################### + +bundle agent check +{ + vars: + "var_name[${init.indices}]" string => "${init.var_prefix[${init.indices}]}.${init.var_name[${init.indices}]}"; + + classes: + # On error the var must not be created + "must_not_be_defined_${init.indices}" expression => strcmp("${init.expected_value[${init.indices}]}", ""); + "error_${init.indices}_not_ok" expression => isvariable("${var_name[${init.indices}]}"), + ifvarclass => "must_not_be_defined_${init.indices}"; + + # On success, verify the value + "must_be_defined_${init.indices}" not => strcmp("${init.expected_value[${init.indices}]}", ""); + "success_${init.indices}_not_ok" not => strcmp("${${var_name[${init.indices}]}}", "${init.expected_value[${init.indices}]}"), + ifvarclass => "must_be_defined_${init.indices}"; + + + "error_not_ok" expression => "error_${init.indices}_not_ok"; + "success_not_ok" expression => "success_${init.indices}_not_ok"; + "classes_ok" expression => "ph0_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph10_ok.ph11_ok.ph12_ok.ph13_ok.ph14_ok.ph15_ok"; + + "ok" expression => "classes_ok.!error_not_ok.!success_not_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "Unexpected value found for ${var_name[${init.indices}]}: ${${var_name[${init.indices}]}} instead of ${init.expected_value[${init.indices}]}" + ifvarclass => "error_${init.indices}_not_ok|success_${init.indices}_not_ok"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_file.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_file.cf new file mode 100644 index 00000000000..d0ac3085ce0 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_file.cf @@ -0,0 +1,57 @@ +####################################################### +# +# Read a file into a string +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "data" string => 'this is a test'; + + methods: + "ph1" usebundle => file_ensure_lines_present("${file}", "${data}"); +} + +####################################################### + +bundle agent test +{ + methods: + "ph1" usebundle => variable_string_from_file("prefix", "var1", "${init.file}"); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "variable_string_from_file_var1_kept.!variable_string_from_file_var1_repaired.!variable_string_from_file_var1_error"; + "ok_var1" expression => strcmp("${prefix.var1}", "${init.data}"); + + "ok" expression => "ok_1.ok_var1"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_math_expression.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_math_expression.cf new file mode 100644 index 00000000000..667ea1239c9 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_string_from_math_expression.cf @@ -0,0 +1,57 @@ +####################################################### +# +# Compute and format a math expression into a string variable +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "three" string => "3"; +} + +####################################################### + +bundle agent test +{ + methods: + + "ph1" usebundle => variable_string_from_math_expression("prefix", "var1", "${init.three}*(2.0+3.0)", "%d"); + "ph2" usebundle => variable_string_from_math_expression("prefix", "var2", "${init.three}*(2.0+4.0)", "%x"); +} + +####################################################### + +bundle agent check +{ + classes: + + "ok_1" expression => "variable_string_from_math_expression_var1_kept.!variable_string_from_math_expression_var1_repaired.!variable_string_from_math_expression_var1_error"; + "ok_2" expression => "variable_string_from_math_expression_var2_kept.!variable_string_from_math_expression_var2_repaired.!variable_string_from_math_expression_var2_error"; + "ok_var1" expression => strcmp("${prefix.var1}", "15"); + "ok_var2" expression => strcmp("${prefix.var2}", "12"); + + "ok" expression => "ok_1.ok_var1.ok_2.ok_var2"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/30_generic_methods/variable_string_match.cf b/policies/lib/tests/acceptance/30_generic_methods/variable_string_match.cf new file mode 100644 index 00000000000..9ffaae8bee4 --- /dev/null +++ b/policies/lib/tests/acceptance/30_generic_methods/variable_string_match.cf @@ -0,0 +1,171 @@ +####################################################### +# +# Exit with a specific code +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { configuration, default("${this.promise_filename}") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + + # Enforce + "content[0]" string => "cat"; + "variable_name[0]" string => "my_prefix.variable0"; + "regex[0]" string => "c.*t"; + "status[0]" string => "success"; + "mode[0]" string => "enforce"; + + "content[1]" string => ""; + "variable_name[1]" string => "my_prefix.variable1"; + "regex[1]" string => ".*"; + "status[1]" string => "success"; + "mode[1]" string => "enforce"; + + "content[2]" string => ""; + "variable_name[2]" string => "my_prefix.variable1"; + "regex[2]" string => ".+"; + "status[2]" string => "error"; + "mode[2]" string => "enforce"; + + "content[3]" string => "cat +dog +and other animals"; + "variable_name[3]" string => "my_prefix.variable3"; + "regex[3]" string => "[\s\S]+"; + "status[3]" string => "success"; + "mode[3]" string => "enforce"; + + # \N is any char except newlines, while . is any char without exception + "content[4]" string => "cat +dog +and other animals"; + "variable_name[4]" string => "my_prefix.variable4"; + "regex[4]" string => "[^\n]+[\n]dog.*"; + "status[4]" string => "success"; + "mode[4]" string => "enforce"; + + "variable_name[5]" string => "my_prefix.undefined"; + "regex[5]" string => ".*"; + "status[5]" string => "error"; + "mode[5]" string => "enforce"; + + "content[6]" string => "cat"; + "variable_name[6]" string => "my_prefix.variable6"; + "regex[6]" string => "non matching regex .*"; + "status[6]" string => "error"; + "mode[6]" string => "enforce"; + + # Audit + "content[7]" string => "cat"; + "variable_name[7]" string => "my_prefix.variable7"; + "regex[7]" string => "c.*t"; + "status[7]" string => "success"; + "mode[7]" string => "audit"; + + "content[8]" string => ""; + "variable_name[8]" string => "my_prefix.variable8"; + "regex[8]" string => ".*"; + "status[8]" string => "success"; + "mode[8]" string => "audit"; + + "content[9]" string => ""; + "variable_name[9]" string => "my_prefix.variable9"; + "regex[9]" string => ".+"; + "status[9]" string => "error"; + "mode[9]" string => "audit"; + + "content[10]" string => "cat +dog +and other animals"; + "variable_name[10]" string => "my_prefix.variable10"; + "regex[10]" string => "[\s\S]+"; + "status[10]" string => "success"; + "mode[10]" string => "audit"; + + # \N is any char except newlines, while . is any char without exception + "content[11]" string => "cat +dog +and other animals"; + "variable_name[11]" string => "my_prefix.variable11"; + "regex[11]" string => "[^\n]+[\n]dog.*"; + "status[11]" string => "success"; + "mode[11]" string => "audit"; + + "variable_name[12]" string => "my_prefix.undefined12"; + "regex[12]" string => ".*"; + "status[12]" string => "error"; + "mode[12]" string => "audit"; + + "content[13]" string => "cat"; + "variable_name[13]" string => "my_prefix.variable13"; + "regex[13]" string => "non matching regex .*"; + "status[13]" string => "error"; + "mode[13]" string => "audit"; + + "indices" slist => getindices("variable_name"); + + methods: + # undefined one (test nb 5) will not be defined since content[5] does not exist + "method_call_${indices}" usebundle => variable_string("my_prefix", "variable${indices}", "${content[${indices}]}"); + +} + +####################################################### + +bundle agent test +{ + vars: + "args${init.indices}" slist => { "${init.variable_name[${init.indices}]}", "${init.regex[${init.indices}]}" }; + + methods: + # Enforce + "ph0" usebundle => apply_gm("variable_string_match", @{args0}, "${init.status[0]}", "ph0", "${init.mode[0]}"); + "ph1" usebundle => apply_gm("variable_string_match", @{args1}, "${init.status[1]}", "ph1", "${init.mode[1]}"); + "ph2" usebundle => apply_gm("variable_string_match", @{args2}, "${init.status[2]}", "ph2", "${init.mode[2]}"); + "ph3" usebundle => apply_gm("variable_string_match", @{args3}, "${init.status[3]}", "ph3", "${init.mode[3]}"); + "ph4" usebundle => apply_gm("variable_string_match", @{args4}, "${init.status[4]}", "ph4", "${init.mode[4]}"); + "ph5" usebundle => apply_gm("variable_string_match", @{args5}, "${init.status[5]}", "ph5", "${init.mode[5]}"); + "ph6" usebundle => apply_gm("variable_string_match", @{args6}, "${init.status[6]}", "ph6", "${init.mode[6]}"); + + # Audit + "ph7" usebundle => apply_gm("variable_string_match", @{args7}, "${init.status[7]}", "ph7", "${init.mode[7]}"); + "ph8" usebundle => apply_gm("variable_string_match", @{args8}, "${init.status[8]}", "ph8", "${init.mode[8]}"); + "ph9" usebundle => apply_gm("variable_string_match", @{args9}, "${init.status[9]}", "ph9", "${init.mode[9]}"); + "ph10" usebundle => apply_gm("variable_string_match", @{args10}, "${init.status[10]}", "ph10", "${init.mode[10]}"); + "ph11" usebundle => apply_gm("variable_string_match", @{args11}, "${init.status[11]}", "ph11", "${init.mode[11]}"); + "ph12" usebundle => apply_gm("variable_string_match", @{args12}, "${init.status[12]}", "ph12", "${init.mode[12]}"); + "ph13" usebundle => apply_gm("variable_string_match", @{args13}, "${init.status[13]}", "ph13", "${init.mode[13]}"); + +} + +####################################################### + +bundle agent check +{ + classes: + "classes_ok" expression => "ph0_ok.ph1_ok.ph2_ok.ph3_ok.ph4_ok.ph5_ok.ph6_ok.ph7_ok.ph8_ok.ph8_ok.ph9_ok.ph10_ok.ph11_ok.ph12_ok"; + + "ok" expression => "classes_ok"; + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/README b/policies/lib/tests/acceptance/README new file mode 100644 index 00000000000..2e704471a3f --- /dev/null +++ b/policies/lib/tests/acceptance/README @@ -0,0 +1,116 @@ +============================================================================== +CFEngine acceptance testsuite +============================================================================== + +CFEngine has an extensive testsuite which covers lot of functionality which can +be tested as a series of cf-agent runs. + +You are encouraged to run this testsuite on any new software/hardware +configuration, in order to + * verify that CFEngine functions correctly + * provide developers with reproducible way to fix any problems encountered in + new configurations / environment + +In case you find a bug you are encouraged to create tests in format of testsuite +which demonstrate bug found, so the test could be added to this testsuite and +checked for in the future. + +------------------------------------------------------------------------------ +Preparing for running tests +------------------------------------------------------------------------------ + +* Compile CFEngine. + - It is advised to use Tokyo Cabinet as it gives much better performance in + test suite over Berkeley DB. + +* Install fakeroot(1). If this tool is not available for your operating system, + you may any other "fake root" environment or even sudo(1). Alternative tools + are specified by --gainroot option of `testall' script. + +------------------------------------------------------------------------------ +Running testsuite +------------------------------------------------------------------------------ + +All tests ought only to create files and directories in /tmp, and ought not to +modify other files. + +Run + + ./testall --agent=$workdir/bin/cf-agent + +e.g. + + ./testall --agent=/var/cfengine/bin/cf-agent + +Testing will start. For every test case the name and result (failed / passed) +will be produced. At the end testing summary will be provided. + +Test runner creates the following log files: + + * test.log contains detailed information about each test case (name, standard + output/error contents, return code, and test status). + * summary.log contains summary information, like the one displayed during + testsuite run. + +Also a directory .succeeded will be created, containing stamps for each passed +test case, so test cases which passed before and failing in subsequent testsuite +run will be additionally marked in output as "(UNEXPECTED FAILURE)". + +You might run a subset of tests by passing either filenames: + + ./testall --agent=$workdir/bin/cf-agent 01_vars/01_basic/001.cf 01_vars/01_basic/002.x.cf + +or directories to 'testall': + + ./testall --agent=$workdir/bin/cf-agent 01_vars + +------------------------------------------------------------------------------ +Creating/editing test cases +------------------------------------------------------------------------------ + +Each test should be 100% standalone, and must contain at least 3 main bundles: + init setup, create initial and hoped-for final states + test the actual test code + check the comparison of expected and actual results + +Look in default.cf for some standard check bundles (for example, to compare +files when testing file edits, or for cleaning up temporary files). + +Tests should be named with only digits (e.g., "001.cf") unless they are expected +to crash (that is, if they contain syntax errors or other faults), in which case +the filename should have an 'x' suffix (e.g., "001.x.cf"). + +Tests which are not expected to pass yet (e.g. there is a bug in code which +prevents tests from passing) should be placed in 'staging' subdirectory in the +test directory where they belong. Such test cases will be only run if --staging +argument to ./testall is passed. + +Tests which need network connectivity should be placed to 'network' +subdirectories. Those tests may be disabled by passing --no-network option to +'testall'. + +NOTE: Since the class 'ok' is used in most tests, never create a persistent +class called 'ok' in any test. Persistent classes are cleaned up between test +runs, but better safe than sorry. + +------------------------------------------------------------------------------ +Glossary +------------------------------------------------------------------------------ + +For purposes of testing, here is what our terms mean: + +Pass: the test did what we expected (whether that was setting a variable, +editing a file, killing or starting a process, or correctly failing to do +these actions in the light of existing conditions or attributes). Note that +in the case of tests that end in an 'x', a Pass is generated when the test +abnormally terminates and we wanted it to do that. + +FAIL: not doing what we wanted: either test finished and returned "FAIL" from +check bundle, or something went wrong - cf-agent might have dropped core, +cf-promises may have denied execution of the promises, etc. + +FAILed to crash: test was expected to crash, but did not. This is another kind +of failure, split into separate kind due to low impact. + +Skipped: test is skipped due to be either explicitly disabled or being +Nova-specific and being run on Community cf-agent. diff --git a/policies/lib/tests/acceptance/default.cf.sub b/policies/lib/tests/acceptance/default.cf.sub new file mode 100644 index 00000000000..235c88015bc --- /dev/null +++ b/policies/lib/tests/acceptance/default.cf.sub @@ -0,0 +1,282 @@ +bundle common G +{ +classes: + "bin_$(cmds)" expression => fileexists("/bin/$(cmds)"); + "usr_bin_$(cmds)" expression => fileexists("/usr/bin/$(cmds)"); + "usr_local_bin_$(cmds)" expression => fileexists("/usr/local/bin/$(cmds)"); + "usr_contrib_bin_$(cmds)" expression => fileexists("/usr/contrib/bin/$(cmds)"); + "has_$(cmds)" or => { "bin_$(cmds)", "usr_bin_$(cmds)" }; + + "temp_declared" not => strcmp(getenv("TEMP", "65536"), ""); + +vars: + windows:: + "cwd" string => execresult("C:\windows\system32\cmd.exe /C cd", "noshell"); + # Dir separator + "DS" string => "\\"; + !windows:: + "cwd" string => execresult("/bin/pwd 2>/dev/null || /usr/bin/pwd", "useshell"); + # Dir separator + "DS" string => "/"; + + any:: + "cmds" slist => { "date", "diff", "echo", "false", "grep", "gzip", "hexdump", + "ln", "mkdir", "od", "perl", "printf", "pwd", "rm", "sort", "touch", + "true", "wc" }; + + + "$(cmds)" string => "/bin/$(cmds)", + ifvarclass => "bin_$(cmds)"; + "$(cmds)" string => "/usr/bin/$(cmds)", + ifvarclass => "!bin_$(cmds).usr_bin_$(cmds)"; + "$(cmds)" string => "/usr/local/bin/$(cmds)", + ifvarclass => "!bin_$(cmds).!usr_bin_$(cmds).usr_local_bin_$(cmds)"; + "$(cmds)" string => "/usr/contrib/bin/$(cmds)", + ifvarclass => "!bin_$(cmds).!usr_bin_$(cmds).!usr_local_bin_$(cmds).usr_contrib_bin_$(cmds)"; + + temp_declared:: + "testroot" string => getenv("TEMP", "65535"); + "testdir" string => concat(getenv("TEMP", "65535"), "$(DS)TEST.cfengine"); + "testfile" string => concat(getenv("TEMP", "65535"), "$(DS)TEST.cfengine"); + + !temp_declared:: + "testroot" string => "$(DS)tmp"; + "testdir" string => "$(DS)tmp$(DS)TEST.cfengine"; + "testfile" string => "$(DS)tmp$(DS)TEST.cfengine"; + +} + +bundle common paths_init(filename) +{ +classes: + "filename_absolute" expression => regcmp("/.*", "$(filename)"); +} + +bundle common paths2(filename) +{ +vars: + filename_absolute:: + "input_file" + string => "$(filename)"; + !filename_absolute:: + "input_file" + string => "$(G.cwd)/$(filename)"; +} + +bundle agent default(filename) +{ +vars: + "tests" slist => { "init", "test", "check" }; + +methods: + "any" + usebundle => paths_init("$(filename)"); + "any" + usebundle => paths2("$(filename)"); + + AUTO:: + "any" usebundle => "$(tests)"; + +reports: + !AUTO:: + "# You must either specify '-D AUTO' or run the following commands:"; + "cf-agent -f .$(DS)$(filename) -b $(tests)"; +} + +####################################################### + +bundle agent default_sort(infile, outfile) +{ +commands: + "$(G.sort) $(infile) > $(outfile)" + contain => default_shell_command; +} + +bundle agent default_check_diff(file1, file2, test) +{ +methods: + "any" usebundle => check_diff("$(file1)", "$(file2)", "$(test)", "no"); +} + +bundle agent sorted_check_diff(file1, file2, test) +{ +methods: + "any" usebundle => default_sort("$(file1)", "$(file1).sort"); + "any" usebundle => default_sort("$(file2)", "$(file2).sort"); + "any" usebundle => check_diff("$(file1).sort", "$(file2).sort", "$(test)", "no"); +} + +bundle agent xml_check_diff(file1, file2, test, expected_difference) +{ +vars: + DEBUG.check_ready.!no_difference:: + "file1r" string => execresult("$(G.cwd)/xml-c14nize $(file1)", "noshell"); + "file2r" string => execresult("$(G.cwd)/xml-c14nize $(file2)", "noshell"); + + DEBUG.check_ready.!no_difference.has_hexdump:: + "file1h" string => execresult("$(G.hexdump) -C $(file1)", "useshell"); + "file2h" string => execresult("$(G.hexdump) -C $(file2)", "useshell"); + + DEBUG.check_ready.!no_difference.!has_hexdump:: + "file1h" string => execresult("$(G.od) -c $(file1)", "useshell"); + "file2h" string => execresult("$(G.od) -c $(file2)", "useshell"); + + DEBUG.check_ready.!no_difference.has_unified_diff:: + "diffu" string => execresult("$(G.diff) -u $(file2) $(file1) 2>/dev/null", "useshell"); + DEBUG.check_ready.!no_difference.!has_unified_diff:: + "diffu" string => execresult("$(G.diff) -c $(file2) $(file1) 2>/dev/null", "useshell"); + +classes: + "has_unified_diff" expression => returnszero( + "$(G.diff) -u /dev/null /dev/null >/dev/null 2>/dev/null", "useshell"); + + c14n_files_created:: + "no_difference" expression => returnszero( + "$(G.diff) $(G.testfile).default-xml-check-diff-1 $(G.testfile).default-xml-check-diff-2 >/dev/null 2>/dev/null", + "useshell"); + + "expected_difference" expression => strcmp("$(expected_difference)", "yes"); + "check_ready" expression => "any"; + +commands: + "$(G.cwd)/xml-c14nize $(file1) > $(G.testfile).default-xml-check-diff-1" + contain => default_cf_sub_shell; + "$(G.cwd)/xml-c14nize $(file2) > $(G.testfile).default-xml-check-diff-2" + contain => default_cf_sub_shell, + classes => default_cf_sub_if_ok("c14n_files_created"); + +reports: + check_ready.no_difference.!expected_difference:: + "$(test) Pass"; + check_ready.!no_difference.expected_difference:: + "$(test) Pass"; + check_ready.!no_difference.!expected_difference:: + "$(test) FAIL"; + check_ready.no_difference.expected_difference:: + "$(test) FAIL"; + + DEBUG.check_ready.!no_difference.!expected_difference:: + "$(file1) and $(file2) differ:"; + "$(file1): <$(file1r)>"; + "$(file2): <$(file2r)>"; + "dump $(file1): +$(file1h)"; + "dump $(file2): +$(file2h)"; + "$(diffu)"; + DEBUG.check_ready.no_difference.expected_difference:: + "Contents of $(file1) and $(file) is the same."; +} + +body contain default_cf_sub_shell +{ +useshell => "true"; +} + +body classes default_cf_sub_if_ok(x) +{ +promise_repaired => { "$(x)" }; +promise_kept => { "$(x)" }; +} + +bundle agent check_diff(file1, file2, test, expected_difference) +{ +vars: + DEBUG.!no_difference:: + "file1r" string => readfile("$(file1)", "99999999"); + "file2r" string => readfile("$(file2)", "99999999"); + + DEBUG.!no_difference.has_hexdump:: + "file1h" string => execresult("$(G.hexdump) -C $(file1)", "useshell"); + "file2h" string => execresult("$(G.hexdump) -C $(file2)", "useshell"); + + DEBUG.!no_difference.!has_hexdump:: + "file1h" string => execresult("$(G.od) -c $(file1)", "useshell"); + "file2h" string => execresult("$(G.od) -c $(file2)", "useshell"); + + DEBUG.!no_difference.has_unified_diff:: + "diffu" string => execresult("$(G.diff) -u $(file2) $(file1) 2>/dev/null", "useshell"); + DEBUG.!no_difference.!has_unified_diff:: + "diffu" string => execresult("$(G.diff) -c $(file2) $(file1) 2>/dev/null", "useshell"); + +classes: + "has_unified_diff" expression => returnszero( + "$(G.diff) -u /dev/null /dev/null >/dev/null 2>/dev/null", "useshell"); + + "no_difference" expression => returnszero( + "$(G.diff) $(file1) $(file2) >/dev/null 2>/dev/null", + "useshell"); + + "expected_difference" expression => strcmp("$(expected_difference)", "yes"); + +reports: + no_difference.!expected_difference:: + "$(test) Pass"; + !no_difference.expected_difference:: + "$(test) Pass"; + !no_difference.!expected_difference:: + "$(test) FAIL"; + no_difference.expected_difference:: + "$(test) FAIL"; + + DEBUG.!no_difference.!expected_difference:: + "FILES DIFFER BUT SHOULD BE THE SAME"; + "CONTENTS OF $(file1): +$(file1r)"; + "CONTENTS OF $(file2): +$(file2r)"; +### Comment out hexdump to avoid cluttering output file +# "hexdump $(file1): +# $(file1h)"; +# "hexdump $(file2): +# $(file2h)"; +# "$(G.diffu)"; + DEBUG.no_difference.expected_difference:: + "Contents of $(file1) and $(file) are the same but should differ."; +} + +body contain default_shell_command +{ +useshell => "true"; +} + +####################################################### + +# Uses rm -rf instead of selecting and deleting files to avoid side-effects in +# tests due to problems in file deleletion promises. + +bundle agent default_fini(file) +{ +commands: + "$(G.rm) -rf $(file)*" + contain => useshell; + "$(G.rm) -rf $(sys.workdir)/state/cf_state.*" + contain => useshell; +} + +body contain useshell +{ + useshell => "true"; + chdir => "/"; +} + +####################################################### + +# Test based on whether two strings are the same + +bundle agent default_check_strcmp(strA, strB, test, expected_difference) +{ + classes: + "equal" expression => strcmp("$(strA)", "$(strB)"); + "expected_difference" or => { strcmp("$(expected_difference)", "yes"), + strcmp("$(expected_difference)", "true") }; + reports: + equal.!expected_difference:: + "$(test) Pass"; + equal.expected_difference:: + "$(test) FAIL"; + !equal.!expected_difference:: + "$(test) FAIL"; + !equal.expected_difference:: + "$(test) Pass"; +} diff --git a/policies/lib/tests/acceptance/default_ncf.cf.sub b/policies/lib/tests/acceptance/default_ncf.cf.sub new file mode 100644 index 00000000000..5517758d04c --- /dev/null +++ b/policies/lib/tests/acceptance/default_ncf.cf.sub @@ -0,0 +1,310 @@ +# Needed to have a path reference for the tests execution +bundle common ncf_configuration { + vars: + "ncf_configuration_basedir" string => dirname("${this.promise_filename}"); +} +# Create the list of all the base file to load to test +# It requires the NCF_TREE environment variable to be defined +bundle common ncf_inputs +{ + vars: + "ncf_tree" string => getenv("NCF_TREE", 1024); + + # Almost same as promises.cf, but only load necessary parts + "list_compatible_inputs" string => "/bin/sh ${ncf_tree}/10_ncf_internals/list-compatible-inputs"; + "capability_file" string => "/opt/rudder/etc/agent-capabilities"; + + "generic_framework" string => execresult("${list_compatible_inputs} --capability-file ${capability_file} --agent-version ${sys.cf_version} --ncf-path ${ncf_tree} 10_ncf_internals 20_cfe_basics 30_generic_methods 40_it_ops_knowledge", "useshell"); + + "default_files_relative" slist => splitstring("${generic_framework}", "\n", 10000); + "default_files" slist => maplist("${ncf_tree}/${this}", default_files_relative); + + classes: + # OS classes for compatibility + "SUSE" expression => "sles"; + "SuSE" expression => "sles"; + "suse" expression => "sles"; +} + +bundle common test_utils { + + vars: + have_perl:: + "file_perms" string => "${paths.perl} -e 'printf \"%03o\\n\", (stat)[2] & 07777, $_ for @ARGV'"; + "file_owner" string => "${paths.perl} -e 'printf \"%d\\n\", (stat)[4], $_ for @ARGV'"; + "file_group" string => "${paths.perl} -e 'printf \"%d\\n\", (stat)[5], $_ for @ARGV'"; + test_utils_pass2.!have_perl:: + "file_perms" string => "/usr/bin/stat -c %a ${file}"; + "file_owner" string => "/usr/bin/stat -c %u ${file}"; + "file_group" string => "/usr/bin/stat -c %g ${file}"; + + classes: + "have_perl" expression => fileexists("${paths.perl}"); + "test_utils_pass2" expression => "any"; +} + +bundle agent define_expected_classes(class_prefix, status, id) +{ + vars: + "complete_suffix" slist => {"not_kept", "kept", "not_ok", "ok", "not_repaired", "repaired", "failed", "error", "reached"}; + "error_suffix" slist => {"not_kept", "not_ok", "not_repaired", "failed", "error", "reached"}; + "repaired_suffix" slist => { "ok", "repaired", "not_kept", "reached" }; + "success_suffix" slist => { "ok", "kept", "not_repaired", "reached" }; + "na_suffix" slist => { "noop" }; + + "expected_string_${id}" string => join(".", maplist("${class_prefix}_${this}", "@{${status}_suffix}")); + "unexpected_string_${id}" string => join("|", maplist("${class_prefix}_${this}", difference(complete_suffix, "${status}_suffix"))); + + "report_string_${id}" string => "(${expected_string_${id}}).!(${unexpected_string_${id}})"; + "expected_classes_${id}" slist => maplist("${class_prefix}_${this}", "@{${status}_suffix}"); + "unexpected_classes_${id}" slist => maplist("${class_prefix}_${this}", difference(complete_suffix, "@{${status}_suffix}")); + + reports: + "[EXPECTED] ${expected_classes_${id}}"; +} + +bundle agent execute_testinfra(test_name, class_name, comment) +{ + classes: + "${class_name}" expression => returnszero("python -m pytest ${ncf_configuration.ncf_configuration_basedir}/spec/localhost/${test_name} #${comment}", "useshell"), + scope => "namespace"; +} + +# This bundle should be call when testing a generic method. +# It will call the target bundle with the given arguments, +# check that the resulting classes are correct and define +# a global class "${result_class}_ok" if everything succeed. +# +# name: name of the target bundle +# args: slist containing the target bundle parameter, in +# correct order. +# status: expected result status, can be: success, error, repaired +# result_class: prefix of the resulting class +bundle agent apply_gm(name, args, status, result_class, mode) +{ + vars: + "length" int => length("args"); + "arg0" string => nth("args", "0"); + "arg1" string => nth("args", "1"); + "arg2" string => nth("args", "2"); + "arg3" string => nth("args", "3"); + "arg4" string => nth("args", "4"); + "arg5" string => nth("args", "5"); + "arg6" string => nth("args", "6"); + "arg7" string => nth("args", "7"); + "arg8" string => nth("args", "8"); + + "old_class_prefix" string => "${${name}.old_class_prefix}"; + + "report_param" string => join("_", args); + "full_class_prefix" string => canonify("${name}_${report_param}"); + "class_prefix" string => string_head("${full_class_prefix}", "1000"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + "audit" expression => strcmp("${mode}", "audit"); + + # Dynamic bundle call does not support undefined arg number + # We have to explicitly define each possibility + "1_arg" expression => strcmp("${length}", "1"); + "2_arg" expression => strcmp("${length}", "2"); + "3_arg" expression => strcmp("${length}", "3"); + "4_arg" expression => strcmp("${length}", "4"); + "5_arg" expression => strcmp("${length}", "5"); + "6_arg" expression => strcmp("${length}", "6"); + "7_arg" expression => strcmp("${length}", "7"); + "8_arg" expression => strcmp("${length}", "8"); + + pass2:: + "old_class_prefix_ok" expression => "${define_expected_classes.report_string_old_class_prefix_${result_class}}"; + "class_prefix_ok" expression => "${define_expected_classes.report_string_class_prefix_${result_class}}"; + "${result_class}_ok" expression => "old_class_prefix_ok.class_prefix_ok", + scope => "namespace"; + + + methods: + "expected_classes" usebundle => define_expected_classes("${class_prefix}", "${status}", "class_prefix_${result_class}"); + "expected_classes" usebundle => define_expected_classes("${old_class_prefix}", "${status}", "old_class_prefix_${result_class}"); + + audit.pass1.!pass2:: + "enable_${result_class}" usebundle => push_dry_run_mode("true"); + !audit.pass1.!pass2:: + "enable_${result_class}" usebundle => push_dry_run_mode("false"); + + 1_arg:: + "${result_class}" usebundle => ${name}("${arg0}"); + 2_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}"); + 3_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}"); + 4_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}", "${arg3}"); + 5_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}", "${arg3}", "${arg4}"); + 6_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}", "${arg3}", "${arg4}", "${arg5}"); + 7_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}", "${arg3}", "${arg4}", "${arg5}", "${arg6}"); + 8_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}", "${arg3}", "${arg4}", "${arg5}", "${arg6}", "${arg7}"); + + pass3:: + "disable_${result_class}" usebundle => pop_dry_run_mode(); + + pass3:: + "cancel_classes" usebundle => _classes_cancel("${class_prefix}"); + "cancel_classes" usebundle => _classes_cancel("${old_class_prefix}"); + + reports: + pass2.!pass3:: + "Missing expected old class ${define_expected_classes.expected_classes_old_class_prefix_${result_class}}" + ifvarclass => "!${define_expected_classes.expected_classes_old_class_prefix_${result_class}}"; + + "Missing expected class ${define_expected_classes.expected_classes_class_prefix_${result_class}}" + ifvarclass => "!${define_expected_classes.expected_classes_class_prefix_${result_class}}"; + + "Found unexpected old class ${define_expected_classes.unexpected_classes_old_class_prefix_${result_class}}" + ifvarclass => "${define_expected_classes.unexpected_classes_old_class_prefix_${result_class}}"; + + "Found unexpected class ${define_expected_classes.unexpected_classes_class_prefix_${result_class}}" + ifvarclass => "${define_expected_classes.unexpected_classes_class_prefix_${result_class}}"; +} + +# This bundle should be call when testing a generic method made with v4 +# It will call the target bundle with the given arguments, +# check that the resulting classes are correct and define +# a global class "${result_class}_ok" if everything succeed. +# +# name: name of the target bundle +# args: slist containing the target bundle parameter, in +# correct order. +# status: expected result status, can be: success, error, repaired +# result_class: prefix of the resulting class +# TODO: It does not yet takes into account the report_id +bundle agent apply_gm_v4(name, args, status, result_class, mode) +{ + vars: + "length" int => length("args"); + "arg0" string => nth("args", "0"); + "arg1" string => nth("args", "1"); + "arg2" string => nth("args", "2"); + "arg3" string => nth("args", "3"); + "arg4" string => nth("args", "4"); + "arg5" string => nth("args", "5"); + "arg6" string => nth("args", "6"); + "arg7" string => nth("args", "7"); + "arg8" string => nth("args", "8"); + + "class_prefix" string => "${${name}.class_prefix}"; + + "report_param" string => join("_", args); + pass1:: # need to be guarded by a class, and need an extra parameter to be executed at each call + "uuid" string => execresult("/usr/bin/uuidgen #${result_class}", "useshell"); + "c_uuid" string => canonify("${uuid}"); + "d_uuid" string => canonify("d_${uuid}"); + "r_uuid" string => canonify("r_${uuid}"); + + classes: + "pass3" expression => "pass2"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + "audit" expression => strcmp("${mode}", "audit"); + pass1.!pass2:: + # Dynamic bundle call does not support undefined arg number + # We have to explicitly define each possibility + "1_arg" expression => strcmp("${length}", "1"); + "2_arg" expression => strcmp("${length}", "2"); + "3_arg" expression => strcmp("${length}", "3"); + "4_arg" expression => strcmp("${length}", "4"); + "5_arg" expression => strcmp("${length}", "5"); + "6_arg" expression => strcmp("${length}", "6"); + "7_arg" expression => strcmp("${length}", "7"); + "8_arg" expression => strcmp("${length}", "8"); + + pass2:: + "class_prefix_ok" expression => "${define_expected_classes.report_string_class_prefix_${result_class}}", + comment => "${c_uuid}"; + "method_id_ok" expression => "${define_expected_classes.report_string_method_id_${result_class}}", + comment => "${c_uuid}"; + "${result_class}_ok" expression => "class_prefix_ok.method_id_ok", + scope => "namespace", + comment => "${c_uuid}"; + + + methods: + pass1.!pass2:: + "set_reporting" usebundle => rudder_reporting_context_v4("${d_uuid}", "${r_uuid}", "default_ncf", "${name}", "${name}", "${uuid}"); + "set_reporting" usebundle => _method_reporting_context_v4("${name}", "${name}","${uuid}"); # component value is not used for classes + pass2:: + "expected_classes" usebundle => define_expected_classes("${class_prefix}", "${status}", "class_prefix_${result_class}"), + comment => "${c_uuid}"; + "expected_classes" usebundle => define_expected_classes("${c_uuid}_0", "${status}", "method_id_${result_class}"), + comment => "${c_uuid}"; + + + audit.pass1.!pass2:: + "enable_${result_class}" usebundle => push_dry_run_mode("true"), + comment => "${c_uuid}"; + !audit.pass1.!pass2:: + "enable_${result_class}" usebundle => push_dry_run_mode("false"), + comment => "${c_uuid}"; + + 1_arg:: + "${result_class}" usebundle => ${name}("${arg0}"); + 2_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}"); + 3_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}"); + 4_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}", "${arg3}"); + 5_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}", "${arg3}", "${arg4}"); + 6_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}", "${arg3}", "${arg4}", "${arg5}"); + 7_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}", "${arg3}", "${arg4}", "${arg5}", "${arg6}"); + 8_arg:: + "${result_class}" usebundle => ${name}("${arg0}", "${arg1}", "${arg2}", "${arg3}", "${arg4}", "${arg5}", "${arg6}", "${arg7}"); + + pass3:: + "disable_${result_class}" usebundle => pop_dry_run_mode(), + comment => "${c_uuid}"; + + pass3:: + "cancel_classes" usebundle => _classes_cancel("${class_prefix}"), + comment => "${c_uuid}"; + + reports: + pass2.!pass3:: + "Class prefix failed for ${result_class} with uuid ${c_uuid}" + if => "!class_prefix_ok"; + "Method id failed for ${result_class} with uuid ${c_uuid}" + if => "!method_id_ok"; + + "Missing method_id class ${define_expected_classes.expected_classes_method_id_${result_class}}" + if => "!${define_expected_classes.expected_classes_method_id_${result_class}}", + comment => "${c_uuid}"; + + "Missing expected class ${define_expected_classes.expected_classes_class_prefix_${result_class}}" + if => "!${define_expected_classes.expected_classes_class_prefix_${result_class}}", + comment => "${c_uuid}"; + + "Found unexpected method_id ${define_expected_classes.unexpected_classes_method_id_${result_class}}" + if => "${define_expected_classes.unexpected_classes_method_id_${result_class}}", + comment => "${c_uuid}"; + + "Found unexpected class ${define_expected_classes.unexpected_classes_class_prefix_${result_class}}" + if => "${define_expected_classes.unexpected_classes_class_prefix_${result_class}}", + comment => "${c_uuid}"; +} + +# Mimic default node properties values +bundle common node +{ + vars: + "properties" data => '{ "rudder": { "packages": { "installed_cache_expire": 60, "updates_cache_expire": 240 } } }'; +} diff --git a/policies/lib/tests/acceptance/sample_test.cf.sample b/policies/lib/tests/acceptance/sample_test.cf.sample new file mode 100644 index 00000000000..1b99eacd4fd --- /dev/null +++ b/policies/lib/tests/acceptance/sample_test.cf.sample @@ -0,0 +1,50 @@ +####################################################### +# +# This is a sample test file to copy when creating a +# new test. Please keep it updated with best practices. +# +####################################################### + +bundle common acc_path +{ + vars: + "root" string => getenv("NCF_TESTS_ACCEPTANCE", 1024); +} + +body common control +{ + inputs => { "${acc_path.root}/default.cf.sub", "${acc_path.root}/default_ncf.cf.sub", "@{ncf_inputs.default_files}" }; + bundlesequence => { default("$(this.promise_filename)") }; + version => "1.0"; +} + +####################################################### + +bundle agent init +{ + vars: + "tmp" string => getenv("TEMP", 1024); + "file" string => "${tmp}/test"; + "file_canon" string => canonify("${file}"); + "line_to_add" string => "This is a test line!"; +} + +####################################################### + +bundle agent test +{ +} + +####################################################### + +bundle agent check +{ + classes: + "ok" expression => + + reports: + ok:: + "$(this.promise_filename) Pass"; + !ok:: + "$(this.promise_filename) FAIL"; +} diff --git a/policies/lib/tests/acceptance/spec/localhost/cron_disabled.py b/policies/lib/tests/acceptance/spec/localhost/cron_disabled.py new file mode 100644 index 00000000000..e06e7b7d5c7 --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/cron_disabled.py @@ -0,0 +1,3 @@ +def test_cron_disabled(host): + cron = host.service("cron") + assert not cron.is_enabled diff --git a/policies/lib/tests/acceptance/spec/localhost/cron_enabled.py b/policies/lib/tests/acceptance/spec/localhost/cron_enabled.py new file mode 100644 index 00000000000..bf1f89ad2f7 --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/cron_enabled.py @@ -0,0 +1,3 @@ +def test_cron_enabled(host): + cron = host.service("cron") + assert cron.is_enabled diff --git a/policies/lib/tests/acceptance/spec/localhost/cron_started.py b/policies/lib/tests/acceptance/spec/localhost/cron_started.py new file mode 100644 index 00000000000..8f79b8723d9 --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/cron_started.py @@ -0,0 +1,3 @@ +def test_cron_started(host): + cron = host.service("cron") + assert cron.is_running diff --git a/policies/lib/tests/acceptance/spec/localhost/cron_stopped.py b/policies/lib/tests/acceptance/spec/localhost/cron_stopped.py new file mode 100644 index 00000000000..c5f93b9f6de --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/cron_stopped.py @@ -0,0 +1,3 @@ +def test_cron_stopped(host): + cron = host.service("cron") + assert not cron.is_running diff --git a/policies/lib/tests/acceptance/spec/localhost/crond_disabled.py b/policies/lib/tests/acceptance/spec/localhost/crond_disabled.py new file mode 100644 index 00000000000..f1a79caeaeb --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/crond_disabled.py @@ -0,0 +1,3 @@ +def test_crond_disabled(host): + crond = host.service("crond") + assert not crond.is_enabled diff --git a/policies/lib/tests/acceptance/spec/localhost/crond_enabled.py b/policies/lib/tests/acceptance/spec/localhost/crond_enabled.py new file mode 100644 index 00000000000..04d837a3d57 --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/crond_enabled.py @@ -0,0 +1,3 @@ +def test_crond_enabled(host): + crond = host.service("crond") + assert crond.is_enabled diff --git a/policies/lib/tests/acceptance/spec/localhost/crond_started.py b/policies/lib/tests/acceptance/spec/localhost/crond_started.py new file mode 100644 index 00000000000..44e99c4295a --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/crond_started.py @@ -0,0 +1,3 @@ +def test_crond_started(host): + crond = host.service("crond") + assert crond.is_running diff --git a/policies/lib/tests/acceptance/spec/localhost/crond_stopped.py b/policies/lib/tests/acceptance/spec/localhost/crond_stopped.py new file mode 100644 index 00000000000..56fdf02d444 --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/crond_stopped.py @@ -0,0 +1,3 @@ +def test_crond_stopped(host): + crond = host.service("crond") + assert not crond.is_running diff --git a/policies/lib/tests/acceptance/spec/localhost/ntp_disabled.py b/policies/lib/tests/acceptance/spec/localhost/ntp_disabled.py new file mode 100644 index 00000000000..f7cf075542d --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/ntp_disabled.py @@ -0,0 +1,3 @@ +def test_ntp_disabled(host): + ntp = host.service("ntp") + assert not ntp.is_enabled diff --git a/policies/lib/tests/acceptance/spec/localhost/ntp_enabled.py b/policies/lib/tests/acceptance/spec/localhost/ntp_enabled.py new file mode 100644 index 00000000000..4c323b6f5e9 --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/ntp_enabled.py @@ -0,0 +1,3 @@ +def test_ntp_enabled(host): + ntp = host.service("ntp") + assert ntp.is_enabled diff --git a/policies/lib/tests/acceptance/spec/localhost/syslogd_started.py b/policies/lib/tests/acceptance/spec/localhost/syslogd_started.py new file mode 100644 index 00000000000..73924b8df31 --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/syslogd_started.py @@ -0,0 +1,3 @@ +def test_syslogd_started(host): + syslogd = host.service("syslogd") + assert syslogd.is_running diff --git a/policies/lib/tests/acceptance/spec/localhost/syslogd_stopped.py b/policies/lib/tests/acceptance/spec/localhost/syslogd_stopped.py new file mode 100644 index 00000000000..b3dc1328cb7 --- /dev/null +++ b/policies/lib/tests/acceptance/spec/localhost/syslogd_stopped.py @@ -0,0 +1,3 @@ +def test_syslogd_stopped(host): + syslogd = host.service("syslogd") + assert not syslogd.is_running diff --git a/policies/lib/tests/acceptance/testall b/policies/lib/tests/acceptance/testall new file mode 100755 index 00000000000..44060b377e7 --- /dev/null +++ b/policies/lib/tests/acceptance/testall @@ -0,0 +1,568 @@ +#!/bin/sh +# +# Copyright (C) CFEngine AS +# +# This file is part of CFEngine 3 - written and maintained by CFEngine AS. +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA +# +# To the extent this program is licensed as part of the Enterprise +# versions of CFEngine, the applicable Commercial Open Source License +# (COSL) may apply to this file if you as a licensee so wish it. See +# included file COSL.txt. +# + +set -e + +# +# Detect and replace non-POSIX shell +# +try_exec() { + type "$1" > /dev/null 2>&1 && exec "$@" +} + +unset foo +(: ${foo%%bar}) 2> /dev/null +T1="$?" + +if test "$T1" != 0; then + try_exec /usr/xpg4/bin/sh "$0" "$@" + echo "No compatible shell script interpreter found." + echo "Please find a POSIX shell for your system." + exit 42 +fi + +# +# Explicitly use POSIX tools if needed +# +if [ -f /usr/xpg4/bin/grep ]; then + PATH=/usr/xpg4/bin:$PATH + export PATH +fi + +# Use only newline as token separator, not spaces. +IFS=' +' + +# +# Unset environment variables which might break running acceptance tests +# +GREP_OPTIONS= +export GREP_OPTIONS + +# +# Defaults (overridden by command-line arguments) +# +LOG=test_debug.log +SUMMARY=test.log +XML=test.xml +XMLTMP=xml.tmp +BASE_WORKDIR="`pwd`/workdir" +QUIET= +STAGING_TESTS=${STAGING_TESTS:-0} +NETWORK_TESTS=${NETWORK_TESTS:-1} +UNSAFE_TESTS=${UNSAFE_TESTS:-0} + +# Only use fakeroot by default if we are are not already root +[ `id | cut -d\( -f2 | cut -d\) -f1` = 'root' ] && GAINROOT="" || GAINROOT=${GAINROOT:-fakeroot} + +PASSED_TESTS=0 +FAILED_TESTS=0 +FAILED_TO_CRASH_TESTS=0 +SKIPPED_TESTS=0 + +OUR_DIR=`dirname "$0"` +NCF_TREE=`cd "${OUR_DIR}" && pwd`/../../tree +NCF_TESTS_ACCEPTANCE=`cd "${OUR_DIR}" && pwd` +NCF_COMPATIBLE_INPUTS="${NCF_TREE}/10_ncf_internals/list-compatible-inputs" +CAPABILITY_FILE="/opt/rudder/etc/agent-capabilities" +# +# Many older platforms don't support date +%s, so check for compatibility +# and find Perl for the unix_seconds() routine below. (Mantis #1254) +# +HAVE_DATE_PCT_S= +date +%s >/dev/null 2>&1 +if [ $? -eq 0 ] ; then + HAVE_DATE_PCT_S=1 +fi +PERL=`which perl 2>/dev/null` + +# +# Obtain UNIX time(), using date +%s, Perl, or POSIX-compatible approach. +# +unix_seconds() { + if [ "$HAVE_DATE_PCT_S" ]; then + date +%s + return 0 + fi + + if [ "$PERL" ]; then + $PERL -e 'print time() . "\n"' 2>/dev/null + if [ $? -eq 0 ] ; then + return 0 + fi + fi + + # Last resort if Perl fails - the extended cpio interchange format has + # the file modification timestamp in columns 48-59, in octal. + : > $BASE_WORKDIR/x + echo "ibase=8;`pax -wx cpio $BASE_WORKDIR/$$.seconds | cut -c 48-59`" | bc 2>/dev/null + rm $BASE_WORKDIR/x +} + +echo +echo === Test environment: === +echo AGENT=$AGENT +echo CF_PROMISES=$CF_PROMISES +echo CF_SERVERD=$CF_SERVERD +echo CF_KEY=$CF_KEY +echo ========================= +echo + +usage() { + echo "testall [-q] [--gainroot=] [--agent=] [--cfpromises=] [--cfserverd=] [--cfkey=] [--no-nova] [--staging] [--unsafe] [--no-network] [--gdb] [--printlog] [--info] [--verbose] [ ...]" + echo + echo "If no test is given, all standard tests are run:" + echo " Tests with names of form .cf are expected to run successfully" + echo " Tests with names of form .x.cf are expected to crash" + echo + echo "If arguments are given, those are executed as tests" + echo + echo " -q makes script much quieter" + echo + echo " --gainroot= forces use of command to gain root privileges," + echo " otherwise fakeroot is used." + echo + echo " --agent provides a way to specify non-default cf-agent location," + echo " and defaults to $DEFAGENT." + echo + echo " --cfpromises provides a way to specify non-default cf-promises location," + echo " and defaults to $DEFCF_PROMISES." + echo + echo " --cfserverd provides a way to specify non-default cf-serverd location," + echo " and defaults to $DEFCF_SERVERD." + echo + echo " --cfkey provides a way to specify non-default cf-key location," + echo " and defaults to $DEFCF_KEY." + echo + echo " --libtool specify non-default libtool location (only needed for --gdb)." + echo " defaults to $DEFLIBTOOL." + echo + echo " --staging enable tests in staging directories. They are not expected to pass." + echo + echo " --unsafe enable tests in unsafe directories. WARNING! These tests modify the" + echo " system they're running on and can DAMAGE YOUR SYSTEM! DO NOT use" + echo " this option without a backup." + echo " If you use this option you should also use --gainroot=sudo," + echo " otherwise you will get incorrect results." + echo + echo " --no-network disable tests in network directories." + echo " --printlog print the full test.log output immediately." + echo + echo " --gdb Run test under GDB" + echo + echo " --info Run test in info mode" + echo + echo " --verbose Run test in verbose mode" +} + +runtest() { + AGENT="$1" + TEST="$2" + if [ -z "$QUIET" ]; then + printf "$TEST " + fi + + if echo "$TEST" | grep -q -F -e .x.cf ; then + EXPECTED_CRASH=1 + else + EXPECTED_CRASH= + fi + + if [ "x$STAGING_TESTS" = "x0" ] && echo "$TEST" | grep -q -e '/staging/'; then + SKIP=1 + SKIPREASON="Staging tests are disabled" + elif [ "x$UNSAFE_TESTS" != "x1" ] && echo "$TEST" | grep -q -e '/unsafe/'; then + SKIP=1 + SKIPREASON="Unsafe tests are disabled" + elif [ "x$NON_CONTAINER_TESTS" != "x1" ] && echo "$TEST" | grep -q -e '/non_container/'; then + SKIP=1 + SKIPREASON="Tests using systemd units are disabled" + elif [ "x$NETWORK_TESTS" = "x0" ] && echo "$TEST" | grep -q -e '/network/'; then + SKIP=1 + SKIPREASON="Network-dependent tests are disabled" + elif grep -q "testinfra" $TEST && ! python -c "import testinfra" > /dev/null; then + SKIP=1 + SKIPREASON="Testinfra python module required but not found" + else + SKIP= + SKIPREASON= + fi + + ( echo ---------------------------------------------------------------------- + echo "$TEST"${EXPECTED_CRASH:+ \(expected to crash\)}${SKIPREASON:+ \($SKIPREASON\)} + echo ---------------------------------------------------------------------- + ) >> "$LOG" + + if [ -z "$SKIP" ]; then + TEST_START_TIME=`unix_seconds` + + FLATNAME="`echo "$TEST" | sed 's,[./],_,g'`" + + # Prepare workdir + WORKDIR="$BASE_WORKDIR/$FLATNAME" + $GAINROOT rm -rf "$WORKDIR" + mkdir -p "$WORKDIR/bin" "$WORKDIR/tmp" "$WORKDIR/inputs" + chmod ugo+rwxt "$WORKDIR/tmp" + ln -sf "$AGENT" "$WORKDIR/bin" + ln -sf "$CF_PROMISES" "$WORKDIR/bin" + ln -sf "$CF_SERVERD" "$WORKDIR/bin" + ln -sf "$CF_KEY" "$WORKDIR/bin" + echo ".*" > $WORKDIR/inputs/ignore_interfaces.rx + if uname | grep MINGW > /dev/null; then + PLATFORM_WORKDIR="$(echo $WORKDIR | sed -e 's%^/\([a-cA-Z]\)/%\1:/%' | sed -e 's%/%\\%g')" + DS="\\" + else + PLATFORM_WORKDIR="$WORKDIR" + DS="/" + fi + + echo "#!/bin/sh +CFENGINE_TEST_OVERRIDE_WORKDIR=\"$PLATFORM_WORKDIR\" +TEMP=\"$PLATFORM_WORKDIR${DS}tmp\" +NCF_TREE=\"${NCF_TREE}\" +NCF_TESTS_ACCEPTANCE=\"${NCF_TESTS_ACCEPTANCE}\" +export CFENGINE_TEST_OVERRIDE_WORKDIR TEMP NCF_TREE NCF_TESTS_ACCEPTANCE + +" > "$WORKDIR/runtest" + + if [ "$GDB" = 1 ]; then + if grep -q libtool < "$AGENT"; then + printf "\"$LIBTOOL\" --mode=execute " >> "$WORKDIR/runtest" + fi + printf "gdb --args " >> "$WORKDIR/runtest" + fi + + printf "\"$AGENT\" $VERBOSE -Kf \"$TEST\" -D AUTO,DEBUG 2>&1\n" >> "$WORKDIR/runtest" + chmod +x "$WORKDIR/runtest" + + if [ "$GDB" = 1 ]; then + $GAINROOT "$WORKDIR/runtest" + else + OUT=`$GAINROOT "$WORKDIR/runtest"` + fi + RETVAL=$? + echo "$OUT" >> "$LOG" + echo >> "$LOG" + echo "Return code is $RETVAL." >> "$LOG" + + TEST_END_TIME=`unix_seconds` + echo " "time=\"$(($TEST_END_TIME - $TEST_START_TIME)) seconds\"\> >> "$XMLTMP" + + # Some states are output by default.cf.sub, therefore check for both TEST + # prefix and default.cf.sub prefix. + ESCAPED_TEST="$(echo "($TEST|default.cf.sub)" | sed -e 's/\./\\./g')" + + if [ -z "$EXPECTED_CRASH" ]; then + if [ $RETVAL -eq 0 ] && echo "$OUT" | grep -qE "R: .*$ESCAPED_TEST Pass"; then + RESULT=Pass + else + RESULT=FAIL + fi + else + if [ $RETVAL -ne 0 ]; then + RESULT=Pass + else + RESULT="FAILed to crash" + fi + fi + + if [ "$RESULT" = "Pass" ]; then + $GAINROOT rm -rf "$WORKDIR" + fi + + if [ "$RESULT" != Pass ] && [ -e .succeeded/"$FLATNAME" ]; then + echo $RESULT $TEST '(UNEXPECTED FAILURE)' >> "$SUMMARY" + ( echo " "\ + echo " "\<\/failure\> + ) >> "$XMLTMP" + else + echo $RESULT $TEST >> "$SUMMARY" + if [ "$RESULT" = FAIL ]; then + ( echo " "\ + echo " "\<\/failure\> + ) >> "$XMLTMP" + elif [ "$RESULT" = "FAILed to crash" ]; then + ( echo " "\ + echo " "\<\/failure\> + ) >> "$XMLTMP" + fi + fi + + if [ -z "$QUIET" ]; then + if [ "$RESULT" != Pass ] && [ -e .succeeded/"$FLATNAME" ]; then + echo $RESULT '(UNEXPECTED FAILURE)' + else + echo $RESULT + fi + else + if [ "$RESULT" = Pass ]; then + printf '.' + else + if [ -n "$EXPECTED_CRASH" ]; then + printf '!' + else + printf 'x' + fi + fi + fi + + ( + echo + echo ' ==>' $RESULT + echo + ) >> "$LOG" + + if [ "$RESULT" = Pass ]; then + PASSED_TESTS=$(($PASSED_TESTS + 1)) + + mkdir -p '.succeeded' + touch .succeeded/"$FLATNAME" + elif [ "$RESULT" = FAIL ]; then + FAILED_TESTS=$(($FAILED_TESTS + 1)) + elif [ "$RESULT" = "FAILed to crash" ]; then + FAILED_TO_CRASH_TESTS=$(($FAILED_TO_CRASH_TESTS + 1)) + fi + else + echo " "time=\"NULL\"\> >> "$XMLTMP" + echo Skip $TEST >> "$SUMMARY" + ( echo " "\ + echo " "\<\/skipped\> + ) >> "$XMLTMP" + if [ -z "$QUIET" ]; then + echo Skipped + else + printf '-' + fi + SKIPPED_TESTS=$(($SKIPPED_TESTS + 1)) + fi +} + +# We assume we're running this script from $objdir, $objdir/tests/acceptance, +# or /var/cfengine/tests/acceptance. +find_default_binary() +{ + [ -x "`pwd`/$2/$2" ] && eval $1=\""`pwd`/$2/$2"\" + [ -x "`pwd`/../../$2/$2" ] && eval $1=\""`pwd`/../../$2/$2"\" + [ -x "`pwd`/../../bin/$2" ] && eval $1=\""`pwd`/../../bin/$2"\" + which $2 > /dev/null && eval $1=\""`which $2`"\" +} +find_default_binary DEFAGENT cf-agent +find_default_binary DEFCF_PROMISES cf-promises +find_default_binary DEFCF_SERVERD cf-serverd +find_default_binary DEFCF_KEY cf-key + +[ -x "`pwd`/libtool" ] && DEFLIBTOOL="`pwd`/libtool" +[ -x "`pwd`/../../libtool" ] && DEFLIBTOOL="`pwd`/../../libtool" + +while true; do + case "$1" in + --help) + usage + exit;; + -q) + QUIET=1;; + --gainroot=*) + GAINROOT=${1#--gainroot=};; + --staging) + STAGING_TESTS=1;; + --unsafe) + UNSAFE_TESTS=1;; + --non_container) + NON_CONTAINER_TESTS=1;; + --no-network) + NETWORK_TESTS=0;; + --agent=*) + AGENT=${1#--agent=};; + --cfpromises=*) + CF_PROMISES=${1#--cfpromises=};; + --cfserverd=*) + CF_SERVERD=${1#--cfserverd=};; + --cfkey=*) + CF_KEY=${1#--cfkey=};; + --libtool=*) + LIBTOOL=${1#--libtool=};; + --printlog) + PRINTLOG=1;; + --gdb) + GDB=1;; + --info) + VERBOSE="-I -D debug";; + --verbose) + VERBOSE="-v -D debug";; + -*) + echo "Unknown option: $1" + exit 1;; + *) + break;; + esac + shift +done + +AGENT=${AGENT:-${DEFAGENT}} +CF_PROMISES=${CF_PROMISES:-${DEFCF_PROMISES}} +CF_SERVERD=${CF_SERVERD:-${DEFCF_SERVERD}} +CF_KEY=${CF_KEY:-${DEFCF_KEY}} +LIBTOOL=${LIBTOOL:-${DEFLIBTOOL}} + +if [ ! -x "$AGENT" ]; then + echo "ERROR can't find cf-agent. Are you sure you're running this from OBJDIR or OBJDIR/tests/acceptance? Check cf-agent = '$AGENT'" + exit 1 +fi +if [ ! -x "$CF_PROMISES" ]; then + CF_PROMISES="`dirname $AGENT`/cf-promises" + if [ ! -x "$CF_PROMISES" ]; then + echo "ERROR can't find cf-promises. Are you sure you're running this from OBJDIR or OBJDIR/tests/acceptance? Check cf-promises = '$CF_PROMISES'" + exit 1 + fi +fi +if [ ! -x "$CF_SERVERD" ]; then + CF_SERVERD="`dirname $AGENT`/cf-serverd" + if [ ! -x "$CF_SERVERD" ]; then + echo "ERROR can't find cf-serverd. Are you sure you're running this from OBJDIR or OBJDIR/tests/acceptance? Check cf-serverd = '$CF_SERVERD'" + exit 1 + fi +fi +if [ ! -x "$CF_KEY" ]; then + CF_KEY="`dirname $AGENT`/cf-key" + if [ ! -x "$CF_KEY" ]; then + echo "ERROR can't find cf-key. Are you sure you're running this from OBJDIR or OBJDIR/tests/acceptance? Check cf-key = '$CF_KEY'" + exit 1 + fi +fi + +if [ "$UNSAFE_TESTS" = "1" -a "$GAINROOT" = "fakeroot" ]; then + echo "Unsafe tests do not play well together with fakeroot. Please use a different" + echo "--gainroot (like \"sudo\"), or you will get incorrect results." + exit 1 +fi + +# Get current agent version +AGENT_VERSION=`"${AGENT}" -V | cut -d' ' -f 3 | sed 's/\.[^.]*$//'` + +if [ $# -gt 0 ]; then + # We need to run all specified tests, but not unsafe ones. + STAGING_TESTS=1 + NETWORK_TESTS=1 + for test in "$@"; do + if ! expr "$test" : '[/.]' >/dev/null; then + test="./$test" + fi + + if [ -f $test ]; then + TESTS="$TESTS${TESTS:+ +}$test" + elif [ -d $test ]; then + ADDTESTS=`${NCF_COMPATIBLE_INPUTS} --capability-file ${CAPABILITY_FILE} --agent-version ${AGENT_VERSION} --ncf-path "${NCF_TESTS_ACCEPTANCE}/${test}" . | awk -v path="${test}" '{ print path"/"$0 }' | sort` + TESTS="$TESTS${TESTS:+ +}$ADDTESTS" + else + echo "Unable to open test file/directory: $test" + fi + done +else + MYDIR=`dirname $0` + TESTS=`${NCF_COMPATIBLE_INPUTS} --capability-file ${CAPABILITY_FILE} --agent-version ${AGENT_VERSION} --ncf-path "${MYDIR}" . | sort` +fi + +# +# fd 7 is a /dev/null for quiet execution and stdout for default one +# +if [ -z "$QUIET" ]; then + exec 7>&1 +else + exec 7>/dev/null +fi + +# +# Now run the tests +# + +TESTS_COUNT=`echo $TESTS | wc -w` +START_TIME=`unix_seconds` + +( echo ====================================================================== + echo Testsuite started at `date "+%F %T"` + echo ---------------------------------------------------------------------- + echo Total tests: $TESTS_COUNT + echo +) | tee "$LOG" | tee "$SUMMARY" >&7 + + +( echo \<\?xml version=\"1.0\" encoding=\"UTF-8\"\?\> + echo \ "$XML" + + echo -n "" > "$XMLTMP" + +for test in $TESTS; do + echo " "\> "$XMLTMP" + echo " "classname=\"$test\" >> "$XMLTMP" + if [ -n "$USE_VALGRIND" ]; then + runtest "valgrind --leak-check=full --show-reachable=yes $AGENT" "$test" || true + else + runtest $AGENT "$test" || true + fi + echo " "\<\/testcase\> >> "$XMLTMP" +done + +END_TIME=`unix_seconds` + +( echo + echo ====================================================================== + echo Testsuite finished at `date "+%F %T"` \($(($END_TIME - $START_TIME)) seconds\) +) | tee -a "$LOG" | tee -a "$SUMMARY" >&7 + +( echo + echo Passed tests: $PASSED_TESTS + echo Failed tests: $FAILED_TESTS + echo Failed to crash tests: $FAILED_TO_CRASH_TESTS + echo Skipped tests: $SKIPPED_TESTS +) | tee -a "$LOG" | tee -a "$SUMMARY" + +( echo " "failures=\"$FAILED_TESTS\" + echo " "skipped=\"$SKIPPED_TESTS\" + echo " "time=\"$(($END_TIME - $START_TIME)) seconds\"\> +) >> "$XML" + + cat "$XMLTMP" >> "$XML" + + echo \<\/testsuite\> >> "$XML" + +if [ -n "$PRINTLOG" ]; then + cat "$LOG" +fi + +if [ "$FAILED_TESTS" -ne 0 ] || [ "$FAILED_TO_CRASH_TESTS" -ne 0 ]; then + exit 1 +else + exit 0 +fi + diff --git a/policies/lib/tests/integration/command_execution_test.rs b/policies/lib/tests/integration/command_execution_test.rs new file mode 100644 index 00000000000..f95428391b0 --- /dev/null +++ b/policies/lib/tests/integration/command_execution_test.rs @@ -0,0 +1,73 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::integration::{end_test, get_lib_path, init_test}; +use crate::testlib::method_test_suite::MethodTestSuite; +use crate::testlib::method_to_test::{MethodStatus, MethodToTest}; + +#[test] +fn it_is_not_applicable_in_audit_mode() { + let workdir = init_test(); + let file_path = workdir.path().join("target.txt"); + + let tested_method = MethodToTest::command_execution(format!( + "/bin/touch {}", + file_path.clone().to_str().unwrap() + )) + .audit(); + let r = MethodTestSuite::new() + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::NA]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::NA); + assert!( + !file_path.exists(), + "The file '{}' should not have been created by the method execution", + file_path.display() + ); + end_test(workdir); +} +#[test] +fn it_repairs_in_enforced_mode_if_the_command_succeeds() { + let workdir = init_test(); + let file_path = workdir.path().join("target.txt"); + + let tested_method = MethodToTest::command_execution(format!( + "/bin/touch {}", + file_path.clone().to_str().unwrap() + )) + .enforce(); + let r = MethodTestSuite::new() + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Repaired]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Repaired); + assert!( + file_path.exists(), + "The file '{}' should have been created by the method execution", + file_path.display() + ); + end_test(workdir); +} +#[test] +fn it_errors_in_enforced_mode_if_the_command_fails() { + let workdir = init_test(); + let file_path = workdir.path().join("nonexistingfolder/target.txt"); + + let tested_method = MethodToTest::command_execution(format!( + "/bin/touch {}", + file_path.clone().to_str().unwrap() + )) + .enforce(); + let r = MethodTestSuite::new() + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Error]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Error); + assert!( + !file_path.exists(), + "The file '{}' should not have been created by the method execution", + file_path.display() + ); + end_test(workdir); +} diff --git a/policies/lib/tests/integration/condition_from_expression_test.rs b/policies/lib/tests/integration/condition_from_expression_test.rs new file mode 100644 index 00000000000..bc7cec78614 --- /dev/null +++ b/policies/lib/tests/integration/condition_from_expression_test.rs @@ -0,0 +1,56 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::integration::{end_test, get_lib_path, init_test}; +use crate::testlib::given::Given; +use crate::testlib::method_test_suite::MethodTestSuite; +use crate::testlib::method_to_test::{MethodStatus, MethodToTest}; + +#[test] +fn it_should_generate_the_true_conditions_if_needed() { + let workdir = init_test(); + let tested_method = + MethodToTest::condition_from_expression("plouf".to_string(), "(any.!false)".to_string()) + .enforce(); + let r = MethodTestSuite::new() + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Success]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Success); + r.assert_conditions_are_defined(vec!["plouf_true".to_string()]); + r.assert_conditions_are_undefined(vec!["plouf_false".to_string()]); + end_test(workdir); +} +#[test] +fn it_should_generate_the_false_conditions_if_needed() { + let workdir = init_test(); + let tested_method = + MethodToTest::condition_from_expression("plouf".to_string(), "!(any.!false)".to_string()) + .enforce(); + let r = MethodTestSuite::new() + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Success]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Success); + r.assert_conditions_are_defined(vec!["plouf_false".to_string()]); + r.assert_conditions_are_undefined(vec!["plouf_true".to_string()]); + end_test(workdir); +} +#[test] +fn calls_should_be_independent() { + let workdir = init_test(); + let tested_method1 = + MethodToTest::condition_from_expression("plouf".to_string(), "!(any.!false)".to_string()) + .enforce(); + let tested_method2 = + MethodToTest::condition_from_expression("plouf".to_string(), "(any.!false)".to_string()) + .enforce(); + let r = MethodTestSuite::new() + .given(Given::method_call(tested_method2)) + .when(tested_method1.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method1.clone(), vec![MethodStatus::Success]); + r.assert_log_v4_result_conditions(tested_method1.clone(), MethodStatus::Success); + r.assert_conditions_are_defined(vec!["plouf_false".to_string(), "plouf_true".to_string()]); + end_test(workdir); +} diff --git a/policies/lib/tests/integration/condition_from_variable_existence_test.rs b/policies/lib/tests/integration/condition_from_variable_existence_test.rs new file mode 100644 index 00000000000..41d8db0bdd5 --- /dev/null +++ b/policies/lib/tests/integration/condition_from_variable_existence_test.rs @@ -0,0 +1,77 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::integration::{end_test, get_lib_path, init_test}; +use crate::testlib::given::Given; +use crate::testlib::method_test_suite::MethodTestSuite; +use crate::testlib::method_to_test::{MethodStatus, MethodToTest}; + +#[test] +fn it_should_generate_the_true_conditions_if_needed() { + let workdir = init_test(); + let variable_def = MethodToTest::variable_string( + "my_prefix".to_string(), + "my_name".to_string(), + "hello world".to_string(), + ); + let tested_method = MethodToTest::condition_from_variable_existence( + "plouf".to_string(), + "my_prefix.my_name".to_string(), + ) + .enforce(); + let r = MethodTestSuite::new() + .given(Given::method_call(variable_def)) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Success]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Success); + r.assert_conditions_are_defined(vec!["plouf_true".to_string()]); + r.assert_conditions_are_undefined(vec!["plouf_false".to_string()]); + end_test(workdir); +} +#[test] +fn it_should_generate_the_false_conditions_if_needed() { + let workdir = init_test(); + let variable_def = MethodToTest::variable_string( + "my_prefix".to_string(), + "my_name".to_string(), + "hello world".to_string(), + ); + let tested_method = MethodToTest::condition_from_variable_existence( + "plouf".to_string(), + "my_prefix.my_wrong_name".to_string(), + ) + .enforce(); + let r = MethodTestSuite::new() + .given(Given::method_call(variable_def)) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Success]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Success); + r.assert_conditions_are_defined(vec!["plouf_false".to_string()]); + r.assert_conditions_are_undefined(vec!["plouf_true".to_string()]); + end_test(workdir); +} +#[test] +fn it_should_work_on_dict_variable() { + let workdir = init_test(); + let variable_def = MethodToTest::variable_dict( + "my_prefix".to_string(), + "my_name".to_string(), + r#"{"key": "hello world"}"#.to_string(), + ); + let tested_method = MethodToTest::condition_from_variable_existence( + "plouf".to_string(), + "my_prefix.my_name".to_string(), + ) + .enforce(); + let r = MethodTestSuite::new() + .given(Given::method_call(variable_def)) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Success]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Success); + r.assert_conditions_are_defined(vec!["plouf_true".to_string()]); + r.assert_conditions_are_undefined(vec!["plouf_false".to_string()]); + end_test(workdir); +} diff --git a/policies/lib/tests/integration/condition_from_variable_match_test.rs b/policies/lib/tests/integration/condition_from_variable_match_test.rs new file mode 100644 index 00000000000..16553ad044c --- /dev/null +++ b/policies/lib/tests/integration/condition_from_variable_match_test.rs @@ -0,0 +1,99 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::integration::{end_test, get_lib_path, init_test}; +use crate::testlib::given::Given; +use crate::testlib::method_test_suite::MethodTestSuite; +use crate::testlib::method_to_test::{MethodStatus, MethodToTest}; + +#[test] +fn it_should_generate_the_true_conditions_if_needed() { + let workdir = init_test(); + let variable_def = MethodToTest::variable_string( + "my_prefix".to_string(), + "my_name".to_string(), + "hello world".to_string(), + ); + let tested_method = MethodToTest::condition_from_variable_match( + "plouf".to_string(), + "my_prefix.my_name".to_string(), + ".*".to_string(), + ) + .enforce(); + let r = MethodTestSuite::new() + .given(Given::method_call(variable_def)) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Success]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Success); + r.assert_conditions_are_defined(vec!["plouf_true".to_string()]); + r.assert_conditions_are_undefined(vec!["plouf_false".to_string()]); + end_test(workdir); +} +#[test] +fn it_should_generate_the_false_conditions_if_needed() { + let workdir = init_test(); + let variable_def = MethodToTest::variable_string( + "my_prefix".to_string(), + "my_name".to_string(), + "hello world".to_string(), + ); + let tested_method = MethodToTest::condition_from_variable_match( + "plouf".to_string(), + "my_prefix.my_name".to_string(), + "foo.*bar".to_string(), + ) + .enforce(); + let r = MethodTestSuite::new() + .given(Given::method_call(variable_def)) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Success]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Success); + r.assert_conditions_are_defined(vec!["plouf_false".to_string()]); + r.assert_conditions_are_undefined(vec!["plouf_true".to_string()]); + end_test(workdir); +} +#[test] +fn it_should_be_in_error_if_the_variable_is_undefined() { + let workdir = init_test(); + let tested_method = MethodToTest::condition_from_variable_match( + "plouf".to_string(), + "my_prefix.my_name".to_string(), + "foo.*bar".to_string(), + ) + .enforce(); + let r = MethodTestSuite::new() + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Error]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Error); + r.assert_conditions_are_defined(vec!["plouf_false".to_string()]); + r.assert_conditions_are_undefined(vec!["plouf_true".to_string()]); + end_test(workdir); +} +#[test] +fn it_should_not_work_on_dict_variable() { + // It succeeds as the variable is detected but the matching will always fail on a dict variable + let workdir = init_test(); + let variable_def = MethodToTest::variable_dict( + "my_prefix".to_string(), + "my_name".to_string(), + r#"{"key": "hello world"}"#.to_string(), + ); + let tested_method = MethodToTest::condition_from_variable_match( + "plouf".to_string(), + "my_prefix.my_name".to_string(), + ".*".to_string(), + ) + .enforce(); + let r = MethodTestSuite::new() + .given(Given::method_call(variable_def)) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method.clone(), vec![MethodStatus::Success]); + r.assert_log_v4_result_conditions(tested_method.clone(), MethodStatus::Success); + r.assert_conditions_are_defined(vec!["plouf_false".to_string()]); + r.assert_conditions_are_undefined(vec!["plouf_true".to_string()]); + end_test(workdir); +} diff --git a/policies/lib/tests/integration/file_absent_test.rs b/policies/lib/tests/integration/file_absent_test.rs new file mode 100644 index 00000000000..b5accc554af --- /dev/null +++ b/policies/lib/tests/integration/file_absent_test.rs @@ -0,0 +1,97 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::integration::{end_test, get_lib_path, init_test}; +use crate::testlib::given::Given; +use crate::testlib::method_test_suite::MethodTestSuite; +use crate::testlib::method_to_test::{MethodStatus, MethodToTest}; + +#[test] +fn it_repairs_in_enforce_when_the_target_file_exists() { + let workdir = init_test(); + let file = workdir.path().join("file_to_remove"); + let file_path = file.clone().to_string_lossy().into_owned(); + + let tested_method = MethodToTest::file_absent(file_path.clone()).enforce(); + let r = MethodTestSuite::new() + .given(Given::file_present(file_path.clone())) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method, vec![MethodStatus::Repaired]); + assert!( + !file.exists(), + "The file '{}' should have been removed by the method execution", + file.display() + ); + end_test(workdir); +} + +#[test] +fn it_errors_in_audit_when_the_target_file_exists() { + let workdir = init_test(); + let file = workdir.path().join("file_to_remove"); + let file_path = file.clone().to_string_lossy().into_owned(); + + let tested_method = MethodToTest::file_absent(file_path.clone()).audit(); + let r = MethodTestSuite::new() + .given(Given::file_present(file_path.clone())) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method, vec![MethodStatus::Error]); + assert!( + file.exists(), + "The file '{}' should NOT have been removed by the method execution", + file.display() + ); + end_test(workdir); +} + +#[test] +fn it_errors_in_enforce_when_the_target_exists_and_is_a_directory() { + let workdir = init_test(); + let dir = workdir.path().join("dir_to_remove"); + let file_path = dir.clone().to_string_lossy().into_owned(); + let tested_method = MethodToTest::file_absent(file_path.clone()).enforce(); + + let r = MethodTestSuite::new() + .given(Given::directory_present(file_path.clone())) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method, vec![MethodStatus::Error]); + assert!( + dir.exists(), + "The directory '{}' should not have been removed by the method execution", + dir.display() + ); + end_test(workdir); +} + +#[ignore] +#[test_log::test] +fn it_should_be_idempotent() { + let workdir = init_test(); + let file = workdir.path().join("file_to_remove"); + let file_path = file.clone().to_string_lossy().into_owned(); + let tested_method = MethodToTest::file_absent(file_path.clone()).enforce(); + + let r = MethodTestSuite::new() + .given(Given::file_present(file_path.clone())) + .when(tested_method.clone()) + .when(tested_method.clone()) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions( + tested_method, + vec![ + MethodStatus::Repaired, + MethodStatus::Success, + MethodStatus::Success, + ], + ); + assert!( + !file.exists(), + "The file '{}' should have been removed by the method execution", + file.display() + ); + end_test(workdir); +} diff --git a/policies/lib/tests/integration/file_check_exists_test.rs b/policies/lib/tests/integration/file_check_exists_test.rs new file mode 100644 index 00000000000..a042d6cef68 --- /dev/null +++ b/policies/lib/tests/integration/file_check_exists_test.rs @@ -0,0 +1,95 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::integration::{end_test, get_lib_path, init_test}; +use crate::testlib::given::Given; +use crate::testlib::method_test_suite::MethodTestSuite; +use crate::testlib::method_to_test::{MethodStatus, MethodToTest}; + +#[test] +fn it_succeeds_in_enforce_when_the_target_file_exists() { + let workdir = init_test(); + let file_path = workdir + .path() + .join("flag_file") + .to_string_lossy() + .into_owned(); + let tested_method = MethodToTest::file_check_exists(file_path.clone()).enforce(); + + let r = MethodTestSuite::new() + .given(Given::file_present(file_path.clone())) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method, vec![MethodStatus::Success]); + end_test(workdir); +} + +#[test] +fn it_fails_in_enforce_when_the_target_file_does_not_exist() { + let workdir = init_test(); + let file_path = workdir + .path() + .join("flag_file") + .to_string_lossy() + .into_owned(); + + let tested_method = MethodToTest::file_check_exists(file_path.clone()).enforce(); + let r = MethodTestSuite::new() + .given(Given::file_absent(file_path.clone())) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method, vec![MethodStatus::Error]); + end_test(workdir); +} + +#[test] +fn it_succeeds_in_audit_when_the_target_file_exists() { + let workdir = init_test(); + let file_path = workdir + .path() + .join("flag_file") + .to_string_lossy() + .into_owned(); + + let tested_method = MethodToTest::file_check_exists(file_path.clone()).audit(); + let r = MethodTestSuite::new() + .given(Given::file_present(file_path.clone())) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method, vec![MethodStatus::Success]); + end_test(workdir); +} +#[test] +fn it_succeeds_in_audit_when_the_target_file_does_not_exist() { + let workdir = init_test(); + let file_path = workdir + .path() + .join("flag_file") + .to_string_lossy() + .into_owned(); + + let tested_method = MethodToTest::file_check_exists(file_path.clone()).audit(); + let r = MethodTestSuite::new() + .given(Given::file_absent(file_path.clone())) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method, vec![MethodStatus::Error]); + end_test(workdir); +} +#[test] +fn it_succeeds_in_enforce_when_the_target_is_a_directory() { + let workdir = init_test(); + let dir_path = workdir + .path() + .join("directory") + .to_string_lossy() + .into_owned(); + + let tested_method = MethodToTest::file_check_exists(dir_path.clone()).audit(); + let r = MethodTestSuite::new() + .given(Given::directory_present(dir_path.clone())) + .when(tested_method.clone()) + .execute(get_lib_path(), workdir.path().to_path_buf()); + r.assert_legacy_result_conditions(tested_method, vec![MethodStatus::Success]); + end_test(workdir); +} diff --git a/policies/lib/tests/integration/mod.rs b/policies/lib/tests/integration/mod.rs new file mode 100644 index 00000000000..f0567521718 --- /dev/null +++ b/policies/lib/tests/integration/mod.rs @@ -0,0 +1,46 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +#[cfg(test)] +pub mod command_execution_test; +#[cfg(test)] +pub mod condition_from_expression_test; +#[cfg(test)] +pub mod condition_from_variable_existence_test; +#[cfg(test)] +pub mod condition_from_variable_match_test; +#[cfg(test)] +pub mod file_absent_test; +#[cfg(test)] +pub mod file_check_exists_test; + +use log::debug; +use rudder_commons::methods::Methods; +use std::mem::ManuallyDrop; +use std::path::PathBuf; +use std::sync::OnceLock; +use tempfile::{TempDir, tempdir}; + +const LIBRARY_PATH: &str = "./tree"; +pub fn get_lib() -> &'static Methods { + static LIB: OnceLock = OnceLock::new(); + LIB.get_or_init(|| { + rudderc::frontends::read_methods(&[get_lib_path()]) + .unwrap() + .clone() + }) +} +fn get_lib_path() -> PathBuf { + PathBuf::from(LIBRARY_PATH) +} + +fn init_test() -> ManuallyDrop { + let _ = env_logger::try_init(); + let workdir = tempdir().unwrap(); + debug!("WORKDIR = {:?}", workdir.path()); + ManuallyDrop::new(workdir) +} + +fn end_test(workdir: ManuallyDrop) { + ManuallyDrop::into_inner(workdir); +} diff --git a/policies/lib/tests/mod.rs b/policies/lib/tests/mod.rs new file mode 100644 index 00000000000..e500e030a22 --- /dev/null +++ b/policies/lib/tests/mod.rs @@ -0,0 +1,5 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +pub mod integration; +pub mod testlib; diff --git a/policies/lib/tests/ncf.py b/policies/lib/tests/ncf.py new file mode 100644 index 00000000000..d0898dc5d69 --- /dev/null +++ b/policies/lib/tests/ncf.py @@ -0,0 +1,276 @@ +# -*- coding: utf-8 -*- +# This is a Python module containing functions to parse and analyze ncf components + +# This module is designed to run on the latest major versions of the most popular +# server OSes (Debian, Red Hat/CentOS, Ubuntu, SLES, ...) +# At the time of writing (November 2013) these are Debian 7, Red Hat/CentOS 6, +# Ubuntu 12.04 LTS, SLES 11, ... +# The version of Python in all of these is >= 2.6, which is therefore what this +# module must support + +import re +import subprocess +import json +import os.path +import shutil +import sys +import os +import codecs +import uuid +from pprint import pprint + +# Verbose output +VERBOSE = 0 +CFPROMISES_PATH="/opt/rudder/bin/cf-promises" + +dirs = [ "10_ncf_internals", "20_cfe_basics", "30_generic_methods", "40_it_ops_knowledge", "50_techniques", "60_services", "ncf-hooks.d" ] + +tags = {} +common_tags = [ "name", "description", "parameter", "bundle_name", "bundle_args"] +tags["generic_method"] = [ "documentation", "class_prefix", "class_parameter", "class_parameter_id", "deprecated", "agent_requirements", "parameter_constraint", "parameter_type", "action", "rename", "parameter_rename" ] +[ value.extend(common_tags) for (k,value) in tags.items() ] + +optional_tags = {} +optional_tags["generic_method"] = [ "deprecated", "documentation", "parameter_constraint", "parameter_type", "agent_requirements", "action", "rename", "parameter_rename" ] +multiline_tags = [ "description", "documentation", "deprecated" ] + +class NcfError(Exception): + def __init__(self, message, details="", cause=None): + self.message = message + self.details = details + # try to get details from inner cause + try: + # Will not add to details if cause is None or message is None + self.details += " caused by : " + cause.message + # Will not add to details if details is None + self.details += "\n" + cause.details + except: + # We got an error while extending error details, just ignore it and keep current value + pass + + def __str__(self): + return repr(self.message) + + +def format_errors(error_list): + formatted_errors = [] + for error in error_list: + sys.stderr.write("ERROR: " + error.message + "\n") + sys.stderr.write(error.details + "\n") + formatted_errors.append( { "message": error.message, "details": error.details } ) + sys.stderr.flush() + return formatted_errors + + +def get_root_dir(): + return os.path.realpath(os.path.dirname(__file__) + "/../") + + +# This method emulates the behavior of subprocess check_output method. +# We aim to be compatible with Python 2.6, thus this method does not exist +# yet in subprocess. +def check_output(command, env = {}): + command_env = dict(env) + if VERBOSE == 1: + sys.stderr.write("VERBOSE: About to run command '" + " ".join(command) + "'\n") + command_env["PATH"] = os.environ['PATH'] + process = subprocess.Popen(command, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=None, env=command_env) + output, error = process.communicate() + lines = output.decode("UTF-8", "ignore").split("\n") + for index, line in enumerate(lines): + if line.startswith("{"): + output = "\n".join(lines[index:]) + break; + error = error.decode("UTF-8", "ignore") + retcode = process.poll() + if retcode == 0: + sys.stderr.write(error) + else: + if VERBOSE == 1: + sys.stderr.write("VERBOSE: Exception triggered, Command returned error code " + str(retcode) + "\n") + raise NcfError("Error while running '" + " ".join(command) +"' command.", error) + + if VERBOSE == 1: + sys.stderr.write("VERBOSE: Command output: '" + output + "'" + "\n") + return output + + +def get_all_generic_methods_filenames(alt_path=None): + result = [] + if alt_path is None: + filelist1 = get_all_generic_methods_filenames_in_dir(get_root_dir() + "/tree/30_generic_methods") + filelist2 = get_all_generic_methods_filenames_in_dir("/var/rudder/configuration-repository/ncf/30_generic_methods") + result = filelist1 + filelist2 + else: + result = get_all_generic_methods_filenames_in_dir(alt_path) + + return result + +excluded_dirs = [ "applications", "fileConfiguration", "fileDistribution", "jobScheduling", "systemSettings", "system" ] + +def get_all_generic_methods_filenames_in_dir(parent_dir): + filenames = [] + filenames_add = filenames.append + for root, dirs, files in os.walk(parent_dir): + for dir in dirs: + if dir not in excluded_dirs: + filenames = filenames + get_all_generic_methods_filenames_in_dir(os.path.join(parent_dir,dir)) + + for file in files: + if not file.startswith("_") and file.endswith(".cf"): + filenames.append(os.path.join(root, file)) + return filenames + + +def parse_generic_method_metadata(content): + res = {} + warnings = [] + parameters = [] + param_names = set() + param_constraints = {} + param_types = {} + param_rename = [] + default_constraint = { + "allow_whitespace_string" : False + , "allow_empty_string" : False + , "max_length" : 16384 + } + + multiline = False + previous_tag = None + match_line = "" + + for line in content.splitlines(): + # line should already be unicode + #unicodeLine = unicode(line,"UTF-8") #line.decode('unicode-escape') + + # Parse metadata tag line + match = re.match("^\s*#\s*@(\w+)\s*(([a-zA-Z0-9_]+)?\s+(.*?)|.*?)\s*$", line, flags=re.UNICODE) + if match : + tag = match.group(1) + # Check if we are a valid tag + if tag in tags["generic_method"]: + # tag "parameter" may be multi-valued + if tag == "parameter": + param_name = match.group(3) + parameters.append({'name': param_name, 'description': match.group(4)}) + param_names.add(param_name) + elif tag == "parameter_constraint": + constraint = json.loads("{" + match.group(4).replace('\\', '\\\\') + "}") + # extend default_constraint if it was not already defined) + param_constraints.setdefault(match.group(3), default_constraint.copy()).update(constraint) + elif tag == "parameter_type": + param_type = match.group(4) + param_types[match.group(3)] = param_type + elif tag == "parameter_rename": + old_name = match.group(3) + new_name = match.group(4) + param_rename.append( { "new": new_name, "old" : old_name } ) + res['parameter_rename'] = param_rename + else: + res[tag] = match.group(2) + previous_tag = tag + continue + + # Parse line without tag, if previous tag was a multiline tag + if previous_tag is not None and previous_tag in multiline_tags: + match = re.match("^\s*# ?(.*)$", line, flags=re.UNICODE) + if match: + res[previous_tag] += "\n"+match.group(1) + continue + else: + previous_tag = None + + # manage multiline bundle definition + if multiline: + match_line += line + else: + match_line = line + if re.match("[^#]*bundle\s+agent\s+(\w+)\s*\([^)]*$", match_line, flags=re.UNICODE|re.MULTILINE|re.DOTALL): + multiline = True + + # read a complete bundle definition + match = re.match("[^#]*bundle\s+agent\s+(\w+)\s*(\(([^)]*)\))?\s*\{?\s*$", match_line, flags=re.UNICODE|re.MULTILINE|re.DOTALL) + if match: + multiline = False + res['bundle_name'] = match.group(1) + res['bundle_args'] = [] + + if match.group(3) is not None and len(match.group(3)): + res['bundle_args'] += [x.strip() for x in match.group(3).split(',')] + + # Any tags should come before the "bundle agent" declaration + break + + # The tag "class_parameter_id" is a magic tag, it's value is built from class_parameter and the list of args + if "class_parameter_id" in tags["generic_method"]: + try: + res['class_parameter_id'] = res['bundle_args'].index(res['class_parameter'])+1 + except: + res['class_parameter_id'] = 0 + name = res['bundle_name'] if 'bundle_name' in res else "unknown" + raise NcfError("The class_parameter name \"" + res['class_parameter'] + "\" does not seem to match any of the bundle's parameters in " + name) + + # Check that we don't have a constraint that is defined on a non existing parameter: + wrong_constraint_names = set(param_constraints.keys()) - param_names + if len(wrong_constraint_names) > 0: + warning_message = "In technique '' defining constraint on non existing parameters: "+ ", ".join(wrong_constraint_names) + print(warning_message) + warnings.append(warning_message) + + # Check that we don't have a type that is defined on a non existing parameter: + wrong_type_names = set(param_types.keys()) - param_names + if len(wrong_type_names) > 0: + warning_message = "In technique '' defining type on non existing parameters: "+ ", ".join(wrong_type_names) + print(warning_message) + warnings.append(warning_message) + + # If we found any parameters, store them in the res object + if len(parameters) > 0: + for param in parameters: + parameter_name = param["name"] + constraints = param_constraints.get(param["name"], default_constraint) + param_type = param_types.get(param["name"], "string") + param["constraints"] = constraints + param["type"] = param_type + + res['parameter'] = parameters + + # Remove trailing line breaks + for tag in multiline_tags: + if tag in res: + res[tag] = res[tag].strip('\n\r') + + all_tags = tags["generic_method"] + expected_tags = [ tag for tag in all_tags if not tag in optional_tags["generic_method"]] + if not set(res.keys()).issuperset(set(expected_tags)): + missing_keys = [mkey for mkey in expected_tags if mkey not in set(res.keys())] + name = res['bundle_name'] if 'bundle_name' in res else "unknown" + raise NcfError("One or more metadata tags not found before the bundle agent declaration (" + ", ".join(missing_keys) + ") in " + name) + + result = { "result" : res, "warnings" : warnings } + return result + + +def get_all_generic_methods_metadata(alt_path=None): + all_metadata = {} + + filenames = get_all_generic_methods_filenames(alt_path) + errors = [] + warnings = [] + + for file in filenames: + with codecs.open(file, encoding="utf-8") as fd: + content = fd.read() + try: + result = parse_generic_method_metadata(content) + metadata = result["result"] + warnings.extend(result["warnings"]) + all_metadata[metadata['bundle_name']] = metadata + except NcfError as e: + error = NcfError("Could not parse generic method in '" + file + "'", cause=e ) + errors.append(error) + continue # skip this file, it doesn't have the right tags in - yuk! + + return { "data": { "generic_methods" : all_metadata }, "errors": format_errors(errors), "warnings": warnings } + diff --git a/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_bundles_should_be_commented_py b/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_bundles_should_be_commented_py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_generic_methods_should_define_prefixes_py b/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_generic_methods_should_define_prefixes_py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_generic_methods_should_have_only_one_bundle_agent_sh b/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_generic_methods_should_have_only_one_bundle_agent_sh new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_generic_methods_should_use_intermediary_promisers_slist_sh b/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_generic_methods_should_use_intermediary_promisers_slist_sh new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_generic_methods_should_use_new_style_log_calls_sh b/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_generic_methods_should_use_new_style_log_calls_sh new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_generic_should_canonify_report_class_sh b/policies/lib/tests/quick/.succeeded/__30_generic_methods_all_generic_should_canonify_report_class_sh new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__30_generic_methods_bundles_starting_with_underscore_must_not_modify_the_system_sh b/policies/lib/tests/quick/.succeeded/__30_generic_methods_bundles_starting_with_underscore_must_not_modify_the_system_sh new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__30_generic_methods_dont_use_perms_on_group_root_sh b/policies/lib/tests/quick/.succeeded/__30_generic_methods_dont_use_perms_on_group_root_sh new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__30_generic_methods_each_generic_method_name_should_be_unique_py b/policies/lib/tests/quick/.succeeded/__30_generic_methods_each_generic_method_name_should_be_unique_py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__tests_all_shell_test_should_be_set_e_sh b/policies/lib/tests/quick/.succeeded/__tests_all_shell_test_should_be_set_e_sh new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__tests_markdown_in_docs_py b/policies/lib/tests/quick/.succeeded/__tests_markdown_in_docs_py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__tests_nbsp_is_not_allowed_sh b/policies/lib/tests/quick/.succeeded/__tests_nbsp_is_not_allowed_sh new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/.succeeded/__tests_normal_ordering_sh b/policies/lib/tests/quick/.succeeded/__tests_normal_ordering_sh new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/all_shell_test_should_be_set_e.sh b/policies/lib/tests/quick/all_shell_test_should_be_set_e.sh new file mode 100755 index 00000000000..4e91de2b5c1 --- /dev/null +++ b/policies/lib/tests/quick/all_shell_test_should_be_set_e.sh @@ -0,0 +1,26 @@ +#!/bin/sh + +set -e +GIT_ROOT="$(git rev-parse --show-toplevel)" +NCF_TREE=$GIT_ROOT/policies/lib/tree + +# All tests written in shell should use "set -e". This test checks those tests. +# (It is thus somewhat a meta test test.) + +ALL_SHELL_TESTS=`find "${GIT_ROOT}/policies/lib/tests/quick" -name "*.sh"` + +ERRORS=0 +for file in ${ALL_SHELL_TESTS} +do + if ! grep -E "^[ ]*set[ ]+-e" ${file} > /dev/null; then + ERRORS=`expr ${ERRORS} + 1` + echo "Test ${file} is missing the \"set -e\" declaration" + fi +done + +if [ ${ERRORS} -eq 0 ]; then + echo "R: $0 Pass" +else + echo "R: $0 FAIL" +fi +exit $ERRORS diff --git a/policies/lib/tests/quick/assets/getParent/1/2/3/emptyfile.txt b/policies/lib/tests/quick/assets/getParent/1/2/3/emptyfile.txt new file mode 100644 index 00000000000..e69de29bb2d diff --git a/policies/lib/tests/quick/assets/getParent/1/2/3/symlink_to_2 b/policies/lib/tests/quick/assets/getParent/1/2/3/symlink_to_2 new file mode 120000 index 00000000000..b870225aa05 --- /dev/null +++ b/policies/lib/tests/quick/assets/getParent/1/2/3/symlink_to_2 @@ -0,0 +1 @@ +../ \ No newline at end of file diff --git a/policies/lib/tests/quick/bundles_starting_with_underscore_must_not_modify_the_system.sh b/policies/lib/tests/quick/bundles_starting_with_underscore_must_not_modify_the_system.sh new file mode 100755 index 00000000000..395126347cd --- /dev/null +++ b/policies/lib/tests/quick/bundles_starting_with_underscore_must_not_modify_the_system.sh @@ -0,0 +1,40 @@ +#!/bin/sh + +set -e +GIT_ROOT="$(git rev-parse --show-toplevel)" +NCF_TREE=$GIT_ROOT/policies/lib/tree + +# Check that all generic_methods bundles that start with _ do not have any promises aside from "meta-promises" + +ALLOWED_TYPES="vars classes methods reports" + +ERRORS=0 +FILES_TO_CHECK=`find "${NCF_TREE}/30_generic_methods/" -name "_*.cf"` +for file in ${FILES_TO_CHECK} +do + PROMISE_TYPES=`cat ${file} | grep -Ev "^\s*#?\s*$" | grep -E "^\s*[a-z]+:(\s+|#|$)" | sed -e "s/^\s*\(.*\):.*$/\1/" | sort | uniq` + + for found_type in ${PROMISE_TYPES} + do + TYPE_OK=0 + for allowed_type in ${ALLOWED_TYPES} + do + if [ "z${found_type}" = "z${allowed_type}" ]; then + TYPE_OK=1 + fi + done + + if [ ${TYPE_OK} -ne 1 ]; then + echo "File ${file} contains a forbidden promise type (${found_type}) in an internal bundle" + ERRORS=`expr ${ERRORS} + 1` + fi + done + +done + +if [ ${ERRORS} -eq 0 ]; then + echo "R: $0 Pass" +else + echo "R: $0 FAIL" +fi +exit $ERRORS diff --git a/policies/lib/tests/quick/dont_use_perms_on_group_root.sh b/policies/lib/tests/quick/dont_use_perms_on_group_root.sh new file mode 100755 index 00000000000..262fa37c700 --- /dev/null +++ b/policies/lib/tests/quick/dont_use_perms_on_group_root.sh @@ -0,0 +1,42 @@ +#!/bin/sh + +##################################################################################### +# Copyright 2015 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### +set -e +GIT_ROOT="$(git rev-parse --show-toplevel)" +NCF_TREE=$GIT_ROOT/policies/lib/tree + +# Check that no tests use the group "root" for perms - this works on Linux but not on most UNIXes + +FILES_TO_CHECK=`find "${GIT_ROOT}/policies/lib/tests/" -name "*.cf"` +ERRORS=0 +for f in $FILES_TO_CHECK +do + if grep -Eq "^[^#]*perms\s*=>\s*mog\([^,]+,\s*[^,]+,\s*['\"]root['\"]\)" ${f}; then + echo "File $f uses 'root' group, will break tests on non-Linux OSes" + ERRORS=`expr $ERRORS + 1` + fi +done + +if [ $ERRORS -eq 0 ]; then + echo "R: $0 Pass" +else + echo "R: $0 Fail" +fi + +exit $ERRORS diff --git a/policies/lib/tests/quick/nbsp_is_not_allowed.sh b/policies/lib/tests/quick/nbsp_is_not_allowed.sh new file mode 100755 index 00000000000..51acd7af594 --- /dev/null +++ b/policies/lib/tests/quick/nbsp_is_not_allowed.sh @@ -0,0 +1,35 @@ +#!/bin/sh + +set -e +GIT_ROOT="$(git rev-parse --show-toplevel)" +NCF_TREE=$GIT_ROOT/policies/lib/tree + +# NBSP breaks scripts and cfengine code, they should never appear + +# Build fine arguments +FIND_ARGS="-type f -not -wholename */.git/* -not -wholename */api/flask/* -not -name *.png -not -name *.eot -not -name *.ttf -not -name *.woff -not -name *.woff2 -not -name *.otf -not -name *.js -not -name *.ico -not -name *.rpm -not -name *.pyc" + +# Automatically exclude anything from the .gitignore file +while read line +do + export FIND_ARGS="${FIND_ARGS} -not -wholename */${line}" +done < ../.gitignore + +ALL_TESTS=`find ${NCF_TREE}/.. ${FIND_ARGS}` + +ERRORS=0 +for file in ${ALL_TESTS} +do + # allow nbsp in comments + if grep -P '^[^#]*\xA0' ${file} > /dev/null; then + ERRORS=`expr ${ERRORS} + 1` + echo "Test ${file} has a non breaking space in it" + fi +done + +if [ ${ERRORS} -eq 0 ]; then + echo "R: $0 Pass" +else + echo "R: $0 FAIL" +fi +exit ${ERRORS} diff --git a/policies/lib/tests/quick/normal_ordering.sh b/policies/lib/tests/quick/normal_ordering.sh new file mode 100755 index 00000000000..5bdfe3c6886 --- /dev/null +++ b/policies/lib/tests/quick/normal_ordering.sh @@ -0,0 +1,27 @@ +#!/bin/sh + +set -e +GIT_ROOT="$(git rev-parse --show-toplevel)" +NCF_TREE=$GIT_ROOT/policies/lib/tree + +# Source code should respect normal ordering to avoid misunderstanding +# This can help detect bugs + +ALL_TESTS=`find ${NCF_TREE} -name '*.cf' | grep -v 20_cfe_basics/cfengine` + +ERRORS=0 +for file in ${ALL_TESTS} +do + ${NCF_TREE}/../tests/quick/ordering.pl ${file} + if [ $? -ne 0 ]; then + ERRORS=`expr ${ERRORS} + 1` + echo "Test ${file} has a normal ordering error" + fi +done + +if [ ${ERRORS} -eq 0 ]; then + echo "R: $0 Pass" +else + echo "R: $0 FAIL" +fi +exit ${ERRORS} diff --git a/policies/lib/tests/quick/ordering.pl b/policies/lib/tests/quick/ordering.pl new file mode 100755 index 00000000000..53ce50b51c9 --- /dev/null +++ b/policies/lib/tests/quick/ordering.pl @@ -0,0 +1,74 @@ +#!/usr/bin/perl +use warnings; +use strict; + +my $exit = 0; + +# ordering from cfengine documentation +# convert to a hash { promise_type => order_id } +my $orderings = { + agent => make_hash(qw/meta vars defaults classes users files packages guest_environments methods processes services commands storage databases reports/), + edit_line => make_hash(qw/meta vars defaults classes delete_lines field_edits insert_lines replace_patterns reports/), + server => make_hash(qw/vars classes access roles/), + monitor => make_hash(qw/vars classes measurements reports/), + common => make_hash(qw/vars classes reports/), +}; + +while(my $file = shift @ARGV) { + check_ordering($file); +} +exit $exit; + +sub check_ordering { + # open file + my $filename = shift; + + open(my $fh, "<$filename") or die "Can't open file $filename"; + + # work variables + my $bundle = "unknown"; + my $bundle_type = "unknown"; + my $type = ""; + + # scan the file + while(my $line = <$fh>) { + $line =~ s/#.*//; + + if($line =~ /^\s*bundle\s+(\w+)\s+(\w+)/) { + # detect bundles + + $bundle_type = $1; + $bundle = $2; + $type = ""; + + } elsif($line =~ /\W(\w+):\s*$/) { + # detect promise type + + my $new_type = $1; + if($type ne "") { + + # check ordering + die "Unknown bundle type $bundle_type" unless exists $orderings->{$bundle_type}; + my $ordering = $orderings->{$bundle_type}; + if (!exists($ordering->{$type})){ + print "Unknown promise type '$type' in '$filename' for 'bundle $bundle_type $bundle'\n"; + } elsif (!exists($ordering->{$new_type})){ + print "Unknown promise type '$new_type' in '$filename' for 'bundle $bundle_type $bundle'\n"; + } elsif($ordering->{$type} > $ordering->{$new_type}) { + print "Error in '$filename' in 'bundle $bundle_type $bundle' : $type before $new_type\n"; + $exit++; + } + + } + $type = $new_type; + } + } + close($fh); +} + +sub make_hash { + my $i=1; + my %ordering = map { $_ => $i++ } @_; + return \%ordering; +} + diff --git a/policies/lib/tests/quick/test_generic_methods.py b/policies/lib/tests/quick/test_generic_methods.py new file mode 100755 index 00000000000..2f9d6964887 --- /dev/null +++ b/policies/lib/tests/quick/test_generic_methods.py @@ -0,0 +1,123 @@ +#!/usr/bin/python3 +""" +Sanity test file for methods +""" + +import sys +import os +DIRNAME = os.path.dirname(os.path.abspath(__file__)) +TESTLIB_PATH = DIRNAME + '/../testlib' +sys.path.insert(0, TESTLIB_PATH) +import re +import unittest +import collections +import testlib +import avocado + +class TestNcfBundles(avocado.Test): +#class TestNcfBundles(unittest.TestCase): + """ + Sanity tests for methods + """ + def setUp(self): + """ + Tests setup + """ + self.methods = testlib.get_methods() + + def test_methods_should_have_a_metadata(self): + """ + Methods should have a metadata + """ + for method in self.methods: + with self.subTest(i=method.path): + self.assertIn('name', method.metadata) + self.assertIn('description', method.metadata) + # We have way too much method without documentation at the moment + # this will need a dedicated pr + #self.assertIn('documentation', method.metadata) + self.assertIn('parameter', method.metadata) + self.assertIn('class_prefix', method.metadata) + self.assertIn('class_parameter', method.metadata) + + def test_methods_should_have_only_one_agent_bundle(self): + """ + Methods should define a unique agent bundle + """ + for method in self.methods: + with self.subTest(i=method.path): + bundles = method.get_bundles() + self.assertEqual(1, len(bundles)) + + def test_methods_name_should_be_unique(self): + """ + Methods should @name should be unique + """ + names = [x.metadata['name'] for x in self.methods] + duplicates = [x for x, y in collections.Counter(names).items() if y > 1] + if [] != duplicates: + for method in self.methods: + with self.subTest(i=method.path): + self.assertNotIn(method.metadata['name'], duplicates) + + @avocado.skip('Should be reenabled once replaced') + def test_old_class_prefix(self): + """ + Methods should define an old_class_prefix in either one of the following formats: + "old_class_prefix" string => canonify("_${}"); + "old_class_prefix" string => "_${canonified_}"; + + In fact, we should force the first one. + """ + for method in self.methods: + with self.subTest(k=method.path): + class_prefix = method.metadata['class_prefix'] + class_parameter = method.metadata['class_parameter'] + + class_pattern1 = r"\"old_class_prefix\"\s+string\s+=>\s+canonify\(\"" + class_prefix + "_" + r"\${" + class_parameter + r"}\"\);" + class_pattern2 = r"\"old_class_prefix\"\s+string\s+=>\s+\"" + class_prefix + "_" + r"\${canonified_" + class_parameter + r"}\";" + + if not skip(method): + self.assertTrue(testlib.test_pattern_on_file(method.path, class_pattern1) is not None or testlib.test_pattern_on_file(method.path, class_pattern2) is not None) + + def test_methods_should_not_contain_unescaped_chars(self): + """ + Test if the documentation fields contain unescaped dollar characters that would break pdflatex + """ + for method in self.methods: + check_backquotes = re.compile(r'[^\`]*\$[^\`]*') + with self.subTest(i=method.path): + if 'documentation' in method.metadata: + self.assertFalse(check_backquotes.match(method.metadata['documentation'])) + + @avocado.skip('Lots of methods are not correct atm') + def test_methods_name_should_be_class_prefix(self): + """ + Methods prefix should be based on their name + """ + for method in self.methods: + with self.subTest(i=method.path): + class_prefix = method.metadata['class_prefix'] + path = method.path_basename + self.assertTrue(class_prefix == path, '\nprefix = %s\n path = %s'%(class_prefix, path)) + + +### Helper functions +def skip(method): + """ + In some tests, we need to skip some methods, either because they are not really a true method + and only a wrapper or because their inner logic make them exceptions + """ + to_skip = ['file_from_shared_folder', 'user_password_hash'] + result = False + if testlib.test_pattern_on_file(method.path, r'{\s+methods:\s+[^;]+;\s+}'): + result = True + elif method.path_basename in to_skip: + result = True + if result: + pass + return result + +#if __name__ == '__main__': +# #unittest.main() +# main() diff --git a/policies/lib/tests/quick/test_getparent_module.py b/policies/lib/tests/quick/test_getparent_module.py new file mode 100755 index 00000000000..34fb769d20b --- /dev/null +++ b/policies/lib/tests/quick/test_getparent_module.py @@ -0,0 +1,62 @@ +import unittest, os, sys, shutil, tempfile +from pathlib import Path + +DIRNAME = os.path.dirname(os.path.abspath(__file__)) +GIT_ROOT = DIRNAME + '/../..' +sys.path.insert(0, GIT_ROOT + '/tree/10_ncf_internals/modules/promises/') +import getParent + + +class TestGetParents(unittest.TestCase): + def setUp(self): + self.test_dir = tempfile.mkdtemp() + test_tree = os.path.dirname(os.path.abspath(__file__)) + "/assets/getParent" + shutil.copytree(test_tree, self.test_dir + '/', dirs_exist_ok=True, symlinks=True) + + def tearDown(self): + shutil.rmtree(self.test_dir) + + def test_module(self): + param_list = [ + # 1 standard case + ( + self.test_dir + '/1/2/3/emptyfile.txt', + [ + self.test_dir + '/1', + self.test_dir + '/1/2', + self.test_dir + '/1/2/3' + ] + ), + # 2 must follow symlink + ( + self.test_dir + '/1/2/3/symlink_to_2/3/emptyfile.txt', + [ + self.test_dir + '/1', + self.test_dir + '/1/2', + self.test_dir + '/1/2/3' + ] + ), + # 3 must support globbing + ( + self.test_dir + '/1/2/3/*', + [ + self.test_dir + '/1', + self.test_dir + '/1/2', + self.test_dir + '/1/2/3' + ] + ) + ] + for path, expected in param_list: + with self.subTest(): + # Since the tempdir can not be known before execution, assume it is correctly parsed and + # start the index after the tempdir + results = getParent.exec_module(path) + self.assertEqual(results[1 + results.index(self.test_dir):], expected) + self.assertNotIn('/', results) + + def test_module_on_non_existing_files_must_return_empty_list(self): + results = getParent.exec_module(self.test_dir + '/file_that_does_not_exist.txt') + self.assertEqual(results, []) + +if __name__ == '__main__': + unittest.main() diff --git a/policies/lib/tests/quick/test_ncf_api.py b/policies/lib/tests/quick/test_ncf_api.py new file mode 100755 index 00000000000..eba6b81c4f5 --- /dev/null +++ b/policies/lib/tests/quick/test_ncf_api.py @@ -0,0 +1,106 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +import sys +import os +DIRNAME = os.path.dirname(os.path.abspath(__file__)) +GIT_ROOT = DIRNAME + '/../..' +TESTLIB_PATH = DIRNAME + '/../testlib' +sys.path.insert(0, GIT_ROOT + '/tests') +import ncf +import os.path +import subprocess +import shutil +import avocado + +class TestNcf(avocado.Test): + + def setUp(self): + self.dirname = os.path.dirname(os.path.realpath(__file__)) + self.test_generic_method_file = self.dirname + '/test_ncf_api_assets/test_generic_method.cf' + with open(self.test_generic_method_file) as fd: + self.generic_method_content = fd.read() + self.all_methods = ncf.get_all_generic_methods_metadata()["data"]["generic_methods"] + + all_tags = ncf.tags["generic_method"] + self.methods_expected_tags = [ tag for tag in all_tags if not tag in ncf.optional_tags["generic_method"] ] + + + def test_get_ncf_root_dir(self): + self.assertEqual(ncf.get_root_dir(), os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + "/../../")) + + ##################################### + # Generic tests for parsing .cf files + ##################################### + + def test_parse_bundlefile_empty(self): + """Attempting to parse an empty string should raise an exception""" + self.assertRaises(Exception, ncf.parse_generic_method_metadata, "") + + def test_parse_bundlefile_incomplete(self): + """Attempting to parse a bundle file with metadata after the bundle agent declaration should raise an exception""" + self.assertRaises(Exception, ncf.parse_generic_method_metadata, """# @name A name + bundle agent thingy { + } + # @description bla bla + # @version 1.0""") + + + ############################## + # Generic method parsing tests + ############################## + + def test_parse_generic_method(self): + """Parsing a generic method should return a dict with all defined generic_method tags""" + metadata = ncf.parse_generic_method_metadata(self.generic_method_content)['result'] + self.assertTrue(set(metadata.keys()).issuperset(set(self.methods_expected_tags))) + + def test_parse_generic_method_data(self): + """Parsing should return a dict with the data from the test generic_method""" + metadata = ncf.parse_generic_method_metadata(self.generic_method_content)['result'] + self.assertEqual(metadata['bundle_name'], "package_install_version") + self.assertEqual(metadata['bundle_args'], ["package_name", "package_version"]) + self.assertEqual(metadata['name'], "Package install") + self.assertEqual(metadata['description'], "Install a package by name from the default system package manager") + self.assertEqual(metadata['parameter'], [ { 'constraints': { "allow_empty_string": False, "allow_whitespace_string": False, "max_length" : 16384 }, 'type' : 'string', 'name': 'package_name', 'description': 'Name of the package to install'},{ 'constraints': { "allow_empty_string": False, "allow_whitespace_string": False, "max_length" : 16384 }, 'type': 'version', 'name': 'package_version', 'description': 'Version of the package to install'}]) + self.assertEqual(metadata['class_prefix'], "package_install") + self.assertEqual(metadata['class_parameter'], "package_name") + self.assertEqual(metadata['class_parameter_id'], 1) + self.assertEqual(len(metadata), len(self.methods_expected_tags)) + + ##################################### + # Tests for reading all metadata info + ##################################### + + def test_get_all_generic_methods_filenames(self): + """test_get_all_generic_methods_filenames should return a list of all generic_methods files""" + base_dir = ncf.get_root_dir() + "/tree/30_generic_methods" + conf_repo_dir = "/var/rudder/configuration-repository/ncf/30_generic_methods" + alternative_path = os.path.dirname(os.path.realpath(__file__)) + "/test_methods" + + # Get list of generic_methods without prefix "_" on the filesystem + list_methods_files = [] + ## Get recursively each promises in the basic path and the alternative one + list_methods_files += [os.path.join(full_path,filename) for full_path, dirname, files in os.walk(base_dir) for filename in files if not filename.startswith('_') and filename.endswith('.cf')] + list_methods_files += [os.path.join(full_path,filename) for full_path, dirname, files in os.walk(conf_repo_dir) for filename in files if not filename.startswith('_') and filename.endswith('.cf')] + + filenames = ncf.get_all_generic_methods_filenames() + + filenames.sort() + list_methods_files.sort() + + self.assertEqual(filenames, list_methods_files) + + def test_get_all_generic_methods_metadata(self): + """get_all_generic_methods_metadata should return a list of all generic_methods with all defined metadata tags""" + metadata = ncf.get_all_generic_methods_metadata()["data"]["generic_methods"] + + number_generic_methods = len(ncf.get_all_generic_methods_filenames()) + self.assertEqual(number_generic_methods, len(metadata)) + + def test_get_all_generic_methods_metadata_with_arg(self): + """get_all_generic_methods_metadata should return a list of all generic_methods with all defined metadata tags""" + metadata = ncf.get_all_generic_methods_metadata()["data"]["generic_methods"] + + number_generic_methods = len(ncf.get_all_generic_methods_filenames()) + self.assertEqual(number_generic_methods, len(metadata)) diff --git a/policies/lib/tests/quick/test_ncf_api_assets/test_generic_method.cf b/policies/lib/tests/quick/test_ncf_api_assets/test_generic_method.cf new file mode 100644 index 00000000000..1952a751ada --- /dev/null +++ b/policies/lib/tests/quick/test_ncf_api_assets/test_generic_method.cf @@ -0,0 +1,25 @@ +# This file contains a sample generic_method + +# generic method package_install +# +# This method is unlikely to be useful at all, however it does provide +# the basic syntaxic requirements for the structured metadata in comments +# above each generic method main bundle +# +# @name Package install +# @description Install a package by name from the default system package manager +# +# @parameter package_name Name of the package to install +# @parameter package_version Version of the package to install +# @parameter_type package_version version +# @class_prefix package_install +# @class_parameter package_name +# +# Please see the online documentation for more details. +bundle agent package_install_version(package_name, package_version) +{ + packages: + "${package_name}" + package_action => "add"; + +} diff --git a/policies/lib/tests/quick/test_ncf_api_assets/test_technique.cf b/policies/lib/tests/quick/test_ncf_api_assets/test_technique.cf new file mode 100644 index 00000000000..174db0aba8f --- /dev/null +++ b/policies/lib/tests/quick/test_ncf_api_assets/test_technique.cf @@ -0,0 +1,32 @@ +# This file contains a sample meta-technique + +# Meta-Technique bla +# +# This Technique is unlikely to be useful at all, however it does provide +# the basic syntaxic requirements for the structured metadata in comments +# above each meta-technique main bundle +# +# @name Bla Technique for evaluation of parsingness +# @description This meta-Technique is a sample only, allowing for testing. +# @version 0.1 +# +# Please see the online documentation for more details. +bundle agent bla { + vars: + redhat:: + "apache_package_name" string => "httpd"; + !redhat:: + "apache_package_name" string => "apache2"; + + methods: + any:: + "ph1" usebundle => package_install_version("${bla.apache_package_name}", "2.2.11"); + cfengine:: + "ph2" usebundle => service_start("${bla.apache_package_name}"); + "ph3" usebundle => package_install("openssh-server"); + "ph4" usebundle => command_execution("/bin/echo \"test\""); + !cfengine:: + "ph5" usebundle => _logger("NA", "NA"); + + +} diff --git a/policies/lib/tests/testlib/common.sh b/policies/lib/tests/testlib/common.sh new file mode 100755 index 00000000000..ef92d7cd0a2 --- /dev/null +++ b/policies/lib/tests/testlib/common.sh @@ -0,0 +1,8 @@ +#!/bin/sh + +set -e +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +GIT_ROOT=$SCRIPT_DIR/../.. +NCF_TREE=$GIT_ROOT/policies/lib/tree + +export GIT_ROOT NCF_TREE diff --git a/policies/lib/tests/testlib/given.rs b/policies/lib/tests/testlib/given.rs new file mode 100755 index 00000000000..944bd7047ec --- /dev/null +++ b/policies/lib/tests/testlib/given.rs @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +pub mod directory_present; +pub mod file_absent; +pub mod file_present; +pub mod setup_state; + +use crate::testlib::given::file_absent::FileAbsentStruct; +use crate::testlib::given::file_present::FilePresentStruct; +use crate::testlib::method_to_test::MethodToTest; +use directory_present::DirectoryPresentStruct; +use setup_state::SetupState; +use setup_state::SetupState::{DirectoryPresent, FileAbsent, FilePresent}; + +#[derive(Clone)] +pub enum Given { + Setup(SetupState), + MethodCall(MethodToTest), +} + +impl Given { + pub fn method_call(m: MethodToTest) -> Given { + Given::MethodCall(m) + } + pub fn file_present(file_path: String) -> Given { + Given::Setup(FilePresent(FilePresentStruct { path: file_path })) + } + pub fn file_absent(file_path: String) -> Given { + Given::Setup(FileAbsent(FileAbsentStruct { path: file_path })) + } + pub fn directory_present(directory_path: String) -> Given { + Given::Setup(DirectoryPresent(DirectoryPresentStruct { + path: directory_path, + })) + } +} diff --git a/policies/lib/tests/testlib/given/directory_present.rs b/policies/lib/tests/testlib/given/directory_present.rs new file mode 100644 index 00000000000..98a162c6e95 --- /dev/null +++ b/policies/lib/tests/testlib/given/directory_present.rs @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::testlib::given::setup_state::TestSetup; +use crate::testlib::test_setup::TestSetupResult; +use anyhow::Error; +use log::debug; +use std::fs; + +#[derive(Clone, Debug)] +pub struct DirectoryPresentStruct { + pub path: String, +} +impl TestSetup for DirectoryPresentStruct { + fn resolve(&self) -> anyhow::Result { + debug!("Creating directory {}", self.path); + fs::create_dir(&self.path)?; + Ok(TestSetupResult::default()) + } +} diff --git a/policies/lib/tests/testlib/given/file_absent.rs b/policies/lib/tests/testlib/given/file_absent.rs new file mode 100644 index 00000000000..34919baac69 --- /dev/null +++ b/policies/lib/tests/testlib/given/file_absent.rs @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::testlib::given::setup_state::TestSetup; +use crate::testlib::test_setup::TestSetupResult; +use anyhow::{Error, bail}; +use log::debug; +use std::fs; +use std::io::ErrorKind; + +#[derive(Clone)] +pub struct FileAbsentStruct { + pub path: String, +} +impl TestSetup for FileAbsentStruct { + fn resolve(&self) -> anyhow::Result { + let r = TestSetupResult::default(); + debug!("Removing file {}", self.path); + match fs::remove_file(&self.path) { + Ok(()) => Ok(r), + Err(e) if e.kind() == ErrorKind::NotFound => Ok(r), + Err(e) => bail!(e), + } + } +} diff --git a/policies/lib/tests/testlib/given/file_present.rs b/policies/lib/tests/testlib/given/file_present.rs new file mode 100644 index 00000000000..a4dd40b8a71 --- /dev/null +++ b/policies/lib/tests/testlib/given/file_present.rs @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::testlib::given::setup_state::TestSetup; +use crate::testlib::test_setup::TestSetupResult; +use anyhow::Error; +use log::debug; +use std::fs::File; + +#[derive(Clone, Debug)] +pub struct FilePresentStruct { + pub path: String, +} +impl TestSetup for FilePresentStruct { + fn resolve(&self) -> anyhow::Result { + debug!("Creating file {}", self.path); + File::create(&self.path)?; + Ok(TestSetupResult::default()) + } +} diff --git a/policies/lib/tests/testlib/given/setup_state.rs b/policies/lib/tests/testlib/given/setup_state.rs new file mode 100755 index 00000000000..809b34ea739 --- /dev/null +++ b/policies/lib/tests/testlib/given/setup_state.rs @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::testlib::given::directory_present::DirectoryPresentStruct; +use crate::testlib::given::file_absent::FileAbsentStruct; +use crate::testlib::given::file_present::FilePresentStruct; +use crate::testlib::test_setup::TestSetupResult; +use anyhow::Error; + +pub trait TestSetup { + fn resolve(&self) -> anyhow::Result; +} + +#[derive(Clone)] +pub enum SetupState { + FilePresent(FilePresentStruct), + FileAbsent(FileAbsentStruct), + DirectoryPresent(DirectoryPresentStruct), +} + +impl TestSetup for SetupState { + fn resolve(&self) -> anyhow::Result { + match self { + SetupState::FilePresent(fp) => fp.resolve(), + SetupState::FileAbsent(fa) => fa.resolve(), + SetupState::DirectoryPresent(dp) => dp.resolve(), + } + } +} diff --git a/policies/lib/tests/testlib/method_test_suite.rs b/policies/lib/tests/testlib/method_test_suite.rs new file mode 100755 index 00000000000..57cd7a3fb5e --- /dev/null +++ b/policies/lib/tests/testlib/method_test_suite.rs @@ -0,0 +1,133 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::testlib::given::Given; +use crate::testlib::given::setup_state::TestSetup; +use crate::testlib::method_to_test::MethodToTest; +use crate::testlib::test_result::ExecutionResult; +use log::debug; +use rudder_commons::PolicyMode; +use rudderc::backends::Backend; +use rudderc::backends::unix::Unix; +use rudderc::backends::unix::cfengine::cf_agent; +use rudderc::ir::Technique; +use rudderc::ir::technique::{ItemKind, TechniqueId}; +use std::fs; +use std::path::PathBuf; +use std::str::FromStr; + +#[derive(Clone)] +pub struct MethodTestSuite { + given: Vec, + when: Vec, +} +impl Default for MethodTestSuite { + fn default() -> Self { + Self::new() + } +} + +impl MethodTestSuite { + pub fn new() -> MethodTestSuite { + MethodTestSuite { + given: Vec::new(), + when: Vec::new(), + } + } + + pub fn given(&self, g: Given) -> MethodTestSuite { + let mut v = self.given.clone(); + v.push(g); + MethodTestSuite { + given: v, + when: self.when.clone(), + } + } + + pub fn when(&self, nm: MethodToTest) -> MethodTestSuite { + let mut v = self.when.clone(); + v.push(nm); + MethodTestSuite { + given: self.given.clone(), + when: v, + } + } + + pub fn generate_test_technique(&self, extra_items: Vec) -> Technique { + let mut items = extra_items; + items.extend(self.when.iter().map(|nm| nm.clone().to_item_kind())); + Technique { + format: 0, + id: TechniqueId::from_str("method_test_technique").unwrap(), + name: "".to_string(), + version: "".to_string(), + tags: None, + category: None, + description: None, + documentation: None, + policy_types: Vec::new(), + items, + params: vec![], + } + } + + pub fn execute(self, library_path: PathBuf, workdir: PathBuf) -> ExecutionResult { + debug!("[Starting a new method test]"); + let mut conditions = vec![]; + let mut policy_mode: PolicyMode = Default::default(); + let mut extra_items: Vec = vec![]; + debug!("resolving the given"); + self.given.iter().for_each(|g| match g { + Given::Setup(setup) => { + let setup_result = setup.resolve().unwrap(); + if let Some(p) = setup_result.policy_mode { + policy_mode = p; + } + conditions.extend(setup_result.conditions); + } + Given::MethodCall(method) => { + extra_items.push(method.clone().to_item_kind()); + } + }); + debug!("generating a YAML technique"); + let test_technique = self.clone().generate_test_technique(extra_items); + fs::write( + workdir.join("technique.yml"), + serde_yaml::to_string(&test_technique.clone()).unwrap(), + ) + .unwrap(); + let resource_path = workdir.join("resources"); + debug!("compiling the technique"); + let compiled_technique_path = workdir.join("technique.cf"); + + debug!("converting it to a standalone policy"); + let backend = Unix::new(); + let standalone = backend + .generate(test_technique.clone(), resource_path.as_path(), true) + .unwrap(); + + fs::write(compiled_technique_path.clone(), standalone.clone()).unwrap(); + + debug!("executing the standalone technique"); + let run_result = cf_agent( + &compiled_technique_path, + &compiled_technique_path, + &library_path, + &PathBuf::from("/opt/rudder/bin/"), + true, + ) + .unwrap(); + let cfengine_log_path = workdir.join("output.log"); + fs::write(cfengine_log_path.clone(), run_result.output).unwrap(); + fs::write( + workdir.join("datastate.json"), + serde_json::to_string(&run_result.datastate.clone()).unwrap(), + ) + .unwrap(); + ExecutionResult { + conditions: run_result.datastate.classes, + variables: run_result.datastate.vars, + reports: vec![], + } + } +} diff --git a/policies/lib/tests/testlib/method_to_test.rs b/policies/lib/tests/testlib/method_to_test.rs new file mode 100644 index 00000000000..aab02f2a18f --- /dev/null +++ b/policies/lib/tests/testlib/method_to_test.rs @@ -0,0 +1,253 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::integration::get_lib; +use anyhow::Context; +use itertools::Itertools; +use regex::Regex; +use rudder_commons::PolicyMode; +use rudder_commons::methods::method::MethodInfo; +use rudderc::backends::unix::cfengine::cfengine_canonify; +use rudderc::ir::technique::{Id, ItemKind, Method}; +use std::collections::HashMap; + +pub enum MethodStatus { + Success, + Repaired, + Error, + NA, +} + +pub fn get_result_condition_suffixes(status: MethodStatus) -> Vec { + let v = match status { + MethodStatus::Success => vec!["ok", "kept", "not_repaired", "reached"], + MethodStatus::Repaired => vec!["ok", "repaired", "not_kept", "reached"], + MethodStatus::Error => vec![ + "not_kept", + "not_ok", + "not_repaired", + "failed", + "error", + "reached", + ], + MethodStatus::NA => vec!["noop"], + }; + v.iter().map(|s| s.to_string()).collect() +} +#[derive(Clone)] +pub struct MethodToTest { + pub id: Id, + pub name: String, + pub params: HashMap, + pub method_info: &'static MethodInfo, + pub policy_mode: PolicyMode, +} + +impl Default for MethodToTest { + fn default() -> Self { + Self::new() + } +} + +impl MethodToTest { + pub fn new() -> MethodToTest { + MethodToTest { + id: Default::default(), + name: "file_absent".to_string(), + params: HashMap::from([("path".to_string(), "/tmp/default_target.txt".to_string())]), + method_info: get_lib() + .get("file_absent") + .context("Looking for the method metadata from the parsed library") + .unwrap(), + policy_mode: Default::default(), + } + } + + pub fn audit(self) -> MethodToTest { + MethodToTest { + policy_mode: PolicyMode::Audit, + ..self + } + } + + pub fn enforce(self) -> MethodToTest { + MethodToTest { + policy_mode: PolicyMode::Enforce, + ..self + } + } + pub fn to_item_kind(&self) -> ItemKind { + ItemKind::Method(Method { + name: format!("Testing method {}", self.id), + id: self.id.clone(), + policy_mode_override: Some(self.policy_mode), + method: self.name.clone(), + params: self.params.clone(), + info: Some(self.method_info), + description: Default::default(), + documentation: Default::default(), + tags: Default::default(), + reporting: Default::default(), + condition: Default::default(), + resolved_foreach_state: Default::default(), + }) + } + pub fn get_result_condition_prefix(&self) -> String { + cfengine_canonify(&format!( + "{}_{}_", + self.method_info.class_prefix, + self.params.get(&self.method_info.class_parameter).unwrap() + )) + } + + pub fn log_v4_result_conditions(&self, status: MethodStatus) -> Vec { + get_result_condition_suffixes(status) + .into_iter() + .map(|s| { + Regex::new(&format!( + "^{}_{}_{}$", + cfengine_canonify(&self.id.to_string()), + r"\d+", + s + )) + .unwrap() + }) + .collect() + } + + // As legacy result condition can overlap between method calls, we have to handle + // combinations of expected statuses + pub fn legacy_result_conditions(&self, statuses: Vec) -> Vec { + let mut expected_suffixes: Vec = Vec::new(); + statuses.into_iter().for_each(|status| { + expected_suffixes.extend(get_result_condition_suffixes(status)); + }); + expected_suffixes + .into_iter() + .unique() + .map(|s| { + cfengine_canonify(&format!( + "{}_{}_{}", + &self.method_info.class_prefix, + self.params.get(&self.method_info.class_parameter).unwrap(), + s + )) + }) + .collect::>() + } + + // Below are the generic method constructors + pub fn command_execution(command: String) -> MethodToTest { + MethodToTest { + name: "command_execution".to_string(), + params: HashMap::from([("command".to_string(), command)]), + method_info: get_lib() + .get("command_execution") + .context("Looking for the method metadata from the parsed library") + .unwrap(), + ..Self::new() + } + } + pub fn file_absent(path: String) -> MethodToTest { + MethodToTest { + name: "file_absent".to_string(), + params: HashMap::from([("path".to_string(), path)]), + method_info: get_lib() + .get("file_absent") + .context("Looking for the method metadata from the parsed library") + .unwrap(), + ..Self::new() + } + } + pub fn file_check_exists(path: String) -> MethodToTest { + MethodToTest { + name: "file_check_exists".to_string(), + params: HashMap::from([("path".to_string(), path)]), + method_info: get_lib() + .get("file_check_exists") + .context("Looking for the method metadata from the parsed library") + .unwrap(), + ..Self::new() + } + } + pub fn condition_from_expression(condition: String, expression: String) -> MethodToTest { + MethodToTest { + name: "condition_from_expression".to_string(), + params: HashMap::from([ + ("condition".to_string(), condition), + ("expression".to_string(), expression), + ]), + method_info: get_lib() + .get("condition_from_expression") + .context("Looking for the method metadata from the parsed library") + .unwrap(), + ..Self::new() + } + } + pub fn condition_from_variable_match( + condition: String, + variable_name: String, + expected_match: String, + ) -> MethodToTest { + MethodToTest { + name: "condition_from_variable_match".to_string(), + params: HashMap::from([ + ("condition".to_string(), condition), + ("variable_name".to_string(), variable_name), + ("expected_match".to_string(), expected_match), + ]), + method_info: get_lib() + .get("condition_from_variable_match") + .context("Looking for the method metadata from the parsed library") + .unwrap(), + ..Self::new() + } + } + pub fn condition_from_variable_existence( + condition: String, + variable_name: String, + ) -> MethodToTest { + MethodToTest { + name: "condition_from_variable_existence".to_string(), + params: HashMap::from([ + ("condition".to_string(), condition), + ("variable_name".to_string(), variable_name), + ]), + method_info: get_lib() + .get("condition_from_variable_existence") + .context("Looking for the method metadata from the parsed library") + .unwrap(), + ..Self::new() + } + } + pub fn variable_string(prefix: String, name: String, value: String) -> MethodToTest { + MethodToTest { + name: "variable_string".to_string(), + params: HashMap::from([ + ("prefix".to_string(), prefix), + ("name".to_string(), name), + ("value".to_string(), value), + ]), + method_info: get_lib() + .get("variable_string") + .context("Looking for the method metadata from the parsed library") + .unwrap(), + ..Self::new() + } + } + pub fn variable_dict(prefix: String, name: String, value: String) -> MethodToTest { + MethodToTest { + name: "variable_dict".to_string(), + params: HashMap::from([ + ("prefix".to_string(), prefix), + ("name".to_string(), name), + ("value".to_string(), value), + ]), + method_info: get_lib() + .get("variable_dict") + .context("Looking for the method metadata from the parsed library") + .unwrap(), + ..Self::new() + } + } +} diff --git a/policies/lib/tests/testlib/mod.rs b/policies/lib/tests/testlib/mod.rs new file mode 100644 index 00000000000..132f19bee6e --- /dev/null +++ b/policies/lib/tests/testlib/mod.rs @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +pub mod given; +pub mod method_test_suite; +pub mod method_to_test; +pub mod test_result; +pub mod test_setup; diff --git a/policies/lib/tests/testlib/test_result.rs b/policies/lib/tests/testlib/test_result.rs new file mode 100644 index 00000000000..7fe7a201e19 --- /dev/null +++ b/policies/lib/tests/testlib/test_result.rs @@ -0,0 +1,88 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use crate::testlib::method_to_test::{MethodStatus, MethodToTest}; +use log::debug; +use rudder_commons::report::Report; + +#[derive(Debug, Clone)] +pub struct ExecutionResult { + pub reports: Vec, + pub conditions: Vec, + pub variables: serde_json::Value, +} + +impl ExecutionResult { + pub fn assert_legacy_result_conditions( + &self, + method_call: MethodToTest, + expected_status: Vec, + ) { + let expected_conditions = method_call.legacy_result_conditions(expected_status); + for c in expected_conditions.clone() { + assert!( + self.conditions.contains(&c), + "Could not find the expected result condition '{}'", + c + ); + debug!("Found expected result condition '{}'", c); + } + let pattern = method_call.get_result_condition_prefix(); + let matching: Vec = self + .conditions + .clone() + .into_iter() + .filter(|c| c.starts_with(&pattern) && !expected_conditions.contains(c)) + .collect(); + assert!( + matching.is_empty(), + "Found unexpected result conditions in the datastate:\n[\n {}\n]", + matching.join(",\n ") + ); + } + + // When using the log v4, an incremental index is added to each method call, making + // the exact result condition difficult to compute, using patterns is easier + pub fn assert_log_v4_result_conditions( + &self, + method_call: MethodToTest, + expected_status: MethodStatus, + ) { + let expected_conditions = method_call.log_v4_result_conditions(expected_status); + for expected_pattern in expected_conditions.clone() { + assert!( + self.conditions.iter().any(|c| expected_pattern.is_match(c)), + "Could not find the expected result condition '{}'", + expected_pattern + ); + debug!( + "Found expected log v4 result condition '{}'", + expected_pattern.as_str() + ); + } + // In log v4 we expected result conditions under the form ___ + let matching = self + .conditions + .clone() + .into_iter() + .filter(|c| c.contains(&method_call.id.to_string())) + .collect::>(); + assert!( + matching.is_empty(), + "Found unexpected log v4 result conditions in the datastate:\n[\n {}\n]", + matching.join(",\n ") + ) + } + + pub fn assert_conditions_are_defined(&self, conditions: Vec) { + conditions + .iter() + .for_each(|c| assert!(self.conditions.contains(c))) + } + + pub fn assert_conditions_are_undefined(&self, conditions: Vec) { + conditions + .iter() + .for_each(|c| assert!(!self.conditions.contains(c))) + } +} diff --git a/policies/lib/tests/testlib/test_setup.rs b/policies/lib/tests/testlib/test_setup.rs new file mode 100755 index 00000000000..20f0cfdd38d --- /dev/null +++ b/policies/lib/tests/testlib/test_setup.rs @@ -0,0 +1,28 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Normation SAS + +use rudder_commons::PolicyMode; +use rudderc::ir::technique::ItemKind; + +/// Initial test environment to create before running a test +#[derive(Debug, Default)] +pub struct TestSetupResult { + pub conditions: Vec, + pub method_calls: Vec, + pub policy_mode: Option, +} +impl TestSetupResult { + pub fn push_condition(&mut self, condition: String) { + let mut v = self.conditions.clone(); + v.push(condition); + self.conditions = v; + } + + pub fn push_method_call(&mut self, method: ItemKind) { + self.method_calls.push(method); + } + + pub fn policy_mode(&mut self, mode: PolicyMode) { + self.policy_mode = Some(mode); + } +} diff --git a/policies/lib/tests/testlib/testlib/__init__.py b/policies/lib/tests/testlib/testlib/__init__.py new file mode 100644 index 00000000000..d2980cc04b4 --- /dev/null +++ b/policies/lib/tests/testlib/testlib/__init__.py @@ -0,0 +1 @@ +from .method import * diff --git a/policies/lib/tests/testlib/testlib/method.py b/policies/lib/tests/testlib/testlib/method.py new file mode 100755 index 00000000000..cb9afd77f81 --- /dev/null +++ b/policies/lib/tests/testlib/testlib/method.py @@ -0,0 +1,134 @@ +#!/usr/bin/python3 +""" +Lib used by the ncf sanity tests +Defines the Method class, and make the interactions +with the ncf python api to avoid using it in tests. +""" + +import os +import sys +import re +import codecs +from shutil import which +DIRNAME = os.path.dirname(os.path.abspath(__file__)) +GIT_ROOT = DIRNAME + '/../../..' +sys.path.insert(0, GIT_ROOT + '/tests/') +print(GIT_ROOT) +import ncf + +ncf.CFPROMISES_PATH = which("cf-promises") +NCF_TREE = os.getenv('NCF_TREE') +NCF_TREE = GIT_ROOT + '/tree' +#NCF_TREE = os.getenv('NCF_TREE') + +PROMISE_TYPES = [ + "meta", + "vars", + "defaults", + "classes", + "users", + "files", + "packages", + "guest_environments", + "methods", + "processes", + "services", + "commands", + "storage", + "databases", + "report" +] + +def canonify(string): + """ + Canonify a given string + """ + regex = re.compile(r'[^a-zA-Z0-9]') + return re.sub(regex, '_', string) + +class Method: + """ + Represent an ncf method, making it easier to manipulate + """ + def __init__(self, path): + self.path = path + self.path_basename = os.path.basename(self.path).split('.')[0] + self.raw_content = self.get_raw_content() + self.content = self.get_bundle_content() + self.metadata = ncf.parse_generic_method_metadata(self.raw_content)["result"] + + def get_raw_content(self): + """ + Return raw content of the whole file defining the method + """ + with codecs.open(self.path, encoding="utf-8") as file_descriptor: + content = file_descriptor.read() + return content + + def get_bundle_content(self): + """ + Return content of the method, which is the cfengine code without comments + """ + content = [] + raw = self.get_raw_content() + for line in raw.splitlines(): + match = re.match(r"^\s*#.*$", line, flags=re.UNICODE) + if not match: + content.append(line) + return content + + def get_bundles(self): + """ + Return each bundles called in the method content + """ + matches = [] + for line in self.content: + match = re.match(r'\s*bundle agent ([a-zA-Z_]+)\s*.*$', line, flags=re.UNICODE) + if match: + matches.append(match.group(1)) + return matches + + def get_bundle_name(self): + """ + Return the bundle name of the method + """ + return self.get_bundles()[0] + + def get_promise_types(self): + """ + Return all promise types used in the method + """ + matches = [] + promises_regex = '(' + '|'.join(PROMISE_TYPES) + ')' + for line in self.content: + match = re.match(r'^\s*%s:$'%promises_regex, line, flags=re.UNICODE) + if match: + matches.append(match.group(1)) + return matches + + +def get_methods(): + """ + Return an array of Method object containing all methods defined in the + tree/30_generic_methods of the repo + """ + gms = [] + methods_folder = NCF_TREE + '/30_generic_methods/' + # We do not parse non method files (starting by _) since the ncf lib can not parse them + # They are needed if we ever want to migrate the other tests to python + filenames = [ + methods_folder + x for x in os.listdir(methods_folder) + if x.endswith('.cf') and not x.startswith('_') + ] + + for method_file in filenames: + gms.append(Method(method_file)) + return gms + +def test_pattern_on_file(filename, pattern): + """ + Return the result of a search of a pattern on a whole file + """ + with codecs.open(filename, encoding="utf-8") as file_descriptor: + content = file_descriptor.read() + return re.search(pattern, content) diff --git a/policies/lib/tree/10_ncf_internals/README.md b/policies/lib/tree/10_ncf_internals/README.md new file mode 100644 index 00000000000..ae20ae30d3b --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/README.md @@ -0,0 +1,3 @@ +## 10_ncf_internals + +This directory contains all the code and glue to make the framework works. diff --git a/policies/lib/tree/10_ncf_internals/configuration.cf b/policies/lib/tree/10_ncf_internals/configuration.cf new file mode 100644 index 00000000000..745cb5cbc7c --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/configuration.cf @@ -0,0 +1,48 @@ +##################################################################################### +# Copyright 2013 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# Global Config +# The variables will be usable in the context configuration.variablename +bundle common configuration +{ + vars: + "fatal" string => "[FATAL]"; + "error" string => "[ERROR]"; + "info" string => "[INFO]"; + "debug" string => "[DEBUG]"; + "trace" string => "[TRACE]"; + + "enabled_abort_handlers" slist => { "_abort_default", "abort_rudder" }; + "flag_file" string => "/var/rudder/agent-data/flags.json"; + + classes: + # Define verbosity classes according to classes + # defined by the agent after verbosity options (-I, -v, -d) + "info" expression => "inform_mode", + scope => "namespace"; + "debug" expression => "verbose_mode", + scope => "namespace"; + "trace" expression => "debug_mode", + scope => "namespace"; + + # Make log level incremental + "debug" expression => "trace", + scope => "namespace"; + "info" expression => "debug", + scope => "namespace"; +} diff --git a/policies/lib/tree/10_ncf_internals/initialization.cf b/policies/lib/tree/10_ncf_internals/initialization.cf new file mode 100644 index 00000000000..fa02feaae47 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/initialization.cf @@ -0,0 +1,68 @@ +##################################################################################### +# Copyright 2016 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# Initialization +# This bundle will make necessary steps to prepare ncf before running any configuration +# It needs to be called just after loading ncf and its configuration. +bundle agent initialization +{ + + vars: + # Reporting context for Rudder + "report_data.should_report" string => "true"; + # Global counter + "report_data.index" int => "0"; + + files: + "${sys.workdir}/modules" + copy_from => local_dcp("${this.promise_dirname}/modules"), + depth_search => recurse("inf"), + perms => m("700"), + action => immediate, + move_obstructions => "true", + classes => classes_generic("ncf_internals_modules_update"), + comment => "Update the ncf modules in CFEngine's workdir from ncf sources"; + + methods: + # the ncf_init_* classes can be used to get the aggregated result of ncf initialization + "copy classes for reporting" usebundle => _classes_copy("ncf_internals_modules_update", "ncf_init"); + "init dry_run context" usebundle => dry_run_initialization; + "init flag context" usebundle => flag_initialization; + "init method id" usebundle => rudder_method_id_reset; + + reports: + info:: + "${configuration.info} Starting CFEngine ${sys.cf_version} on host ${sys.fqhost} (${sys.flavour} ${sys.arch})"; +} + +# Copy additional modules from custom path +# The content of the path will be copied in the workdir's module path +# INFO: There is no purge in workdir's module path so you can add additional modules easily. +# WARNING: If synchronizing content that exists in standard path, you will end up with +# file changes at every run +bundle agent additional_modules(path) { + files: + "${sys.workdir}/modules" + copy_from => local_dcp("${path}/"), + depth_search => recurse("inf"), + perms => m("700"), + action => immediate, + move_obstructions => "true", + classes => classes_generic("ncf_internals_modules_update"), + comment => "Update the ncf modules in CFEngine's workdir from custom path"; +} diff --git a/policies/lib/tree/10_ncf_internals/list-compatible-inputs b/policies/lib/tree/10_ncf_internals/list-compatible-inputs new file mode 100755 index 00000000000..e298246f517 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/list-compatible-inputs @@ -0,0 +1,254 @@ +#!/bin/sh + +set -e + +# Need at least 3 parameters +if [ "$#" -lt 3 ] +then + echo "Usage: $0 [--capability-file ] [--agent-version ] --ncf-path [common|local]/ [ [common|local]/ [/[common|local] that are compatible with the given CFEngine version and the capability list" + exit 1 +fi + +# add a default path since it is emptied by cfengine, and busybox for android +PATH="/usr/gnu/bin:/usr/gnu/x86_64-pc-solaris2.11/bin:/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/system/xbin:${PATH}" +export PATH + +# tag matching +# warning do not change regex without testing on aix +tag_regex='^#[ \t]*@agent_version[ \t]*' +reqs_regex='^#[ \t]*@agent_requirements[ \t]*' +cap_regex='"capabilities"[ \t]*:[ \t]*\[\(.*\)\]' +version_regex='\([0-9][0-9]*\)\.\([0-9][0-9]*\).*' +agent_regex='"agent_version"[ \t]*\([<>=]=*\)[ \t]*'${version_regex} + +if [ `printf "%.1s" "$1"` = "-" ] +then + # standard parsing + while true + do + if [ "$1" = "--capability-file" ] + then + capability_file="$2" + shift 2 + elif [ "$1" = "--agent-version" ] + then + cfengine_version="$2" + shift 2 + elif [ "$1" = "--ncf-path" ] + then + framework_path="$2" + shift 2 + else + break + fi + done +else + cfengine_version="$1" + framework_path="$2" + # see comment below on the last parameters + shift 2 +fi + +if [ -n "${cfengine_version}" ] +then + # split version numbers + cfengine_major=`printf "${cfengine_version}\n" | sed -e "s/${version_regex}/\\1/"` + cfengine_minor=`printf "${cfengine_version}\n" | sed -e "s/${version_regex}/\\2/"` +fi + +# list capabilities +capabilities="" +if [ -f "${capability_file}" ] +then + capabilities=`cat "${capability_file}"` +else + # Empty the variable if the file does not exist + # This allows passing the file path everytime and not failing when it is not there + capability_file="" +fi + +# requirement validation function +validate_requirements() { + file="$1" + requirements=`sed -ne "s/${reqs_regex}//p" "${file}"` + # AIX's sed doesn't support '|' or '?' so we are limited to matching approximately valid expression + validity=`echo "${requirements}" | sed -ne "/\\(${cap_regex}\\)*[ \\t]*\\([|&][|&]\\)*[ \\t]*\\(${agent_regex}\\)*/p"` + if [ -z "${validity}" ] + then + echo "Invalid requirement ${requirements} in ${file}" 1>&2 + exit 1 + fi + + # extract capabilities from the expression + caps=`echo "${requirements}" | sed -ne "s/${cap_regex}.*/\\1/p" | tr -d '" \t' | tr ',' ' '` + include_caps="x" + if [ -n "${caps}" ] + then + include_caps=0 # true for a shell + # for all required capability + for cap in ${caps} + do + # manage ! for negation + count_wanted=1 + if [ `echo "${cap}" | cut -c 1` = '!' ] + then + count_wanted=0 + cap=`echo "${cap}" | cut -c 2-` + fi + + count=0 + # check if there is a matching agent capability + for c in ${capabilities} + do + if [ "${cap}" = "${c}" ] + then + count=1 + fi + done + + # exclude if there is no match + if [ ${count} != ${count_wanted} ] + then + include_caps=1 # false in shell + fi + done + fi + + # extract agent version from the expression + operator=`echo "${requirements}" | sed -ne "s/.*${agent_regex}[ \\t]*/\\1/p"` + major=`echo "${requirements}" | sed -ne "s/.*${agent_regex}[ \\t]*/\\2/p"` + minor=`echo "${requirements}" | sed -ne "s/.*${agent_regex}[ \\t]*/\\3/p"` + include_agent="x" + if [ -n "${operator}" ] + then + if [ "${operator}" = ">=" ] + then + [ "${cfengine_major}" -gt "${major}" ] || [ "${cfengine_major}" -eq "${major}" -a "${cfengine_minor}" -ge "${minor}" ] + include_agent=$? + elif [ "${operator}" = "<" ] + then + [ "${cfengine_major}" -lt "${major}" ] || [ "${cfengine_major}" -eq "${major}" -a "${cfengine_minor}" -lt "${minor}" ] + include_agent=$? + else + echo "Unknown operator ${operator} in ${file}" 1>&2 + exit 1 + fi + fi + + # extract binary operator + operator=`echo "${requirements}" | sed -ne "s/${cap_regex}[ \\t]*\\(..\\)[ \\t]*${agent_regex}/\\2/p"` + if [ "${operator}" = "&&" ] + then + [ ${include_caps} -eq 0 ] && [ ${include_agent} -eq 0 ] + elif [ "${operator}" = "||" ] + then + [ ${include_caps} -eq 0 ] || [ ${include_agent} -eq 0 ] + else + if [ "${include_caps}" = "x" ] + then + [ ${include_agent} -eq 0 ] + elif [ "${include_agent}" = "x" ] + then + [ ${include_caps} -eq 0 ] + else + echo "Evaluation error of ${requirements} in ${file}" + exit 1 + fi + fi +} + +# move into framework path +cd "${framework_path}" + +# Last parameters not named to keep them as a quoted array +for directory in "$@" +do + if [ "${NCF_CACHE_PATH}" = "" ] + then + # maintain compatibility with old callers + exclude_file="${framework_path}/${directory}/.ncf-exclude-cache-${cfengine_version}" + else + # take the cache directory from environment + exclude_basedir="${NCF_CACHE_PATH}/ncf-exclude-cache-${cfengine_version}" + [ -d "${exclude_basedir}" ] || mkdir "${exclude_basedir}" + canonified_path=`echo "${framework_path}/${directory}" | sed -e "s/\\//_/g"` + exclude_file="${exclude_basedir}/${canonified_path}" + fi + + # ignore directory if it doesn't exist + if [ ! -d "${framework_path}/${directory}" ]; then continue; fi + + # first remove obsolete cache for exclude list + if [ -f "${exclude_file}" ] + then + # Include capability_file in the search list, if it doesn't exist it will be ignored + # If it has been changed the cache will be considered obsolete + newer_files=`find "${directory}" ${capability_file} -type f -newer "${exclude_file}"` + if [ "${newer_files}" != "" ] + then + rm -f "${exclude_file}" + fi + fi + + # then create cache if it doesn't exist + if [ -f "${exclude_file}" ] + then + excludes=`cat "${exclude_file}"` + else + for file in `find "${directory}" -name '*.cf' -exec grep -l -e "${tag_regex}" -e "${reqs_regex}" '{}' \;` + do + operator="" + + # @agent_version + if grep -q "${tag_regex}>=" "${file}"; then operator=">="; fi + if grep -q "${tag_regex}<" "${file}"; then operator="<"; fi + if [ -z "${operator}" ] || [ -z "${cfengine_version}" ] + then + include_version=0 # true for a shell + else + major=`sed -ne "s/${tag_regex}${operator}[ \\t]*${version_regex}/\\1/p" "${file}"` + minor=`sed -ne "s/${tag_regex}${operator}[ \\t]*${version_regex}/\\2/p" "${file}"` + set +e + if [ "${operator}" = ">=" ] + then + [ "${cfengine_major}" -gt "${major}" ] || [ "${cfengine_major}" -eq "${major}" -a "${cfengine_minor}" -ge "${minor}" ] + include_version=$? + else # < + [ "${cfengine_major}" -lt "${major}" ] || [ "${cfengine_major}" -eq "${major}" -a "${cfengine_minor}" -lt "${minor}" ] + include_version=$? + fi + set -e + fi + + # @agent_requirements + if grep -q "${reqs_regex}" "${file}" + then + set +e + validate_requirements "${file}" + include_reqs="$?" + set -e + else + # no capabilities required = accept + include_reqs=0 # true for a shell + fi + + # if exclude because of version or exclude because of requirements + # if (include_version == false) || (include_reqs == false) + if [ ${include_version} -ne 0 ] || [ ${include_reqs} -ne 0 ] + then + # exclude + file_name=`basename ${file}` + # posix compliant syntax to exclude a file + excludes="${excludes} -name ${file_name} -prune -o" + fi + done + printf "${excludes}" > "${exclude_file}" + fi + + # eventually call find + # posix compliant version of find without -printf '%p\n', it should work with darwin and aix + find "${directory}" ${excludes} -name '*.cf' -print +done + diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/apk b/policies/lib/tree/10_ncf_internals/modules/packages/apk new file mode 100755 index 00000000000..6f92fb2ef47 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/apk @@ -0,0 +1,110 @@ +#!/bin/sh -e +get_package_data() { + file="${INPUT_File?File must be given to get-package-data}" + set +e + apk verify $file 2>/dev/null >/dev/null + rc=$? + set -e + if [ 0 = $rc ]; then + echo "PackageType=file" + name=$(basename $file) + echo $name | sed -e 's/-/ /' -e 's/.apk/ apk/' | awk ' +{ + printf("Name=%s\n",$1) + printf("Version=%s\n",$2) +}' + else + echo PackageType=repo + echo Name=$file + fi +} + +list_installed() { + # Example `apk list --installed` output: + # busybox-1.32.0-r3 x86_64 {busybox} (GPL-2.0-only) [installed] + # + # After rewrite: + # Name=busybox + # Version=1.32.0-r3 + # Architecture=x86_64 + apk list --installed | sed 's/-\([0-9]\)/ \1/' | awk ' +{ + printf("Name=%s\n",$1) + printf("Version=%s\n",$2) + printf("Architecture=%s\n",$3) +}' +} + +repo_install() { + name="${INPUT_Name?Name must be given to repo-install}" + version="${INPUT_Version}" + if [ ! -z "${INPUT_Version}" ]; then + apk add "$name=$version" 2>/dev/null >/dev/null + else + apk add --upgrade "$name" 2>/dev/null >/dev/null + fi +} + +file_install() { + file="${INPUT_File?File must be given to file-install}" + apk add $file 2>/dev/null >/dev/null +} + +list_updates() { +# for some odd reason --upgradable does not work where -u does + apk list -u | sed 's/-\([0-9]\)/ \1/' | awk ' +{ + printf("Name=%s\n",$1) + printf("Version=%s\n",$2) + printf("Architecture=%s\n",$3) +}' +} + +remove() { + name="${INPUT_Name?Name must be given to remove}" + apk del "$name" 2>/dev/null >/dev/null +} + +main() { + command=$1 + + # Output maybe contain backslashes, and we don't want those to end up escaping + # something so we use use -r with read. + while read -r line; do + # Note that line is a variable assignment, e.g. + # INPUT_File=syncthing + export INPUT_$line + done + + + case $command in + supports-api-version) + echo 1 + ;; + get-package-data) + get_package_data + ;; + list-installed) + list_installed + ;; + repo-install) + repo_install + ;; + file-install) + file_install + ;; + list-updates) + list_updates + ;; + list-updates-local) + list_updates + ;; + remove) + remove + ;; + *) + echo "ErrorMessage=Invalid operation" + esac +} + +main $1 diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/apt_get b/policies/lib/tree/10_ncf_internals/modules/packages/apt_get new file mode 100755 index 00000000000..92b3fcfebf3 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/apt_get @@ -0,0 +1,422 @@ +#!/bin/sh +# vim: syntax=python +''':' +# First try to run this script with python3, else run with python then python2 +if command -v python3 >/dev/null 2>/dev/null; then + exec python3 "$0" "$@" +elif command -v python >/dev/null 2>/dev/null; then + exec python "$0" "$@" +else + exec python2 "$0" "$@" +fi +''' + +import sys +import os +import subprocess +import re + +PY3 = sys.version_info > (3,) + +dpkg_options = ["--force-confold", "--force-confdef"] + +dpkg_cmd = os.environ.get('CFENGINE_TEST_DPKG_CMD', "/usr/bin/dpkg") +dpkg_deb_cmd = os.environ.get('CFENGINE_TEST_DPKG_DEB_CMD', "/usr/bin/dpkg-deb") +dpkg_query_cmd = os.environ.get('CFENGINE_TEST_DPKG_QUERY_CMD', "/usr/bin/dpkg-query") + +dpkg_output_format = "Name=${Package}\nVersion=${Version}\nArchitecture=${Architecture}\n" +dpkg_status_format = "Status=${Status}\n" + dpkg_output_format + +apt_get_cmd = os.environ.get('CFENGINE_TEST_APT_GET_CMD', "/usr/bin/apt-get") + +# Some options only work with specific versions of apt, so we must know the +# current version in order to do the right thing. +apt_version = subprocess.Popen([ apt_get_cmd , '-v'], + stdout=subprocess.PIPE, universal_newlines=True).communicate()[0] +apt_version = apt_version.splitlines()[0].split(' ')[1] + +apt_get_options = ["-o", "Dpkg::Options::=--force-confold", + "-o", "Dpkg::Options::=--force-confdef", + "-y"] + +# compare only the first two digits of the version so versions like 1.1.1ubuntu2 work +if [int(x) for x in apt_version.split(".")[0:2]] < [1, 1]: + apt_get_options.append("--force-yes") + +else: + # The --force-yes option was deprecated in apt-get 1.1 + apt_get_options.extend( [ "--allow-downgrades", + "--allow-remove-essential", + "--allow-change-held-packages"]) + +apt_get_list_update_options = [] + +# compare only the first two digits of the version so versions like 1.1.1ubuntu2 work +if [int(x) for x in apt_version.split(".")[0:2]] < [1, 0]: + apt_get_list_update_options.append("--with-new-pkgs") + +os.environ['DEBIAN_FRONTEND'] = "noninteractive" +os.environ['LC_ALL'] = "C" + +NULLFILE = open(os.devnull, 'w') + + +redirection_is_broken_cached = -1 + +def redirection_is_broken(): + # Older versions of Python have a bug where it is impossible to redirect + # stderr using subprocess, and any attempt at redirecting *anything*, not + # necessarily stderr, will result in it being closed instead. This is very + # bad, because RPM may then open its RPM database on file descriptor 2 + # (stderr), and will cause it to output error messages directly into the + # database file. Fortunately "stdout=subprocess.PIPE" doesn't have the bug, + # and that's good, because it would have been much more tricky to solve. + global redirection_is_broken_cached + if redirection_is_broken_cached == -1: + cmd_line = [sys.executable, sys.argv[0], "internal-test-stderr"] + if subprocess.call(cmd_line, stdout=sys.stderr) == 0: + redirection_is_broken_cached = 0 + else: + redirection_is_broken_cached = 1 + + return redirection_is_broken_cached + + +def subprocess_Popen(cmd, stdout=None, stderr=None): + if ((not redirection_is_broken()) + or (stdout is None and stderr is None) + or (stdout == subprocess.PIPE) + or (stderr == subprocess.PIPE)): + + return subprocess.Popen(cmd, stdout=stdout, stderr=stderr) + + old_stdout_fd = -1 + old_stderr_fd = -1 + + if stdout is not None: + old_stdout_fd = os.dup(1) + os.dup2(stdout.fileno(), 1) + + if stderr is not None: + old_stderr_fd = os.dup(2) + os.dup2(stderr.fileno(), 2) + + result = subprocess.Popen(cmd) + + if old_stdout_fd >= 0: + os.dup2(old_stdout_fd, 1) + os.close(old_stdout_fd) + + if old_stderr_fd >= 0: + os.dup2(old_stderr_fd, 2) + os.close(old_stderr_fd) + + return result + + +def subprocess_call(cmd, stdout=None, stderr=None): + process = subprocess_Popen(cmd, stdout, stderr) + return process.wait() + + + + + +def get_package_data(): + pkg_string = "" + for line in sys.stdin: + if line.startswith("File="): + pkg_string = line.split("=", 1)[1].rstrip() + # Don't break, we need to exhaust stdin. + + if not pkg_string: + return 1 + + if (pkg_string.startswith("/")): + # Absolute file. + sys.stdout.write("PackageType=file\n") + sys.stdout.flush() + return subprocess_call([dpkg_deb_cmd, "--showformat", dpkg_output_format, "-W", pkg_string]) + elif (re.search("([:,]|_[0-9])", pkg_string)): + # Contains either a version number or an illegal symbol. + sys.stdout.write(line + "ErrorMessage: Package string with illegal format\n") + return 1 + else: + sys.stdout.write("PackageType=repo\n") + sys.stdout.write("Name=" + pkg_string + "\n") + return 0 + + +def list_installed(): + # Ignore everything. + sys.stdin.readlines() + + process = subprocess_Popen([dpkg_query_cmd, "--showformat", dpkg_status_format, "-W"], stdout=subprocess.PIPE) + installed_package = False + for line in process.stdout: + if PY3: + line = line.decode("utf-8") + line = line.rstrip("\n") + # 'Status=install ok ' or 'Status=hold ok ' + if line.startswith("Status=install") or line.startswith("Status=hold"): + state = line.split()[2] + if state in [ "installed", "half-configured", "half-installed" ]: + installed_package = True + else: + installed_package = False + elif line.startswith("Status="): + installed_package = False + elif installed_package: + sys.stdout.write(line + "\n") + + return 0 + + +def list_updates(online): + # Ignore everything. + sys.stdin.readlines() + + if online: + result = subprocess_call([apt_get_cmd] + apt_get_options + ["update"], stdout=NULLFILE) + if result != 0: + return result + + # We ignore held packages (--ignore-hold) so that all package updates + # available are listed. This makes package update listing compatible with + # debian 8 and highers `apt list --upgradeable` + + process = subprocess_Popen([apt_get_cmd] + apt_get_options + apt_get_list_update_options + ["--simulate", "--ignore-hold", "upgrade"], stdout=subprocess.PIPE) + for line in process.stdout: + if PY3: + line = line.decode("utf-8") + # Example of lines that we try to match: + # (name) (old version (ignored)) (new version) (repository(ies) (ignored)) (arch) + # | | | | | + # V V V V V + # Inst php5-cli [5.3.10-1ubuntu3.17] (5.3.10-1ubuntu3.18 Ubuntu:12.04/precise-updates [amd64]) [] + # + # Note architecture included in the name on this one: + # Inst php5-cli:i386 [5.3.10-1ubuntu3.17] (5.3.10-1ubuntu3.18 Ubuntu:12.04/precise-updates [i386]) [] + # + # Note multiple repositories in this one: + # Inst linux-libc-dev [2.6.32-48squeeze4] (2.6.32-48squeeze6 Debian:6.0.10/oldstable, Debian-Security:6.0/oldoldstable [amd64]) + # + # Another example (note the addition of jessie:jessie without a comma): + # Inst rudder-agent [4.1.0~rc1-jessie0] (4.1.0-jessie0 release/4.1.0-2 jessie:jessie [amd64]) + # + # name old version new version + # | | | + # /-------+-------\ /--+--\ /------+-------\ + match = re.match(r"^Inst\s+(?P[^\s:]+)(?::\S+)?\s+\[[^]\s]+\]\s+\((?P\S+)" + + + # repository(ies) arch (might be optional) + # | | + # /--+-\ /---------+---------\ + r"(?:\s+\S+)*?(\s+\[(?P[^]\s]+)\])?\).*", line) + + if match is not None: + sys.stdout.write("Name=" + match.group("name") + "\n") + sys.stdout.write("Version=" + match.group("version") + "\n") + + arch = match.group("arch") + if not arch: + arch = get_platform_arch() + sys.stdout.write("Architecture=" + arch + "\n") + + return 0 + + +def get_platform_arch(): + process = subprocess_Popen([dpkg_cmd, "--print-architecture"], stdout=subprocess.PIPE) + for line in process.stdout: + if PY3: + line = line.decode("utf-8") + return line.rstrip() + return None + +def one_package_argument(name, arch, version, is_apt_install): + args = [] + archs = [] + platform_arch = get_platform_arch() + + if arch: + archs.append(arch) + else: + # If we have existing architectures, operate on those, instead + # of the platform default. stderr is suppressed to avoid + # message pollution if the package is not be installed + process = subprocess_Popen([dpkg_query_cmd, "--showformat", "${Architecture}=${Status}\n", + "-W", name + ":*"], + stdout=subprocess.PIPE, stderr=NULLFILE) + for line in process.stdout: + if PY3: + line = line.decode("utf-8") + # The space before "installed" is important, because it can be "not-installed". + if "=" in line: + arch, stat = line.split("=", 1) + if stat.find(" installed") >= 0: + archs.append(arch) + + version_suffix = "" + if version != "": + version_suffix = "=" + version + + if archs: + for cur_arch in archs: + if cur_arch == platform_arch: + if is_apt_install: + # Store duplicated entry in tuple for simplicity of use in repo_install and remove + # functions. + args.append((name + version_suffix, name + version_suffix)) + else: + # For some distributions with multi arch support we must provide package name with ':architecture' + # postfix to remove package which architecture matches architecture of OS (Debian 7). + # This is not consistent behavior for all dpkg implementations. On Ubuntu 12 we have to + # use a package name only and adding ':architecture' postfix results in an error. + args.append((name + version_suffix, name + ':' + cur_arch + version_suffix)) + else: + # For managing packages which architecture doesn't match native OS architecture we always + # are using ':architecture' postfix added to package name. + args.append((name + ':' + cur_arch + version_suffix, name + ':' + cur_arch + version_suffix)) + else: + args.append((name + version_suffix, name + version_suffix)) + + return args + + +def package_arguments_builder(is_apt_install): + name = "" + version = "" + arch = "" + args = [] + for line in sys.stdin: + if line.startswith("Name="): + if name: + # Each new "Name=" triggers a new entry. + args.extend(one_package_argument(name, arch, version, is_apt_install)) + + version = "" + arch = "" + + name = line.split("=", 1)[1].rstrip() + + elif line.startswith("Version="): + version = line.split("=", 1)[1].rstrip() + + elif line.startswith("Architecture="): + arch = line.split("=", 1)[1].rstrip() + + elif line.startswith("options="): + global apt_get_options + option = line.split("=", 1)[1].rstrip() + if option: + apt_get_options.append(option) + + if name: + args.extend(one_package_argument(name, arch, version, is_apt_install)) + + return args + + +def repo_install(): + args = package_arguments_builder(True) + + cmd_line = [apt_get_cmd] + apt_get_options + ["install"] + + if (not args): + return 0 + + # Convert list of tuples into two lists so that first element of each + # tuple belongs to list 'a1' and the second one to list 'a2'. + a1, a2 = map(list, zip(*args)) + + # For 'repo_insrtall' both 'a1' and 'a2' should be equal so we can operate + # on 'a1' elements only. + if a1: + return subprocess_call(cmd_line + a1, stdout=NULLFILE) + return 0 + +def remove(): + args = package_arguments_builder(False) + + cmd_line = [apt_get_cmd] + apt_get_options + ["remove"] + + if (not args): + return 0 + + # Convert list of tuples into two lists so that first element of each + # tuple belongs to list 'a1' and the second one to list 'a2'. + # + # In case of multi arch support elements of 'a1' list should not contain + # packages names with ':architecture' suffix for all packages matching native + # OS architecture. + a1, a2 = map(list, zip(*args)) + + # As there seems to be no unified method to remove packages matching + # native OS architecture we are trying first to remove packages providing + # just a package name and if this call is failing we are trying + # 'package_name:architecture' approach. + ret = subprocess_call(cmd_line + a1, stdout=NULLFILE) + if ret != 0 and a1 != a2: + ret = subprocess_call(cmd_line + a2, stdout=NULLFILE) + + return ret + +def file_install(): + cmd_line = [dpkg_cmd] + dpkg_options + ["-i"] + + found = False + for line in sys.stdin: + if line.startswith("File="): + found = True + cmd_line.append(line.split("=", 1)[1].rstrip()) + + if (not found): + return 0 + + return subprocess_call(cmd_line, stdout=NULLFILE) + + +def main(): + if len(sys.argv) < 2: + sys.stderr.write("Need to provide argument\n") + return 2 + + if sys.argv[1] == "internal-test-stderr": + # This will cause an exception if stderr is closed. + try: + os.fstat(2) + except OSError: + return 1 + return 0 + + elif sys.argv[1] == "supports-api-version": + sys.stdout.write("1\n") + return 0 + + elif sys.argv[1] == "get-package-data": + return get_package_data() + + elif sys.argv[1] == "list-installed": + return list_installed() + + elif sys.argv[1] == "list-updates": + return list_updates(True) + + elif sys.argv[1] == "list-updates-local": + return list_updates(False) + + elif sys.argv[1] == "repo-install": + return repo_install() + + elif sys.argv[1] == "remove": + return remove() + + elif sys.argv[1] == "file-install": + return file_install() + + else: + sys.stderr.write("Invalid operation\n") + return 2 + +sys.exit(main()) diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/freebsd_ports b/policies/lib/tree/10_ncf_internals/modules/packages/freebsd_ports new file mode 100755 index 00000000000..81d4d0647da --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/freebsd_ports @@ -0,0 +1,117 @@ +#!/bin/sh -e + +command=$1 + +while read -r line; do + export INPUT_$line +done + +get_package_data() { + name="${INPUT_File?File must be given to get-package-data}" + echo PackageType=repo + echo Name=$name +} + +list_installed() { + # Example pkg output: + # sudo-1.8.14p3 + # Name : sudo + # Version : 1.8.14p3 + # Installed on : Sun Aug 16 05:36:05 UTC 2015 + # Origin : security/sudo + # Architecture : freebsd:10:x86:64 + # + # After rewrite: + # Name=sudo + # Version=1.8.14p3 + # Architecture=none + pkg info -f -a | grep -E '^(Name|Version|Architecture)' | sed -e 's/[ ]*:[ ]*/=/' -e 's/^Architecture=.*/Architecture=none/' +} + +repo_install() { + name="${INPUT_Name?Name must be given to repo-install}" + version="${INPUT_Version}" + + export BATCH=1 + PORT_PATH=$(whereis -sq "$name") + + if [ -z "$PORT_PATH" ] + then + echo "ErrorMessage=Could not install $name, port does not exist" + exit 0 + fi + + cd "$PORT_PATH" + + if [ -n "$version" ] + then + available=$(make -V PKGVERSION) + if [ "$available" != "$version" ] + then + echo "ErrorMessage=Could not install $name $version, available version was $available" + exit 0 + fi + fi + + make deinstall reinstall >&2 +} + +update_ports_tree() { + portsnap --interactive fetch update >&2 +} + +list_updates_local() { + # Example pkg output: + # ca_root_nss-3.19.3 < needs updating (index has 3.20) + # + # After sed: + # Name=ca_root_nss + # Version=3.20 + # Architecture=none + pkg version -v -l "<" | sed -e 's/\([^ ]*\)-[^-]* .* \(.*\))/Name=\1\ +Version=\2\ +Architecture=none/' +} + +remove() { + name="${INPUT_Name?Name must be given to remove}" + export BATCH=1 + + PORT_PATH=$(whereis -sq "$name") + + if [ -z "$PORT_PATH" ] + then + echo "ErrorMessage=Could not remove $name, port does not exist" + exit 0 + fi + + cd "$PORT_PATH" + make deinstall >&2 +} + +case $command in + supports-api-version) + echo 1 + ;; + get-package-data) + get_package_data + ;; + list-installed) + list_installed + ;; + repo-install) + repo_install + ;; + list-updates) + update_ports_tree + list_updates_local + ;; + list-updates-local) + list_updates_local + ;; + remove) + remove + ;; + *) + echo "ErrorMessage=Invalid operation" +esac diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/ips b/policies/lib/tree/10_ncf_internals/modules/packages/ips new file mode 100755 index 00000000000..db17c8fbcc5 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/ips @@ -0,0 +1,85 @@ +#!/bin/sh + +LEVEL=0 + +fatal () { + echo "ErrorMessage=$@" + exit 2 +} + +warn () { + [ $LEVEL -gt 0 ] && echo "[TRACE]: $*" >&2 +} + +supports_api_version () { + echo 1 +} + +get_package_data() { + if [ -f "${File}" ] + then + echo "PackageType=file" + # this probably doesn't work + echo "Name=${File}" + else + echo "PackageType=repo" + pkg list -H --no-refresh "${File}" | head -n1 | awk '{print "Name=" $1}' + fi +} + +list_installed () { + arch=$(uname -p) + pkg list -H --no-refresh | awk '{print "Name=" $1 "\nVersion=" $2 "\nArchitecture=" "'${arch}'"}' +} + +list_updates () { + # The difference between list-updates and list-updates-local + # is that list-updates expects to refresh from the upstream repo. + pkg refresh -q + list_updates_local +} + +list_updates_local () { + arch=$(uname -p) + pkg list -H --no-refresh -u | awk '{print "Name=" $1 "\nVersion=" $2 "\nArchitecture=" "'${arch}'"}' +} + +repo_install () { + # ignore architecture + [ "${Version}" != "" ] && Name="${Name}@${Version}" + pkg install -q --accept --no-refresh "${Name}" +} + +file_install () { + fatal "File installation not supported" +} + +remove () { + # ignore architecture + [ "${Version}" != "" ] && Name="${Name}@${Version}" + pkg uninstall -q --ignore-missing "${Name}" +} + +# Cfengine passes data on STDIN. Absorb that and convert to shell variables. +while IFS= read -r line; do + eval "$line" + # options can be passed multiple times so we need to avoid clobbering + # previous instances. Plus, what we really want to eval is the value of + # each option. + if [ -n "$options" ]; then + eval "$options" + fi +done + +case "$1" in + supports-api-version) supports_api_version;; + repo-install) repo_install;; + file-install) file_install;; + remove) remove;; + list-installed) list_installed;; + list-updates) list_updates;; + list-updates-local) list_updates_local;; + get-package-data) get_package_data;; + *) fatal "Invalid operation";; +esac + diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/nimclient b/policies/lib/tree/10_ncf_internals/modules/packages/nimclient new file mode 100755 index 00000000000..7d2feb3c6ed --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/nimclient @@ -0,0 +1,191 @@ +#!/bin/sh + +## Licensed under: +## MIT Public License +## http://www.opensource.org/licenses/MIT + +## Copyright (c) 2015, Nick Anderson + +## nim package module for cfengine +## - Based on work done in pkgsrc module +## Installs packages using nimclient +## - Probably need to implement caching layer for nimclient, it can be slow +## and always requires remote connection +## Removes packages using installp + +export PATH=/opt/local/bin:/opt/local/sbin:$PATH + +LEVEL=0 + +fatal () { + echo "ErrorMessage=$@" + exit 2 +} + +warn () { + [ $LEVEL -gt 0 ] && echo "[TRACE]: $*" >&2 +} + +supports_api_version () { + echo 1 +} + +repo_install () { + # nimclient needs to know which lpp_source to install from. + # If lpp_source is provied its a fatal error + if [ -z "$lpp_source" ]; then + fatal "Error installing '${Name}': No lpp_source defined." + fi + nimclient_install_package +} + +file_install () { + # Must query the File to get the Name + # installp -d ${File} ${Name} + fatal "Error: File based installs not supported by nimclient" +} + +remove () { + # This function should make sure the specified package gets removed + remove_package +} + +list_installed () { + # This function should return the packages that are currently installed + # NAME= + # VERSION= + # ARCHITECTURE= + list_installed_packages | /usr/bin/grep -v "\s*#" | /usr/bin/awk -F':' '{print "Name=" $1 "\nVersion=" $3 "\nArchitecture=PPC"}' +} + +list_updates () { + # This function should hit the network + # This function should return the list of package updates that are available + # NAME= + # VERSION= + # ARCHITECTURE= + # If you can't get a list of updates available, then you can't use + # version=latest and mission portal won't report updates available. If + # there is no valid cached list, AND it is unable to get a list it should + # return nothing + # if it is able to get a valid listing it should update the local cache and return that + # - expects the cache is kept up to date with the installed state + # - if you hve an update and you set the version to latest, and it installs + # that package that package should be removed from the cache + # - If you don't then mission portal may show that there are updates + # available that are actually already installed (until the cache gets + # refreshed) + + # Since we don't yet know how to determine which packages have updates + # available we simply return true. + /usr/bin/true +} + +list_updates_local () { + # This function should return the cached list of package updates availabel + # IF there is no cache then it should return nothing + # This function should avoid hitting the network for listing + # returns same info as list_updates + # CFEngine determines which one to call based on if_elapsed + + # see if showres can do offline listing, see if we can know which filesets are considered updates + # - only list the latest update + /usr/bin/true +} + +get_package_data () { + # NIM is only a REPO type install + # - Could add file based install for bff or rpm packages + + #if echo "${File}" | grep '^/' >/dev/null; then + # If there's a / in $File then we'll expec this to be a 'file' install. + # First we need to figure out if the package matches .bff or .rpm + # - If not fail + # - fatal "Unsupported Package Type" + # Next we need to query the package for the base name and version + # Finally spit out the stuff + # echo "PackageType=file" + # echo NAME= + # echo VERSION= + # echo ARCHITECUTE= + #echo "Name=$(echo "$File" | sed 's/.*\///g')" + #else + # If $File does not contain /, it must be in an existing remote repo + echo "PackageType=repo" + echo "Name=${File}" +} + +parse_pkg_data () { + # Emit package name and version, and arch based on output from nimclient + # showres. If file based install support is added then this will need to be + # improved to handle that case. + name=$(echo $1 | awk -F':' '{ print $2}') + version=$(echo $1 | awk -F':' '{ print $3 }') + + echo "Name=$name" + echo "Version=$version" + # ARCH is useless on AIX? + echo "Architecture=PPC" +} + +# Cfengine passes data on STDIN. Absorb that and convert to shell variables. +while IFS= read -r line; do + eval "$line" + # options can be passed multiple times so we need to avoid clobbering + # previous instances. Plus, what we really want to eval is the value of + # each option so that we can have a variable for each value. + # For example options => { "lpp_source=aix7783" } + # comes through the protocol as options=lpp_source=aix7783 + # and here we define lpp_source=aix7783 + if [ -n "$options" ]; then + eval "$options" + fi +done + +# Set up mock environment if necessary +# This is not well developed as I don't have continuous access to aix and nim +# nor am I an expert +CFENGINE_TEST_NIMCLIENT_MOCK=false +if [ -n "$CFENGINE_TEST_NIMCLIENT_MOCK" = "true" ]; then + list_installed_packages() { + cat ../../tests/unit/mock_lslpp_Lc + } + nimclient_showres() { + # This lists the AVAILABLE packages in the nim repo + cat ../../tests/unit/mock_nimclient_showres + } + nimclient_install_package() { + # Ugh, not sure what this should do to mock. I think that nimclient + # return codes kind of suck, might need to parse the output? + echo nimclient -o cust -a lpp_source=${lpp_source} -a filesets=\"${Name}\" >&2 + } + remove_package() { + echo installp -u "${Name}" >&2 + } +else + list_installed_packages() { + lslpp -Lc + } + nimclient_showres() { + /usr/sbin/nimclient -o showres -a resource=${lpp_source} -a installp_flags=L + } + nimclient_install_package() { + /usr/sbin/nimclient -o cust -a lpp_source=${lpp_source} -a filesets=\"${Name}\" 1>&2 + } + remove_package() { + installp -u "${Name}" 1>&2 + } +fi + + +case "$1" in + supports-api-version) supports_api_version;; + repo-install) repo_install;; + file-install) file_install;; + remove) remove;; + list-installed) list_installed;; + list-updates) list_updates;; + list-updates-local) list_updates_local;; + get-package-data) get_package_data;; + *) fatal "Invalid operation";; +esac diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/pkg b/policies/lib/tree/10_ncf_internals/modules/packages/pkg new file mode 100755 index 00000000000..8c1b832bb55 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/pkg @@ -0,0 +1,226 @@ +#!/bin/sh + +## Licensed under +## BSD License (2-clause) +## https://opensource.org/licenses/BSD-2-Clause + +## Copyright (c) 2015, John D. "Trix" Farrar + +## pkgng module for CFEngine 3.7+ + + +if [ -n "$CFENGINE_TEST_PKGNG_MOCK" ]; then + alias pkg='./mock_pkgng' +fi + +CMD="$1" + +DEFARCH=`uname -m` +REPO='' +INPUTLIST='' + +fatal() { + echo "ErrorMessage=$*" + exit 1 +} + +parse_line() { + INPUTP="$1" + + echo "${INPUTP}" | awk -F= '{print "KEYWORD=" $1 "; VALUE=" $2;}' 2>/dev/null + + unset INPUTP + +} + +read_options() { + while read -r INPUT ; do + if [ -n "${INPUT}" ] ; then + eval `parse_line "${INPUT}"` + case "${KEYWORD}" in + File|Name) + VER='' + ARCH='' + if [ "${KEYWORD}" = 'File' ] ; then + PKG="${VALUE}" + else + NAM="${VALUE}" + fi + ;; + Version) + VER="${VALUE}" + ;; + Architecture) + ARCH=${VALUE:=${DEFARCH}} + ;; + options*) + if [ -n "${repository}" ] ; then + REPO="-r ${repository}" + fi + continue + ;; + PackageType*) + TYP="${VALUE}" + ;; + *) + fatal "Invalid input: '${KEYWORD}'." + ;; + esac + if [ -n "${KEYWORD}" ] && [ -n "${VALUE}" ] ; then + if [ -n "${INPUTLIST}" ] ; then + INPUTLIST="${INPUTLIST} ${KEYWORD}=${VALUE}" + else + INPUTLIST="${KEYWORD}=${VALUE}" + fi + fi + fi + done +} + +process_inputs() { + CMD="$1" ; shift + OPTIONS="$*" + + for OPT in ${OPTIONS} ; do + eval `parse_line ${OPT}` + case "${KEYWORD}" in + File*) + if [ "${CMD}" = 'file-install' ] ; then + PKG="${PKG} ${VALUE}" + else + fatal "Bad Input: '${OPT}' (should be Name=...)." + fi + VER='' + ;; + Name*) + if [ "${CMD}" != 'file-install' ] ; then + PKG="${PKG} ${VALUE}" + else + fatal "Bad Input: '${OPT}' (should be File=...)" + fi + VER='' + ;; + Version*) + PKG="${PKG}-${VALUE}" + ;; + Architecture*) + if [ -n "${VALUE}" ] && + [ "${VALUE}" != "${DEFARCH}" ] ; then + fatal "Bad Input: Architecture ${VALUE} does not match ${DEFARCH}." + fi + ;; + PackageType*) + TYP="${VALUE}" + ;; + *) + fatal "Invalid input: '${INPUT}'." + ;; + esac + done +} + +get_package_data() { + if [ -n "${NAM}" ] && [ -z "${PKG}" ] ; then + PKG="${NAM}" + fi + if [ -n "${PKG}" ] ; then + case "${PKG}" in + /*) # File Path. It's a file + TYP='file' + if [ ! -e "${PKG}" ] ; then + fatal "No such file: '${PKG}'." + fi + ;; + http://*|ftp://|file://) # URL, it's a file. + TYP='file' + ;; + *) + TYP='repo' + ;; + esac + echo "PackageType=${TYP}" + if [ "${TYP}" = 'repo' ] ; then + PKGNAM=`basename "${PKG}" | sed -r -e 's/^([A-Za-z0-9_-]+)-[0-9a-z.,_]+/\1/'` + PKGVER=`basename "${PKG}" | sed -r -e 's/^[A-Za-z0-9_-]+-([0-9a-z.,_]+)/\1/' -e 's/\.(tgz|(tar\.)?gz|txz|zip)$//'` + if [ -z "`echo ${PKGVER} | grep -E '^[0-9]'`" ] ; then + if [ "${PKGNAM}" != "${PKGVER}" ] ; then + PKGNAM="${PKGNAM}-${PKGVER}" + fi + PKGVER='' + fi + echo "Name=${PKGNAM}" + # Returning version for PackageType=repo is not advised + # https://docs.cfengine.com/latest/reference-standard-library-package_modules.html + # if [ -n "${PKGVER}" ] ; then + # echo "Version=${PKGVER}" + # fi + else + pkg query --file "${PKG}" 'Name=%n\nVersion=%v\nArchitecture=%q' | \ + sed -e 's/=freebsd:[0-9]*:/=:/' \ + -e 's/:x86:32/i386/' \ + -e 's/:x86:64/amd64/' + + fi + fi +} + +if [ "${CMD}" = 'supports-api-version' ] ; then + echo '1' + exit 0 +else + read_options +fi + +case "${CMD}" in + get-package-data) + get_package_data + ;; + list-installed) + pkg query -a 'Name=%n\nVersion=%v\nArchitecture=%q' | \ + sed -e 's/=freebsd:[0-9]*:/=:/' \ + -e 's/:x86:32/i386/' \ + -e 's/:x86:64/amd64/' + + ;; + list-updates-local|list-updates) + if [ "${CMD}" = 'list-updates-local' ] ; then + LOCALUPDATE='-U' + else + LOCALUPDATE='' + fi + pkg version ${LOCALUPDATE} -vl'<' | \ + awk 'BEGIN { cmd="uname -m"; cmd | getline arch} +/ repositor(y|ies) / { next; } +{ + pkgname = $1; + pkgver = $7; + sub(/-[0-9_.,]*$/,"",pkgname); + sub(/\)/, "", pkgver); + print "Name=" pkgname; + print "Version=" pkgver; + print "Architecture=" arch; +}' + ;; + repo-install|file-install|remove) + process_inputs "${CMD}" "${INPUTLIST}" + case "${CMD}" in + repo-install) + pkg install -qy ${REPO} ${PKG} 2>/dev/null >/dev/null + ;; + file-install) + pkg install -qy ${PKG} 2>/dev/null >/dev/null + ;; + remove) + pkg delete -Rqy ${PKG} 2>/dev/null >/dev/null + ;; + esac + RC=$? + if [ ${RC} -ne 0 ] ; then + fatal "Package manager error" + exit $RC + fi + ;; + *) + fatal "Command '${CMD}' is either not valid or not implemented." + ;; +esac diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/pkgsrc b/policies/lib/tree/10_ncf_internals/modules/packages/pkgsrc new file mode 100755 index 00000000000..11229d46644 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/pkgsrc @@ -0,0 +1,163 @@ +#!/bin/sh + +## Licensed under: +## MIT Public License +## http://www.opensource.org/licenses/MIT + +## Copyright (c) 2015, Brian Bennett + +## pkgsrc package module for cfengine + +# Set up mock environment if necessary +if [ -n "$CFENGINE_TEST_PKGSRC_MOCK" ]; then + alias pkgin='./mock_pkgin' + alias pkg_info='./mock_pkg_info' +fi + +# /opt/local supports SmartOS +# /opt/pkg supports Mac OS X from Joyent, 2015Q2 or later +# /usr/pkg supports standard pkgsrc +# This should be sufficient to support all platforms supported by pgksrc. +# pkgsrc bootstrapped manually to non-standard locations is not currently supported. +export PATH=/opt/local/bin:/opt/local/sbin:/opt/pkg/bin:/opt/pkg/sbin:/usr/pkg/bin:/usr/pkg/sbin:$PATH +export MACHINE_ARCH=$(pkg_info -X pkg_install | grep MACHINE_ARCH | cut -d = -f 2) +export PKG_ROOT=$(pkg_info -pq pkg_install | cut -d ' ' -f 2 | sort -u) +export PKG_INSTALL_CONF=${PKG_ROOT}/etc/pkg_install.conf + +LEVEL=0 + +fatal () { + echo "ErrorMessage=$@" + exit 2 +} + +warn () { + [ $LEVEL -gt 0 ] && echo "[TRACE]: $*" >&2 +} + +supports_api_version () { + echo 1 +} + +repo_install () { + # If a version number is specified, insert a dash between the name and + # version + [ -n "$Version" ] && ver="-$Version" + pkgin -y in "${Name}${ver}" > /dev/null + if [ $? -gt 0 ]; then + fatal "Error installing ${Name}${ver}" + fi +} + +file_install () { + # The specified config file might, for example override signature reqs: + # VERIFIED_INSTALLATION=never + pkg_add -U -C "$PKG_INSTALL_CONF" "$File" > /dev/null + if [ $? -gt 0 ]; then + echo "Error installing ${File}" + fi +} + +remove () { + # If a version number is specified, insert a dash between the name and + # version + [ -n "$Version" ] && ver="-$Version" + pkgin -y rm "${Name}${ver}" > /dev/null +} + +list_installed () { + parse_pkg_data "$(pkgin -p list)" +} + +list_updates () { + # The difference between list-updates and list-updates-local, it seems + # is that list-updates expects to refresh from the upstream repo. + pkgin -f update >&2 + list_updates_local +} + +list_updates_local () { + parse_pkg_data "$(pkgin -pl '<' ls)" +} + +get_package_data () { + if echo "$File" | grep '/' >/dev/null; then + # If there's a / in $File then we'll expec this to be a 'file' install. + # This is reliable because 1) pkgsrc packages don't have / in the name + # and because cfengine can't install a PackageType=file from a relative + # path. + # + # The package will be installed with pkg_add later, which also supports + # arbitrary HTTP locations. + echo "PackageType=file" + # To appease cfengine, we'll take the basename of the package passed. + echo "Name=$(echo "$File" | sed 's/.*\///g')" + else + # If $File does not contain /, it must be in an existing remote repo, + # because cfengine can't install files from relative paths. + echo "PackageType=repo" + # Cfengine expects a *single* matching package. So sort and return the + # most recent. If a version is specified it can partial match, in which + # case we'll again take the latest. If there's no match on the name + # or version, return nothing. + # There's possibly a bug here because we're already emitting that the + # PackageType is repo. + parse_pkg_data "$(pkgin -pP avail | grep "^${File}-[0-9]" | grep "$Version;" | sort -n | tail -1)" | grep Name + fi +} + +parse_pkg_data () { + # This is a bit tricky. + # pkgin is called with parsable format and separates fields with ';'. + # Packages are further sub-split between name and version with '-', but + # package names may also contain '-'. To complicate matters, package + # versions can have '-' as well. + + # Take the example package mozilla-rootcerts-1.0.20141117nb1 + # $1 is the package-version compound. Discard the description in $2.. + # Split $1 on 'separator' and store in array 'package'. Return length 'l' + # 'version' is the last element of array 'package' + # Now the tricky bit. We've split the package name, so now must reassemble + # it with dashes in tact, without the version number. + # For each element less 1 in 'package', if this is the first iteration + # print the element. On subsequent passes print "-element" + # Finally print the version and the machine architecture as well. + echo "$*" | awk -F';' ' + { + separator="-" + l=split($1,package,separator) + version=package[l] + printf("Name=") + for (i=1ength;i1) { + printf("-") + } + printf("%s",package[i]) + } + printf("\nVersion=%s\n",version) + printf("Architecture=%s\n",ENVIRON["MACHINE_ARCH"]) + }' +} + +# Cfengine passes data on STDIN. Absorb that and convert to shell variables. +while IFS= read -r line; do + eval "$line" + # options can be passed multiple times so we need to avoid clobbering + # previous instances. Plus, what we really want to eval is the value of + # each option. + if [ -n "$options" ]; then + eval "$options" + fi +done + +case "$1" in + supports-api-version) supports_api_version;; + repo-install) repo_install;; + file-install) file_install;; + remove) remove;; + list-installed) list_installed;; + list-updates) list_updates;; + list-updates-local) list_updates_local;; + get-package-data) get_package_data;; + *) fatal "Invalid operation";; +esac diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/slackpkg b/policies/lib/tree/10_ncf_internals/modules/packages/slackpkg new file mode 100755 index 00000000000..4aa5554bcea --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/slackpkg @@ -0,0 +1,188 @@ +#!/bin/sh + +CMD="$1" + +DEFARCH=$(uname -m) +REPO='' +INPUTLIST='' + +fatal() { + echo "ErrorMessage=$*" + exit 1 +} + +parse_line() { + INPUTP="$1" + + KEYWORD=$(echo "${INPUTP}" | sed -e 's/=.*$//') + VALUE=$(echo "${INPUTP}" | sed -e 's/^.*=//') + + unset INPUTP +} + +read_options() { + while read -r INPUT ; do + if [ -n "${INPUT}" ] ; then + parse_line "${INPUT}" + case "${KEYWORD}" in + File|Name) + VER='' + ARCH='' + if [ "${KEYWORD}" = 'File' ] ; then + PKG="${VALUE}" + else + NAM="${VALUE}" + fi + ;; + Version) + VER="${VALUE}" + ;; + Architecture) + ARCH=${VALUE:=${DEFARCH}} + ;; + options*) + if [ -n "${repository}" ] ; then + REPO="-r ${repository}" + fi + continue + ;; + PackageType*) + TYP="${VALUE}" + ;; + *) + fatal "Invalid input: '${KEYWORD}'." + ;; + esac + if [ -n "${KEYWORD}" ] && [ -n "${VALUE}" ] ; then + if [ -n "${INPUTLIST}" ] ; then + INPUTLIST="${INPUTLIST} ${KEYWORD}=${VALUE}" + else + INPUTLIST="${KEYWORD}=${VALUE}" + fi + fi + fi + done +} + +process_inputs() { + CMD="$1" ; shift + OPTIONS="$*" + + for OPT in ${OPTIONS} ; do + eval `parse_line ${OPT}` + case "${KEYWORD}" in + File*) + if [ "${CMD}" = 'file-install' ] ; then + PKG="${PKG} ${VALUE}" + else + fatal "Bad Input: '${OPT}' (should be Name=...)." + fi + VER='' + ;; + Name*) + if [ "${CMD}" != 'file-install' ] ; then + PKG="${PKG} ${VALUE}" + else + fatal "Bad Input: '${OPT}' (should be File=...)" + fi + VER='' + ;; + Version*) + PKG="${PKG}-${VALUE}" + ;; + Architecture*) + if [ -n "${VALUE}" ] && + [ "${VALUE}" != "${DEFARCH}" ] ; then + fatal "Bad Input: Architecture ${VALUE} does not match ${DEFARCH}." + fi + ;; + PackageType*) + TYP="${VALUE}" + ;; + *) + fatal "Invalid input: '${INPUT}'." + ;; + esac + done +} + +get_package_data() { + if [ -n "${NAM}" ] && [ -z "${PKG}" ] ; then + PKG="${NAM}" + fi + if [ -n "${PKG}" ] ; then + case "${PKG}" in + /*|./*) # File Path. It's a file + TYP='file' + if [ ! -e "${PKG}" ] ; then + fatal "No such file: '${PKG}'." + fi + ;; + http://*|ftp://|file://) # URL, it's a file. + TYP='file' + ;; + *) + TYP='repo' + ;; + esac + echo "PackageType=${TYP}" + if [ "${TYP}" = 'repo' ] ; then + if [ -z "`echo ${PKGVER} | grep -E '^[0-9]'`" ] ; then + if [ "${PKGNAM}" != "${PKGVER}" ] ; then + PKGNAM="${PKGNAM}-${PKGVER}" + fi + PKGVER='' + fi + # Returning version for PackageType=repo is not advised + # https://docs.cfengine.com/latest/reference-standard-library-package_modules.html + # if [ -n "${PKGVER}" ] ; then + # echo "Version=${PKGVER}" + # fi + echo "Name="${PKG} + else + echo "Name="$(echo ${PKG} |sed -e 's/-[0-9].*$//' -e 's/[^/]+\(.*$\)/\1/' -e 's/.*\///') + echo ${PKG} |sed -e 's/.*\///' -e 's/.*-\([_a-z0-9.?]\+\)-\(.*\)-.*/Version=\1\'$'\nArchitecture=\\2/' + fi + fi +} + +if [ "${CMD}" = 'supports-api-version' ] ; then + echo '1' + exit 0 +else + read_options +fi + +case "${CMD}" in + get-package-data) + get_package_data + ;; + list-installed) + ls -1 /var/log/packages/ |sed -e 's/\([^ ]\+\)-\([_a-zA-Z0-9.?]\+\)-\(.*\)-/Name=\1\'$'\nVersion=\\2\\nArchitecture=\\3/' + ;; + list-updates-local|list-updates) + slackpkg -dialog=off -batch=on upgrade-all | sed -r 's!^(.+)-(.+)-(i[0-9]86|noarch|x86_64|x86|fw|npmjs)-(.*)$!Name=\1\nVersion=\2\nArchitecture=\3!g' | grep -vE "Total\ package|Do\ you\ wish\ to|Looking\ for|Checking\ local|^$" + ;; + repo-install|file-install|remove) + process_inputs "${CMD}" "${INPUTLIST}" + case "${CMD}" in + repo-install) + slackpkg -dialog=off -batch=on -default_answer=yes install ${PKG} 2>/dev/null >/dev/null + ;; + file-install) + installpkg ${PKG} 2>/dev/null >/dev/null + ;; + remove) + slackpkg -dialog=off -batch=on -default_answer=yes remove ${PKG} 2>/dev/null >/dev/null + ;; + esac + RC=$? + if [ ${RC} -ne 0 ] ; then + fatal "Package manager error" + exit $RC + fi + ;; + *) + fatal "Command '${CMD}' is either not valid or not implemented." + ;; +esac diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/snap b/policies/lib/tree/10_ncf_internals/modules/packages/snap new file mode 100755 index 00000000000..1bc3ad2663f --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/snap @@ -0,0 +1,86 @@ +#!/bin/bash -e +get_package_data() { + name="${INPUT_File?File must be given to get-package-data}" + echo PackageType=repo + echo Name=$name +} + +list_installed() { + # Example pkg output: + # Name Version Rev Developer Notes + # core 16-2.30 3748 canonical core + # hello-world 6.3 27 canonical - + # + # After rewrite: + # Name=core + # Version=16-2.30 + # Architecture=none + snap list | sed 1d | awk ' +{ + printf("Name=%s\n",$1) + printf("Version=%s\n",$2) + printf("Architecture=none\n") +}' +} + +repo_install() { + name="${INPUT_Name?Name must be given to repo-install}" + # TODO: investigate channel, revision flags + snap install "$name" >&2 +} + + +list_updates() { + # By default snaps are updated daily, at the time of this writing, there is no + # way to disable the auto-update, but it can be delayed. + + # TODO: Get example output showing updates from `snap refresh --list` + + true +} + +remove() { + name="${INPUT_Name?Name must be given to remove}" + snap remove "$name" >&2 +} + +main() { + command=$1 + + # Output maybe contain backslashes, and we don't want those to end up escaping + # something so we use use -r with read. + while read -r line; do + # Note that line is a variable assignment, e.g. + # INPUT_File=syncthing + declare INPUT_$line + done + + + case $command in + supports-api-version) + echo 1 + ;; + get-package-data) + get_package_data + ;; + list-installed) + list_installed + ;; + repo-install) + repo_install + ;; + list-updates) + list_updates + ;; + list-updates-local) + list_updates + ;; + remove) + remove + ;; + *) + echo "ErrorMessage=Invalid operation" + esac +} + +main $1 diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/yum b/policies/lib/tree/10_ncf_internals/modules/packages/yum new file mode 100755 index 00000000000..b4e26e80f18 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/yum @@ -0,0 +1,447 @@ +#!/bin/sh +# vim: syntax=python +''':' +# First try to run this script with python3, else run with python then python2 +if command -v python3 >/dev/null 2>/dev/null; then + exec python3 "$0" "$@" +elif command -v python >/dev/null 2>/dev/null; then + exec python "$0" "$@" +else + exec python2 "$0" "$@" +fi +''' + +import sys +import os +import subprocess +import re + + +rpm_cmd = os.environ.get('CFENGINE_TEST_RPM_CMD', "/bin/rpm") +rpm_quiet_option = ["--quiet"] +rpm_output_format = "Name=%{name}\nVersion=%|EPOCH?{%{epoch}:}:{}|%{version}-%{release}\nArchitecture=%{arch}\n" + +yum_cmd = os.environ.get('CFENGINE_TEST_YUM_CMD', "/usr/bin/yum") +yum_options = ["--quiet", "-y"] + +NULLFILE = open(os.devnull, 'w') + + +redirection_is_broken_cached = -1 + +def redirection_is_broken(): + # Older versions of Python have a bug where it is impossible to redirect + # stderr using subprocess, and any attempt at redirecting *anything*, not + # necessarily stderr, will result in it being closed instead. This is very + # bad, because RPM may then open its RPM database on file descriptor 2 + # (stderr), and will cause it to output error messages directly into the + # database file. Fortunately "stdout=subprocess.PIPE" doesn't have the bug, + # and that's good, because it would have been much more tricky to solve. + global redirection_is_broken_cached + if redirection_is_broken_cached == -1: + cmd_line = [sys.executable, sys.argv[0], "internal-test-stderr"] + if subprocess.call(cmd_line, stdout=sys.stderr) == 0: + redirection_is_broken_cached = 0 + else: + redirection_is_broken_cached = 1 + + return redirection_is_broken_cached + + +def subprocess_Popen(cmd, stdout=None, stderr=None): + if not redirection_is_broken() or (stdout is None and stderr is None) or stdout == subprocess.PIPE or stderr == subprocess.PIPE: + return subprocess.Popen(cmd, stdout=stdout, stderr=stderr) + + old_stdout_fd = -1 + old_stderr_fd = -1 + + if stdout is not None: + old_stdout_fd = os.dup(1) + os.dup2(stdout.fileno(), 1) + + if stderr is not None: + old_stderr_fd = os.dup(2) + os.dup2(stderr.fileno(), 2) + + result = subprocess.Popen(cmd) + + if old_stdout_fd >= 0: + os.dup2(old_stdout_fd, 1) + os.close(old_stdout_fd) + + if old_stderr_fd >= 0: + os.dup2(old_stderr_fd, 2) + os.close(old_stderr_fd) + + return result + + +def subprocess_call(cmd, stdout=None, stderr=None): + process = subprocess_Popen(cmd, stdout, stderr) + return process.wait() + + +def get_package_data(): + pkg_string = "" + for line in sys.stdin: + if line.startswith("File="): + pkg_string = line.split("=", 1)[1].rstrip() + # Don't break, we need to exhaust stdin. + + if not pkg_string: + return 1 + + if pkg_string.startswith("/"): + # Absolute file. + sys.stdout.write("PackageType=file\n") + sys.stdout.flush() + return subprocess_call([rpm_cmd, "--qf", rpm_output_format, "-qp", pkg_string]) + elif re.search("[:,]", pkg_string): + # Contains an illegal symbol. + sys.stdout.write(line + "ErrorMessage: Package string with illegal format\n") + return 1 + else: + sys.stdout.write("PackageType=repo\n") + sys.stdout.write("Name=" + pkg_string + "\n") + return 0 + + +def list_installed(): + # Ignore everything. + sys.stdin.readlines() + + return subprocess_call([rpm_cmd, "-qa", "--qf", rpm_output_format]) + + +def list_updates(online): + global yum_options + for line in sys.stdin: + line = line.strip() + if line.startswith("options="): + option = line[len("options="):] + if option.startswith("-"): + yum_options.append(option) + elif option.startswith("enablerepo=") or option.startswith("disablerepo="): + yum_options.append("--" + option) + + online_flag = [] + if not online: + online_flag = ["-C"] + + process = subprocess_Popen([yum_cmd] + yum_options + online_flag + ["check-update"], stdout=subprocess.PIPE) + (stdoutdata, _) = process.communicate() + # analyze return code from `yum check-update`: + # 0 means no updates + # 1 means there was an error + # 100 means that there are available updates + if process.returncode == 1 and not online: + # If we get an error when listing local updates, try again using the + # online method, so that the cache is generated + process = subprocess_Popen([yum_cmd] + yum_options + ["check-update"], stdout=subprocess.PIPE) + (stdoutdata, _) = process.communicate() + if process.returncode != 100: + # either there were no updates or error happened + # Nothing to do for us here anyway + return process.returncode + lastline = "" + for line in stdoutdata.decode("utf-8").splitlines(): + # Combine multiline entries into one line. A line without at least three + # space separated fields gets combined with the next line, if that line + # starts with a space. + if lastline and (len(line) == 0 or not line[0].isspace()): + # Line does not start with a space. No combination. + lastline = "" + + line = lastline + line + match = re.match(r"^\S+\s+\S+\s+\S+", line) + if match is None: + # Keep line + lastline = line + continue + + lastline = "" + match = re.match(r"^(?P\S+)\.(?P[^.\s]+)\s+(?P\S+)\s+\S+\s*$", line) + if match is not None: + sys.stdout.write("Name=" + match.group("name") + "\n") + sys.stdout.write("Version=" + match.group("version") + "\n") + sys.stdout.write("Architecture=" + match.group("arch") + "\n") + + return 0 + + +# Returns a pair: +# List 1: Contains arguments for a single command line. +# List 2: Contains arguments for multiple command lines (see comments in +# repo_install()). +def one_package_argument(name, arch, version, is_yum_install): + args = [] + archs = [] + exists = False + + if arch: + archs.append(arch) + + if is_yum_install: + process = subprocess_Popen([rpm_cmd, "--qf", "%{arch}\n", + "-q", name], stdout=subprocess.PIPE) + existing_archs = [line.decode("utf-8").rstrip() for line in process.stdout] + process.wait() + if process.returncode == 0 and existing_archs: + exists = True + if not arch: + # Here we have no specified architecture and we are + # installing. If we have existing versions, operate + # on those, instead of the platform default. + archs += existing_archs + + version_suffix = "" + if version: + version_suffix = "-" + version + + if archs: + args += [name + version_suffix + "." + arch for arch in archs] + else: + args.append(name + version_suffix) + + if exists and version: + return [], args + else: + return args, [] + + +# Returns a pair: +# List 1: Contains arguments for a single command line. +# List 2: Contains arguments for multiple command lines (see comments in +# repo_install()). This is a list of lists, where the logic is: +# list +# | +---- package1:amd64 -+ +# +- sublist ---+ +--- Do these together +# | +---- package1:i386 -+ +# | +# | +# | +---- package2:amd64 -+ +# +- sublist ---+ +--- And these together +# +---- package2:i386 -+ +def package_arguments_builder(is_yum_install): + name = "" + version = "" + arch = "" + single_cmd_args = [] # List of arguments + multi_cmd_args = [] # List of lists of arguments + old_name = "" + for line in sys.stdin: + line = line.strip() + if line.startswith("options="): + option = line[len("options="):] + if option.startswith("-"): + yum_options.append(option) + elif option.startswith("enablerepo=") or option.startswith("disablerepo="): + yum_options.append("--" + option) + if line.startswith("Name="): + if name: + # Each new "Name=" triggers a new entry. + single_list, multi_list = one_package_argument(name, arch, version, is_yum_install) + single_cmd_args += single_list + if name == old_name: + # Packages that differ only by architecture should be + # processed together + multi_cmd_args[-1] += multi_list + elif multi_list: + # Otherwise we process them individually. + multi_cmd_args += [multi_list] + + version = "" + arch = "" + + old_name = name + name = line.split("=", 1)[1].rstrip() + + elif line.startswith("Version="): + version = line.split("=", 1)[1].rstrip() + + elif line.startswith("Architecture="): + arch = line.split("=", 1)[1].rstrip() + + if name: + single_list, multi_list = one_package_argument(name, arch, version, is_yum_install) + single_cmd_args += single_list + if name == old_name: + # Packages that differ only by architecture should be + # processed together + multi_cmd_args[-1] += multi_list + elif multi_list: + # Otherwise we process them individually. + multi_cmd_args += [multi_list] + + return single_cmd_args, multi_cmd_args + + +def repo_install(): + # Due to how yum works we need to split repo installs into several + # components. + # + # 1. Installation of fresh packages is easy, we add all of them on one + # command line. + # 2. Upgrade of existing packages where no version has been specified is + # also easy, we add that to the same command line. + # 3. Up/downgrade of existing packages where version is specified is + # tricky, for several reasons: + # a) There is no one yum command that will do both, "install" or + # "upgrade" will only upgrade, and "downgrade" will only downgrade. + # b) There is no way rpm or yum will tell you which version is higher + # than the other, and we know from experience with the old package + # promise implementation that we don't want to try to do such a + # comparison ourselves. + # c) yum has no dry-run mode, so we cannot tell in advance which + # operation will succeed. + # d) yum will not even tell you whether operation succeeded when you + # run it for real + # + # So here's what we need to do. We start by querying each package to find + # out whether that exact version is installed. If it fulfills 1. or 2. we + # add it to that single command line. + # + # If we end up at 3. we need to split the work and do each package + # separately. We do: + # + # 1. Try to upgrade using "yum upgrade". + # 2. Query the package again, see if it is the right version now. + # 3. If not, try to downgrade using "yum downgrade". + # 4. Query the package again, see if it is the right version now. + # 5. Final safeguard, try installing using "yum install". This may happen + # in case we have one architecture already, but we are installing a + # second one. In this case only install will work. + # 6. (No need to check again, CFEngine will do the final check) + # + # This is considerably more expensive than what we do for apt, but it's the + # only way to cover all bases. In apt it will be one apt call for any number + # of packages, with yum it will in the worst case be: + # 1 + 5 * number_of_packages + # although a more common case will probably be: + # 1 + 2 * number_of_packages + # since it's unlikely that people will do a whole lot of downgrades + # simultaneously. + + ret = 0 + single_cmd_args, multi_cmd_args = package_arguments_builder(True) + + if single_cmd_args: + cmd_line = [yum_cmd] + yum_options + ["install"] + cmd_line.extend(single_cmd_args) + + ret = subprocess_call(cmd_line, stdout=NULLFILE) + + if multi_cmd_args: + for block in multi_cmd_args: + # Try to upgrade. + cmd_line = [yum_cmd] + yum_options + ["upgrade"] + block + subprocess_call(cmd_line, stdout=NULLFILE) + + # See if it succeeded. + success = True + for item in block: + cmd_line = [rpm_cmd] + rpm_quiet_option + ["-q", item] + if subprocess_call(cmd_line, stdout=NULLFILE) != 0: + success = False + break + + if success: + continue + + # Try to downgrade. + cmd_line = [yum_cmd] + yum_options + ["downgrade"] + block + subprocess_call(cmd_line, stdout=NULLFILE) + + # See if it succeeded. + success = True + for item in block: + cmd_line = [rpm_cmd] + rpm_quiet_option + ["-q", item] + if subprocess_call(cmd_line, stdout=NULLFILE) != 0: + success = False + break + + if success: + continue + + # Try to plain install. + cmd_line = [yum_cmd] + yum_options + ["install"] + block + subprocess_call(cmd_line, stdout=NULLFILE) + + # No final check. CFEngine will figure out that it's missing + # if it failed. + + # ret == 0 doesn't mean we succeeded with everything, but it's expensive to + # check, so let CFEngine do that. + return ret + + +def remove(): + cmd_line = [yum_cmd] + yum_options + ["remove"] + + # package_arguments_builder will always return empty second element in case + # of removals, so just drop it. | + # V + args = package_arguments_builder(False)[0] + + if args: + return subprocess_call(cmd_line + args, stdout=NULLFILE) + return 0 + + +def file_install(): + cmd_line = [yum_cmd] + yum_options + ["install"] + found = False + for line in sys.stdin: + if line.startswith("File="): + found = True + cmd_line.append(line.split("=", 1)[1].rstrip()) + + if not found: + return 0 + + return subprocess_call(cmd_line, stdout=NULLFILE) + + +def main(): + if len(sys.argv) < 2: + sys.stderr.write("Need to provide argument\n") + return 2 + + if sys.argv[1] == "internal-test-stderr": + # This will cause an exception if stderr is closed. + try: + os.fstat(2) + except OSError: + return 1 + return 0 + + elif sys.argv[1] == "supports-api-version": + sys.stdout.write("1\n") + return 0 + + elif sys.argv[1] == "get-package-data": + return get_package_data() + + elif sys.argv[1] == "list-installed": + return list_installed() + + elif sys.argv[1] == "list-updates": + return list_updates(True) + + elif sys.argv[1] == "list-updates-local": + return list_updates(False) + + elif sys.argv[1] == "repo-install": + return repo_install() + + elif sys.argv[1] == "remove": + return remove() + + elif sys.argv[1] == "file-install": + return file_install() + + else: + sys.stderr.write("Invalid operation\n") + return 2 + +sys.exit(main()) diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/zypper b/policies/lib/tree/10_ncf_internals/modules/packages/zypper new file mode 100755 index 00000000000..47d363695bf --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/zypper @@ -0,0 +1,465 @@ +#!/bin/sh +# vim: syntax=python +''':' +# First try to run this script with python3, else run with python +if command -v python3 >/dev/null 2>/dev/null; then + exec python3 "$0" "$@" +elif command -v python >/dev/null 2>/dev/null; then + exec python "$0" "$@" +else + exec python2 "$0" "$@" +fi +''' + +##################################################################################### +# Copyright 2016 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# This script is based on the CFEngine's masterfiles yum script: +# https://github.com/cfengine/masterfiles/blob/master/modules/packages/yum + +# Licensed under: +# MIT Public License +# Copyright 2021 Northern.tech AS + +import sys +import os +import subprocess +import re +from distutils.version import StrictVersion + + +rpm_cmd = os.environ.get('CFENGINE_TEST_RPM_CMD', "/bin/rpm") +rpm_quiet_option = ["--quiet"] +rpm_output_format = "Name=%{name}\nVersion=%|EPOCH?{%{epoch}:}:{}|%{version}-%{release}\nArchitecture=%{arch}\n" + +zypper_cmd = os.environ.get('CFENGINE_TEST_ZYPPER_CMD', "/usr/bin/zypper") +zypper_options = ["--quiet", "-n"] + +NULLFILE = open(os.devnull, 'w') + +redirection_is_broken_cached = -1 +zypper_supports_oldpackage_cached = -1 + +def zypper_supports_oldpackage(): + # Suse zypper "--oldpackage" option is only supported greater that 1.6.169 + global zypper_supports_oldpackage_cached + if zypper_supports_oldpackage_cached == -1: + zypper_version = subprocess.Popen([zypper_cmd, "-n", "--version"], stdout=subprocess.PIPE).communicate()[0] + zypper_version = zypper_version.decode().split(" ", 1)[1].rstrip() + if StrictVersion(zypper_version) >= StrictVersion("1.6.169"): + zypper_supports_oldpackage_cached = 1 + else: + zypper_supports_oldpackage_cached = 0 + return zypper_supports_oldpackage_cached + +def redirection_is_broken(): + # Older versions of Python have a bug where it is impossible to redirect + # stderr using subprocess, and any attempt at redirecting *anything*, not + # necessarily stderr, will result in it being closed instead. This is very + # bad, because RPM may then open its RPM database on file descriptor 2 + # (stderr), and will cause it to output error messages directly into the + # database file. Fortunately "stdout=subprocess.PIPE" doesn't have the bug, + # and that's good, because it would have been much more tricky to solve. + global redirection_is_broken_cached + if redirection_is_broken_cached == -1: + cmd_line = [sys.argv[0], "internal-test-stderr"] + if subprocess.call(cmd_line, stdout=sys.stderr) == 0: + redirection_is_broken_cached = 0 + else: + redirection_is_broken_cached = 1 + + return redirection_is_broken_cached + + +def subprocess_Popen(cmd, stdout=None, stderr=None): + if not redirection_is_broken() or (stdout is None and stderr is None) or stdout == subprocess.PIPE or stderr == subprocess.PIPE: + return subprocess.Popen(cmd, stdout=stdout, stderr=stderr) + + old_stdout_fd = -1 + old_stderr_fd = -1 + + if stdout is not None: + old_stdout_fd = os.dup(1) + os.dup2(stdout.fileno(), 1) + + if stderr is not None: + old_stderr_fd = os.dup(2) + os.dup2(stderr.fileno(), 2) + + result = subprocess.Popen(cmd) + + if old_stdout_fd >= 0: + os.dup2(old_stdout_fd, 1) + os.close(old_stdout_fd) + + if old_stderr_fd >= 0: + os.dup2(old_stderr_fd, 2) + os.close(old_stderr_fd) + + return result + + +# If subprocess.PIPE is passed as stderr, it will re-write it in an ErrorMessage +def subprocess_call(cmd, stdout=None, stderr=None): + process = subprocess_Popen(cmd, stdout, stderr) + outs, errs = process.communicate() + if stderr == subprocess.PIPE: + lines = [line for line in errs.decode().splitlines()] + if len(lines): + printed_error = "ErrorMessage=" + " ".join(lines) + sys.stdout.write(printed_error) + sys.stdout.flush() + return process.returncode + + + +def get_package_data(): + pkg_string = "" + for line in sys.stdin: + if line.startswith("File="): + pkg_string = line.split("=", 1)[1].rstrip() + # Don't break, we need to exhaust stdin. + + if not pkg_string: + return 1 + + if pkg_string.startswith("/"): + # Absolute file. + sys.stdout.write("PackageType=file\n") + sys.stdout.flush() + subprocess_call([rpm_cmd, "--qf", rpm_output_format, "-qp", pkg_string], stderr=subprocess.PIPE) + return 0 + elif re.search("[:,]", pkg_string): + # Contains an illegal symbol. + sys.stdout.write(line + "ErrorMessage: Package string with illegal format\n") + return 1 + else: + sys.stdout.write("PackageType=repo\n") + sys.stdout.write("Name=" + pkg_string + "\n") + return 0 + + +def list_installed(): + # Ignore everything. + sys.stdin.readlines() + + subprocess_call([rpm_cmd, "-qa", "--qf", rpm_output_format]) + return 0 + +def list_updates(online): + # Ignore everything. + sys.stdin.readlines() + + online_flag = [] + if not online: + online_flag = ["--no-refresh"] + + process = subprocess_Popen([zypper_cmd] + zypper_options + online_flag + ["list-updates"], stdout=subprocess.PIPE) + + for line in (line.decode() for line in process.stdout): + +# Zypper's output looks like: +# +# S | Repository | Name | Current Version | Available Version | Arch +# --+-------------------+--------------+------------------------------------+------------------------------------------------+------- +# v | Rudder repository | rudder-agent | 1398866025:3.2.6.release-1.SLES.11 | 1398866025:3.2.7.rc1.git201609190419-1.SLES.11 | x86_64 +# +# Which gives: +# +# v | Rudder repository | rudder-agent | 1398866025:3.2.6.release-1.SLES.11 | 1398866025:3.2.7.rc1.git201609190419-1.SLES.11 | x86_64 +# may contain package name old version, ignore it version available architecture +# special chars +# v\s+\|[^\|]+\ |\s+(?P\S+)\s+\|\s+\S+\s+\ |\s+(?P\S+)\s+\ |\s+(?P\S+)\s*$ + +# The first char will always be "v" which means there is a new version available on search outputs. + + match = re.match(r"v\s+\|[^\|]+\|\s+(?P\S+)\s+\|\s+\S+\s+\|\s+(?P\S+)\s+\|\s+(?P\S+)\s*$", line) + if match is not None: + sys.stdout.write("Name=" + match.group("name") + "\n") + sys.stdout.write("Version=" + match.group("version") + "\n") + sys.stdout.write("Architecture=" + match.group("arch") + "\n") + + return 0 + + +# Returns a pair: +# List 1: Contains arguments for a single command line. +# List 2: Contains arguments for multiple command lines (see comments in +# repo_install()). +def one_package_argument(name, arch, version, is_zypper_install): + args = [] + archs = [] + exists = False + + if arch: + archs.append(arch) + + if is_zypper_install: + process = subprocess_Popen([rpm_cmd, "--qf", "%{arch}\n", + "-q", name], stdout=subprocess.PIPE) + existing_archs = [line.decode().rstrip() for line in process.stdout] + process.wait() + if process.returncode == 0 and existing_archs: + exists = True + if not arch: + # Here we have no specified architecture and we are + # installing. If we have existing versions, operate + # on those, instead of the platform default. + archs += existing_archs + + version_suffix = "" + if version: + version_suffix = "=" + version + + if archs: + args += [name + "." + arch + version_suffix for arch in archs] + else: + args.append(name + version_suffix) + + if exists and version: + return [], args + else: + return args, [] + + +# Returns a pair: +# List 1: Contains arguments for a single command line. +# List 2: Contains arguments for multiple command lines (see comments in +# repo_install()). This is a list of lists, where the logic is: +# list +# | +---- package1:amd64 -+ +# +- sublist ---+ +--- Do these together +# | +---- package1:i386 -+ +# | +# | +# | +---- package2:amd64 -+ +# +- sublist ---+ +--- And these together +# +---- package2:i386 -+ +def package_arguments_builder(is_zypper_install): + name = "" + version = "" + arch = "" + single_cmd_args = [] # List of arguments + multi_cmd_args = [] # List of lists of arguments + old_name = "" + for line in sys.stdin: + if line.startswith("Name="): + if name: + # Each new "Name=" triggers a new entry. + single_list, multi_list = one_package_argument(name, arch, version, is_zypper_install) + single_cmd_args += single_list + if name == old_name: + # Packages that differ only by architecture should be + # processed together + multi_cmd_args[-1] += multi_list + elif multi_list: + # Otherwise we process them individually. + multi_cmd_args += [multi_list] + + version = "" + arch = "" + + old_name = name + name = line.split("=", 1)[1].rstrip() + + elif line.startswith("Version="): + version = line.split("=", 1)[1].rstrip() + + elif line.startswith("Architecture="): + arch = line.split("=", 1)[1].rstrip() + + if name: + single_list, multi_list = one_package_argument(name, arch, version, is_zypper_install) + single_cmd_args += single_list + if name == old_name: + # Packages that differ only by architecture should be + # processed together + multi_cmd_args[-1] += multi_list + elif multi_list: + # Otherwise we process them individually. + multi_cmd_args += [multi_list] + + return single_cmd_args, multi_cmd_args + + +def repo_install(): + # Due to how zypper works we need to split repo installs into several + # components. + # + # 1. Installation of fresh packages is easy, we add all of them on one + # command line. + # 2. Upgrade of existing packages where no version has been specified is + # also easy, we add that to the same command line. + # 3. Up/downgrade of existing packages where version is specified is + # tricky, for several reasons: + # a) There is no one zypper command that will do both, "install" or + # "upgrade" will only upgrade, and "downgrade" will only downgrade. + # b) There is no way rpm or zypper will tell you which version is higher + # than the other, and we know from experience with the old package + # promise implementation that we don't want to try to do such a + # comparison ourselves. + # c) zypper has no dry-run mode, so we cannot tell in advance which + # operation will succeed. + # d) zypper will not even tell you whether operation succeeded when you + # run it for real + # + # So here's what we need to do. We start by querying each package to find + # out whether that exact version is installed. If it fulfills 1. or 2. we + # add it to that single command line. + # + # If we end up at 3. we need to split the work and do each package + # separately. We do: + # + # 1. Try to upgrade using "zypper upgrade". + # 2. Query the package again, see if it is the right version now. + # 3. If not, try to downgrade using "zypper downgrade". + # 4. Query the package again, see if it is the right version now. + # 5. Final safeguard, try installing using "zypper install". This may happen + # in case we have one architecture already, but we are installing a + # second one. In this case only install will work. + # 6. (No need to check again, CFEngine will do the final check) + # + # This is considerably more expensive than what we do for apt, but it's the + # only way to cover all bases. In apt it will be one apt call for any number + # of packages, with zypper it will in the worst case be: + # 1 + 5 * number_of_packages + # although a more common case will probably be: + # 1 + 2 * number_of_packages + # since it's unlikely that people will do a whole lot of downgrades + # simultaneously. + + single_cmd_args, multi_cmd_args = package_arguments_builder(True) + + if single_cmd_args: + + cmd_line = [zypper_cmd] + zypper_options + ["install"] + + if zypper_supports_oldpackage(): + cmd_line += ["--oldpackage"] + + cmd_line.extend(single_cmd_args) + + subprocess_call(cmd_line, stdout=NULLFILE, stderr=subprocess.PIPE) + + if multi_cmd_args: + for block in multi_cmd_args: + # Try to upgrade. + cmd_line = [zypper_cmd] + zypper_options + ["update"] + block + subprocess_call(cmd_line, stdout=NULLFILE, stderr=subprocess.PIPE) + + # See if it succeeded. + success = True + for item in block: + cmd_line = [rpm_cmd] + rpm_quiet_option + ["-q", item] + if subprocess_call(cmd_line, stdout=NULLFILE, stderr=subprocess.PIPE) != 0: + success = False + break + + if success: + continue + + # Try to plain install. + + cmd_line = [zypper_cmd] + zypper_options + ["install"] + + if zypper_supports_oldpackage(): + cmd_line += ["--oldpackage"] + + cmd_line += block + + subprocess_call(cmd_line, stdout=NULLFILE, stderr=subprocess.PIPE) + + # No final check. CFEngine will figure out that it's missing + # if it failed. + + # return 0 doesn't mean we succeeded with everything, but it's expensive to + # check, so let CFEngine do that. + return 0 + + +def remove(): + cmd_line = [zypper_cmd] + zypper_options + ["remove"] + + # package_arguments_builder will always return empty second element in case + # of removals, so just drop it. | + # V + args = package_arguments_builder(False)[0] + + if args: + return subprocess_call(cmd_line + args, stdout=NULLFILE, stderr=subprocess.PIPE) + return 0 + + +def file_install(): + cmd_line = [rpm_cmd] + rpm_quiet_option + ["--force", "-U"] + found = False + for line in sys.stdin: + if line.startswith("File="): + found = True + cmd_line.append(line.split("=", 1)[1].rstrip()) + + if not found: + return 0 + + subprocess_call(cmd_line, stdout=NULLFILE, stderr=subprocess.PIPE) + return 0 + + +def main(): + if len(sys.argv) < 2: + sys.stderr.write("Need to provide argument\n") + return 2 + + if sys.argv[1] == "internal-test-stderr": + # This will cause an exception if stderr is closed. + try: + os.fstat(2) + except OSError: + return 1 + return 0 + + elif sys.argv[1] == "supports-api-version": + sys.stdout.write("1\n") + return 0 + + elif sys.argv[1] == "get-package-data": + return get_package_data() + + elif sys.argv[1] == "list-installed": + return list_installed() + + elif sys.argv[1] == "list-updates": + return list_updates(True) + + elif sys.argv[1] == "list-updates-local": + return list_updates(False) + + elif sys.argv[1] == "repo-install": + return repo_install() + + elif sys.argv[1] == "remove": + return remove() + + elif sys.argv[1] == "file-install": + return file_install() + + else: + sys.stderr.write("Invalid operation\n") + return 2 + +sys.exit(main()) diff --git a/policies/lib/tree/10_ncf_internals/modules/packages/zypper_pattern b/policies/lib/tree/10_ncf_internals/modules/packages/zypper_pattern new file mode 100755 index 00000000000..8ecfca96ce2 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/packages/zypper_pattern @@ -0,0 +1,342 @@ +#!/bin/sh +# vim: syntax=python +''':' +# First try to run this script with python3, else run with python +if command -v python3 >/dev/null 2>/dev/null; then + exec python3 "$0" "$@" +elif command -v python >/dev/null 2>/dev/null; then + exec python "$0" "$@" +else + exec python2 "$0" "$@" +fi +''' + +##################################################################################### +# Copyright 2018 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# This script is based on the CFEngine's masterfiles yum script: +# https://github.com/cfengine/masterfiles/blob/master/modules/packages/yum + +# Licensed under: +# MIT Public License +# Copyright (C) 2012-2014 CFEngine AS + +import sys +import os +import subprocess +import re + + +rpm_cmd = os.environ.get('CFENGINE_TEST_RPM_CMD', "/bin/rpm") +rpm_quiet_option = ["--quiet"] +# The pattern name can not be retrieved from a rpm simple query since its name may differ from the package name... +rpm_output_format = "Version=%{version}-%{release}\nArchitecture=%{arch}\n" + +zypper_cmd = os.environ.get('CFENGINE_TEST_ZYPPER_CMD', "/usr/bin/zypper") +zypper_options = ["--quiet", "-n"] + +NULLFILE = open(os.devnull, 'w') + + +redirection_is_broken_cached = -1 + +def redirection_is_broken(): + # Older versions of Python have a bug where it is impossible to redirect + # stderr using subprocess, and any attempt at redirecting *anything*, not + # necessarily stderr, will result in it being closed instead. This is very + # bad, because RPM may then open its RPM database on file descriptor 2 + # (stderr), and will cause it to output error messages directly into the + # database file. Fortunately "stdout=subprocess.PIPE" doesn't have the bug, + # and that's good, because it would have been much more tricky to solve. + global redirection_is_broken_cached + if redirection_is_broken_cached == -1: + cmd_line = [sys.argv[0], "internal-test-stderr"] + if subprocess.call(cmd_line, stdout=sys.stderr) == 0: + redirection_is_broken_cached = 0 + else: + redirection_is_broken_cached = 1 + + return redirection_is_broken_cached + +def subprocess_Popen(cmd, stdout=None, stderr=None): + if not redirection_is_broken() or (stdout is None and stderr is None) or stdout == subprocess.PIPE or stderr == subprocess.PIPE: + return subprocess.Popen(cmd, stdout=stdout, stderr=stderr) + + old_stdout_fd = -1 + old_stderr_fd = -1 + + if stdout is not None: + old_stdout_fd = os.dup(1) + os.dup2(stdout.fileno(), 1) + + if stderr is not None: + old_stderr_fd = os.dup(2) + os.dup2(stderr.fileno(), 2) + + result = subprocess.Popen(cmd) + + if old_stdout_fd >= 0: + os.dup2(old_stdout_fd, 1) + os.close(old_stdout_fd) + + if old_stderr_fd >= 0: + os.dup2(old_stderr_fd, 2) + os.close(old_stderr_fd) + + return result + + +def subprocess_call(cmd, stdout=None, stderr=None): + process = subprocess_Popen(cmd, stdout, stderr) + outs, errs = process.communicate() + if stderr == subprocess.PIPE: + lines = [line for line in errs.decode("utf-8").splitlines()] + if len(lines): + printed_error = "ErrorMessage=" + " ".join(lines) + sys.stdout.write(printed_error) + sys.stdout.flush() + return process.returncode + +# When retrieving data from a rpm file, it will only works with quite new +# pattern building as describe in https://doc.opensuse.org/projects/libzypp/HEAD/zypp-pattern-packages.html +# Older formats, xml based will fail. +def get_package_data(): + pkg_string = "" + for line in sys.stdin: + if line.startswith("File="): + pkg_string = line.split("=", 1)[1].rstrip() + # Don't break, we need to exhaust stdin. + + if not pkg_string: + return 1 + + if pkg_string.startswith("/"): + # Absolute file. + sys.stdout.write("PackageType=file\n") + sys.stdout.flush() + + rpmProcess = subprocess_Popen([rpm_cmd, "-qp", "--provides", pkg_string], stdout=subprocess.PIPE, stderr=NULLFILE) + for line in rpmProcess.stdout: + name = re.match(r"pattern\(\)\s+=+\s+(?P[\S]+)[\s\S]+", line.decode("utf-8")) + if name is not None: + sys.stdout.write("Name=" + name.group("name").rstrip("\n") + "\n") + sys.stdout.flush() + return subprocess_call([rpm_cmd, "--qf", rpm_output_format, "-qp", pkg_string], stderr=subprocess.PIPE) + sys.stdout.write("File=" + pkg_string + "\nErrorMessage: Package pattern name not found\n") + return 1 + + elif re.search("[:,]", pkg_string): + # Contains an illegal symbol. + sys.stdout.write(line + "ErrorMessage: Package string with illegal format\n") + return 1 + else: + sys.stdout.write("PackageType=repo\n") + sys.stdout.write("Name=" + pkg_string + "\n") + return 0 + + +def list_installed(): + # Ignore everything. + sys.stdin.readlines() + + patterns = list_installed_patterns() + if patterns == "": + sys.stdout.write("") + else: + patterns_array = list(set(patterns.split(' '))) + # We can not put the options there since the quiet one truncate the output here + command = [zypper_cmd] + ["-n", "info", "-t", "pattern"] + patterns_array + + process = subprocess_Popen(command , stdout=subprocess.PIPE, stderr=NULLFILE) + for line in process.stdout: + name = re.match(r"Name\s*: (?P\S+)", line.decode("utf-8")) + version = None + arch = re.match(r"Arch\s*: (?P\S+)", line.decode("utf-8")) + if name is not None: + sys.stdout.write("Name=" + name.group("name").rstrip("\n") + "\n") + + # The version showed in zypper info is the latest available version + zypperProcess = subprocess_Popen([zypper_cmd] + ["patterns", "--installed-only"], stdout=subprocess.PIPE, stderr=NULLFILE) + grepProcess = subprocess.Popen(["grep", name.group("name")], stdin=zypperProcess.stdout, stdout=subprocess.PIPE) + cutProcess = subprocess.Popen(["awk", "{print $5}"], stdin=grepProcess.stdout, stdout=subprocess.PIPE, bufsize=1) + + zypperProcess.wait() + grepProcess.wait() + zypperProcess.stdout.close() + grepProcess.stdout.close() + cutProcess.wait() + + version = cutProcess.stdout.readline() + cutProcess.stdout.close() + if version is not None: + sys.stdout.write("Version=" + version.decode("utf-8")) + elif arch is not None: + sys.stdout.write("Architecture=" + arch.group("arch").rstrip("\n") + "\n") + process.stdout.close() + return 0 + +def list_installed_patterns(): +# Return a one line space separated list of the patterns installed +# Zypper's output looks like: +# S | Name | Version | Repository | Dependency +# --+---------+---------+------------+----------- +# i | 32bit | 12-64.3 | @System | +# + command = [zypper_cmd] + zypper_options + ["-t", "patterns", "--installed-only"] + process = subprocess_Popen(command, stdout=subprocess.PIPE) + patterns = "" + for line in process.stdout: + match = re.match(r"i\+?\s+\|\s+(?P\S+)\s+\|.*$", line.decode("utf-8")) + if match is not None: + patterns += match.group("name") + " " + process.stdout.close() + return patterns.strip() + + +# Local update support has not been tested. +def list_updates(online): + # Assume that the output for packages and patterns are the same.... + # Ignore everything. + sys.stdin.readlines() + + online_flag = [] + if not online: + online_flag = ["--no-refresh"] + + process = subprocess_Popen([zypper_cmd] + zypper_options + online_flag + ["list-updates", "-t", "pattern"], stdout=subprocess.PIPE) + lastline = "" + for line in process.stdout: + +# Zypper's output looks like: +# +# S | Repository | Name | Available Version | Arch +# --+------------+------------------+-------------------+------- +# v | local | rudder_test_repo | 3-64.3 | x86_64 +# +# Which gives: +# +# v | local | rudder_test_repo | 3-64.3 | x86_64 +# may contain package name version available architecture +# special chars +# v\s+\|[^\|]+ \|\s+(?P\S+)\s+ \|\s+(?P\S+)\s+ \|\s+(?P\S+)\s*$ +# +# The first char will always be "v" which means there is a new version available on search outputs. + + match = re.match(r"v\s+\|[^\|]+\|\s+(?P\S+)\s+\|\s+(?P\S+)\s+\|\s+(?P\S+)\s*$", line.decode("utf-8")) + if match is not None: + sys.stdout.write("Name=" + match.group("name") + "\n") + sys.stdout.write("Version=" + match.group("version") + "\n") + sys.stdout.write("Architecture=" + match.group("arch") + "\n") + + return 0 + +# Parse the stdin and build a list of complete package names +# Format: package_base_name<.architecture><{=//<=/>=}version}> +# Only "=" operator is supported +def construct_complete_package_names(): + complete_names = [] + pattern = "" + version = "" + architecture = "" + for line in sys.stdin: + flag=line.split("=", 1)[0].rstrip() + if (flag == 'Name'): + version = "" + architecture = "" + pattern = line.split("=", 1)[1].rstrip() + if (flag == 'Version'): + #The only operator supported here is "=" + version = "=" + line.split("=", 1)[1].rstrip() + if (flag == 'Architecture'): + architecture = "." + line.split("=", 1)[1].rstrip() + if (flag == 'Options'): + pass + complete_names.append(pattern + architecture + version) + return complete_names + + +def remove(): + cmd_line = [zypper_cmd] + zypper_options + ["remove", "-t", "pattern"] + cmd_line += construct_complete_package_names() + subprocess_call(cmd_line, stdout=NULLFILE, stderr=subprocess.PIPE) + return 0 + + +def file_install(): + cmd_line = [zypper_cmd] + zypper_options + ["in", "-t", "pattern"] + found = False + for line in sys.stdin: + if line.startswith("File="): + found = True + cmd_line.append(line.split("=", 1)[1].rstrip()) + + if not found: + return 0 + + subprocess_call(cmd_line, stdout=NULLFILE, stderr=subprocess.PIPE) + return 0 + +def repo_install(): + cmd_line = [zypper_cmd] + zypper_options + ["in", "-t", "pattern"] + cmd_line += construct_complete_package_names() + subprocess_call(cmd_line, stdout=NULLFILE, stderr=subprocess.PIPE) + return 0 + +def main(): + if len(sys.argv) < 2: + sys.stderr.write("Need to provide argument\n") + return 2 + + if sys.argv[1] == "internal-test-stderr": + # This will cause an exception if stderr is closed. + try: + os.fstat(2) + except OSError: + return 1 + return 0 + + elif sys.argv[1] == "supports-api-version": + sys.stdout.write("1\n") + return 0 + + elif sys.argv[1] == "get-package-data": + return get_package_data() + + elif sys.argv[1] == "list-installed": + return list_installed() + + elif sys.argv[1] == "list-updates": + return list_updates(True) + + elif sys.argv[1] == "list-updates-local": + return list_updates(False) + + elif sys.argv[1] == "repo-install": + return repo_install() + + elif sys.argv[1] == "remove": + return remove() + + elif sys.argv[1] == "file-install": + return file_install() + + else: + sys.stderr.write("Invalid operation\n") + return 2 + +sys.exit(main()) diff --git a/policies/lib/tree/10_ncf_internals/modules/promises/getParent.py b/policies/lib/tree/10_ncf_internals/modules/promises/getParent.py new file mode 100755 index 00000000000..4357346bfd5 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/promises/getParent.py @@ -0,0 +1,46 @@ +#!/bin/sh +# vim: syntax=python +''':' +# First try to run this script with python3, else run with python +if command -v python3 >/dev/null 2>/dev/null; then + exec python3 "$0" "$@" +elif command -v python >/dev/null 2>/dev/null; then + exec python "$0" "$@" +else + exec python2 "$0" "$@" +fi +''' + +import os +import sys +import glob + +def get_parent(path): + return os.path.dirname(os.path.realpath(os.path.normpath(path))) + +def get_parents(path, parent_dirs): + if (os.path.isfile(path)): + current_dir = get_parent(path) + else: + current_dir = os.path.realpath(os.path.normpath(path)) + + while current_dir not in parent_dirs and current_dir != '/': + parent_dirs.add(current_dir) + current_dir = get_parent(current_dir) + return parent_dirs + +def print_result(directories): + output = "@parentDirectories={ " + for i in directories: + output += "\"{0}\",".format(i) + print(output[:-1] + "}") + +def exec_module(user_input): + parent_dirs = set() + targets = glob.glob(user_input) + for i in targets: + parents_dirs = get_parents(i, parent_dirs) + return sorted(parent_dirs) + +if __name__ == '__main__': + print_result(exec_module(sys.argv[1])) diff --git a/policies/lib/tree/10_ncf_internals/modules/templates/datastate.json.tpl b/policies/lib/tree/10_ncf_internals/modules/templates/datastate.json.tpl new file mode 100644 index 00000000000..52afca3e9e0 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/templates/datastate.json.tpl @@ -0,0 +1 @@ +{{%-top-}} diff --git a/policies/lib/tree/10_ncf_internals/modules/templates/jinja2-templating.py b/policies/lib/tree/10_ncf_internals/modules/templates/jinja2-templating.py new file mode 100755 index 00000000000..ce9ee6a2393 --- /dev/null +++ b/policies/lib/tree/10_ncf_internals/modules/templates/jinja2-templating.py @@ -0,0 +1,133 @@ +#!/bin/sh +# vim: syntax=python +''':' +# First try to run this script with python3, else run with python +if command -v python3 >/dev/null 2>/dev/null && python3 -c "import jinja2" >/dev/null 2>/dev/null; then + exec python3 "$0" "$@" +elif command -v python >/dev/null 2>/dev/null && python2 -c "import jinja2" >/dev/null 2>/dev/null; then + exec python "$0" "$@" +else + exec python2 "$0" "$@" +fi +''' + +##################################################################################### +# Copyright 2016 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# Script for jinja2 templating. +# Needs package python-jinja2 present on the system. +# It will be copied into ${workdir}/modules by ncf initialization process. +# Can be extended with extra filters and tests with script call jinja2_custom.py +# loaded from /var/rudder/ncf/local/10_ncf_internals/modules/extensions/ +# Source of this script need to be /var/rudder/configuration-repository/ncf/10_ncf_internals/modules/extensions on the Rudder Server + +import sys +import os +from optparse import OptionParser + +import jinja2 +from jinja2 import Environment, FileSystemLoader, StrictUndefined + +try: + import simplejson as json +except ImportError: + import json + +PY3 = sys.version_info > (3,) + +def render(opts, args): + if len(args) == 1: + data = sys.stdin.read() + else: + path = os.path.join(os.getcwd(), os.path.expanduser(args[1])) + if PY3: + data_file = open(path, encoding='utf-8') + else: + data_file = open(path) + data = data_file.read() + data_file.close() + + template_path = os.path.abspath(args[0]) + + try: + data = json.loads(data) + except ValueError as err: + sys.stderr.write(str(err)) + sys.exit(1) + + # keep_trailing_newline appeared in jinja 2.7, see http://jinja.pocoo.org/docs/dev/api/ + # we add a case for this as it can be really important in configuration management context + if [int(x) for x in jinja2.__version__.split(".")[0:2]] >= [2, 7]: + env = Environment( + loader=FileSystemLoader(os.path.dirname(template_path)), + keep_trailing_newline=True + ) + else: + env = Environment( + loader=FileSystemLoader(os.path.dirname(template_path)), + ) + + if opts.strict: + env.undefined = StrictUndefined + + # Register custom filters + sys.path.append(os.path.join(os.path.dirname(__file__), "..", "extensions")) + # importlib was introduced in 3.4 and pkgutil deprecated in 3.12 in favor of it + try: + import importlib.util + custom_filters = importlib.util.find_spec("jinja2_custom") is not None + except: + import pkgutil + custom_filters = pkgutil.find_loader('jinja2_custom') is not None + + if custom_filters: + import jinja2_custom # pylint: disable=import-error + if hasattr(jinja2_custom, 'FILTERS'): + from jinja2_custom import FILTERS as CUSTOM_FILTERS # pylint: disable=import-error + env.filters.update(CUSTOM_FILTERS) + if hasattr(jinja2_custom, 'TESTS'): + from jinja2_custom import TESTS as CUSTOM_TESTS # pylint: disable=import-error + env.tests.update(CUSTOM_TESTS) + sys.path.pop() + + if PY3: + output = env.get_template(os.path.basename(template_path)).render(data) + else: + output = env.get_template(os.path.basename(template_path)).render(data).encode("utf-8") + + sys.stdout.write(output) + +def main(): + parser = OptionParser( + usage="usage: %prog [options] [data_file]", + ) + parser.add_option( + '--strict', + help='fail when using undefined variables in the template', + dest='strict', action='store_true') + opts, args = parser.parse_args() + + if len(args) not in [1, 2]: + parser.print_help() + sys.exit(1) + + render(opts, args) + sys.exit(0) + +if __name__ == '__main__': + main() diff --git a/policies/lib/tree/20_cfe_basics/README.md b/policies/lib/tree/20_cfe_basics/README.md new file mode 100644 index 00000000000..d5d0eafe6ef --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/README.md @@ -0,0 +1,17 @@ +## 20_cfe_basics + +This directory contains libraries with utility bodies that can be reused. Most notably, it contains the cfengine standard library. + +The `cfengine` folder contains the CFEngine stdlib from 3.21.1. + +The changes made are: + +* Removing some files we do not use and break ncf policies: stdlib.cf, autorun.cf +* Removing some files used to manage CFEngine hubs: cfe_internal_hub.cf, cfengine_enterprise_hub_ha.cf +* Changing all body action in common.cf to take the dry_run classes into account +* Add back `_not_repaired` classes in classes_generic (https://tracker.mender.io/browse/CFE-1843) +* Rename package_present and package_absent in packages.cf to `_legacy` to avoid conflict with the generic methods +* Use cache expire from `ncf_def` instead of `def` bundle in `packages.cf` +* Set `common_knowledge.list_update_ifelapsed` in `packages.cf` to `${node.properties[rudder][packages][updates_cache_expire]}` +* Don't use CFEngine's python path workarounds in package bodies +* Removing file used to manage repository: vcs.cf diff --git a/policies/lib/tree/20_cfe_basics/abort.cf b/policies/lib/tree/20_cfe_basics/abort.cf new file mode 100644 index 00000000000..254c2459d4e --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/abort.cf @@ -0,0 +1,74 @@ +##################################################################################### +# Copyright 2020 Normation SAS +##################################################################################### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +##################################################################################### + +# @name Abort +# @description Standard agent abort method +# +# @parameter reason The reason to abort the agent +# @parameter message The human readable message to display +# +# As this ends with an abort, we define no result conditions +# + +bundle agent _abort(reason, message) +{ + classes: + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + pass2:: + # Wait for pass2 to be able to call the handlers before aborting + "abort_agent_run" expression => "any"; + + methods: + pass1:: + # Calling the appropriate bundle(s) + "wrapper for ${configuration.enabled_abort_handlers}" + usebundle => ${configuration.enabled_abort_handlers}("${reason}", "${message}"), + action => immediate_ignore_dry_run, + comment => "Call the ${method} bundle with arguments ${reason}, ${message}"; +} + +# @name Abort default +# @description Standard default agent abort handler +# +# @parameter reason The reason to abort the agent +# @parameter message The human readable message to display +# +# As this ends with an abort, we define no result conditions +# + +bundle agent _abort_default(reason, message) +{ + reports: + "${configuration.fatal} Aborting agent run because of ${reason}: ${message}" + action => immediate_ignore_dry_run; +} + +# @name Abort rudder +# @description Special handler when aborting from ncf +# +# @parameter reason The reason to abort the agent +# @parameter message The human readable message to display +# +bundle agent abort_rudder(reason, message) +{ + methods: + "abort log" usebundle => rudder_common_report("Common", "log_info", "${system_common.directiveId}", "Abort run", "${reason}", "${message}"); + "end run" usebundle => endExecution; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/README.md b/policies/lib/tree/20_cfe_basics/cfengine/README.md new file mode 100644 index 00000000000..a5e9083305f --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/README.md @@ -0,0 +1,55 @@ +This directory contains the CFEngine Standard Library, previously +referred to as the Common Open Promise Body Library. + +Layout of this `lib` directory: + +* `lib/*.cf`: Re-unified library compatible with 3.7+ + - *autorun.cf*: This file contains bundles that support automatic activation + of bundles based on tags as well as automatically adding policy files found + in services/autorun to inputs. + - *bundles.cf*: This file contains bundles that are generically useful. For + example manage cron entries, recursively deleting directories, checking if + a url is repsonding, and merging multiple data containers together. + - *cfe_internal.cf*: This file defines bodies and bundles that are related to + general CFEngine management. For example purging old log files. + - *cfe_internal_hub.cf*: This file defines bodies and bundles that are + releated to CFEngine Enterprise Hub management. + - *cfengine_enterprise_hub_ha.cf*: This file defines bodies and bundles that + are related to managing High Availability on CFEngine Enterprise Hubs. + - *commands.cf*: This file contains bodies and bundles that are useful when + running commands. For example suppressing command output, or controlling + which user or group should be executing the command. + - *common.cf*: This file contains bodies and bundles that are useful across + the board. For example bodies that help to define classes based on promise + outcomes, bodies to control logging for specific promsies, and bodies to help + control how frequently promises get activated. + - *databases.cf*: This file contains bodies and bundles useful when managing + databases like Postgres and the Windows Registry. + - *edit_xml.cf*: This file contains bodies and bundles useful when managing + xml files. + - *examples.cf*: This file contains examples of other useful bundles like + activating a bundle based on probability. + - *feature.cf*: This file defines a bundle to help manage classes to identify + contexts that should be set or not set. It can be useful for turning + certain aspects of policy on or off for a given amount of time. + - *files.cf*: This file defines bodies and bundles that are useful for + managing files. + - *guest_environments.cf*: This file defines bodies and bundles useful for + manageing guest environments (Virtual Machines). + - *monitor.cf*: This file defines bodies and bundles useful when measuring + values with cf-monitord (Enterprise only) + - *packages.cf*: This file defines bodies and bundles releated to + package management. + - *paths.cf*: This file defines paths to well known binaries for + various platforms. + - *processes.cf*: This file defines bodies and bundles useful for + managing processes. + - *services.cf*: This file defines service methods for use with + services type promises. + - *stdlib.cf*: This file includes the commonly used library files + - *storage.cf*: This file defines storage related bodies and bundles + for working with mounts and volumes. + - *users.cf*: This file defines bodies and bundles related to local + user management + - *vcs.cf*: This file defines bodies and bundles useful for + interacting with version control systems. diff --git a/policies/lib/tree/20_cfe_basics/cfengine/bundles.cf b/policies/lib/tree/20_cfe_basics/cfengine/bundles.cf new file mode 100644 index 00000000000..54044b106ab --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/bundles.cf @@ -0,0 +1,500 @@ +# Bundles + +bundle common bundles_common +# @brief Enumerate policy files used by this policy file for inclusion to inputs +{ + vars: + "inputs" slist => { "$(this.promise_dirname)/paths.cf", + "$(this.promise_dirname)/files.cf", + "$(this.promise_dirname)/commands.cf" }; +} + +body file control +# @brief Include policy files used by this policy file as part of inputs +{ + inputs => { @(bundles_common.inputs) }; +} + +################################################### +# agent bundles +################################################### + +bundle agent cronjob(commands,user,hours,mins) +# @brief Defines a cron job for `user` +# +# Adds a line to crontab, if necessary. +# +# @param commands The commands that should be run +# @param user The owner of crontab +# @param hours The hours at which the job should run +# @param mins The minutes at which the job should run +# +# **Example:** +# +# ```cf3 +# methods: +# "cron" usebundle => cronjob("/bin/ls","mark","*","5,10"); +# ``` +{ + vars: + suse|sles:: + "crontab" string => "/var/spool/cron/tabs"; + redhat|fedora:: + "crontab" string => "/var/spool/cron"; + freebsd:: + "crontab" string => "/var/cron/tabs"; + !(suse|sles|redhat|fedora|freebsd):: + "crontab" string => "/var/spool/cron/crontabs"; + + any:: + # We escape the user supplied values so that we can search to see if the + # entry already exists with slightly different spacing. + "e_mins" string => escape("$(mins)"); + "e_hours" string => escape("$(hours)"); + "e_commands" string => escape("$(commands)"); + + classes: + !windows:: + + # We tolerate existing entries that differ only in whitespace and avoid + # entering duplicate entries. + + "present_with_potentially_different_spacing" + expression => regline( "^$(e_mins)\s+$(e_hours)\s+\*\s+\*\s+\*\s+$(e_commands)", "$(crontab)/$(user)"); + + files: + + !windows.!present_with_potentially_different_spacing:: + "$(crontab)/$(user)" + + comment => "A user's regular batch jobs are added to this file", + create => "true", + edit_line => append_if_no_line("$(mins) $(hours) * * * $(commands)"), + perms => mo("600","$(user)"), + classes => if_repaired("changed_crontab"); + + processes: + + changed_crontab:: + "cron" + comment => "Most crons need to be huped after file changes", + signals => { "hup" }; + +} + +bundle agent rm_rf(name) +# @brief recursively remove `name` to any depth, including base +# @depends rm_rf_depth +# @param name the file or directory name +# +# This bundle will remove `name` to any depth, including `name` itself. +# +# **Example:** +# +# ```cf3 +# methods: +# "bye" usebundle => rm_rf("/var/tmp/oldstuff"); +# ``` +{ + methods: + "rm" usebundle => rm_rf_depth($(name),"inf"); + +} + +bundle agent rm_rf_depth(name,depth) +# @brief recursively remove `name` to depth `depth`, including base +# @depends recurse_with_base tidy all +# @param name the file or directory name +# @param depth how far to descend +# +# This bundle will remove `name` to depth `depth`, including `name` itself. +# +# **Example:** +# +# ```cf3 +# methods: +# "bye" usebundle => rm_rf_depth("/var/tmp/oldstuff", "100"); +# ``` +{ + classes: + "isdir" expression => isdir($(name)); + files: + isdir:: + "$(name)" + file_select => all, + depth_search => recurse_with_base($(depth)), + delete => tidy; + + "$(name)/." + delete => tidy; + + !isdir:: + "$(name)" delete => tidy; +} + +bundle agent fileinfo(f) +# @brief provide access to file stat fields from the bundle caller and report +# file stat info for file "f" if "verbose_mode" class is defined +# @param f file or files to stat +# +# **Example:** +# +# ```cf3 +# bundle agent example +# { +# vars: +# "files" slist => { "/tmp/example1", "/tmp/example2" }; +# +# files: +# "$(files)" +# create => "true", +# classes => if_ok("verbose_mode"), +# comment => "verbose_mode is defined because the fileinfo bundle restricts the report of the file info to verbose mode"; +# +# "/tmp/example3" +# create => "true", +# classes => if_ok("verbose_mode"), +# comment => "verbose_mode is defined because the fileinfo bundle restricts the report of the file info to verbose mode"; +# +# +# methods: +# "fileinfo" usebundle => fileinfo( @(files) ); +# "fileinfo" usebundle => fileinfo( "/tmp/example3" ); +# +# reports: +# "$(this.bundle): $(files): $(fileinfo.fields) = '$(fileinfo.stat[$(files)][$(fileinfo.fields)])'"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][size])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][gid])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][uid])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][ino])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][nlink])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][ctime])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][atime])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][mtime])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][mode])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][modeoct])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][permstr])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][permoct])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][type])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][devno])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][dev_minor])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][dev_major])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][basename])"; +# "$(this.bundle): $(fileinfo.stat[/tmp/example3][dirname])"; +# } +# ``` +{ + vars: + "fields" slist => splitstring("size,gid,uid,ino,nlink,ctime,atime,mtime,mode,modeoct,permstr,permoct,type,devno,dev_minor,dev_major,basename,dirname,linktarget,linktarget_shallow", ",", 999); + + "stat[$(f)][$(fields)]" string => filestat($(f), $(fields)); + + reports: + verbose_mode:: + "$(this.bundle): file $(f) has $(fields) = $(stat[$(f)][$(fields)])"; +} + +bundle agent logrotate(log_files, max_size, rotate_levels) +# @brief rotate specified "log_files" larger than "max_size". Keep +# "rotate_levels" versions of the files before overwriting the oldest one +# @depends rotate +# @depends bigger_than +# @param log_files single file or list of files to evaluate for rotation +# @param max_size minimum size in bytes that the file will grow to before being rotated +# @param rotate_levels number of rotations to keep before overwriting the oldest one +# +# **Example:** +# +# ```cf3 +# bundle agent example +# { +# vars: +# "logdirs" slist => { "/var/log/syslog", "/var/log/maillog"}; +# +# methods: +# "logrotate" usebundle => logrotate( @(logdirs), "1M", "2" ); +# "logrotate" usebundle => logrotate( "/var/log/mylog, "1", "5" ); +# "logrotate" usebundle => logrotate( "/var/log/alog, "500k", "7" ); +# } +# ``` +{ + files: + "$(log_files)" + comment => "Rotate file if above specified size", + rename => rotate("$(rotate_levels)"), + file_select => bigger_than("$(max_size)"), + if => fileexists( $(log_files) ); +} + +bundle agent prunedir(dir, max_days) +# @brief delete plain files inside "dir" older than "max_days" (not recursively). +# @depends tidy +# @depends recurse +# @depends filetype_older_than +# @param dir directory to examine for files +# @param max_days maximum number of days old a files mtime is allowed to before deletion +# +# **Example:** +# +# ```cf3 +# bundle agent example +# { +# vars: +# "dirs" slist => { "/tmp/logs", "/tmp/logs2" }; +# +# methods: +# "prunedir" usebundle => prunedir( @(dirs), "1" ); +# } +# ``` +{ + files: + "$(dir)" + comment => "Delete plain files inside directory older than max_days", + delete => tidy, + file_select => filetype_older_than("plain", "$(max_days)"), + depth_search => recurse("1"); +} + +bundle agent prunetree(dir, depth, max_days) +# @brief Delete files and directories inside "dir" up to "depth" older than "max_days". +# @depends delete tidy +# @depends depth_search recurse_with_base +# @depends file_select days_old +# @param dir directory to examine for files +# @param depth How many levels to descend +# @param max_days maximum number of days old a files mtime is allowed to before deletion +# +# **Example:** +# +# ```cf3 +# bundle agent example +# { +# vars: +# "dirs" slist => { "/tmp/logs", "/tmp/logs2" }; +# +# methods: +# "prunetree" usebundle => prunetree( @(dirs), inf, "1" ); +# } +# ``` +{ + files: + "$(dir)" + comment => "Delete files and directories under $(dir) up to $(depth) + depth older than $(max_days)", + + delete => tidy, + file_select => days_old( $(max_days) ), + depth_search => recurse_with_base( $(depth) ); + +} + +bundle agent url_ping(host, method, port, uri) +# @brief ping HOST:PORT/URI using METHOD +# @param host the host name +# @param method the HTTP method (HEAD or GET) +# @param port the port number, e.g. 80 +# @param uri the URI, e.g. /path/to/resource +# +# This bundle will send a simple HTTP request and read 20 bytes back, +# then compare them to `200 OK.*` (ignoring leading spaces). +# +# If the data matches, the global class "url_ok_HOST" will be set, where +# HOST is the canonified host name, i.e. `canonify($(host))` +# +# **Example:** +# +# ```cf3 +# methods: +# "check" usebundle => url_ping("cfengine.com", "HEAD", "80", "/bill/was/here"); +# reports: +# url_ok_cfengine_com:: +# "CFEngine's web site is up"; +# url_not_ok_cfengine_com:: +# "CFEngine's web site *may* be down. Or you're offline."; +# ``` +{ + vars: + "url_check" string => readtcp($(host), + $(port), + "$(method) $(uri) HTTP/1.1$(const.r)$(const.n)Host:$(host)$(const.r)$(const.n)$(const.r)$(const.n)", + 20); + + "chost" string => canonify($(host)); + + classes: + "url_ok_$(chost)" + scope => "namespace", + expression => regcmp("[^\n]*200 OK.*\n.*", + $(url_check)); + + "url_not_ok_$(chost)" + scope => "namespace", + not => regcmp("[^\n]*200 OK.*\n.*", + $(url_check)); + + reports: + verbose_mode:: + "$(this.bundle): $(method) $(host):$(port)/$(uri) got 200 OK" + if => "url_ok_$(chost)"; + "$(this.bundle): $(method) $(host):$(port)/$(uri) did *not* get 200 OK" + if => "url_not_ok_$(chost)"; +} + +bundle agent cmerge(name, varlist) +# @brief bundle to merge many data containers into one +# @param name the variable name to create +# @param varlist a list of variable names (**MUST** be a list) +# +# The result will be in `cmerge.$(name)`. You can also use +# `cmerge.$(name)_str` for a string version of the merged containers. +# +# The name is variable so you can call this bundle for more than one +# merge. +# +# If you merge a key-value map into an array or vice versa, the map +# always wins. So this example will result in a key-value map even +# though `cmerge.$(name)` starts as an array. +# +# **Example:** +# +# ```cf3 +# bundle agent run +# { +# vars: +# # the "mymerge" tag is user-defined +# "a" data => parsejson('{ "mark": "b" }'), meta => { "mymerge" }; +# "b" data => parsejson('{ "volker": "h" }'), meta => { "mymerge" }; +# +# # you can list them explicitly: "default:run.a" through "default:run.d" +# "todo" slist => variablesmatching(".*", "mymerge"); +# +# # you can use cmerge.all_str instead of accessing the merged data directly +# "merged_str" string => format("%S", "cmerge.all"); +# +# methods: +# "go" usebundle => cmerge("all", @(todo)); # merge a, b into container cmerge.all +# +# reports: +# "merged = $(cmerge.all_str)"; # will print a map with keys "mark" and "volker" +# } +# ``` +{ + vars: + "$(name)" data => parsejson('[]'), policy => "free"; + "$(name)" data => mergedata($(name), $(varlist)), policy => "free"; # iterates! + "$(name)_str" string => format("%S", $(name)), policy => "free"; +} + +bundle agent collect_vars(name, tag, flatten) +# @brief bundle to collect tagged variables into a data container +# @param name the variable name to create inside `collect_vars` +# @param tag the tag regex string to match e.g. "beta,gamma=.*" +# @param flatten to flatten variable values, set to "any" or "true" or "1" +# +# The result will be a map in `collect.$(name)`. You can also use +# `cmerge.$(name)_str` for a string version of the merged containers +# (if it fits in a CFEngine string). +# +# The name is variable so you can call this bundle for more than one +# collection. +# +# Every found variable will be a key in the map, unless you specify +# `flatten`, in which case they'll be flattened into a top-level array +# of data. +# +# The `flatten` parameter can be "any" or "true" or "1" to be true. +# +# **Example:** +# +# ```cf3 +# body common control +# { +# inputs => { "$(sys.libdir)/stdlib.cf" }; +# } +# +# bundle agent main +# { +# vars: +# # the "mymerge" tag is user-defined +# "a" data => parsejson('{ "mark": "burgess" }'), meta => { "mymerge" }; +# "b" data => parsejson('{ "volker": "hilsheimer" }'), meta => { "mymerge" }; +# +# methods: +# # merge a, b into container collect_vars.all +# "go" usebundle => collect_vars("all", "mymerge", ""); +# # flatten a, b into container collect_vars.flattened +# "go_flatten" usebundle => collect_vars("flattened", "mymerge", "true"); +# +# reports: +# # merged = {"default:main.a":{"mark":"burgess"},"default:main.b":{"volker":"hilsheimer"}} +# "merged = $(collect_vars.all_str)"; +# # flattened = {"mark":"burgess","volker":"hilsheimer"} +# "flattened = $(collect_vars.flattened_str)"; +# } +# ``` +{ + classes: + "flatten" expression => strcmp($(flatten), "any"); + "flatten" expression => strcmp($(flatten), "1"); + "flatten" expression => strcmp($(flatten), "true"); + + vars: + "todo_$(name)" slist => variablesmatching(".*", $(tag)); + + !flatten:: + "$(name)" + data => parsejson('{}'), + policy => "free"; + + # this iterates! + "$(name)" + data => mergedata($(name), '{ "$(todo_$(name))": $(todo_$(name)) }'), + policy => "free"; + + flatten:: + "$(name)" + data => parsejson('[]'), + policy => "free"; + + # this iterates! + "$(name)" + data => mergedata($(name), "$(todo_$(name))"), + policy => "free"; + + any:: + "$(name)_str" + string => format("%S", $(name)), + policy => "free"; +} + +bundle agent run_ifdefined(namespace, mybundle) +# @brief bundle to maybe run another bundle dynamically +# @param namespace the namespace, usually `$(this.namespace)` +# @param mybundle the bundle to maybe run +# +# This bundle simply is a way to run another bundle only if it's defined. +# +# **Example:** +# +# ```cf3 +# bundle agent run +# { +# methods: +# # does nothing if bundle "runthis" is not defined +# "go" usebundle => run_ifdefined($(this.namespace), runthis); +# } +# ``` +{ + vars: + "bundlesfound" slist => bundlesmatching("^$(namespace):$(mybundle)$"); + "count" int => length(bundlesfound); + + methods: + "any" + usebundle => $(bundlesfound), + if => strcmp(1, $(count)); + + reports: + verbose_mode:: + "$(this.bundle): found matching bundles $(bundlesfound) for namespace '$(namespace)' and bundle '$(mybundle)'"; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/cfe_internal.cf b/policies/lib/tree/20_cfe_basics/cfengine/cfe_internal.cf new file mode 100644 index 00000000000..c1fb685a726 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/cfe_internal.cf @@ -0,0 +1,84 @@ +bundle common cfe_internal_common +# @brief Select parts of the standard library that are dependant +{ + vars: + "inputs" slist => { "$(this.promise_dirname)/common.cf", + "$(this.promise_dirname)/commands.cf"}; +} + +body file control +# @brief Include necessary parts of stdlib +{ + inputs => { @(cfe_internal_common.inputs) }; +} + +bundle agent cfe_internal_cleanup_agent_reports +# @brief Cleanup accumulated agent reports +{ + vars: + any:: + # To avoid unnecessary work, we only findfiles if there is not already a + # variable defined. + "diff_files" + slist => findfiles("$(sys.workdir)/state/diff/*.diff"), + unless => isvariable( $(this.promiser) ); + + "promise_log_files" + slist => findfiles("$(sys.workdir)/state/promise_log/*.csv"), + unless => isvariable( $(this.promiser) ); + + "previous_state_files" -> { "ENT-3161" } + slist => findfiles("$(sys.workdir)/state/previous_state/*.cache"), + unless => isvariable( $(this.promiser) ), + comment => "The files in this directory record the state at the end of + the previous agent run. They are used in concert with the + promise logs to derive delta reports."; + + "untracked_files" -> { "ENT-3161" } + slist => findfiles("$(sys.workdir)/state/untracked/*.idx"), + unless => isvariable( $(this.promiser) ), + comment => "The files in this directory are used in support of the + report_data_select filters. This is a record of all promises + that should not be collected"; + + "files" + slist => { @(diff_files), + @(promise_log_files), + @(previous_state_files), + @(untracked_files) }; + + "reports_size[$(files)]" + int => filesize("$(files)"), + unless => isvariable( $(this.promiser) ); + + "tmpmap" + slist => maparray("$(this.v)", reports_size); + + # We need to make sure that we have files before summing or errors are + # produced in the log + have_files:: + "total_report_size" real => sum(tmpmap); + + classes: + "cfe_internal_purge_reports" + expression => isgreaterthan("$(total_report_size)","$(def.max_client_history_size)"), + comment => "Determine if the current sum of reports exceeds the max desired"; + + "have_files" + expression => isgreaterthan(length(tmpmap), 0); + + files: + cfe_internal_purge_reports:: + "$(files)" + delete => tidy, + handle => "cf_cleanup_agent_reports_$(files)"; + + reports: + DEBUG|DEBUG_cfe_internal_cleanup_agent_reports:: + "DEBUG $(this.bundle): Size of '$(files)' = '$(reports_size[$(files)])'"; + "DEBUG $(this.bundle): Size of all reports = '$(total_report_size)'"; + "DEBUG $(this.bundle): Purge threshold = '$(def.max_client_history_size)'"; + "DEBUG $(this.bundle): Client history purge triggered" + if => "cfe_internal_purge_reports"; + +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/commands.cf b/policies/lib/tree/20_cfe_basics/cfengine/commands.cf new file mode 100644 index 00000000000..479cb0e40c9 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/commands.cf @@ -0,0 +1,309 @@ +# Commands bodies + +bundle agent daemonize(command) +# @brief Run a command as a daemon. I.e., fully detaches from Cfengine. +# @param command The command to run detached +# Note: There will be no output from the command reported by cf-agent. This +# bundle has no effect on windows +# +# **Example:** +# ```cf3 +# methods: +# "Launch Daemon" +# usebundle => daemonize("/bin/sleep 30"); +# ``` +{ + commands: + !windows:: + "exec 1>&-; exec 2>&-; $(command) &" + contain => in_shell; + + reports: + "windows.(DEBUG|DEBUG_$(this.bundle))":: + "DEBUG $(this.bundle): This bundle does not support Windows"; +} + +##------------------------------------------------------- +## contain +##------------------------------------------------------- + +body contain powershell +# @brief Run command with powershell (windows only) +# +# **Example:** +# +# ```cf3 +# commands: +# windows:: +# 'schtasks /DELETE /TN "$(_taskname)" /F' +# contain => powershell; +# ``` +# +# **History:** +# +# * Introduced in 3.17.0 +{ + useshell => "powershell"; +} + +body contain silent +# @brief suppress command output +{ + no_output => "true"; +} + +## + +body contain in_dir(dir) +# @brief run command after switching to directory "dir" +# @param dir directory to change into +# +# **Example:** +# +# ```cf3 +# commands: +# "/bin/pwd" +# contain => in_dir("/tmp"); +# ``` +{ + chdir => "$(dir)"; +} + +## + +body contain in_dir_shell(dir) +# @brief run command after switching to directory "dir" with full shell +# @param dir directory to change into +# +# **Example:** +# +# ```cf3 +# commands: +# "/bin/pwd | /bin/cat" +# contain => in_dir_shell("/tmp"); +# ``` +{ + chdir => "$(dir)"; + useshell => "true"; # canonical "useshell" but this is backwards-compatible +} + +## + +body contain silent_in_dir(dir) +# @brief run command after switching to directory and suppress output +# @param dir directory to change into +# +# **Example:** +# +# ```cf3 +# "/bin/pwd" +# contain => silent_in_dir("/tmp"); +# ``` +{ + chdir => "$(dir)"; + no_output => "true"; +} + +## + +body contain in_shell +# @brief run command in shell +# +# **Example:** +# +# ```cf3 +# commands: +# "/bin/pwd | /bin/cat" +# contain => in_shell; +# ``` +{ + useshell => "true"; # canonical "useshell" but this is backwards-compatible +} + +## + +body contain in_shell_bg +# @brief deprecated +# This bundle previously had an invalid background attribute that was caught by +# parser strictness enhancements. Backgrounding is handeled by the body action +# background attribute. +{ + useshell => "true"; # canonical "useshell" but this is backwards-compatible +} + +## + +body contain in_shell_and_silent +# @brief run command in shell and suppress output +# +# **Example:** +# +# ```cf3 +# commands: +# "/bin/pwd | /bin/cat" +# contain => in_shell_and_silent, +# comment => "Silently run command in shell"; +# ``` +{ + useshell => "true"; # canonical "useshell" but this is backwards-compatible + no_output => "true"; +} + +## + +body contain in_dir_shell_and_silent(dir) +# @brief run command in shell after switching to 'dir' and suppress output +# @param dir directory to change into +# +# **Example:** +# +# ```cf3 +# commands: +# "/bin/pwd | /bin/cat" +# contain => in_dir_shell_and_silent("/tmp"), +# comment => "Silently run command in shell"; +# ``` + +{ + useshell => "true"; # canonical "useshell" but this is backwards-compatible + no_output => "true"; + chdir => "$(dir)"; +} + +## + +body contain setuid(owner) +# @brief run command as specified user +# @param owner username or uid to run command as +# +# **Example:** +# +# ```cf3 +# commands: +# "/usr/bin/id" +# contain => setuid("apache"); +# "/usr/bin/id" +# contain => setuid("503"); +# ``` +{ + exec_owner => "$(owner)"; +} + +## + +body contain setuid_sh(owner) +# @brief run command as specified user in shell +# @param owner username or uid to run command as +# +# **Example:** +# +# ```cf3 +# commands: +# "/usr/bin/id | /bin/cat" +# contain => setuid("apache"); +# "/usr/bin/id | /bin/cat" +# contain => setuid("503"); +# ``` +{ + exec_owner => "$(owner)"; + useshell => "true"; # canonical "useshell" but this is backwards-compatible +} + +## + +body contain setuidgid_dir(owner,group,dir) +# @brief run command as specified owner and group in shell +# @param owner username or uid to run command as +# @param group groupname or gid to run command as +# @param dir directory to run command from +{ + exec_owner => "$(owner)"; + exec_group => "$(group)"; + chdir => "$(dir)"; +} + +## + +body contain setuidgid_sh(owner,group) +# @brief run command as specified owner and group in shell +# @param owner username or uid to run command as +# @param group groupname or gid to run command as +{ + exec_owner => "$(owner)"; + exec_group => "$(group)"; + useshell => "true"; # canonical "useshell" but this is backwards-compatible +} + +## + +body contain jail(owner,jail_root,dir) +# @brief run command as specified user in specified directory of jail +# @param owner username or uid to run command as +# @param jail_root path that will be the root directory for the process +# @param dir directory to change to before running command (must be within 'jail_root') +{ + exec_owner => "$(owner)"; + useshell => "true"; # canonical "useshell" but this is backwards-compatible + chdir => "$(dir)"; + chroot => "$(jail_root)"; +} + +## + +body contain setuid_umask(owner, umask) +# @brief run command as specified user with umask +# +# +# | Valid Values | Umask | Octal (files) | Symbolic (files) | Octal (dirs) | Symbolic (dirs) | +# |--------------|-------|-------|-------------|-------|-------------| +# | `0` | `000` | `666` | `(rw-rw-rw-)` | `777` | `(rwxrwxrwx)` | +# | `002` | `002` | `664` | `(rw-rw-r--)` | `775` | `(rwxrwxr-x)` | +# | `22`, `022` | `022` | `644` | `(rw-r--r--)` | `755` | `(rwxr-xr-x)` | +# | `27`, `027` | `027` | `640` | `(rw-r-----)` | `750` | `(rwxr-x---)` | +# | `77`, `077` | `077` | `600` | `(rw-------)` | `700` | `(rwx------)` | +# | `72`, `072` | `072` | `604` | `(rw----r--)` | `705` | `(rwx---r-x)` | +# +# @param owner username or uid to run command as +# @param umask controls permissions of created files and directories +# +# **Example:** +# +# ```cf3 +# commands: +# "/usr/bin/git pull" +# contain => setuid_umask("git", "022"); +# ``` +{ + exec_owner => "$(owner)"; + umask => "$(umask)"; +} + +body contain setuid_gid_umask(uid, gid, umask) +# @brief run command as specified user with umask +# +# +# | Valid Values | Umask | Octal (files) | Symbolic (files) | Octal (dirs) | Symbolic (dirs) | +# |--------------|-------|-------|-------------|-------|-------------| +# | `0` | `000` | `666` | `(rw-rw-rw-)` | `777` | `(rwxrwxrwx)` | +# | `002` | `002` | `664` | `(rw-rw-r--)` | `775` | `(rwxrwxr-x)` | +# | `22`, `022` | `022` | `644` | `(rw-r--r--)` | `755` | `(rwxr-xr-x)` | +# | `27`, `027` | `027` | `640` | `(rw-r-----)` | `750` | `(rwxr-x---)` | +# | `77`, `077` | `077` | `600` | `(rw-------)` | `700` | `(rwx------)` | +# | `72`, `072` | `072` | `604` | `(rw----r--)` | `705` | `(rwx---r-x)` | +# +# @param uid username or uid to run command as +# @param gid group name or gid to run command as +# @param umask controls permissions of created files and directories +# +# **Example:** +# +# ```cf3 +# commands: +# "/usr/bin/git pull" +# contain => setuid_gid_umask("git", "minions", "022"); +# ``` +{ + exec_owner => "$(uid)"; + exec_group => "$(uid)"; + umask => "$(umask)"; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/common.cf b/policies/lib/tree/20_cfe_basics/cfengine/common.cf new file mode 100644 index 00000000000..f6740fcdc27 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/common.cf @@ -0,0 +1,458 @@ +# Common bodies + +##------------------------------------------------------- +## action +##------------------------------------------------------- + +body action if_elapsed(x) +# @brief Evaluate the promise every `x` minutes +# @param x The time in minutes between promise evaluations +{ + ifelapsed => "$(x)"; + expireafter => "$(x)"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +## + +body action if_elapsed_day +# @brief Evalute the promise once every 24 hours +{ + ifelapsed => "1440"; # 60 x 24 + expireafter => "1400"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +## + +body action measure_performance(x) +# @brief Measure repairs of the promiser every `x` minutes +# +# Repair-attempts are cancelled after `x` minutes. +# +# @param x The time in minutes between promise evaluations. +{ + measurement_class => "Detect changes in $(this.promiser)"; + ifelapsed => "$(x)"; + expireafter => "$(x)"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +## + +body action measure_promise_time(identifier) +# @brief Performance will be measured and recorded under identifier +# +# @param identifier Measurement name. +{ + measurement_class => "$(identifier)"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +## + +body action warn_only +# @brief Warn once an hour if the promise needs to be repaired +# +# The promise does not get repaired. +{ + action_policy => "warn"; + ifelapsed => "60"; +} + +## + +body action bg(elapsed,expire) +# @brief Evaluate the promise in the background every `elapsed` minutes, for at most `expire` minutes +# @param elapsed The time in minutes between promise evaluations +# @param expire The time in minutes after which a repair-attempt gets cancelled +{ + ifelapsed => "$(elapsed)"; + expireafter => "$(expire)"; + background => "true"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +## + +body action ifwin_bg +# @brief Evaluate the promise in the background when running on Windows +{ + windows:: + background => "true"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +## + +body action immediate +# @brief Evaluate the promise at every `cf-agent` execution. +{ + ifelapsed => "0"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +## + +body action policy(p) +# @brief Set the `action_policy` to `p` +# @param p The action policy +{ + action_policy => "$(p)"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +## + +body action log_repaired(log,message) +# @brief Log `message` to a file `log`=[/file|stdout] +# @param log The log file for repaired messages +# @param message The log message +{ + log_string => "$(sys.date), $(message)"; + log_repaired => "$(log)"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +### + +body action log_verbose +# @brief Sets the `log_level` attribute to "verbose" +{ + log_level => "verbose"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +## + +body action sample_rate(x) +# @brief Evaluate the promise every `x` minutes, +# A repair-attempt is cancelled after 10 minutes +# @param x The time in minutes between promise evaluation +{ + ifelapsed => "$(x)"; + expireafter => "10"; + + dry_run|global_dry_run:: + action_policy => "warn"; +} + +##------------------------------------------------------- +## classes +##------------------------------------------------------- + +body classes if_repaired(x) +# @brief Define class `x` if the promise has been repaired +# @param x The name of the class +{ + promise_repaired => { "$(x)" }; +} + +## + +body classes if_else(yes,no) +# @brief Define the classes `yes` or `no` depending on promise outcome +# @param yes The name of the class that should be defined if the promise is kept or repaired +# @param no The name of the class that should be defined if the promise could not be repaired +{ + promise_kept => { "$(yes)" }; + promise_repaired => { "$(yes)" }; + repair_failed => { "$(no)" }; + repair_denied => { "$(no)" }; + repair_timeout => { "$(no)" }; +} + +## + +body classes cf2_if_else(yes,no) +# @brief Define the classes `yes` or `no`, depending on promise outcome +# +# A version of `if_else` that matches CFEngine2 semantics. Neither class is set if the promise +# does not require any repair. +# +# @param yes The name of the class that should be defined if the promise is repaired +# @param no The name of the class that should be defined if the promise could not be repaired +{ + promise_repaired => { "$(yes)" }; + repair_failed => { "$(no)" }; + repair_denied => { "$(no)" }; + repair_timeout => { "$(no)" }; +} + +## + +body classes if_notkept(x) +# @brief Define the class `x` if the promise is not kept and cannot be repaired. +# @param x The name of the class that should be defined +{ + repair_failed => { "$(x)" }; + repair_denied => { "$(x)" }; + repair_timeout => { "$(x)" }; +} + +## + +body classes if_ok(x) +# @brief Define the class `x` if the promise is kept or could be repaired +# @param x The name of the class that should be defined +{ + promise_repaired => { "$(x)" }; + promise_kept => { "$(x)" }; +} + +## + +body classes if_ok_cancel(x) +# @brief Cancel the class `x` if the promise is kept or repaired +# @param x The name of the class that should be cancelled +{ + cancel_repaired => { "$(x)" }; + cancel_kept => { "$(x)" }; +} + +## + +body classes cmd_repair(code,cl) +# @brief Define the class `cl` if an external command in a `commands`, `file` or `packages` +# promise is executed with return code `code` +# @param code The return codes that indicate a successful repair +# @param cl The name of the class that should be defined +# +# **See also:** `repaired_returncodes` +{ + repaired_returncodes => { "$(code)" }; + promise_repaired => { "$(cl)" }; +} + +body classes classes_generic(x) +# @brief Define `x` prefixed/suffixed with promise outcome +# @param x The unique part of the classes to be defined +{ + promise_repaired => { "promise_repaired_$(x)", "$(x)_repaired", "$(x)_ok", "$(x)_reached", "$(x)_not_kept" }; + repair_failed => { "repair_failed_$(x)", "$(x)_failed", "$(x)_not_ok", "$(x)_error", "$(x)_not_kept", "$(x)_not_repaired", "$(x)_reached" }; + repair_denied => { "repair_denied_$(x)", "$(x)_denied", "$(x)_not_ok", "$(x)_error", "$(x)_not_kept", "$(x)_not_repaired", "$(x)_reached" }; + repair_timeout => { "repair_timeout_$(x)", "$(x)_timeout", "$(x)_not_ok", "$(x)_error", "$(x)_not_kept", "$(x)_not_repaired", "$(x)_reached" }; + promise_kept => { "promise_kept_$(x)", "$(x)_kept", "$(x)_ok", "$(x)_not_repaired", "$(x)_reached" }; +} + +body classes results(scope, class_prefix) +# @brief Define classes prefixed with `class_prefix` and suffixed with +# appropriate outcomes: _kept, _repaired, _not_kept, _error, _failed, +# _denied, _timeout, _reached +# +# @param scope The scope in which the class should be defined (`bundle` or `namespace`) +# @param class_prefix The prefix for the classes defined +# +# This body can be applied to any promise and sets global +# (`namespace`) or local (`bundle`) classes based on its outcome. For +# instance, with `class_prefix` set to `abc`: +# +# * if the promise is to change a file's owner to `nick` and the file +# was already owned by `nick`, the classes `abc_reached` and +# `abc_kept` will be set. +# +# * if the promise is to change a file's owner to `nick` and the file +# was owned by `adam` and the change succeeded, the classes +# `abc_reached` and `abc_repaired` will be set. +# +# This body is a simpler, more consistent version of the body +# `scoped_classes_generic`, which see. The key difference is that +# fewer classes are defined, and only for outcomes that we can know. +# For example this body does not define "OK/not OK" outcome classes, +# since a promise can be both kept and failed at the same time. +# +# It's important to understand that promises may do multiple things, +# so a promise is not simply "OK" or "not OK." The best way to +# understand what will happen when your specific promises get this +# body is to test it in all the possible combinations. +# +# **Suffix Notes:** +# +# * `_reached` indicates the promise was tried. Any outcome will result +# in a class with this suffix being defined. +# +# * `_kept` indicates some aspect of the promise was kept +# +# * `_repaired` indicates some aspect of the promise was repaired +# +# * `_not_kept` indicates some aspect of the promise was not kept. +# error, failed, denied and timeout outcomes will result in a class +# with this suffix being defined +# +# * `_error` indicates the promise repair encountered an error +# +# * `_failed` indicates the promise failed +# +# * `_denied` indicates the promise repair was denied +# +# * `_timeout` indicates the promise timed out +# +# **Example:** +# +# ```cf3 +# bundle agent example +# { +# commands: +# "/bin/true" +# classes => results("bundle", "my_class_prefix"); +# +# reports: +# my_class_prefix_kept:: +# "My promise was kept"; +# +# my_class_prefix_repaired:: +# "My promise was repaired"; +# } +# ``` +# +# **See also:** `scope`, `scoped_classes_generic`, `classes_generic` +{ + scope => "$(scope)"; + + promise_kept => { "$(class_prefix)_reached", + "$(class_prefix)_kept" }; + + promise_repaired => { "$(class_prefix)_reached", + "$(class_prefix)_repaired" }; + + repair_failed => { "$(class_prefix)_reached", + "$(class_prefix)_error", + "$(class_prefix)_not_kept", + "$(class_prefix)_failed" }; + + repair_denied => { "$(class_prefix)_reached", + "$(class_prefix)_error", + "$(class_prefix)_not_kept", + "$(class_prefix)_denied" }; + + repair_timeout => { "$(class_prefix)_reached", + "$(class_prefix)_error", + "$(class_prefix)_not_kept", + "$(class_prefix)_timeout" }; +} + +@if minimum_version(3.8) +body classes diff_results(scope, x) +# @brief Define `x` prefixed/suffixed with promise outcome with command return codes adjusted to align with `diff`. +# @param scope The scope the class should be defined with [bundle|namespace]. +# @param x The unique part of the classes to be defined. +# +# From man diff: +# Exit status is 0 if inputs are the same, 1 if +# different, 2 if trouble. +# +# **Example:** +# +# ```cf3 +# bundle agent example +# { +# commands: +# "/usr/bin/diff" +# args => "/tmp/file1 /tmp/file2", +# classes => diff_results("diff"); +# +# vars: +# "c" slist => classesmatching("diff_.*"); +# +# reports: +# "Found class '$(c)'"; +# "Files Differ!" +# if => "diff_failed|diff_error|diff_not_kept"; +# "Files are the same." +# if => "diff_kept"; +# } +# ``` +{ + inherit_from => results( $(scope), $(x) ); + kept_returncodes => { "0" }; + failed_returncodes => { "1","2" }; +} +@endif + +body classes scoped_classes_generic(scope, x) +# @brief Define `x` prefixed/suffixed with promise outcome +# **See also:** `scope` +# +# @param scope The scope in which the class should be defined +# @param x The unique part of the classes to be defined +{ + scope => "$(scope)"; + promise_repaired => { "promise_repaired_$(x)", "$(x)_repaired", "$(x)_ok", "$(x)_reached" }; + repair_failed => { "repair_failed_$(x)", "$(x)_failed", "$(x)_not_ok", "$(x)_error", "$(x)_not_kept", "$(x)_not_repaired", "$(x)_reached" }; + repair_denied => { "repair_denied_$(x)", "$(x)_denied", "$(x)_not_ok", "$(x)_error", "$(x)_not_kept", "$(x)_not_repaired", "$(x)_reached" }; + repair_timeout => { "repair_timeout_$(x)", "$(x)_timeout", "$(x)_not_ok", "$(x)_error", "$(x)_not_kept", "$(x)_not_repaired", "$(x)_reached" }; + promise_kept => { "promise_kept_$(x)", "$(x)_kept", "$(x)_ok", "$(x)_not_repaired", "$(x)_reached" }; +} + +# special body for update/*.cf compatibility +body classes u_kept_successful_command +# @brief Set command to "kept" instead of "repaired" if it returns 0 +{ + kept_returncodes => { "0" }; +} + +##------------------------------------------------------- +## Persistent classes +##------------------------------------------------------- + +body classes state_repaired(x) +# @brief Define `x` for 10 minutes if the promise was repaired +# @param x The name of the class that should be defined +{ + promise_repaired => { "$(x)" }; + persist_time => "10"; + scope => "namespace"; +} + +## + +body classes enumerate(x) +# @brief Define `x` for 15 minutes if the promise is either kept or repaired +# This is used by commercial editions to count instances of jobs in a cluster +# @param x The unique part of the class that should be defined +# The class defined is prefixed with `mXC_` +{ + promise_repaired => { "mXC_$(x)" }; + promise_kept => { "mXC_$(x)" }; + persist_time => "15"; + scope => "namespace"; +} + +## + +body classes always(x) +# @brief Define class `x` no matter what the outcome of the promise is +# @param x The name of the class to be defined +{ + promise_repaired => { "$(x)" }; + promise_kept => { "$(x)" }; + repair_failed => { "$(x)" }; + repair_denied => { "$(x)" }; + repair_timeout => { "$(x)" }; +} + +body classes kept_successful_command +# @brief Set command to "kept" instead of "repaired" if it returns 0 +{ + kept_returncodes => { "0" }; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/databases.cf b/policies/lib/tree/20_cfe_basics/cfengine/databases.cf new file mode 100644 index 00000000000..1a7f1b4ada0 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/databases.cf @@ -0,0 +1,31 @@ +# Databases bodies + +body database_server local_mysql(username, password) +# @brief Defines a MySQL server running on localhost +# @param username The username for the server connection +# @param password The password for the server connection +# +# **See also:** `db_server_owner`, `db_server_password` +{ + db_server_owner => "$(username)"; + db_server_password => "$(password)"; + db_server_host => "localhost"; + db_server_type => "mysql"; + db_server_connection_db => "mysql"; +} + +## + +body database_server local_postgresql(username, password) +# @brief Defines a PostgreSQL server running on localhost +# @param username The username for the server connection +# @param password The password for the server connection +# +# **See also:** `db_server_owner`, `db_server_password` +{ + db_server_owner => "$(username)"; + db_server_password => "$(password)"; + db_server_host => "localhost"; + db_server_type => "postgres"; + db_server_connection_db => "postgres"; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/edit_xml.cf b/policies/lib/tree/20_cfe_basics/cfengine/edit_xml.cf new file mode 100644 index 00000000000..80b024305b1 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/edit_xml.cf @@ -0,0 +1,85 @@ +# edit_xml bundles + +bundle edit_xml xml_insert_tree_nopath(treestring) +# @brief Insert XML tree with no path +# +# This `edit_xml` bundle inserts the given XML tree. Use with an +# empty XML document. +# +# @param treestring The XML tree, as a string +# +# **Example:** +# +# ```cf3 +# files: +# "/newfile" edit_xml => xml_insert_tree_nopath('y'); +# ``` +{ + insert_tree: + '$(treestring)'; +} + +bundle edit_xml xml_insert_tree(treestring, xpath) +# @brief Insert XML tree at the given XPath +# +# This `edit_xml` bundle inserts the given XML tree at a specific +# XPath. Uses `insert_tree`. +# +# @param treestring The XML tree, as a string +# @param xpath A valid XPath string +# +# **Example:** +# +# ```cf3 +# files: +# "/file.xml" edit_xml => xml_insert_tree('y', '/a/b/c'); +# ``` +{ + insert_tree: + '$(treestring)' select_xpath => "$(xpath)"; +} + +bundle edit_xml xml_set_value(value, xpath) +# @brief Sets or replaces a value in XML at the given XPath +# +# This `edit_xml` bundle sets or replaces the value at a specific +# XPath with the given value. Uses `set_text`. +# +# @param value The new value +# @param xpath A valid XPath string +# +# **Example:** +# +# ```cf3 +# files: +# "/file.xml" edit_xml => xml_set_value('hello', '/a/b/c'); +# ``` +{ + set_text: + "$(value)" + select_xpath => "$(xpath)"; +} + +bundle edit_xml xml_set_attribute(attr, value, xpath) +# @brief Sets or replaces an attribute in XML at the given XPath +# +# This `edit_xml` bundle sets or replaces an attribute at a specific +# XPath with the given value. Uses `set_attribute`. +# +# @param attr The attribute name +# @param value The new attribute value +# @param xpath A valid XPath string +# +# **Example:** +# +# ```cf3 +# files: +# "/file.xml" edit_xml => xml_set_attribute('parameter', 'ha', '/a/b/c'); +# ``` +{ + set_attribute: + "$(attr)" + attribute_value => "$(value)", + select_xpath => "$(xpath)"; + +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/event.cf b/policies/lib/tree/20_cfe_basics/cfengine/event.cf new file mode 100644 index 00000000000..b14632404f6 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/event.cf @@ -0,0 +1,158 @@ +# Event handling bundles and bodies + +@if minimum_version(3.9) + +bundle agent event_register(prefix, type, name, persistence, metadata) +# @brief Register a `event_$(prefix)_$(type)_$(name)` class with meta=`metadata` +# @param prefix The prefix (usually the issuing bundle name) +# @param type The event type +# @param name The event name +# @param persistence the time, in minutes, the class should persist on disk (unless collected) +# @param metadata A slist with the event metadata +# +# This bundle creates a class that conforms to the ad-hoc event protocol defined +# herein. +# +# **See also:** `event_handle` +{ + vars: + "e" string => "event_$(prefix)_$(type)_$(name)"; + "metadata_string" string => format("%S", metadata); + + classes: + "$(e)" scope => "namespace", + persistence => $(persistence), + meta => { "event", "prefix=$(prefix)", "type=$(type)", "name=$(name)", @(metadata) }; + + reports: + inform_mode:: + "$(this.bundle): creating event $(e) persistent for $(persistence) minutes with metadata $(metadata_string)"; +} + +body classes event_cancel_events(events) +# @brief Cancel any `events` +# @param events A slist of events to cancel +{ + cancel_notkept => { @(events) }; + cancel_kept => { @(events) }; + cancel_repaired => { @(events) }; +} + +bundle agent event_handle(prefix, type) +# @brief Handle all the events matching `prefix` and `type` through delegation +# @param prefix A prefix for the event, can be `.*` for all +# @param type A type for the event, can be `.*` for all +# +# This bundle looks for all the event classes matching `prefix` and `type`, then +# for all the bundles that have declared they can handle that prefix and type, +# and then passes the corresponding event classes to each bundle. +# +# **See also:** `event_register` +{ + vars: + "events_prefix" slist => classesmatching("event_.*", "prefix=$(prefix)"); + "events_type" slist => classesmatching("event_.*", "type=$(type)"); + "events" slist => intersection(events_prefix, events_type); + "events_string" string => format("%S", events); + + "handlers_prefix" slist => bundlesmatching("default:event_.*", format("event_prefix=(%s|ALL)", escape($(prefix)))); + "handlers_type" slist => bundlesmatching("default:event_.*", format("event_type=(%s|ALL)", escape($(type)))); + "handlers" slist => intersection(handlers_prefix, handlers_type); + "handlers_string" string => format("%S", handlers); + + methods: + "" usebundle => $(handlers)(@(events)), + classes => event_cancel_events(@(events)); + + reports: + inform_mode:: + "$(this.bundle): with prefix $(prefix) and type $(type) found events $(events_string)"; + "$(this.bundle): with prefix $(prefix) and type $(type) found handlers $(handlers_string)"; +} + +bundle agent event_debug_handler(events) +# @brief Debug all the events matching the meta tags `event_prefix` and `event_type` +# @param events The list of events, passed from `event_handle` +# +# This is an event handler that just prints out all the events it finds. To be +# registered as a handler, it must have the `meta tags` indicated below. +# +# **See also:** `event_handle`, `event_register` +{ + meta: + "tags" slist => { "event_handler", "event_prefix=.*", "event_type=.*" }; + + vars: + "events_string" string => format("%S", events); + "tags_string" string => format("%S", "$(this.bundle)_meta.tags"); + + reports: + inform_mode:: + "$(this.bundle): with tags $(tags_string) got events $(events_string)"; +} + +bundle agent event_install_handler(events) +# @brief Handle all the install events matching the meta tags `event_prefix` and `event_type` +# @param events The list of events, passed from `event_handle` +# +# This is an event handler that just prints out all the install events it finds. +# To be registered as a handler, it must have the `meta tags` indicated below. +# The subtlety in `event_prefix=ALL` is that we want to match only +# `event_handle(ANYTHING, "install")` but not `event_handle(".*", ANYTHING)`. If +# you're confused, just remember: debug handlers use `event_prefix=.*` and +# everything else uses `event_prefix=ALL`. +# +# **See also:** `event_handle`, `event_register` +{ + meta: + "tags" slist => { "event_handler", "event_prefix=ALL", "event_type=install" }; + + vars: + "events_string" string => format("%S", events); + "tags_string" string => format("%S", "$(this.bundle)_meta.tags"); + + reports: + inform_mode:: + "$(this.bundle): with tags $(tags_string) got events $(events_string)"; +} + +bundle agent event_usage_example +# @brief Simple demo of event_register and event_handle usage +# +# You can run it like this: `cf-agent -K ./event.cf -b event_usage_example` +# Or for extra debugging, you can run it like this: `cf-agent -KI ./event.cf -b event_usage_example` +# +# **See also:** `event_handle`, `event_register` +# +# Expected output with `-KI`: +# +# ``` +# R: event_register: creating event event_event_usage_example_restart_apache persistent for 1440 minutes with metadata { } +# R: event_register: creating event event_event_usage_example_install_php persistent for 2880 minutes with metadata { } +# R: event_install_handler: with tags { "event_handler", "event_prefix=ALL", "event_type=install" } got events { "event_event_usage_example_install_php" } +# R: event_handle: with prefix event_usage_example and type install found events { "event_event_usage_example_install_php" } +# R: event_handle: with prefix event_usage_example and type install found handlers { "default:event_install_handler" } +# R: event_debug_handler: with tags { "event_handler", "event_prefix=.*", "event_type=.*" } got events { "event_event_usage_example_restart_apache", "event_event_usage_example_install_php" } +# R: event_handle: with prefix .* and type .* found events { "event_event_usage_example_restart_apache", "event_event_usage_example_install_php" } +# R: event_handle: with prefix .* and type .* found handlers { "default:event_debug_handler" } +# ``` +{ + vars: + "empty" slist => {}; + + methods: + # register a restart event named "apache" with persistence = 1 day + "" usebundle => event_register($(this.bundle), "restart", "apache", 1440, @(empty)); # 1 day + # register an install event named "php" with persistence = 2 days + "" usebundle => event_register($(this.bundle), "install", "php", 2880, @(empty)); + + # the following can be run immediately, or up to 2 days later to collect + # the install event above + "" usebundle => event_handle($(this.bundle), "install"); + + # the following can be run immediately, or up to 1 day later to collect + # the restart event above + "" usebundle => event_handle(".*", ".*"); +} + +@endif diff --git a/policies/lib/tree/20_cfe_basics/cfengine/examples.cf b/policies/lib/tree/20_cfe_basics/cfengine/examples.cf new file mode 100644 index 00000000000..43eaf86629c --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/examples.cf @@ -0,0 +1,73 @@ +bundle agent probabilistic_usebundle(probability, bundlename) +# @brief activate named bundle probabilistically +# @param probability probability that the named bundle will be activated during +# a given agent execution +# @param bundlename the bundle to activate based on the probability +# +# **Example:** +# +# ```cf3 +# bundle agent example +# { +# methods: +# "Toss Coin" +# usebundle => probabilistic_usebundle("50", "heads"), +# comment => "Call bundle heads ~ 50% of the time"; +# +# "Trick Coin" +# usebundle => probabilistic_usebundle("75", "heads"), +# comment => "Call bundle heads ~ 75% of the time"; +# } +# ``` +{ + classes: + "fifty_fifty" + expression => strcmp("$(probability)", "50"), + comment => "We have to special case 50 because of the way dist classes + work you would always get 50 defined"; + "not_fifty_fifty" expression => "!fifty_fifty"; + "have_remainder" expression => isvariable("remainder"); + + fifty_fifty.have_remainder:: + "activate_bundle" + dist => { "$(probability)000", "$(remainder)"}; + + not_fifty_fifty.have_remainder:: + "activate_bundle" + dist => { "$(probability)", "$(remainder)"}; + + vars: + fifty_fifty:: + "remainder" + string => format("%d", eval("((100 - $(probability)) * 1000) +1", "math", "infix")); + + not_fifty_fifty:: + "remainder" + string => format("%d", eval("100 - $(probability)", "math", "infix")); + + methods: + fifty_fifty:: + "Activate bundle probabilistically" + handle => "probabilistic_usebundle_methods_special_case_fifty_fifty_activate_bundle", + usebundle => $(bundlename), + if => "activate_bundle_$(probability)000", + comment => "Activate $(bundlename) $(probability)%ish of the time"; + + not_fifty_fifty:: + "Activate bundle probabilistically" + handle => "probabilistic_usebundle_methods_activate_bundle", + usebundle => $(bundlename), + if => "activate_bundle_$(probability)", + comment => "Activate $(bundlename) $(probability)% of the time"; + + + reports: + DEBUG.fifty_fifty:: + "$(this.bundle) Special case for 50/50"; + + "$(this.bundle) activate_bundle_$(probability)000" + if => "activate_bundle_$(probability)000"; + + "$(this.bundle) activate_bundle_$(probability)001" + if => "activate_bundle_$(probability)001"; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/feature.cf b/policies/lib/tree/20_cfe_basics/cfengine/feature.cf new file mode 100644 index 00000000000..4aa4744c631 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/feature.cf @@ -0,0 +1,87 @@ +bundle agent feature +# @brief Finds feature_set_X and feature_unset_X classes and sets/unsets X persistently +# +# Finds all classes named `feature_unset_X` and clear class X. +# +# Finds all classes named `feature_set_DURATION_X` and sets class X +# persistently for DURATION. DURATION can be any digits followed by +# `k`, `m`, or `g`. +# +# In inform mode (`-I`) it will report what it does. +# +# **Example:** +# Set class `xyz` for 10 minutes, class `qpr` for 100 minutes, and +# `ijk` for 90m minutes. Unset class `abc`. +# `cf-agent -I -f ./feature.cf -b feature -Dfeature_set_10_xyz,feature_set_100_qpr,feature_set_90m_ijk,feature_unset_abc` +{ + classes: + "parsed_$(on)" expression => regextract("feature_set_([0-9]+[kmgKMG]?)_(.*)", + $(on), + "extract_$(on)"); + + "parsed_$(off)" expression => regextract("feature_unset_(.*)", + $(off), + "extract_$(off)"); + + "$(extract_$(on)[2])" expression => "parsed_$(on)", + persistence => "$(extract_$(on)[1])"; + + vars: + "on" slist => classesmatching("feature_set_.*"); + "off" slist => classesmatching("feature_unset_.*"); + + "_$(off)" string => "off", classes => feature_cancel("$(extract_$(off)[1])"); + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): $(on) => SET class '$(extract_$(on)[2]) for '$(extract_$(on)[1])'" + if => "parsed_$(on)"; + + "DEBUG $(this.bundle): $(off) => UNSET class '$(extract_$(off)[1])'" + if => "parsed_$(off)"; + + "DEBUG $(this.bundle): have $(extract_$(on)[2])" if => "$(extract_$(on)[2])"; + "DEBUG $(this.bundle): have no $(extract_$(on)[2])" if => "!$(extract_$(on)[2])"; + + "DEBUG $(this.bundle): have $(extract_$(off)[1])" if => "$(extract_$(off)[1])"; + "DEBUG $(this.bundle): have no $(extract_$(off)[1])" if => "!$(extract_$(off)[1])"; +} + +bundle agent feature_test +# @brief Finds feature_set_X and feature_unset_X classes and reports X +# +# Note that this bundle is intended to be used exactly like `feature` +# and just show what's defined or undefined. +# +# **Example:** +# Check classes `xyz`, `qpr`, `ijk`, and `abc`. +# `cf-agent -I -f ./feature.cf -b feature_test -Dfeature_set_10_xyz,feature_set_100_qpr,feature_set_90m_ijk,feature_unset_abc` +{ + classes: + "parsed_$(on)" expression => regextract("feature_set_([0-9]+[kmgKMG]?)_(.*)", + $(on), + "extract_$(on)"); + + "parsed_$(off)" expression => regextract("feature_unset_(.*)", + $(off), + "extract_$(off)"); + + vars: + "on" slist => classesmatching("feature_set_.*"); + "off" slist => classesmatching("feature_unset_.*"); + + reports: + "$(this.bundle): have $(extract_$(on)[2])" if => "$(extract_$(on)[2])"; + "$(this.bundle): have no $(extract_$(on)[2])" if => "!$(extract_$(on)[2])"; + + "$(this.bundle): have $(extract_$(off)[1])" if => "$(extract_$(off)[1])"; + "$(this.bundle): have no $(extract_$(off)[1])" if => "!$(extract_$(off)[1])"; +} + +body classes feature_cancel(x) +# @brief Undefine class `x` when promise is kept or repaired +# Used internally by bundle `feature` +{ + cancel_kept => { "$(x)" }; + cancel_repaired => { "$(x)" }; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/files.cf b/policies/lib/tree/20_cfe_basics/cfengine/files.cf new file mode 100644 index 00000000000..3a18c61a811 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/files.cf @@ -0,0 +1,2518 @@ +# Files bodies + +bundle common files_common +# @brief Enumerate policy files used by this policy file for inclusion to inputs +{ + vars: + "inputs" slist => { "$(this.promise_dirname)/common.cf" }; +} + +body file control +# @brief Include policy files used by this policy file as part of inputs +{ + inputs => { @(files_common.inputs) }; +} + +################################################### +# edit_line bundles +################################################### + +bundle edit_line insert_before_if_no_line(before, string) +# @brief Insert `string` before `before` if `string` is not found in the file +# @param before The regular expression matching the line which `string` will be +# inserted before +# @param string The string to be prepended +# +{ + insert_lines: + "$(string)" + location => before($(before)), + comment => "Prepend a line to the file if it doesn't already exist"; +} + +## + +bundle edit_line insert_file(templatefile) +# @brief Reads the lines from `templatefile` and inserts those into the +# file being edited. +# @param templatefile The name of the file from which to import lines. +{ + insert_lines: + + "$(templatefile)" + comment => "Insert the template file into the file being edited", + insert_type => "file"; +} + +## + +bundle edit_line lines_present(lines) +# @brief Ensure `lines` are present in the file. Lines that do not exist are appended to the file +# @param lines List or string that should be present in the file +# +# **Example:** +# +# ```cf3 +# bundle agent example +# { +# vars: +# "nameservers" slist => { "8.8.8.8", "8.8.4.4" }; +# +# files: +# "/etc/resolv.conf" edit_line => lines_present( @(nameservers) ); +# "/etc/ssh/sshd_config" edit_line => lines_present( "PermitRootLogin no" ); +# } +# ``` +{ + insert_lines: + + "$(lines)" + comment => "Append lines if they don't exist"; +} + +bundle edit_line insert_lines(lines) +# @brief Alias for `lines_present` +# @param lines List or string that should be present in the file +{ + insert_lines: + + "$(lines)" + comment => "Append lines if they don't exist"; +} + +bundle edit_line append_if_no_line(lines) +# @brief Alias for `lines_present` +# @param lines List or string that should be present in the file +{ + insert_lines: + + "$(lines)" + comment => "Append lines if they don't exist"; +} + +bundle edit_line append_if_no_lines(lines) +# @brief Alias for `lines_present` +# @param lines List or string that should be present in the file +{ + insert_lines: + + "$(lines)" + comment => "Append lines if they don't exist"; +} + +## + +bundle edit_line comment_lines_matching(regex,comment) +# @brief Comment lines in the file that matching an [anchored] regex +# @param regex Anchored regex that the entire line needs to match +# @param comment A string that is prepended to matching lines +{ + replace_patterns: + + "^($(regex))$" + + replace_with => comment("$(comment)"), + comment => "Search and replace string"; +} + +## + +bundle edit_line contains_literal_string(string) +# @brief Ensure the literal string is present in the promised file +# @description If the string is not found in the file it is inserted according +# to CFEngine defaults. +# @param string The string (potentially multiline) to ensure exists in the +# promised file. +{ + + insert_lines: + "$(string)" + insert_type => "preserve_block", + expand_scalars => "false", + whitespace_policy => { "exact_match" }; +} + +## + +bundle edit_line uncomment_lines_matching(regex,comment) +# @brief Uncomment lines of the file where the regex matches +# the entire text after the comment string +# @param regex The regex that lines need to match after `comment` +# @param comment The prefix of the line that is removed +{ + replace_patterns: + + "^$(comment)\s?($(regex))$" + + replace_with => uncomment, + comment => "Uncomment lines matching a regular expression"; +} + +## + +bundle edit_line comment_lines_containing(regex,comment) +# @brief Comment lines of the file matching a regex +# @param regex A regex that a part of the line needs to match +# @param comment A string that is prepended to matching lines +{ + replace_patterns: + + "^((?!$(comment)).*$(regex).*)$" + + replace_with => comment("$(comment)"), + comment => "Comment out lines in a file"; +} + +## + +bundle edit_line uncomment_lines_containing(regex,comment) +# @brief Uncomment lines of the file where the regex matches +# parts of the text after the comment string +# @param regex The regex that lines need to match after `comment` +# @param comment The prefix of the line that is removed +{ + replace_patterns: + + "^$(comment)\s?(.*$(regex).*)$" + + replace_with => uncomment, + comment => "Uncomment a line containing a fragment"; +} + +## + +bundle edit_line delete_lines_matching(regex) +# @brief Delete lines matching a regular expression +# @param regex The regular expression that the lines need to match +{ + delete_lines: + + "$(regex)" + + comment => "Delete lines matching regular expressions"; +} + +## + +bundle edit_line warn_lines_matching(regex) +# @brief Warn about lines matching a regular expression +# @param regex The regular expression that the lines need to match +{ + delete_lines: + + "$(regex)" + + comment => "Warn about lines in a file", + action => warn_only; +} + +## + +bundle edit_line prepend_if_no_line(string) +# @brief Prepend `string` if it doesn't exist in the file +# @param string The string to be prepended +# +# **See also:** [`insert_lines`][insert_lines] in +# [`edit_line`][bundle edit_line] +{ + insert_lines: + "$(string)" + location => start, + comment => "Prepend a line to the file if it doesn't already exist"; +} + +## + +bundle edit_line replace_line_end(start,end) +# @brief Give lines starting with `start` the ending given in `end` +# +# Whitespaces will be left unmodified. For example, +# `replace_line_end("ftp", "2121/tcp")` would replace +# +# `"ftp 21/tcp"` +# +# with +# +# `"ftp 2121/tcp"` +# +# @param start The string lines have to start with +# @param end The string lines should end with +{ + field_edits: + + "\s*$(start)\s.*" + comment => "Replace lines with $(this.start) and $(this.end)", + edit_field => line("(^|\s)$(start)\s*", "2", "$(end)","set"); +} + +bundle edit_line replace_uncommented_substrings( _comment, _find, _replace ) +# @brief Replace all occurrences of `_find` with `_replace` on lines that do not follow a `_comment` +# @param _comment Sequence of characters, each indicating the start of a comment. +# @param _find String matching substring to replace +# @param _replace String to substitute `_find` with +# +# **Example:** +# +# ```cf3 +# bundle agent example_replace_uncommented_substrings +# { +# files: +# "/tmp/file.txt" +# edit_line => replace_uncommented_substrings( "#", "ME", "YOU"); +# } +# ``` +# +# **Notes:** +# +# * Only single character comments are supported as `_comment` is used in the PCRE character group (`[^...]`). +# * `-` in `_comment` is interpreted as a range unless it's used as the first or last character. For example, setting `_comment` to `0-9` means any digit starts a comment. +# +# **History:** +# +# * Introduced 3.17.0, 3.15.3 +{ + vars: + "_reg_match_uncommented_lines_containing_find" + string => "^([^$(_comment)]*)\Q$(_find)\E(.*$)"; + + replace_patterns: + "$(_reg_match_uncommented_lines_containing_find)" + replace_with => text_between_match1_and_match2( $(_replace) ); +} + +## + +bundle edit_line append_to_line_end(start,end) +# @brief Append `end` to any lines beginning with `start` +# +# `end` will be appended to all lines starting with `start` and not +# already ending with `end`. Whitespaces will be left unmodified. +# +# For example, `append_to_line_end("kernel", "vga=791")` would replace +# `kernel /boot/vmlinuz root=/dev/sda7` +# +# with +# +# `kernel /boot/vmlinuz root=/dev/sda7 vga=791` +# +# **WARNING**: Be careful not to have multiple promises matching the same line, which would result in the line growing indefinitely. +# +# @param start pattern to match lines of interest +# @param end string to append to matched lines +# +# **Example:** +# +# ```cf3 +# files: +# "/tmp/boot-options" edit_line => append_to_line_end("kernel", "vga=791"); +# ``` +# +{ + field_edits: + + "\s*$(start)\s.*" + comment => "Append lines with $(this.start) and $(this.end)", + edit_field => line("(^|\s)$(start)\s*", "2", "$(end)","append"); +} + +## + +bundle edit_line regex_replace(find,replace) +# @brief Find exactly a regular expression and replace exactly the match with a string. +# You can think of this like a PCRE powered sed. +# @param find The regular expression +# @param replace The replacement string +{ + replace_patterns: + + "$(find)" + replace_with => value("$(replace)"), + comment => "Search and replace string"; +} + +## + +bundle edit_line resolvconf(search,list) +# @brief Adds search domains and name servers to the system +# resolver configuration. +# +# Use this bundle to modify `resolv.conf`. Existing entries for +# `search` and `nameserver` are replaced. +# +# @param search The search domains with space +# @param list An slist of nameserver addresses +{ + delete_lines: + + "search.*" comment => "Reset search lines from resolver"; + "nameserver.*" comment => "Reset nameservers in resolver"; + + insert_lines: + + "search $(search)" comment => "Add search domains to resolver"; + "nameserver $(list)" comment => "Add name servers to resolver"; +} + +## + +bundle edit_line resolvconf_o(search,list,options) +# @brief Adds search domains, name servers and options to the system +# resolver configuration. +# +# Use this bundle to modify `resolv.conf`. Existing entries for +# `search`, `nameserver` and `options` are replaced. +# +# @param search The search domains with space +# @param list An slist of nameserver addresses +# @param options is an slist of variables to modify the resolver + +{ + delete_lines: + + "search.*" comment => "Reset search lines from resolver"; + "nameserver.*" comment => "Reset nameservers in resolver"; + "options.*" comment => "Reset options in resolver"; + + insert_lines: + + "search $(search)" comment => "Add search domains to resolver"; + "nameserver $(list)" comment => "Add name servers to resolver"; + "options $(options)" comment => "Add options to resolver"; +} + +## + +bundle edit_line manage_variable_values_ini(tab, sectionName) +# @brief Sets the RHS of configuration items in the file of the form +# `LHS=RHS` +# +# If the line is commented out with `#`, it gets uncommented first. +# Adds a new line if none exists. +# Removes any variable value pairs not defined for the ini section. +# +# @param tab An associative array containing `tab[sectionName][LHS]="RHS"`. +# The value is not changed when the `RHS` is "dontchange" +# @param sectionName The section in the file within which values should be +# modified +# +# **See also:** `set_variable_values_ini()` +{ + vars: + "index" slist => getindices("$(tab)[$(sectionName)]"); + + # Be careful if the index string contains funny chars + "cindex[$(index)]" string => canonify("$(index)"); + + classes: + "edit_$(cindex[$(index)])" not => strcmp("$($(tab)[$(sectionName)][$(index)])","dontchange"), + comment => "Create conditions to make changes"; + + field_edits: + + # If the line is there, but commented out, first uncomment it + "#+\s*$(index)\s*=.*" + select_region => INI_section(escape("$(manage_variable_values_ini.sectionName)")), + edit_field => col("=","1","$(index)","set"), + ifvarclass => "edit_$(cindex[$(index)])"; + + # match a line starting like the key something + "$(index)\s*=.*" + edit_field => col("=","2","$($(tab)[$(sectionName)][$(index)])","set"), + select_region => INI_section(escape("$(manage_variable_values_ini.sectionName)")), + classes => results("bundle", "manage_variable_values_ini_not_$(cindex[$(index)])"), + ifvarclass => "edit_$(cindex[$(index)])"; + + delete_lines: + ".*" + select_region => INI_section(escape("$(manage_variable_values_ini.sectionName)")), + comment => "Remove all entries in the region so there are no extra entries"; + + insert_lines: + "[$(sectionName)]" + location => start, + comment => "Insert lines"; + + "$(index)=$($(tab)[$(sectionName)][$(index)])" + select_region => INI_section(escape("$(manage_variable_values_ini.sectionName)")), + ifvarclass => "!(manage_variable_values_ini_not_$(cindex[$(index)])_kept|manage_variable_values_ini_not_$(cindex[$(index)])_repaired).edit_$(cindex[$(index)])"; + +} + +## + +bundle edit_line set_variable_values_ini(tab, sectionName) +# @brief Sets the RHS of configuration items in the file of the form +# `LHS=RHS` +# +# If the line is commented out with `#`, it gets uncommented first. +# Adds a new line if none exists. +# +# It does support = in value +# +# @param tab An associative array containing `tab[sectionName][LHS]="RHS"`. +# The value is not changed when the `RHS` is "dontchange" +# @param sectionName The section in the file within which values should be +# modified +# +# **See also:** `manage_variable_values_ini()` +{ + vars: + "index" slist => getindices("$(tab)[$(sectionName)]"); + + # Be careful if the index string contains funny chars + "cindex[$(index)]" string => canonify("$(index)"); + + # Escape the value (had a problem with special characters and regex's) + "ctab[$(index)]" string => escape("$($(tab)[$(sectionName)][$(index)])"); + + + classes: + "edit_$(cindex[$(index)])" not => strcmp("$($(tab)[$(sectionName)][$(index)])","dontchange"), + comment => "Create conditions to make changes"; + "pass2" expression => "pass1"; + "pass1" expression => "any"; + + insert_lines: + "[$(sectionName)]" + location => start, + comment => "Insert lines"; + + pass2:: + "$(index)=$($(tab)[$(sectionName)][$(index)])" + select_region => INI_section(escape("$(set_variable_values_ini.sectionName)")), + ifvarclass => "edit_$(cindex[$(index)])"; + + + replace_patterns: + # If the line is commented out, uncomment and replace with + # the correct value + "^\s*#\s*($(index)\s*=\s*.*)$" + comment => "If we find a commented entry we uncomment it", + select_region => INI_section(escape("$(set_variable_values_ini.sectionName)")), + replace_with => value("$(index)=$($(tab)[$(sectionName)][$(index)])"), + ifvarclass => "edit_$(cindex[$(index)]).!set_variable_values_ini_not_$(cindex[$(index)])_reached"; + + # If the line is there with the wrong value, replace with + # the correct value + "^\s*($(index)\s*=\s*(?!$(ctab[$(index)])$).*)$" + comment => "Correct the value $(index)", + replace_with => value("$(index)=$($(tab)[$(sectionName)][$(index)])"), + select_region => INI_section(escape("$(set_variable_values_ini.sectionName)")), + classes => results("bundle", "set_variable_values_ini_not_$(cindex[$(index)])"), + ifvarclass => "edit_$(cindex[$(index)])"; + +} + +bundle edit_line insert_ini_section(name, config) +# @brief Inserts a INI section with content +# +# ``` +# # given an array "barray" +# files: +# "myfile.ini" edit_line => insert_ini_section("foo", "barray"); +# ``` +# +# Inserts a section in an INI file with the given configuration +# key-values from the array `config`. +# +# @param name the name of the INI section +# @param config The fully-qualified name of an associative array containing `v[LHS]="rhs"` +{ + vars: + # TODO: refactor once 3.7.x is EOL + "indices" slist => getindices($(config)); + "k" slist => sort("indices", lex); + + insert_lines: + "[$(name)]" + location => start, + comment => "Insert an ini section with values if not present"; + + "$(k)=$($(config)[$(k)])" + location => after("[$(name)]"); +} + + +bundle edit_line set_quoted_values(v) +# @brief Sets the RHS of variables in shell-like files of the form: +# +# ``` +# LHS="RHS" +# ``` +# +# Adds a new line if no LHS exists, and replaces RHS values if one does exist. +# If the line is commented out with #, it gets uncommented first. +# +# @param v The fully-qualified name of an associative array containing `v[LHS]="rhs"` +# +# **Example:** +# +# ```cf3 +# vars: +# "stuff[lhs-1]" string => "rhs1"; +# "stuff[lhs-2]" string => "rhs2"; +# +# files: +# "myfile" +# edit_line => set_quoted_values(stuff) +# ``` +# +# **See also:** `set_variable_values()` +{ + meta: + "tags" + slist => + { + "deprecated=3.6.0", + "deprecation-reason=Generic reimplementation", + "replaced-by=set_line_based" + }; + + vars: + "index" slist => getindices("$(v)"); + # Be careful if the index string contains funny chars + + "cindex[$(index)]" string => canonify("$(index)"); + + field_edits: + # If the line is there, but commented out, first uncomment it + "#+\s*$(index)\s*=.*" + edit_field => col("=","1","$(index)","set"); + + # match a line starting like the key = something + "\s*$(index)\s*=.*" + edit_field => col("=","2",'"$($(v)[$(index)])"',"set"), + classes => results("bundle", "$(cindex[$(index)])_in_file"), + comment => "Match a line starting like key = something"; + + insert_lines: + '$(index)="$($(v)[$(index)])"' + comment => "Insert a variable definition", + if => "!($(cindex[$(index)])_in_file_kept|$(cindex[$(index)])_in_file_repaired)"; +} + +## + +bundle edit_line set_variable_values(v) +# @brief Sets the RHS of variables in files of the form: +# +# ``` +# LHS=RHS +# ``` +# +# Adds a new line if no LHS exists, and replaces RHS values if one does exist. +# If the line is commented out with #, it gets uncommented first. +# +# @param v The fully-qualified name of an associative array containing `v[LHS]="rhs"` +# +# **Example:** +# +# ```cf3 +# vars: +# "stuff[lhs-1]" string => "rhs1"; +# "stuff[lhs-2]" string => "rhs2"; +# +# files: +# "myfile" +# edit_line => set_variable_values(stuff) +# ``` +# +# **See also:** `set_quoted_values()` +{ + meta: + "tags" + slist => + { + "deprecated=3.6.0", + "deprecation-reason=Generic reimplementation", + "replaced-by=set_line_based" + }; + + vars: + + "index" slist => getindices("$(v)"); + + # Be careful if the index string contains funny chars + + "cindex[$(index)]" string => canonify("$(index)"); + "cv" string => canonify("$(v)"); + + field_edits: + + # match a line starting like the key = something + + "\s*$(index)\s*=.*" + + edit_field => col("\s*$(index)\s*=","2","$($(v)[$(index)])","set"), + classes => results("bundle", "$(cv)_$(cindex[$(index)])_in_file"), + comment => "Match a line starting like key = something"; + + insert_lines: + + "$(index)=$($(v)[$(index)])" + + comment => "Insert a variable definition", + if => "!($(cv)_$(cindex[$(index)])_in_file_kept|$(cv)_$(cindex[$(index)])_in_file_repaired)"; +} + +bundle edit_line set_config_values(v) +# @brief Sets the RHS of configuration items in the file of the form: +# +# ``` +# LHS RHS +# ``` +# +# If the line is commented out with `#`, it gets uncommented first. +# +# Adds a new line if none exists. +# +# @param v The fully-qualified name of an associative array containing `v[LHS]="rhs"` +{ + meta: + "tags" + slist => + { + "deprecated=3.6.0", + "deprecation-reason=Generic reimplementation", + "replaced-by=set_line_based" + }; + + vars: + "index" slist => getindices("$(v)"); + + # Be careful if the index string contains funny chars + "cindex[$(index)]" string => canonify("$(index)"); + + # Escape the value (had a problem with special characters and regex's) + "ev[$(index)]" string => escape("$($(v)[$(index)])"); + + # Do we have more than one line commented out? + "index_comment_matches_$(cindex[$(index)])" + int => countlinesmatching("^\s*#\s*($(index)\s+.*|$(index))$","$(edit.filename)"); + + + classes: + # Check to see if this line exists + "line_exists_$(cindex[$(index)])" + expression => regline("^\s*($(index)\s.*|$(index))$","$(edit.filename)"), + scope => "bundle"; + + # if there's more than one comment, just add new (don't know who to use) + "multiple_comments_$(cindex[$(index)])" + expression => isgreaterthan("$(index_comment_matches_$(cindex[$(index)]))","1"), + scope => "bundle"; + + replace_patterns: + # If the line is commented out, uncomment and replace with + # the correct value + "^\s*#\s*($(index)\s+.*|$(index))$" + comment => "If we find a single commented entry we can uncomment it to + keep the settings near any inline documentation. If there + are multiple comments, then we don't try to replace them and + instead will later append the new value after the first + commented occurrence of $(index).", + handle => "set_config_values_replace_commented_line", + replace_with => value("$(index) $($(v)[$(index)])"), + if => "!line_exists_$(cindex[$(index)]).!replace_attempted_$(cindex[$(index)])_reached.!multiple_comments_$(cindex[$(index)])", + classes => results("bundle", "uncommented_$(cindex[$(index)])"); + + # If the line is there with the wrong value, replace with + # the correct value + "^\s*($(index)\s+(?!$(ev[$(index)])$).*|$(index))$" + comment => "Correct the value $(index)", + replace_with => value("$(index) $($(v)[$(index)])"), + classes => results("bundle", "replace_attempted_$(cindex[$(index)])"); + + insert_lines: + # If the line doesn't exist, or there is more than one occurrence + # of the LHS commented out, insert a new line and try to place it + # after the commented LHS (keep new line with old comments) + "$(index) $($(v)[$(index)])" + comment => "Insert the value, marker exists $(index)", + location => after("^\s*#\s*($(index)\s+.*|$(index))$"), + if => "replace_attempted_$(cindex[$(index)])_reached.multiple_comments_$(cindex[$(index)])"; + + # If the line doesn't exist and there are no occurrences + # of the LHS commented out, insert a new line at the eof + "$(index) $($(v)[$(index)])" + comment => "Insert the value, marker doesn't exist $(index)", + if => "replace_attempted_$(cindex[$(index)])_reached.!multiple_comments_$(cindex[$(index)])"; + +} + +bundle edit_line set_line_based(v, sep, bp, kp, cp) +# @brief Sets the RHS of configuration items in the file of the form: +# +# ``` +# LHS$(sep)RHS +# ``` +# +# Example usage for `x=y` lines (e.g. rsyncd.conf): +# +# ```cf3 +# "myfile" +# edit_line => set_line_based("test.config", "=", "\s*=\s*", ".*", "\s*#\s*"); +# ``` +# +# Example usage for `x y` lines (e.g. sshd_config): +# +# ```cf3 +# "myfile" +# edit_line => set_line_based("test.config", " ", "\s+", ".*", "\s*#\s*"); +# ``` +# +# If the line is commented out with `$(cp)`, it gets uncommented first. +# +# Adds a new line if none exists or if more than one commented-out +# possible matches exist. +# +# +# **Note:** If the data structure being used for the first parameter is in the current bundle, you can use `$(this.bundle).variable`. +# +# Originally `set_config_values` by Ed King. +# +# @param v The fully-qualified name (`bundlename.variable`) of an associative array containing `v[LHS]="rhs"` +# @param sep The separator to insert, e.g. ` ` for space-separated +# @param bp The key-value separation regex, e.g. `\s+` for space-separated +# @param kp The keys to select from v, use `.*` for all +# @param cp The comment pattern from line-start, e.g. `\s*#\s*` +{ + meta: + "tags" + slist => + { + "replaces=set_config_values", + "replaces=set_config_values_matching", + "replaces=set_variable_values", + "replaces=set_quoted_values", + "replaces=maintain_key_values", + }; + + vars: + "vkeys" slist => getindices("$(v)"); + "i" slist => grep($(kp), vkeys); + + # Be careful if the index string contains funny chars + "ci[$(i)]" string => canonify("$(i)"); + + # Escape the value (had a problem with special characters and regex's) + "ev[$(i)]" string => escape("$($(v)[$(i)])"); + + # Do we have more than one line commented out? + "comment_matches_$(ci[$(i)])" + int => countlinesmatching("^$(cp)($(i)$(bp).*|$(i))$", + $(edit.filename)); + + + classes: + # 3.21.0 and greater know about a file being emptied before editing and + # skip this check since it does not make sense. +@if minimum_version(3.21) + # Check to see if this line exists + "exists_$(ci[$(i)])" + expression => regline("^\s*($(i)$(bp).*|$(i))$", + $(edit.filename)), + unless => strcmp( "true", $(edit.empty_before_use) ); +@endif + +@if minimum_version(3.18) + !(cfengine_3_18_0|cfengine_3_18_1|cfengine_3_18_2):: + "exists_$(ci[$(i)])" + expression => regline("^\s*($(i)$(bp).*|$(i))$", + $(edit.filename)), + unless => strcmp( "true", $(edit.empty_before_use) ); +@endif + + (cfengine_3_15|cfengine_3_16|cfengine_3_17|cfengine_3_18_0|cfengine_3_18_1|cfengine_3_18_2|cfengine_3_19|cfengine_3_20):: + # Version 3.15.0 does not know about the before_version macro, so we keep the same behavior + # TODO Remove after 3.21 is no longer supported. (3.15.0 was supported when 3.21 was released) + # Check to see if this line exists + "exists_$(ci[$(i)])" + expression => regline("^\s*($(i)$(bp).*|$(i))$", + $(edit.filename)); + + any:: + + # if there's more than one comment, just add new (don't know who to use) + "multiple_comments_$(ci[$(i)])" + expression => isgreaterthan("$(comment_matches_$(ci[$(i)]))", + "1"); + + + replace_patterns: + # If the line is commented out, uncomment and replace with + # the correct value + "^$(cp)($(i)$(bp).*|$(i))$" + comment => "Uncommented the value '$(i)'", + replace_with => value("$(i)$(sep)$($(v)[$(i)])"), + if => "!exists_$(ci[$(i)]).!replace_attempted_$(ci[$(i)])_reached.!multiple_comments_$(ci[$(i)])", + classes => results("bundle", "uncommented_$(ci[$(i)])"); + + # If the line is there with the wrong value, replace with + # the correct value + "^\s*($(i)$(bp)(?!$(ev[$(i)])$).*|$(i))$" + comment => "Correct the value '$(i)'", + replace_with => value("$(i)$(sep)$($(v)[$(i)])"), + classes => results("bundle", "replace_attempted_$(ci[$(i)])"); + + insert_lines: + # If the line doesn't exist, or there is more than one occurrence + # of the LHS commented out, insert a new line and try to place it + # after the commented LHS (keep new line with old comments) + "$(i)$(sep)$($(v)[$(i)])" + comment => "Insert the value, marker '$(i)' exists", + location => after("^$(cp)($(i)$(bp).*|$(i))$"), + if => "replace_attempted_$(ci[$(i)])_reached.multiple_comments_$(ci[$(i)])"; + + # If the line doesn't exist and there are no occurrences + # of the LHS commented out, insert a new line at the eof + "$(i)$(sep)$($(v)[$(i)])" + comment => "Insert the value, marker '$(i)' doesn't exist", + if => "replace_attempted_$(ci[$(i)])_reached.!multiple_comments_$(ci[$(i)]).!exists_$(ci[$(i)])"; + + reports: + verbose_mode|EXTRA:: + "$(this.bundle): Line for '$(i)' exists" if => "exists_$(ci[$(i)])"; + "$(this.bundle): Line for '$(i)' does not exist" if => "!exists_$(ci[$(i)])"; +} + +bundle edit_line set_config_values_matching(v,pat) +# @brief Sets the RHS of configuration items in the file of the form +# +# ``` +# LHS RHS +# ``` +# +# If the line is commented out with `#`, it gets uncommented first. +# Adds a new line if none exists. +# +# @param v the fully-qualified name of an associative array containing v[LHS]="rhs" +# @param pat Only elements of `v` that match the regex `pat` are use +{ + meta: + "tags" + slist => + { + "deprecated=3.6.0", + "deprecation-reason=Generic reimplementation", + "replaced-by=set_line_based" + }; + + vars: + "allparams" slist => getindices("$(v)"); + "index" slist => grep("$(pat)", "allparams"); + + # Be careful if the index string contains funny chars + "cindex[$(index)]" string => canonify("$(index)"); + + replace_patterns: + # If the line is there, maybe commented out, uncomment and replace with + # the correct value + "^\s*($(index)\s+(?!$($(v)[$(index)])).*|# ?$(index)\s+.*)$" + comment => "Correct the value", + replace_with => value("$(index) $($(v)[$(index)])"), + classes => results("bundle", "replace_attempted_$(cindex[$(index)])"); + + insert_lines: + "$(index) $($(v)[$(index)])" + if => "replace_attempted_$(cindex[$(index)])_reached"; + +} + +## + +bundle edit_line maintain_key_values(v,sep) +# @brief Sets the RHS of configuration items with an giving separator +# +# Contributed by David Lee +{ + meta: + "tags" + slist => + { + "deprecated=3.6.0", + "deprecation-reason=Generic reimplementation", + "replaced-by=set_line_based" + }; + + vars: + "index" slist => getindices("$(v)"); + # Be careful if the index string contains funny chars + "cindex[$(index)]" string => canonify("$(index)"); + # Matching pattern for line (basically key-and-separator) + "keypat[$(index)]" string => "\s*$(index)\s*$(sep)\s*"; + + # Values may contain regexps. Escape them for replace_pattern matching. + "ve[$(index)]" string => escape("$($(v)[$(index)])"); + + classes: + "$(cindex[$(index)])_key_in_file" + comment => "Dynamic Class created if patterns matching", + expression => regline("^$(keypat[$(index)]).*", "$(edit.filename)"); + + replace_patterns: + # For convergence need to use negative lookahead on value: + # "key sep (?!value).*" + "^($(keypat[$(index)]))(?!$(ve[$(index)])$).*" + comment => "Replace definition of $(index)", + replace_with => value("$(match.1)$($(v)[$(index)])"); + + insert_lines: + "$(index)$(sep)$($(v)[$(index)])" + comment => "Insert definition of $(index)", + if => "!$(cindex[$(index)])_key_in_file"; +} + +## + +bundle edit_line append_users_starting(v) +# @brief For adding to `/etc/passwd` or `etc/shadow` +# @param v An array `v[username] string => "line..."` +# +# **Note:** To manage local users with CFEngine 3.6 and later, +# consider making `users` promises instead of modifying system files. +{ + vars: + + "index" slist => getindices("$(v)"); + + classes: + + "add_$(index)" not => userexists("$(index)"), + comment => "Class created if user does not exist"; + + insert_lines: + + "$($(v)[$(index)])" + + comment => "Append users into a password file format", + if => "add_$(index)"; +} + +## + +bundle edit_line append_groups_starting(v) +# @brief For adding groups to `/etc/group` +# @param v An array `v[groupname] string => "line..."` +# +# **Note:** To manage local users with CFEngine 3.6 and later, +# consider making `users` promises instead of modifying system files. +{ + vars: + + "index" slist => getindices("$(v)"); + + classes: + + "add_$(index)" not => groupexists("$(index)"), + comment => "Class created if group does not exist"; + + insert_lines: + + "$($(v)[$(index)])" + + comment => "Append users into a group file format", + if => "add_$(index)"; + +} + +## + +bundle edit_line set_colon_field(key,field,val) +# @brief Set the value of field number `field` of the line whose +# first field is `key` to the value `val`, in a colon-separated file. +# @param key The value the first field has to match +# @param field The field to be modified +# @param val The new value of `field` +{ + field_edits: + + "$(key):.*" + + comment => "Edit a colon-separated file, using the first field as a key", + edit_field => col(":","$(field)","$(val)","set"); +} + +## + +bundle edit_line set_user_field(user,field,val) +# @brief Set the value of field number "field" in a `:-field` +# formatted file like `/etc/passwd` +# @param user The user to be modified +# @param field The field that should be modified +# @param val The value for `field` +# +# **Note:** To manage local users with CFEngine 3.6 and later, +# consider making `users` promises instead of modifying system files. +{ + field_edits: + + "$(user):.*" + + comment => "Edit a user attribute in the password file", + edit_field => col(":","$(field)","$(val)","set"); +} + +## + +bundle edit_line append_user_field(group,field,allusers) +# @brief For adding users to to a file like `/etc/group` +# at field position `field`, comma separated subfields +# @param group The group to be modified +# @param field The field where users should be added +# @param allusers The list of users to add to `field` +# +# **Note:** To manage local users with CFEngine 3.6 and later, +# consider making `users` promises instead of modifying system files. +{ + vars: + + "val" slist => { @(allusers) }; + + field_edits: + + "$(group):.*" + + comment => "Append users into a password file format", + edit_field => col(":","$(field)","$(val)","alphanum"); +} + +## + +bundle edit_line expand_template(templatefile) +# @brief Read in the named text file and expand `$(var)` inside the file +# @param templatefile The name of the file +{ + insert_lines: + + "$(templatefile)" + + insert_type => "file", + comment => "Expand variables in the template file", + expand_scalars => "true"; +} + +bundle edit_line replace_or_add(pattern,line) +# @brief Replace a pattern in a file with a single line. +# +# If the pattern is not found, add the line to the file. +# +# @param pattern The pattern that should be replaced +# The pattern must match the whole line (it is automatically +# anchored to the start and end of the line) to avoid +# ambiguity. +# @param line The line with which to replace matches of `pattern` +{ + vars: + "cline" string => canonify("$(line)"); + "eline" string => escape("$(line)"); + + replace_patterns: + "^(?!$(eline)$)$(pattern)$" + comment => "Replace a pattern here", + replace_with => value("$(line)"), + classes => results("bundle", "replace_$(cline)"); + + insert_lines: + "$(line)" + if => "replace_$(cline)_reached"; +} + +bundle edit_line converge(marker, lines) +# @brief Converge `lines` marked with `marker` +# +# Any content marked with `marker` is removed, then `lines` are +# inserted. Every `line` should contain `marker`. +# +# @param marker The marker (not a regular expression; will be escaped) +# @param lines The lines to insert; all must contain `marker` +# +# **Example:** +# +# ```cf3 +# bundle agent pam_d_su_include +# #@brief Ensure /etc/pam.d/su has includes configured properly +# { +# files: +# ubuntu:: +# "/etc/pam.d/su" +# edit_line => converge( "@include", "@include common-auth +# @include common-account +# @include common-session"); +# } +# ``` +# +# **History:** +# +# * Introduced in 3.6.0 +{ + vars: + "regex" string => escape($(marker)); + + delete_lines: + ".*$(regex).*" comment => "Delete lines matching the marker"; + insert_lines: + "$(lines)" comment => "Insert the given lines"; +} + +bundle edit_line converge_prepend(marker, lines) +# @brief Converge `lines` marked with `marker` to start of content +# +# Any content marked with `marker` is removed, then `lines` are +# inserted at *start* of content. Every `line` should contain `marker`. +# +# @param marker The marker (not a regular expression; will be escaped) +# @param lines The lines to insert; all must contain `marker` +# +# **Example:** +# +# ```cf3 +# bundle agent pam_d_su_session +# #@brief Ensure /etc/pam.d/su has session configured properly +# { +# files: +# ubuntu:: +# "/etc/pam.d/su" +# edit_line => converge_prepend( "session", "session required pam_env.so readenv=1 envfile=/etc/default/locale +# session optional pam_mail.so nopen +# session required pam_limits.so" ); +# } +# ``` +# +# **History:** +# +# * Introduced in 3.17.0, 3.15.3, 3.12.6 +{ + vars: + "regex" string => escape($(marker)); + + delete_lines: + ".*$(regex).*" comment => "Delete lines matching the marker"; + insert_lines: + "$(lines)" location => start, comment => "Insert the given lines"; +} + + +bundle edit_line fstab_option_editor(method, mount, option) +# @brief Add or remove `/etc/fstab` options for a mount +# +# This bundle edits the options field of a mount. The `method` is a +# `field_operation` which can be `append`, `prepend`, `set`, `delete`, +# or `alphanum`. The option is OS-specific. +# +# @param method `field_operation` to apply +# @param mount the mount point +# @param option the option to add or remove +# +# **Example:** +# +# ```cf3 +# files: +# "/etc/fstab" edit_line => fstab_option_editor("delete", "/", "acl"); +# "/etc/fstab" edit_line => fstab_option_editor("append", "/", "acl"); +# ``` +{ + field_edits: + "(?!#)\S+\s+$(mount)\s.+" + edit_field => fstab_options($(option), $(method)); +} + +##------------------------------------------------------- +## editing bodies +##------------------------------------------------------- + +body edit_field fstab_options(newval, method) +# @brief Edit the options field in a fstab format +# @param newval the new option +# @param method `field_operation` to apply +# +# This body edits the options field in the fstab file format. The +# `method` is a `field_operation` which can be `append`, `prepend`, +# `set`, `delete`, or `alphanum`. The `newval` option is OS-specific. +# +# **Example:** +# +# ```cf3 +# # from the `fstab_options_editor` +# field_edits: +# "(?!#)\S+\s+$(mount)\s.+" +# edit_field => fstab_options($(option), $(method)); +# ``` +{ + field_separator => "\s+"; + select_field => "4"; + value_separator => ","; + field_value => "$(newval)"; + field_operation => "$(method)"; +} + +body edit_field quoted_var(newval,method) +# @brief Edit the quoted value of the matching line +# @param newval The new value +# @param method The method by which to edit the field +{ + field_separator => "\""; + select_field => "2"; + value_separator => " "; + field_value => "$(newval)"; + field_operation => "$(method)"; + extend_fields => "false"; + allow_blank_fields => "true"; +} + +## + +body edit_field col(split,col,newval,method) +# @brief Edit tabluar data with comma-separated sub-values +# @param split The separator that defines columns +# @param col The (1-based) index of the value to change +# @param newval The new value +# @param method The method by which to edit the field +{ + field_separator => "$(split)"; + select_field => "$(col)"; + value_separator => ","; + field_value => "$(newval)"; + field_operation => "$(method)"; + extend_fields => "true"; + allow_blank_fields => "true"; +} + +## + +body edit_field line(split,col,newval,method) +# @brief Edit tabular data with space-separated sub-values +# @param split The separator that defines columns +# @param col The (1-based) index of the value to change +# @param newval The new value +# @param method The method by which to edit the field +{ + field_separator => "$(split)"; + select_field => "$(col)"; + value_separator => " "; + field_value => "$(newval)"; + field_operation => "$(method)"; + extend_fields => "true"; + allow_blank_fields => "true"; +} + +## + +body replace_with text_between_match1_and_match2( _text ) +# @brief Replace matched line with substituted string +# @param _text String to substitute between first and second match +{ + replace_value => "$(match.1)$(_text)$(match.2)"; + occurrences => "all"; +} + +body replace_with value(x) +# @brief Replace matching lines +# @param x The replacement string +{ + replace_value => "$(x)"; + occurrences => "all"; +} + +## + +body select_region INI_section(x) +# @brief Restrict the `edit_line` promise to the lines in section `[x]` +# @param x The name of the section in an INI-like configuration file +{ + select_start => "\[$(x)\]\s*"; + select_end => "\[.*\]\s*"; + +@if minimum_version(3.10) + select_end_match_eof => "true"; +@endif +} + +##------------------------------------------------------- +## edit_defaults +##------------------------------------------------------- + +body edit_defaults std_defs +# @brief Standard definitions for `edit_defaults` +# Don't empty the file before editing starts and don't make a backup. +{ + empty_file_before_editing => "false"; + edit_backup => "false"; + #max_file_size => "300000"; +} + +## + +body edit_defaults empty +# @brief Empty the file before editing +# +# No backup is made +{ + empty_file_before_editing => "true"; + edit_backup => "false"; + #max_file_size => "300000"; +} + +## + +body edit_defaults no_backup +# @brief Don't make a backup of the file before editing +{ + edit_backup => "false"; +} + +## + +body edit_defaults backup_timestamp +# @brief Make a timestamped backup of the file before editing +{ + empty_file_before_editing => "false"; + edit_backup => "timestamp"; + #max_file_size => "300000"; +} + +##------------------------------------------------------- +## location +##------------------------------------------------------- + +body location start +# @brief Editing occurs before the matched line +{ + before_after => "before"; +} + +## + +body location after(str) +# @brief Editing occurs after the line matching `str` +# @param str Regular expression matching the file line location +{ + before_after => "after"; + select_line_matching => "$(str)"; +} + +## + +body location before(str) +# @brief Editing occurs before the line matching `str` +# @param str Regular expression matching the file line location +{ + before_after => "before"; + select_line_matching => "$(str)"; +} + +##------------------------------------------------------- +## replace_with +##------------------------------------------------------- + +## + +body replace_with comment(c) +# @brief Comment all lines matching the pattern by preprending `c` +# @param c The prefix that comments out lines +{ + replace_value => "$(c) $(match.1)"; + occurrences => "all"; +} + +## + +body replace_with uncomment +# @brief Uncomment all lines matching the pattern by removing +# anything outside the matching string +{ + replace_value => "$(match.1)"; + occurrences => "all"; +} + +##------------------------------------------------------- +## copy_from +##------------------------------------------------------- + +body copy_from secure_cp(from,server) +# @brief Download a file from a remote server over an encrypted channel +# +# Only copy the file if it is different from the local copy, and verify +# that the copy is correct. +# +# @param from The location of the file on the remote server +# @param server The hostname or IP of the server from which to download +{ + source => "$(from)"; + servers => { "$(server)" }; + compare => "digest"; + encrypt => "true"; + verify => "true"; +} + +## + +body copy_from remote_cp(from,server) +# @brief Download a file from a remote server. +# +# @param from The location of the file on the remote server +# @param server The hostname or IP of the server from which to download +{ + servers => { "$(server)" }; + source => "$(from)"; + compare => "mtime"; +} + +## + +body copy_from remote_dcp(from,server) +# @brief Download a file from a remote server if it is different from the local copy. +# +# @param from The location of the file on the remote server +# @param server The hostname or IP of the server from which to download +# +# **See Also:** `local_dcp()` +{ + servers => { "$(server)" }; + source => "$(from)"; + compare => "digest"; +} + +## + +body copy_from local_cp(from) +# @brief Copy a file if the modification time or creation time of the source +# file is newer (the default comparison mechanism). +# @param from The path to the source file. +# +# **Example:** +# +# ```cf3 +# bundle agent example +# { +# files: +# "/tmp/file.bak" +# copy_from => local_cp("/tmp/file"); +# } +# ``` +# +# **See Also:** `local_dcp()` +{ + source => "$(from)"; +} + +## + +body copy_from local_dcp(from) +# @brief Copy a local file if the hash on the source file differs. +# @param from The path to the source file. +# +# **Example:** +# +# ```cf3 +# bundle agent example +# { +# files: +# "/tmp/file.bak" +# copy_from => local_dcp("/tmp/file"); +# } +# ``` +# +# **See Also:** `local_cp()`, `remote_dcp()` +{ + source => "$(from)"; + compare => "digest"; +} + +## + +body copy_from perms_cp(from) +# @brief Copy a local file and preserve file permissions on the local copy. +# +# @param from The path to the source file. +{ + source => "$(from)"; + preserve => "true"; +} + +body copy_from perms_dcp(from) +# @brief Copy a local file if it is different from the existing copy and +# preserve file permissions on the local copy. +# +# @param from The path to the source file. +{ + source => "$(from)"; + preserve => "true"; + compare => "digest"; +} + +body copy_from backup_local_cp(from) +# @brief Copy a local file and keep a backup of old versions. +# +# @param from The path to the source file. +{ + source => "$(from)"; + copy_backup => "timestamp"; +} + +## + +body copy_from seed_cp(from) +# @brief Copy a local file if the file does not already exist, i.e. seed the +# placement +# @param from The path to the source file. +# +# **Example:** +# +# ```cf3 +# bundle agent home_dir_init +# { +# files: +# "/home/mark.burgess/." +# copy_from => seed_cp("/etc/skel"), +# depth_search => recurse(inf), +# file_select => all, +# comment => "We want to be sure that the home directory has files that are +# present in the skeleton."; +# } +# ``` +{ + source => "$(from)"; + compare => "exists"; +} + +## + +body copy_from sync_cp(from,server) +# @brief Synchronize a file with a remote server. +# +# * If the file does not exist on the remote server then it should be purged. +# * Allow types to change (directories to files and vice versa). +# * The mode of the remote file should be preserved. +# * Files are compared using the default comparison (mtime or ctime). +# +# @param from The location of the file on the remote server +# @param server The hostname or IP of the server from which to download +# +# **Example**: +# +# ```cf3 +# files: +# "/tmp/masterfiles/." +# copy_from => sync_cp( "/var/cfengine/masterfiles", $(sys.policy_server) ), +# depth_search => recurse(inf), +# file_select => all, +# comment => "Mirror masterfiles from the hub to a temporary directory"; +# ``` +# +# **See Also:** `dir_sync()`, `copyfrom_sync()` +{ + servers => { "$(server)" }; + source => "$(from)"; + purge => "true"; + preserve => "true"; + type_check => "false"; +} + +## + +body copy_from no_backup_cp(from) +# @brief Copy a local file and don't make any backup of the previous version +# +# @param from The path to the source file. +{ + source => "$(from)"; + copy_backup => "false"; +} + +## + +body copy_from no_backup_dcp(from) +# @brief Copy a local file if contents have changed, and don't make any backup +# of the previous version +# +# @param from The path to the source file. +{ + source => "$(from)"; + copy_backup => "false"; + compare => "digest"; +} + +## + +body copy_from no_backup_rcp(from,server) +# @brief Download a file if it's newer than the local copy, and don't make any +# backup of the previous version +# +# @param from The location of the file on the remote server +# @param server The hostname or IP of the server from which to download +{ + servers => { "$(server)" }; + source => "$(from)"; + compare => "mtime"; + copy_backup => "false"; +} + +##------------------------------------------------------- +## link_from +##------------------------------------------------------- + +body link_from ln_s(x) +# @brief Create a symbolink link to `x` +# The link is created even if the source of the link does not exist. +# @param x The source of the link +{ + link_type => "symlink"; + source => "$(x)"; + when_no_source => "force"; +} + +## + +body link_from linkchildren(tofile) +# @brief Create a symbolink link to `tofile` +# If the promiser is a directory, children are linked to the source, unless +# entries with identical names already exist. +# The link is created even if the source of the link does not exist. +# +# @param tofile The source of the link +{ + source => "$(tofile)"; + link_type => "symlink"; + when_no_source => "force"; + link_children => "true"; + when_linking_children => "if_no_such_file"; # "override_file"; +} + +body link_from linkfrom(source, type) +# @brief Make any kind of link to a file +# @param source link to this +# @param type the link's type (`symlink` or `hardlink`) +{ + source => $(source); + link_type => $(type); +} + +##------------------------------------------------------- +## perms +##------------------------------------------------------- + +body perms m(mode) +# @brief Set the file mode +# @param mode The new mode +{ + mode => "$(mode)"; + rxdirs => "true"; +} + +## + +body perms mo(mode,user) +# @brief Set the file's mode and owners +# @param mode The new mode +# @param user The username of the new owner +{ + owners => { "$(user)" }; + mode => "$(mode)"; + rxdirs => "true"; +} + +## + +body perms mog(mode,user,group) +# @brief Set the file's mode, owner and group +# @param mode The new mode +# @param user The username of the new owner +# @param group The group name +{ + owners => { "$(user)" }; + groups => { "$(group)" }; + mode => "$(mode)"; + rxdirs => "true"; +} + +## + +body perms og(u,g) +# @brief Set the file's owner and group +# @param u The username of the new owner +# @param g The group name +{ + owners => { "$(u)" }; + groups => { "$(g)" }; + rxdirs => "true"; +} + +## + +body perms owner(user) +# @brief Set the file's owner +# @param user The username of the new owner +{ + owners => { "$(user)" }; + rxdirs => "true"; +} + +body perms system_owned(mode) +# @brief Set the file owner and group to the system default +# @param mode the access permission in octal format +# +# **Example:** +# +# ```cf3 +# files: +# "/etc/passwd" perms => system_owned("0644"); +# ``` +{ + mode => "$(mode)"; + owners => { "root" }; + rxdirs => "true"; + +#+begin_ENT-951 +# Remove after 3.20 is not supported + rxdirs => "true"; +@if minimum_version(3.20) + rxdirs => "false"; +@endif +#+end + + freebsd|openbsd|netbsd|darwin:: + groups => { "wheel" }; + + linux:: + groups => { "root" }; + + solaris:: + groups => { "sys" }; + + aix:: + groups => { "system" }; +} + +##------------------------------------------------------- +## ACLS (extended Unix perms) +##------------------------------------------------------- + +body acl access_generic(acl) +# @brief Set the `aces` of the access control as specified +# +# Default/inherited ACLs are left unchanged. This body is +# applicable for both files and directories on all platforms. +# +# @param acl The aces to be set +{ + acl_method => "overwrite"; + aces => { "@(acl)" }; + + windows:: + acl_type => "ntfs"; + + !windows:: + acl_type => "posix"; +} + +## + +body acl ntfs(acl) +# @brief Set the `aces` on NTFS file systems, and overwrite +# existing ACLs. +# +# This body requires CFEngine Enterprise. +# +# @param acl The aces to be set +{ + acl_type => "ntfs"; + acl_method => "overwrite"; + aces => { "@(acl)" }; +} + +## + +body acl strict +# @brief Limit file access via ACLs to users with administrator privileges, +# overwriting existing ACLs. +# +# **Note:** May need to take ownership of file/dir to be sure no-one else is +# allowed access. +{ + acl_method => "overwrite"; + + windows:: + aces => { "user:Administrator:rwx" }; + !windows:: + aces => { "user:root:rwx" }; +} + +##------------------------------------------------------- +## depth_search +##------------------------------------------------------- + +body depth_search recurse(d) +# @brief Search files and direcories recursively, up to the specified depth +# Directories on different devices are excluded. +# +# @param d The maximum search depth +{ + depth => "$(d)"; + xdev => "true"; +} + +## + +body depth_search recurse_ignore(d,list) +# @brief Search files and directories recursively, +# but don't recurse into the specified directories +# +# @param d The maximum search depth +# @param list The list of directories to be excluded +{ + depth => "$(d)"; + exclude_dirs => { @(list) }; +} + +## + +body depth_search include_base +# @brief Search files and directories recursively, +# starting from the base directory. +{ + include_basedir => "true"; +} + +body depth_search recurse_with_base(d) +# @brief Search files and directories recursively up to the specified +# depth, starting from the base directory excluding directories on +# other devices. +# +# @param d The maximum search depth +{ + depth => "$(d)"; + xdev => "true"; + include_basedir => "true"; +} + +##------------------------------------------------------- +## delete +##------------------------------------------------------- + +body delete tidy +# @brief Delete the file and remove empty directories +# and links to directories +{ + dirlinks => "delete"; + rmdirs => "true"; +} + +##------------------------------------------------------- +## rename +##------------------------------------------------------- + +body rename disable +# @brief Disable the file +{ + disable => "true"; +} + +## + +body rename rotate(level) +# @brief Rotate and store up to `level` backups of the file +# @param level The number of backups to store +{ + rotate => "$(level)"; +} + +## + +body rename to(file) +# @brief Rename the file to `file` +# @param file The new name of the file +{ + newname => "$(file)"; +} + +##------------------------------------------------------- +## file_select +##------------------------------------------------------- + +body file_select name_age(name,days) +# @brief Select files that have a matching `name` and have not been modified for at least `days` +# @param name A regex that matches the file name +# @param days Number of days +{ + leaf_name => { "$(name)" }; + mtime => irange(0,ago(0,0,"$(days)",0,0,0)); + file_result => "mtime.leaf_name"; +} + +## + +body file_select days_old(days) +# @brief Select files that have not been modified for at least `days` +# @param days Number of days +{ + mtime => irange(0,ago(0,0,"$(days)",0,0,0)); + file_result => "mtime"; +} + +## + +body file_select size_range(from,to) +# @brief Select files that have a size within the specified range +# @param from The lower bound of the allowed file size +# @param to The upper bound of the allowed file size +{ + search_size => irange("$(from)","$(to)"); + file_result => "size"; +} + +## + +body file_select bigger_than(size) +# @brief Select files that are above a given size +# @param size The number of bytes files have +{ + search_size => irange("0","$(size)"); + file_result => "!size"; +} + +## + +body file_select exclude(name) +# @brief Select all files except those that match `name` +# @param name A regular expression +{ + leaf_name => { "$(name)"}; + file_result => "!leaf_name"; +} + +## + +body file_select not_dir +# @brief Select all files that are not directories +{ + file_types => { "dir" }; + file_result => "!file_types"; +} + +body file_select plain +# @brief Select plain, regular files +{ + file_types => { "plain" }; + file_result => "file_types"; +} + +body file_select dirs +# @brief Select directories +{ + file_types => { "dir" }; + file_result => "file_types"; +} + +## + +body file_select by_name(names) +# @brief Select files that match `names` +# @param names A regular expression +{ + leaf_name => { @(names)}; + file_result => "leaf_name"; +} + +## + +body file_select ex_list(names) +# @brief Select all files except those that match `names` +# @param names A list of regular expressions +{ + leaf_name => { @(names) }; + file_result => "!leaf_name"; +} + +## + +body file_select all +# @brief Select all file system entries +{ + leaf_name => { ".*" }; + file_result => "leaf_name"; +} + +## + +body file_select older_than(years, months, days, hours, minutes, seconds) +# @brief Select files older than the date-time specified +# @param years Number of years +# @param months Number of months +# @param days Number of days +# @param hours Number of hours +# @param minutes Number of minutes +# @param seconds Number of seconds +# +# Generic older_than selection body, aimed to have a common definition handy +# for every case possible. +{ + mtime => irange(0,ago("$(years)","$(months)","$(days)","$(hours)","$(minutes)","$(seconds)")); + file_result => "mtime"; +} + +## + +body file_select filetype_older_than(filetype, days) +# @brief Select files of specified type older than specified number of days +# +# @param filetype File type to select +# @param days Number of days +# +# This body only takes a single filetype, see `filetypes_older_than()` +# if you want to select more than one type of file. +{ + file_types => { "$(filetype)" }; + mtime => irange(0,ago(0,0,"$(days)",0,0,0)); + file_result => "file_types.mtime"; +} + +## + +body file_select filetypes_older_than(filetypes, days) +# @brief Select files of specified types older than specified number of days +# +# This body only takes a list of filetypes +# +# @param filetypes A list of file types +# @param days Number of days +# +# **See also:** `filetype_older_than()` +{ + file_types => { @(filetypes) }; + mtime => irange(0,ago(0,0,"$(days)",0,0,0)); + file_result => "file_types.mtime"; +} + +body file_select symlinked_to(target) +# @brief Select symlinks that point to $(target) +# @param target The file the symlink should point to in order to be selected +{ + file_types => { "symlink" }; + issymlinkto => { "$(target)" }; + file_result => "issymlinkto"; +} + +##------------------------------------------------------- +## changes +##------------------------------------------------------- + +body changes detect_all_change +# @brief Detect all file changes using the best hash method +# +# This is fierce, and will cost disk cycles +# +{ + hash => "best"; + report_changes => "all"; + update_hashes => "yes"; +} + +## + +body changes detect_all_change_using(hash) +# @brief Detect all file changes using a given hash method +# +# Detect all changes using a configurable hashing algorithm +# for times when you care about both content and file stats e.g. mtime +# +# @param hash supported hashing algorithm (md5, sha1, sha224, sha256, sha384, sha512, best) +{ + hash => "$(hash)"; + report_changes => "all"; + update_hashes => "yes"; +} + +## + +body changes detect_content +# @brief Detect file content changes using md5 +# +# This is a cheaper alternative +{ + hash => "md5"; + report_changes => "content"; + update_hashes => "yes"; +} + +## + +body changes detect_content_using(hash) +# @brief Detect file content changes using a given hash algorithm. +# +# For times when you only care about content, not file stats e.g. mtime +# @param hash - supported hashing algorithm (md5, sha1, sha224, sha256, sha384, +# sha512, best) +{ + hash => "$(hash)"; + report_changes => "content"; + update_hashes => "yes"; +} + +## + +body changes noupdate +# @brief Detect content changes in (small) files that should never change +{ + hash => "sha256"; + report_changes => "content"; + update_hashes => "no"; +} + +## + +body changes diff +# @brief Detect file content changes using sha256 +# and report the diff to CFEngine Enterprise +{ + hash => "sha256"; + report_changes => "content"; + report_diffs => "true"; + update_hashes => "yes"; +} + +## + +body changes all_changes +# @brief Detect all file changes using sha256 +# and report the diff to CFEngine Enterprise +{ + hash => "sha256"; + report_changes => "all"; + report_diffs => "true"; + update_hashes => "yes"; +} + +## + +body changes diff_noupdate +# @brief Detect content changes in (small) files +# and report the diff to CFEngine Enterprise +{ + hash => "sha256"; + report_changes => "content"; + report_diffs => "true"; + update_hashes => "no"; +} + +# template bundles + +bundle agent file_mustache(mustache_file, json_file, target_file) +# @brief Make a file from a Mustache template and a JSON file +# @param mustache_file the file with the Mustache template +# @param json_file a file with JSON data +# @param target_file the target file to write +# +# **Example:** +# +# ```cf3 +# methods: +# "m" usebundle => file_mustache("x.mustache", "y.json", "z.txt"); +# ``` +{ + files: + "$(target_file)" + create => "true", + edit_template => $(mustache_file), + template_data => readjson($(json_file), "100k"), + template_method => "mustache"; +} + +bundle agent file_mustache_jsonstring(mustache_file, json_string, target_file) +# @brief Make a file from a Mustache template and a JSON string +# @param mustache_file the file with the Mustache template +# @param json_string a string with JSON data +# @param target_file the target file to write +# +# **Example:** +# +# ```cf3 +# methods: +# "m" usebundle => file_mustache_jsonstring("x.mustache", '{ "x": "y" }', "z.txt"); +# ``` +{ + files: + "$(target_file)" + create => "true", + edit_template => $(mustache_file), + template_data => parsejson($(json_string)), + template_method => "mustache"; +} + +bundle agent file_tidy(file) +# @brief Remove a file +# @param file to remove +# +# **Example:** +# +# ```cf3 +# methods: +# "" usebundle => file_tidy("/tmp/z.txt"); +# ``` +{ + files: + "$(file)" delete => tidy; + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): deleting $(file) with delete => tidy"; +} + +bundle agent dir_sync(from, to) +# @brief Synchronize a directory entire, deleting unknown files +# @param from source directory +# @param to destination directory +# +# **Example:** +# +# ```cf3 +# methods: +# "" usebundle => dir_sync("/tmp", "/var/tmp"); +# ``` +{ + files: + "$(to)/." + create => "true", + depth_search => recurse("inf"), + copy_from => copyfrom_sync($(from)); + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): copying directory $(from) to $(to)"; +} + +bundle agent file_copy(from, to) +# @brief Copy a file +# @param from source file +# @param to destination file +# +# **Example:** +# +# ```cf3 +# methods: +# "" usebundle => file_copy("/tmp/z.txt", "/var/tmp/y.txt"); +# ``` +{ + files: + "$(to)" + copy_from => copyfrom_sync($(from)); + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): copying file $(from) to $(to)"; +} + +body copy_from copyfrom_sync(f) +# @brief Copy a directory or file with digest checksums, preserving attributes and purging leftovers +# @param f the file or directory +{ + source => "$(f)"; + purge => "true"; + preserve => "true"; + type_check => "false"; + compare => "digest"; +} + +bundle agent file_make(file, str) +# @brief Make a file from a string +# @param file target +# @param str the string data +# +# **Example:** +# +# ```cf3 +# methods: +# "" usebundle => file_make("/tmp/z.txt", "Some text +# and some more text here"); +# ``` +{ + vars: + "len" int => string_length($(str)); + summarize:: + "summary" string => format("%s...%s", + string_head($(str), 18), + string_tail($(str), 18)); + classes: + "summarize" expression => isgreaterthan($(len), 40); + + files: + "$(file)" + create => "true", + edit_line => insert_lines($(str)), + edit_defaults => empty; + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): creating $(file) with contents '$(str)'" + if => "!summarize"; + + "DEBUG $(this.bundle): creating $(file) with contents '$(summary)'" + if => "summarize"; +} + +bundle agent file_make_mog(file, str, mode, owner, group) +# @brief Make a file from a string with mode, owner, group +# @param file target +# @param str the string data +# @param mode the file permissions in octal +# @param owner the file owner as a name or UID +# @param group the file group as a name or GID +# +# **Example:** +# +# ```cf3 +# methods: +# "" usebundle => file_make_mog("/tmp/z.txt", "Some text +# and some more text here", "0644", "root", "root"); +# ``` +{ + vars: + "len" int => string_length($(str)); + summarize:: + "summary" string => format("%s...%s", + string_head($(str), 18), + string_tail($(str), 18)); + classes: + "summarize" expression => isgreaterthan($(len), 40); + + files: + "$(file)" + create => "true", + edit_line => insert_lines($(str)), + perms => mog($(mode), $(owner), $(group)), + edit_defaults => empty; + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): creating $(file) with contents '$(str)', mode '$(mode)', owner '$(owner)' and group '$(group)'" + if => "!summarize"; + + "DEBUG $(this.bundle): creating $(file) with contents '$(summary)', mode '$(mode)', owner '$(owner)' and group '$(group)'" + if => "summarize"; +} + +bundle agent file_make_mustache(file, template, data) +# @brief Make a file from a mustache template +# @param file Target file to render +# @param template Path to mustache template +# @param data Data container to use +# +# **Example:** +# +# ```cf3 +# vars: +# "state" data => datastate(); +# +# methods: +# "" usebundle => file_make_mustache( "/tmp/z.txt", "/tmp/z.mustache", @(state) ); +# ``` +{ + files: + "$(file)" + create => "true", + edit_template => "$(template)", + template_method => "mustache", + template_data => @(data); + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): rendering $(file) with template '$(template)'"; +} + +bundle agent file_make_mustache_with_perms(file, template, data, mode, owner, group) +# @brief Make a file from a mustache template +# @param file Target file to render +# @param template Path to mustache template +# @param data Data container to use +# @param mode File permissions +# @param owner Target file owner +# @param group Target file group +# +# **Example:** +# +# ```cf3 +# vars: +# "state" data => datastate(); +# +# methods: +# "" usebundle => file_make_mustache( "/tmp/z.txt", "/tmp/z.mustache", @(state), +# 600, "root", "root" ); +# ``` +{ + files: + "$(file)" + create => "true", + edit_template => "$(template)", + template_method => "mustache", + perms => mog( $(mode), $(owner), $(group) ), + template_data => @(data); + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): rendering $(file) with template '$(template)'"; +} + +bundle agent file_empty(file) +# @brief Make an empty file +# @param file target +# +# **Example:** +# +# ```cf3 +# methods: +# "" usebundle => file_empty("/tmp/z.txt"); +# ``` +{ + files: + "$(file)" + create => "true", + edit_defaults => empty; + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): creating empty $(file) with 0 size"; +} + +bundle agent file_hardlink(target, link) +# @brief Make a hard link to a file +# @param target of link +# @param link the hard link's location +# +# **Example:** +# +# ```cf3 +# methods: +# "" usebundle => file_hardlink("/tmp/z.txt", "/tmp/z.link"); +# ``` +{ + files: + "$(link)" + move_obstructions => "true", + link_from => linkfrom($(target), "hardlink"); + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): $(link) will be a hard link to $(target)"; +} + +bundle agent file_link(target, link) +# @brief Make a symlink to a file +# @param target of symlink +# @param link the symlink's location +# +# **Example:** +# +# ```cf3 +# methods: +# "" usebundle => file_link("/tmp/z.txt", "/tmp/z.link"); +# ``` +{ + files: + "$(link)" + move_obstructions => "true", + link_from => linkfrom($(target), "symlink"); + + reports: + "DEBUG|DEBUG_$(this.bundle)":: + "DEBUG $(this.bundle): $(link) will be a symlink to $(target)"; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/guest_environments.cf b/policies/lib/tree/20_cfe_basics/cfengine/guest_environments.cf new file mode 100644 index 00000000000..7f1d1df52c8 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/guest_environments.cf @@ -0,0 +1,59 @@ +# Guest environments bodies + +body environment_resources kvm(name, arch, cpu_count, mem_kb, disk_file) +# @brief An `environment_resources` body for a KVM virtual machine. +# +# The `env_spec` attribute is set to a KVM XML specification. +# +# @param name The name of the virtual machine +# @param arch The architecture +# @param cpu_count The number of CPUs the virtual machine should have +# @param mem_kb The amount of RAM in kilobyte +# @param disk_file The file on the host system for the virtual machine's harddrive +# +# **Example:** +# +# ```cf3 +# bundle agent manage_vm +# { +# guest_environments: +# am_vm_host:: +# "db_server" +# environment_host => atlas, +# environment_type => "kvm", +# environment_state => "create", +# environment_resources => kvm("PSQL1, "x86_64", "4", "4096", "/var/lib/libvirt/images/psql1.iso") +# } +# ``` +{ + env_spec => + " + $(name) + $(mem_kb) + $(mem_kb) + $(cpu_count) + + hvm + + + + + + + destroy + restart + restart + + /usr/bin/kvm + + + + + + + + + + +"; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/monitor.cf b/policies/lib/tree/20_cfe_basics/cfengine/monitor.cf new file mode 100644 index 00000000000..76dafd03111 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/monitor.cf @@ -0,0 +1,67 @@ +# Monitor bodies + +body match_value scan_log(line) +# @brief Selects lines matching `line` in a growing file +# @param line Regular expression for matching lines. +# +# **See also:** `select_line_matching`, `track_growing_file` +{ + select_line_matching => "$(line)"; + track_growing_file => "true"; +} + +## + +body match_value scan_changing_file(line) +# @brief Selects lines matching `line` in a changing file +# @param line Regular expression for matching lines. +# +# **See also:** `select_line_matching`, `track_growing_file` +{ + select_line_matching => "$(line)"; + track_growing_file => "false"; +} + +## + +body match_value single_value(regex) +# @brief Extract lines matching `regex` as values +# @param regex Regular expression matching lines and values +# +# **See also:** `select_line_matching`, `extraction_regex` +{ + select_line_matching => "$(regex)"; + extraction_regex => "($(regex))"; +} + +## + +body match_value line_match_value(line_match, extract_regex) +# @brief Find lines matching line_match and extract a value matching extract_regex +# @param line_match Regular expression matching line where value is found +# @param extract_regex Regular expression matching value to extract +# +# **See also:** `select_line_matching`, `extraction_regex` +# +# **Example:** +# +# ```cf3 +# bundle monitor example +# { +# vars: +# "regex_vsz" string => "root\s+[0-9]+\s+[0-9]+\s+[0-9]+\s+[0-9.]+\s+[0-9.]+\s+([0-9]+).*"; +# measurements: +# "/var/cfengine/state/cf_procs" +# handle => "cf_serverd_vsz", +# comment => "Tracking the memory consumption of a process can help us identify possible memory leaks", +# stream_type => "file", +# data_type => "int", +# history_type => "weekly", +# units => "kB", +# match_value => line_match_value(".*cf-serverd.*", "$(regex_vsz)"); +# } +# ``` +{ + select_line_matching => "$(line_match)"; + extraction_regex => "$(extract_regex)"; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/packages.cf b/policies/lib/tree/20_cfe_basics/cfengine/packages.cf new file mode 100644 index 00000000000..198faf7b821 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/packages.cf @@ -0,0 +1,2249 @@ +# Packages bodies + +bundle common package_module_knowledge +# @brief common package_module_knowledge bundle +# +# This common bundle defines which package modules are the defaults on different +# platforms. +{ + vars: + + # Package inventory refresh + "query_installed_ifelapsed" string => "$(ncf_def.package_module_query_installed_ifelapsed)"; + "query_updates_ifelapsed" string => "$(ncf_def.package_module_query_updates_ifelapsed)"; + + debian:: + "platform_default" string => "apt_get"; + + freebsd:: + "platform_default" string => "pkg"; + + redhat|amazon_linux:: + "platform_default" string => "yum"; + + suse|sles|opensuse:: + "platform_default" string => "zypper"; + + aix:: + "platform_default" string => "nimclient"; + + slackware:: + "platform_default" string => "slackpkg"; + + solaris:: + "platform_default" string => "ips"; + + # CFEngine 3.12.2+ and 3.14+ have new package module on Windows + windows.cfengine_3_12.!(cfengine_3_12_0|cfengine_3_12_1):: + "platform_default" string => "msiexec"; +@if minimum_version(3.14) + windows:: + "platform_default" string => "msiexec"; +@endif + + alpinelinux:: + "platform_default" string => "apk"; + + termux:: + "platform_default" string => "apt_get"; +} + +body package_module apk +{ + query_installed_ifelapsed => "$(package_module_knowledge.query_installed_ifelapsed)"; + query_updates_ifelapsed => "$(package_module_knowledge.query_updates_ifelapsed)"; +} + +body package_module apt_get +{ + query_installed_ifelapsed => "$(package_module_knowledge.query_installed_ifelapsed)"; + query_updates_ifelapsed => "$(package_module_knowledge.query_updates_ifelapsed)"; + #default_options => {}; +} + +body package_module zypper +{ + query_installed_ifelapsed => "$(package_module_knowledge.query_installed_ifelapsed)"; + query_updates_ifelapsed => "$(package_module_knowledge.query_updates_ifelapsed)"; + #default_options => {}; +} + +body package_module nimclient +# @brief Define details used when interfacing with nimclient package +# module +# +# **Example:** +# +# ```cf3 +# bundle agent example_nimclient +# { +# packages: +# "expect.base" +# policy => "present", +# options => { "lpp_source=lppaix71034" }, +# package_module => nimclient; +# } +# ``` +{ + query_installed_ifelapsed => "$(package_module_knowledge.query_installed_ifelapsed)"; + query_updates_ifelapsed => "$(package_module_knowledge.query_updates_ifelapsed)"; + # This would likey be customized based on your infrastructure specifics + # you may for example want to default the lpp_source based on something + # like `oslevel -s` output. + #default_options => {}; +} +body package_module pkgsrc +# @brief Define details used when interfacing with the pkgsrc package +# module. +# +# **Example:** +# ```cf3 +# bundle agent main +# { +# packages: +# "vim" +# policy => "present", +# package_module => pkgsrc; +# } +# ``` +{ + query_installed_ifelapsed => "$(package_module_knowledge.query_installed_ifelapsed)"; + query_updates_ifelapsed => "$(package_module_knowledge.query_updates_ifelapsed)"; +} + +body package_module yum +# @brief Define details used when interfacing with yum +{ + query_installed_ifelapsed => "$(package_module_knowledge.query_installed_ifelapsed)"; + query_updates_ifelapsed => "$(package_module_knowledge.query_updates_ifelapsed)"; + #default_options => {}; +} + +body package_module slackpkg +# @brief Define details used when interfacing with slackpkg +{ + query_installed_ifelapsed => "$(package_module_knowledge.query_installed_ifelapsed)"; + query_updates_ifelapsed => "$(package_module_knowledge.query_updates_ifelapsed)"; + #default_options => {}; +} + +body package_module pkg +# @brief Define details used when interfacing with pkg +{ + query_installed_ifelapsed => "$(package_module_knowledge.query_installed_ifelapsed)"; + query_updates_ifelapsed => "$(package_module_knowledge.query_updates_ifelapsed)"; + #default_options => {}; +} + +body package_module snap +# @brief Define details used when interfacing with snapcraft +{ + query_installed_ifelapsed => "$(package_module_knowledge.query_installed_ifelapsed)"; + query_updates_ifelapsed => "$(package_module_knowledge.query_updates_ifelapsed)"; + #default_options => {}; +} + +body package_module freebsd_ports +# @brief Define details used when interfacing with the freebsd ports package +# module. +# +# **Note:** Ports are expected to be setup prior to trying to use the packages +# promise. You may need to ensure that `portsnap extract` has been run, e.g. +# fileexists("/usr/ports/Mk/bsd.port.mk") +# +# **Example:** +# ```cf3 +# bundle agent main +# { +# packages: +# freebsd:: +# "vim" +# policy => "present", +# package_module => freebsd_ports; +# } +# ``` +{ + query_installed_ifelapsed => "$(package_module_knowledge.query_installed_ifelapsed)"; + query_updates_ifelapsed => "$(package_module_knowledge.query_updates_ifelapsed)"; +} + +bundle common packages_common +# @brief Define inputs required for this policy file +{ + vars: + "inputs" slist => { + "$(this.promise_dirname)/paths.cf", + "$(this.promise_dirname)/files.cf", + "$(this.promise_dirname)/common.cf" + }; +} + +body file control +# @brief include policy needed by this file +{ + inputs => { @(packages_common.inputs) }; +} + +##-------------------------------------------------------------- +## Packages promises +##-------------------------------------------------------------- + +bundle common common_knowledge +# @brief common packages knowledge bundle +# +# This common bundle defines general things about platforms. +{ + vars: + "list_update_ifelapsed" string => "${node.properties[rudder][packages][updates_cache_expire]}"; +} + +bundle common debian_knowledge +# @depends paths +# @brief common Debian knowledge bundle +# +# This common bundle has useful information about Debian. +{ + vars: + # Debian default package architecture, see https://wiki.debian.org/Multiarch/Tuples + "default_arch" string => ifelse("x86_64", "amd64", + "i386", "i386", + $(sys.arch)); + + "apt_prefix" string => "/usr/bin/env DEBIAN_FRONTEND=noninteractive LC_ALL=C PATH=/bin:/sbin/:/usr/bin:/usr/sbin"; + "call_dpkg" string => "$(apt_prefix) $(paths.path[dpkg])"; + "call_apt_get" string => "$(apt_prefix) $(paths.path[apt_get])"; + "call_aptitude" string => "$(apt_prefix) $(paths.path[aptitude])"; + "dpkg_options" string => "-o Dpkg::Options::=--force-confold -o Dpkg::Options::=--force-confdef"; + + "dpkg_compare_equal" string => "$(call_dpkg) --compare-versions '$(v1)' eq '$(v2)'"; + "dpkg_compare_less" string => "$(call_dpkg) --compare-versions '$(v1)' lt '$(v2)'"; + + "list_name_regex" string => "^.i\s+([^\s:]+).*"; + "list_version_regex" string => "^.i\s+[^\s]+\s+([^\s]+).*"; + + "patch_name_regex" string => "^Inst\s+(\S+)\s+.*"; + "patch_version_regex" string => "^Inst\s+\S+\s+\[\S+\]\s+\((\S+)\s+.*"; +} + +bundle common rpm_knowledge +# @depends paths +# @brief common RPM knowledge bundle +# +# This common bundle has useful information about platforms using RPM +{ + vars: + "call_rpm" string => "$(paths.rpm)"; + + "rpm_output_format" string => "i | repos | %{name} | %{version}-%{release} | %{arch}\n"; + "rpm_name_regex" string => "[^|]+\|[^|]+\|\s+([^\s|]+).*"; + "rpm_version_regex" string => "[^|]+\|[^|]+\|[^|]+\|\s+([^\s|]+).*"; + "rpm_arch_regex" string => "[^|]+\|[^|]+\|[^|]+\|[^|]+\|\s+([^\s]+).*"; + + "rpm2_output_format" string => "%{name} %{version}-%{release} %{arch}\n"; + "rpm2_name_regex" string => "^(\S+?)\s\S+?\s\S+$"; + "rpm2_version_regex" string => "^\S+?\s(\S+?)\s\S+$"; + "rpm2_arch_regex" string => "^\S+?\s\S+?\s(\S+)$"; + + "rpm3_output_format" string => "%{name} %{arch} %{version}-%{release}\n"; + "rpm3_name_regex" string => "(\S+).*"; + "rpm3_version_regex" string => "\S+\s+\S+\s+(\S+).*"; + "rpm3_arch_regex" string => "\S+\s+(\S+).*"; +} + +bundle common redhat_no_locking_knowledge { + vars: + # Option was introduced in rhel 6.1 + (amazon_linux|redhat|centos).!(redhat_3|redhat_4|redhat_5|redhat_6_0|centos_3|centos_4|centos_5|centos_6_0):: + "no_locking_option" string => "--setopt=exit_on_lock=True"; + redhat_3|redhat_4|redhat_5|redhat_6_0|centos_3|centos_4|centos_5|centos_6_0:: + "no_locking_option" string => ""; +} + +bundle common redhat_knowledge +# @depends paths +# @brief common Red Hat knowledge bundle +# +# This common bundle has useful information about Red Hat and its +# derivatives +{ + vars: + # Red Hat default package architecture + "default_arch" string => $(sys.arch); + + "call_yum" string => "$(paths.path[yum])"; + "call_rpmvercmp" string => "$(sys.bindir)/rpmvercmp"; + + # on RHEL 3/4, Yum doesn't know how to be --quiet + "yum_options" string => ifelse("centos_4|redhat_4|centos_3|redhat_3", "", + "--quiet ${redhat_no_locking_knowledge.no_locking_option}"); + "yum_offline_options" string => "$(yum_options) -C"; + + "rpm_compare_equal" string => "$(call_rpmvercmp) '$(v1)' eq '$(v2)'"; + "rpm_compare_less" string => "$(call_rpmvercmp) '$(v1)' lt '$(v2)'"; + # yum check-update prints a lot of extra useless lines, but the format of + # the actual package lines is: + # + # . + # + # We try to match that format as closely as possible, so we reject + # possibly interspersed error messages. + "patch_name_regex" string => "^(\S+)\.[^\s.]+\s+\S+\s+\S+\s*$"; + "patch_version_regex" string => "^\S+\.[^\s.]+\s+(\S+)\s+\S+\s*$"; + "patch_arch_regex" string => "^\S+\.([^\s.]+)\s+\S+\s+\S+\s*$"; + + # Combine multiline entries into one line. A line without at least three + # fields gets combined with the next line, if that line starts with a + # space. + "check_update_postproc" string => "| $(paths.sed) -r -n -e ' + :begin; + /\S+\s+\S+\s+\S+/!{ # Check for valid line. + N; # If not, read in the next line and append it. + /\n /!{ # Check whether that line started with a space. + h; # If not, copy buffer to clipboard. + s/\n[^\n]*$//; # Erase last line. + p; # Print current buffer. + x; # Restore from clipboard. + s/^.*\n//; # Erase everything but last line. + }; + s/\n / /; # Combine lines by removing newline. + bbegin; # Jump back to begin. + }; + p; # Print current buffer.'"; +} + +bundle common suse_knowledge +# @depends paths +# @brief common SUSE knowledge bundle +{ + vars: + # SUSE default package architecture + "default_arch" string => $(sys.arch); + + "call_zypper" string => "$(paths.zypper)"; +} + +bundle common darwin_knowledge +# @depends paths +# @brief common Darwin / Mac OS X knowledge bundle +# +# This common bundle has useful information about Darwin / Mac OS X. +{ + vars: + "call_brew" string => "$(paths.path[brew])"; + "call_sudo" string => "$(paths.path[sudo])"; + + # used with brew list --versions format '%{name} %{version}\n' + + "brew_name_regex" string => "([\S]+)\s[\S]+"; + "brew_version_regex" string => "[\S]+\s([\S]+)"; +} + +bundle common npm_knowledge +# @depends paths +# @brief Node.js `npm' knowledge bundle +# +# This common bundle has useful information about the Node.js `npm' package manager. +{ + vars: + "call_npm" string => "$(paths.path[npm])"; + + "npm_list_name_regex" string => "^[^ /]+ ([\w\d-._~]+)@[\d.]+"; + "npm_list_version_regex" string => "^[^ /]+ [\w\d-._~]+@([\d.]+)"; + "npm_installed_regex" string => "^[^ /]+ ([\w\d-._~]+@[\d.]+)"; +} + +bundle common pip_knowledge +# @depends paths +# @brief Python `pip' knowledge bundle +# +# This common bundle has useful information about the Python `pip' package manager. +{ + vars: + "call_pip" string => "$(paths.path[pip])"; + + "pip_list_name_regex" string => "^([[:alnum:]-_]+)\s\([\d.]+\)"; + "pip_list_version_regex" string => "^[[:alnum:]-_]+\s\(([\d.]+)\)"; + "pip_installed_regex" string => "^([[:alnum:]-_]+\s\([\d.]+\))"; +} + +bundle common solaris_knowledge +# @depends paths +# @brief Solaris knowledge bundle +# +# This common bundle has useful information about the Solaris packages. +{ + vars: + "call_pkgadd" string => "$(paths.path[pkgadd])"; + "call_pkgrm" string => "$(paths.path[pkgrm])"; + "call_pkginfo" string => "$(paths.path[pkginfo])"; + + "admin_nocheck" string => "mail= +instance=unique +partial=nocheck +runlevel=nocheck +idepend=nocheck +rdepend=nocheck +space=nocheck +setuid=nocheck +conflict=nocheck +action=nocheck +networktimeout=60 +networkretries=3 +authentication=quit +keystore=/var/sadm/security +proxy= +basedir=default"; + +} + +body package_method pip(flags) +# @depends common_knowledge pip_knowledge +# @brief Python `pip' package management +# +# `pip' is a package manager for Python +# http://www.pip-installer.org/en/latest/ +# +# Available commands : add, delete, (add)update, verify +# +# @param flags The command line parameter passed to `pip` +# +# Note: "update" command performs recursive upgrade (of dependencies) by +# default. Set $flags to "--no-deps" to perform non-recursive upgrade. +# http://www.pip-installer.org/en/latest/cookbook.html#non-recursive-upgrades +# +# **Example:** +# +# ```cf3 +# packages: +# "Django" package_method => pip(""), package_policy => "add"; +# "django-registration" package_method => pip(""), package_policy => "delete"; +# "requests" package_method => pip(""), package_policy => "verify"; +# +# ``` +# +# **Note:** "Django" with a capital 'D' in the example above. +# Explicitly match the name of the package, capitalization does count! +# +# ```console +# $ pip search django | grep -E "^Django\s+-" +# Django - A high-level Python Web framework [..output trimmed..] +# ``` +{ + package_changes => "individual"; + + package_noverify_regex => ""; + + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_list_name_regex => "$(pip_knowledge.pip_list_name_regex)"; + package_list_version_regex => "$(pip_knowledge.pip_list_version_regex)"; + package_installed_regex => "$(pip_knowledge.pip_installed_regex)"; + + package_name_convention => "$(name)"; + package_delete_convention => "$(name)"; + + package_list_command => "$(paths.path[pip]) list $(flags)"; + package_verify_command => "$(paths.path[pip]) show $(flags)"; + package_add_command => "$(paths.path[pip]) install $(flags)"; + package_delete_command => "$(paths.path[pip]) uninstall --yes $(flags)"; + package_update_command => "$(paths.path[pip]) install --upgrade $(flags)"; +} + +body package_method npm(dir) +# @depends common_knowledge npm_knowledge +# @brief Node.js `npm' local-mode package management +# +# `npm' is a package manager for Node.js +# https://npmjs.org/package/npm +# +# Available commands : add, delete, (add)update, verify +# +# For the difference between local and global install see here: +# https://npmjs.org/doc/cli/npm-install.html +# +# @param dir The prefix path to ./node_modules/ +# +# **Example:** +# +# ```cf3 +# vars: +# "dirs" slist => { "/root/myproject", "/home/somedev/someproject" }; +# +# packages: +# "express" package_method => npm("$(dirs)"), package_policy => "add"; +# "redis" package_method => npm("$(dirs)"), package_policy => "delete"; +# ``` +{ + package_changes => "individual"; + + package_noverify_regex => ""; + + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_list_name_regex => "$(npm_knowledge.npm_list_name_regex)"; + package_list_version_regex => "$(npm_knowledge.npm_list_version_regex)"; + package_installed_regex => "$(npm_knowledge.npm_installed_regex)"; + + package_name_convention => "$(name)"; + package_delete_convention => "$(name)"; + + package_list_command => "$(npm_knowledge.call_npm) list --prefix $(dir)"; + package_verify_command => "$(npm_knowledge.call_npm) list --prefix $(dir)"; + package_add_command => "$(npm_knowledge.call_npm) install --prefix $(dir)"; + package_delete_command => "$(npm_knowledge.call_npm) remove --prefix $(dir)"; + package_update_command => "$(npm_knowledge.call_npm) update --prefix $(dir)"; +} + +body package_method npm_g +# @depends common_knowledge npm_knowledge +# @brief Node.js `npm' global-mode package management +# +# `npm' is a package manager for Node.js +# https://npmjs.org/package/npm +# +# Available commands : add, delete, (add)update, verify +# +# For the difference between global and local install see here: +# https://npmjs.org/doc/cli/npm-install.html +# +# **Example:** +# +# ```cf3 +# packages: +# "express" package_method => npm_g, package_policy => "add"; +# "redis" package_method => npm_g, package_policy => "delete"; +# ``` +{ + package_changes => "individual"; + + package_noverify_regex => ""; + + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_list_name_regex => "$(npm_knowledge.npm_list_name_regex)"; + package_list_version_regex => "$(npm_knowledge.npm_list_version_regex)"; + package_installed_regex => "$(npm_knowledge.npm_installed_regex)"; + + package_name_convention => "$(name)"; + package_delete_convention => "$(name)"; + + package_list_command => "$(npm_knowledge.call_npm) list --global"; + package_verify_command => "$(npm_knowledge.call_npm) list --global"; + package_add_command => "$(npm_knowledge.call_npm) install --global"; + package_delete_command => "$(npm_knowledge.call_npm) remove --global"; + package_update_command => "$(npm_knowledge.call_npm) update --global"; +} + +body package_method brew(user) +# @depends common_knowledge darwin_knowledge +# @brief Darwin/Mac OS X + Homebrew installation method +# +# Homebrew is a package manager for OS X -- http://brew.sh +# +# Available commands : add, delete, (add)update (with package_version). +# +# @param user The user under which to run the commands +# +# Homebrew expects a regular (non-root) user to install packages. +# https://github.com/mxcl/homebrew/wiki/FAQ#why-does-homebrew-say-sudo-is-bad +# As CFEngine doesn't give the possibility to run package_add_command +# with a different user, this body uses sudo -u. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => brew("adminuser"), package_policy => "add"; +# "uppackage" package_method => brew("adminuser"), package_policy => "update", package_version => "3.5.2"; +# ``` +{ + + package_changes => "bulk"; + package_add_command => "$(darwin_knowledge.call_sudo) -u $(user) $(darwin_knowledge.call_brew) install"; + package_delete_command => "$(darwin_knowledge.call_sudo) -u $(user) $(darwin_knowledge.call_brew) uninstall"; + package_delete_convention => "$(name)"; + package_name_convention => "$(name)"; + + # Homebrew can list only installed packages along versions. + # for a complete list of packages, we could use `brew search`, but there's no easy + # way to determine the version or wether it's installed. + package_installed_regex => ".*"; + package_list_command => "$(darwin_knowledge.call_sudo) -u $(user) $(darwin_knowledge.call_brew) list --versions"; + package_list_name_regex => "$(darwin_knowledge.brew_name_regex)"; + package_list_version_regex => "$(darwin_knowledge.brew_version_regex)"; + package_list_update_command => "$(darwin_knowledge.call_sudo) -u $(user) $(darwin_knowledge.call_brew) update"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + # brew list [package] will print the installed files and return 1 if not found. + package_verify_command => "$(darwin_knowledge.call_sudo) -u $(user) $(darwin_knowledge.call_brew) list"; + package_noverify_returncode => "1"; + + # remember to specify the package version + package_update_command => "$(darwin_knowledge.call_sudo) -u $(user) $(darwin_knowledge.call_brew) upgrade"; + +} + +body package_method apt +# @depends common_knowledge debian_knowledge +# @brief APT installation package method +# +# This package method interacts with the APT package manager through `aptitude`. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => apt, package_policy => "add"; +# ``` +{ + package_changes => "bulk"; + package_list_command => "$(debian_knowledge.call_dpkg) -l"; + package_list_name_regex => "$(debian_knowledge.list_name_regex)"; + package_list_version_regex => "$(debian_knowledge.list_version_regex)"; + package_installed_regex => ".i.*"; # packages that have been uninstalled may be listed + package_name_convention => "$(name)"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + # make correct version comparisons + package_version_less_command => "$(debian_knowledge.dpkg_compare_less)"; + package_version_equal_command => "$(debian_knowledge.dpkg_compare_equal)"; + + have_aptitude:: + package_add_command => "$(debian_knowledge.call_aptitude) $(debian_knowledge.dpkg_options) --assume-yes install"; + package_list_update_command => "$(debian_knowledge.call_aptitude) update"; + package_delete_command => "$(debian_knowledge.call_aptitude) $(debian_knowledge.dpkg_options) --assume-yes -q remove"; + package_update_command => "$(debian_knowledge.call_aptitude) $(debian_knowledge.dpkg_options) --assume-yes install"; + package_patch_command => "$(debian_knowledge.call_aptitude) $(debian_knowledge.dpkg_options) --assume-yes install"; + package_verify_command => "$(debian_knowledge.call_aptitude) show"; + package_noverify_regex => "(State: not installed|E: Unable to locate package .*)"; + + package_patch_list_command => "$(debian_knowledge.call_aptitude) --assume-yes --simulate --verbose full-upgrade"; + package_patch_name_regex => "$(debian_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(debian_knowledge.patch_version_regex)"; + + !have_aptitude:: + package_add_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_list_update_command => "$(debian_knowledge.call_apt_get) update"; + package_delete_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes -q remove"; + package_update_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_patch_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_verify_command => "$(debian_knowledge.call_dpkg) -s"; + package_noverify_returncode => "1"; + + package_patch_list_command => "$(debian_knowledge.call_apt_get) --just-print dist-upgrade"; + package_patch_name_regex => "$(debian_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(debian_knowledge.patch_version_regex)"; + +} + +# Ignore aptitude because: +# 1) aptitude will remove "unneeded" packages unexpectly +# 2) aptitude return codes are useless +# 3) aptitude is a high level interface +# 4) aptitude provides little benefit +# 5) have_aptitude is a hard class and thus cannot be unset +body package_method apt_get +# @depends common_knowledge debian_knowledge +# @brief APT installation package method +# +# This package method interacts with the APT package manager through `apt-get`. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => apt_get, package_policy => "add"; +# ``` +{ + package_changes => "bulk"; + package_list_command => "$(debian_knowledge.call_dpkg) -l"; + package_list_name_regex => "$(debian_knowledge.list_name_regex)"; + package_list_version_regex => "$(debian_knowledge.list_version_regex)"; + package_installed_regex => ".i.*"; # packages that have been uninstalled may be listed + package_name_convention => "$(name)=$(version)"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + # Target a specific release, such as backports + package_add_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_list_update_command => "$(debian_knowledge.call_apt_get) update"; + package_delete_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes -q remove"; + package_update_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_patch_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_verify_command => "$(debian_knowledge.call_dpkg) -s"; + package_noverify_returncode => "1"; + + package_patch_list_command => "$(debian_knowledge.call_apt_get) --just-print dist-upgrade"; + package_patch_name_regex => "$(debian_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(debian_knowledge.patch_version_regex)"; + + # make correct version comparisons + package_version_less_command => "$(debian_knowledge.dpkg_compare_less)"; + package_version_equal_command => "$(debian_knowledge.dpkg_compare_equal)"; +} + +body package_method apt_get_permissive +# @depends common_knowledge debian_knowledge +# @brief APT permissive (just by name) package method +# +# This package method interacts with the APT package manager through +# `apt-get`. +# +# Normally you have to specify the package version, and it defaults to +# `*`, which then triggers the bug of installing `xyz-abc` when you ask for `xyz`. +# +# This "permissive" body sets +# +# package_name_convention => "$(name)"; +# +# which is permissive in the sense of not requiring the version. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => apt_get_permissive, package_policy => "add"; +# ``` +{ + package_changes => "bulk"; + package_list_command => "$(debian_knowledge.call_dpkg) -l"; + package_list_name_regex => "$(debian_knowledge.list_name_regex)"; + package_list_version_regex => "$(debian_knowledge.list_version_regex)"; + package_installed_regex => ".i.*"; # packages that have been uninstalled may be listed + package_name_convention => "$(name)"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + # Target a specific release, such as backports + package_add_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_list_update_command => "$(debian_knowledge.call_apt_get) update"; + package_delete_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes -q remove"; + package_update_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_patch_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_verify_command => "$(debian_knowledge.call_dpkg) -s"; + package_noverify_returncode => "1"; + + package_patch_list_command => "$(debian_knowledge.call_apt_get) --just-print dist-upgrade"; + package_patch_name_regex => "$(debian_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(debian_knowledge.patch_version_regex)"; + + # make correct version comparisons + package_version_less_command => "$(debian_knowledge.dpkg_compare_less)"; + package_version_equal_command => "$(debian_knowledge.dpkg_compare_equal)"; +} + +body package_method apt_get_release(release) +# @depends common_knowledge debian_knowledge +# @brief APT installation package method +# @param release specific release to use +# +# This package method interacts with the APT package manager through `apt-get` but sets a specific target release. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => apt_get_release("xyz"), package_policy => "add"; +# ``` +{ + package_changes => "bulk"; + package_list_command => "$(debian_knowledge.call_dpkg) -l"; + package_list_name_regex => "$(debian_knowledge.list_name_regex)"; + package_list_version_regex => "$(debian_knowledge.list_version_regex)"; + package_installed_regex => ".i.*"; # packages that have been uninstalled may be listed + package_name_convention => "$(name)"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + # Target a specific release, such as backports + package_add_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes --target-release $(release) install"; + package_list_update_command => "$(debian_knowledge.call_apt_get) update"; + package_delete_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes -q remove"; + package_update_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes --target-release $(release) install"; + package_patch_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes --target-release $(release) install"; + package_verify_command => "$(debian_knowledge.call_dpkg) -s"; + package_noverify_returncode => "1"; + + package_patch_list_command => "$(debian_knowledge.call_apt_get) --just-print dist-upgrade"; + package_patch_name_regex => "$(debian_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(debian_knowledge.patch_version_regex)"; + + # make correct version comparisons + package_version_less_command => "$(debian_knowledge.dpkg_compare_less)"; + package_version_equal_command => "$(debian_knowledge.dpkg_compare_equal)"; + +} + +## + +body package_method dpkg_version(repo) +# @depends common_knowledge debian_knowledge +# @brief dpkg installation package method +# @param repo specific repo to use +# +# This package method interacts with `dpkg`. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => dpkg_version("xyz"), package_policy => "add"; +# ``` +{ + package_changes => "individual"; + package_list_command => "$(debian_knowledge.call_dpkg) -l"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(debian_knowledge.call_apt_get) update"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_list_name_regex => "$(debian_knowledge.list_name_regex)"; + package_list_version_regex => "$(debian_knowledge.list_version_regex)"; + + package_installed_regex => ".i.*"; # packages that have been uninstalled may be listed + + package_file_repositories => { "$(repo)" }; + + debian.x86_64:: + package_name_convention => "$(name)_$(version)_amd64.deb"; + + debian.i686:: + package_name_convention => "$(name)_$(version)_i386.deb"; + + have_aptitude:: + package_patch_list_command => "$(debian_knowledge.call_aptitude) --assume-yes --simulate --verbose full-upgrade"; + package_patch_name_regex => "$(debian_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(debian_knowledge.patch_version_regex)"; + + !have_aptitude:: + package_patch_list_command => "$(debian_knowledge.call_apt_get) --just-print dist-upgrade"; + package_patch_name_regex => "$(debian_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(debian_knowledge.patch_version_regex)"; + + debian:: + package_add_command => "$(debian_knowledge.call_dpkg) --install"; + package_delete_command => "$(debian_knowledge.call_dpkg) --purge"; + package_update_command => "$(debian_knowledge.call_dpkg) --install"; + package_patch_command => "$(debian_knowledge.call_dpkg) --install"; + + # make correct version comparisons + package_version_less_command => "$(debian_knowledge.dpkg_compare_less)"; + package_version_equal_command => "$(debian_knowledge.dpkg_compare_equal)"; +} + +## + +body package_method rpm_version(repo) +# @depends common_knowledge rpm_knowledge redhat_knowledge +# @brief RPM direct installation method +# @param repo the specific repository for `package_file_repositories` +# +# This package method interacts with the RPM package manager for a specific repo. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => rpm_version("myrepo"), package_policy => "add"; +# ``` +{ + package_changes => "individual"; + + package_list_command => "$(rpm_knowledge.call_rpm) -qa --queryformat \"$(rpm_knowledge.rpm_output_format)\""; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) check-update $(redhat_knowledge.check_update_postproc)"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_list_name_regex => "$(rpm_knowledge.rpm_name_regex)"; + package_list_version_regex => "$(rpm_knowledge.rpm_version_regex)"; + package_list_arch_regex => "$(rpm_knowledge.rpm_arch_regex)"; + + package_installed_regex => "i.*"; + + package_file_repositories => { "$(repo)" }; + + package_name_convention => "$(name)-$(version).$(arch).rpm"; + + package_add_command => "$(rpm_knowledge.call_rpm) -ivh "; + package_update_command => "$(rpm_knowledge.call_rpm) -Uvh "; + package_patch_command => "$(rpm_knowledge.call_rpm) -Uvh "; + package_delete_command => "$(rpm_knowledge.call_rpm) -e --nodeps"; + package_verify_command => "$(rpm_knowledge.call_rpm) -V"; + package_noverify_regex => ".*[^\s].*"; + package_version_less_command => "$(redhat_knowledge.rpm_compare_less)"; + package_version_equal_command => "$(redhat_knowledge.rpm_compare_equal)"; +} + +## + +body package_method windows_feature +# @brief Method for managing Windows features +{ + package_changes => "individual"; + + package_name_convention => "$(name)"; + package_delete_convention => "$(name)"; + + package_installed_regex => ".*"; + package_list_name_regex => "(.*)"; + package_list_version_regex => "(.*)"; # FIXME: the listing does not give version, so takes name for version too now + + package_add_command => "$(sys.winsysdir)\\WindowsPowerShell\\v1.0\\powershell.exe -Command \"Import-Module ServerManager; Add-WindowsFeature -Name\""; + package_delete_command => "$(sys.winsysdir)\\WindowsPowerShell\\v1.0\\powershell.exe -Command \"Import-Module ServerManager; Remove-WindowsFeature -confirm:$false -Name\""; + package_list_command => "$(sys.winsysdir)\\WindowsPowerShell\\v1.0\\powershell.exe -Command \"Import-Module ServerManager; Get-WindowsFeature | where {$_.installed -eq $True} |foreach {$_.Name}\""; +} + +## + +body package_method msi_implicit(repo) +# @brief Windows MSI method +# @param repo The package file repository +# +# Uses the whole file name as promiser, e.g. "7-Zip-4.50-x86_64.msi". +# The name, version and arch is then deduced from the promiser. +# +# **See also:** `msi_explicit()` +{ + package_changes => "individual"; + package_file_repositories => { "$(repo)" }; + + package_installed_regex => ".*"; + + package_name_convention => "$(name)-$(version)-$(arch).msi"; + package_delete_convention => "$(firstrepo)$(name)-$(version)-$(arch).msi"; + + package_name_regex => "^(\S+)-(\d+\.?)+"; + package_version_regex => "^\S+-((\d+\.?)+)"; + package_arch_regex => "^\S+-[\d\.]+-(.*).msi"; + + package_add_command => "\"$(sys.winsysdir)\msiexec.exe\" /qn /i"; + package_update_command => "\"$(sys.winsysdir)\msiexec.exe\" /qn /i"; + package_delete_command => "\"$(sys.winsysdir)\msiexec.exe\" /qn /x"; +} + +## + +body package_method msi_explicit(repo) +# @brief Windows MSI method +# @param repo The package file repository +# +# Uses software name as promiser, e.g. "7-Zip", and explicitly +# specify any `package_version` and `package_arch`. +# +# **See also:** `msi_implicit()` +{ + package_changes => "individual"; + package_file_repositories => { "$(repo)" }; + + package_installed_regex => ".*"; + + package_name_convention => "$(name)-$(version)-$(arch).msi"; + package_delete_convention => "$(firstrepo)$(name)-$(version)-$(arch).msi"; + + package_add_command => "\"$(sys.winsysdir)\msiexec.exe\" /qn /i"; + package_update_command => "\"$(sys.winsysdir)\msiexec.exe\" /qn /i"; + package_delete_command => "\"$(sys.winsysdir)\msiexec.exe\" /qn /x"; +} + +## + +body package_method yum +# @depends common_knowledge rpm_knowledge redhat_knowledge +# @brief Yum+RPM installation method +# +# This package method interacts with the Yum and RPM package managers. +# It is a copy of `yum_rpm()`, which was contributed by Trond Hasle +# Amundsen. The old `yum` package method has been removed. +# +# This is an efficient package method for RPM-based systems - uses `rpm` +# instead of `yum` to list installed packages. +# +# It will use `rpm -e` to remove packages. Please note that if several packages +# with the same name but varying versions or architectures are installed, +# `rpm -e` will return an error and not delete any of them. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => yum, package_policy => "add"; +# ``` +{ + package_changes => "bulk"; + package_list_command => "$(rpm_knowledge.call_rpm) -qa --qf '$(rpm_knowledge.rpm3_output_format)'"; + package_patch_list_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_offline_options) check-update $(redhat_knowledge.check_update_postproc)"; + + package_list_name_regex => "$(rpm_knowledge.rpm3_name_regex)"; + package_list_version_regex => "$(rpm_knowledge.rpm3_version_regex)"; + package_list_arch_regex => "$(rpm_knowledge.rpm3_arch_regex)"; + + package_installed_regex => ".*"; + package_name_convention => "$(name)-$(version).$(arch)"; + + # just give the package name to rpm to delete, otherwise it gets "name.*" (from package_name_convention above) + package_delete_convention => "$(name)"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) check-update $(redhat_knowledge.check_update_postproc)"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_patch_name_regex => "$(redhat_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(redhat_knowledge.patch_version_regex)"; + package_patch_arch_regex => "$(redhat_knowledge.patch_arch_regex)"; + + package_add_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y install"; + package_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y update"; + package_patch_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y update"; + package_delete_command => "$(rpm_knowledge.call_rpm) -e --nodeps"; + package_verify_command => "$(rpm_knowledge.call_rpm) -V"; + package_noverify_returncode => "1"; + package_version_less_command => "$(redhat_knowledge.rpm_compare_less)"; + package_version_equal_command => "$(redhat_knowledge.rpm_compare_equal)"; +} + +## + +body package_method yum_rpm +# @depends common_knowledge rpm_knowledge redhat_knowledge +# @brief Yum+RPM installation method +# +# This package method interacts with the Yum and RPM package managers. +# +# Contributed by Trond Hasle Amundsen +# +# This is an efficient package method for RPM-based systems - uses `rpm` +# instead of `yum` to list installed packages. +# +# It will use `rpm -e` to remove packages. Please note that if several packages +# with the same name but varying versions or architectures are installed, +# `rpm -e` will return an error and not delete any of them. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => yum_rpm, package_policy => "add"; +# ``` +{ + package_changes => "bulk"; + package_list_command => "$(rpm_knowledge.call_rpm) -qa --qf '$(rpm_knowledge.rpm3_output_format)'"; + package_patch_list_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_offline_options) check-update $(redhat_knowledge.check_update_postproc)"; + + package_list_name_regex => "$(rpm_knowledge.rpm3_name_regex)"; + package_list_version_regex => "$(rpm_knowledge.rpm3_version_regex)"; + package_list_arch_regex => "$(rpm_knowledge.rpm3_arch_regex)"; + + package_installed_regex => ".*"; + package_name_convention => "$(name)-$(version).$(arch)"; + + # just give the package name to rpm to delete, otherwise it gets "name.*" (from package_name_convention above) + package_delete_convention => "$(name)"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) check-update $(redhat_knowledge.check_update_postproc)"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_patch_name_regex => "$(redhat_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(redhat_knowledge.patch_version_regex)"; + package_patch_arch_regex => "$(redhat_knowledge.patch_arch_regex)"; + + package_add_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y install"; + package_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y update"; + package_patch_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y update"; + package_delete_command => "$(rpm_knowledge.call_rpm) -e --nodeps"; + package_verify_command => "$(rpm_knowledge.call_rpm) -V"; + package_noverify_returncode => "1"; + package_version_less_command => "$(redhat_knowledge.rpm_compare_less)"; + package_version_equal_command => "$(redhat_knowledge.rpm_compare_equal)"; +} + +body package_method yum_rpm_permissive +# @depends common_knowledge rpm_knowledge redhat_knowledge +# @brief Yum+RPM permissive (just by name) package method +# +# This package method interacts with the Yum and RPM package managers. +# +# Copy of yum_rpm which was contributed by Trond Hasle Amundsen +# +# This is an efficient package method for RPM-based systems - uses +# `rpm` instead of `yum` to list installed packages. It can't delete +# packages and can't take a target version or architecture, so only +# the "add" and "addupdate" methods should be used. +# +# Normally you have to specify the package version, and it defaults to +# `*`, which then triggers the bug of installing `xyz-abc` when you ask for `xyz`. +# +# This "permissive" body sets +# +# package_name_convention => "$(name)"; +# +# which is permissive in the sense of not requiring the version. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => yum_rpm_permissive, package_policy => "add"; +# ``` +{ + package_changes => "bulk"; + package_list_command => "$(rpm_knowledge.call_rpm) -qa --qf '$(rpm_knowledge.rpm3_output_format)'"; + package_patch_list_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_offline_options) check-update $(redhat_knowledge.check_update_postproc)"; + + package_list_name_regex => "$(rpm_knowledge.rpm3_name_regex)"; + package_list_version_regex => "$(rpm_knowledge.rpm3_version_regex)"; + package_list_arch_regex => "$(rpm_knowledge.rpm3_arch_regex)"; + + package_installed_regex => ".*"; + package_name_convention => "$(name)"; + + # not needed, same as package_name_convention above + package_delete_convention => "$(name)"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) check-update $(redhat_knowledge.check_update_postproc)"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_patch_name_regex => "$(redhat_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(redhat_knowledge.patch_version_regex)"; + package_patch_arch_regex => "$(redhat_knowledge.patch_arch_regex)"; + + package_add_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y install"; + package_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y update"; + package_patch_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y update"; + package_delete_command => "$(rpm_knowledge.call_rpm) -e --nodeps"; + package_verify_command => "$(rpm_knowledge.call_rpm) -V"; + package_noverify_returncode => "1"; + package_version_less_command => "$(redhat_knowledge.rpm_compare_less)"; + package_version_equal_command => "$(redhat_knowledge.rpm_compare_equal)"; +} + +## + +body package_method yum_rpm_enable_repo(repoid) +# @depends common_knowledge rpm_knowledge redhat_knowledge +# @brief Yum+RPM repo-specific installation method +# @param repoid the repository name as in `yum --enablerepo=???` +# +# This package method interacts with the RPM package manager for a specific repo. +# +# Based on `yum_rpm()` with addition to enable a repository for the install. +# +# Sometimes repositories are configured but disabled by default. For example +# this pacakge_method could be used when installing a package that exists in +# the EPEL, which normally you do not want to install packages from. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => yum_rpm_enable_repo("myrepo"), package_policy => "add"; +# ``` +{ + package_changes => "bulk"; + package_list_command => "$(rpm_knowledge.call_rpm) -qa --qf '$(rpm_knowledge.rpm2_output_format)'"; + package_patch_list_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_offline_options) check-update $(redhat_knowledge.check_update_postproc)"; + + package_list_name_regex => "$(rpm_knowledge.rpm2_name_regex)"; + package_list_version_regex => "$(rpm_knowledge.rpm2_version_regex)"; + package_list_arch_regex => "$(rpm_knowledge.rpm2_arch_regex)"; + + package_installed_regex => ".*"; + package_name_convention => "$(name)"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) check-update $(redhat_knowledge.check_update_postproc)"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_patch_name_regex => "$(redhat_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(redhat_knowledge.patch_version_regex)"; + package_patch_arch_regex => "$(redhat_knowledge.patch_arch_regex)"; + + package_add_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) --enablerepo=$(repoid) -y install"; + package_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) --enablerepo=$(repoid) -y update"; + package_patch_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y update"; + package_delete_command => "$(rpm_knowledge.call_rpm) -e --nodeps --allmatches"; + package_verify_command => "$(rpm_knowledge.call_rpm) -V"; + package_noverify_returncode => "1"; + package_version_less_command => "$(redhat_knowledge.rpm_compare_less)"; + package_version_equal_command => "$(redhat_knowledge.rpm_compare_equal)"; +} + +## + +body package_method yum_group +# @depends common_knowledge redhat_knowledge +# @brief RPM direct installation method +# +# Makes use of the "groups of packages" feature of Yum possible. (`yum +# groupinstall`, `yum groupremove`) +# +# Groups must be specified by their groupids, available through `yum +# grouplist -v` (between parentheses). For example, below +# `network-tools` is the groupid. +# +# ```console +# $ yum grouplist -v|grep Networking|head -n 1 +# Networking Tools (network-tools) +# ``` +# +# **Example:** +# +# ```cf3 +# Policies examples: +# +# -Install "web-server" group: +# ---------------------------- +# +# packages: +# "web-server" +# package_policy => "add", +# package_method => yum_group; +# +# -Remove "debugging" and "php" groups: +# ------------------------------------- +# +# vars: +# "groups" slist => { "debugging", "php" }; +# +# packages: +# "$(groups)" +# package_policy => "delete", +# package_method => yum_group; +# ``` +{ + package_add_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) groupinstall -y"; + package_changes => "bulk"; + package_delete_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) groupremove -y"; + package_delete_convention => "$(name)"; + package_installed_regex => "^i.*"; + + # Generate a dpkg -l like listing, "i" means installed, "a" available, and a dummy version 1 + package_list_command => + "$(redhat_knowledge.call_yum) grouplist -v|awk '$0 ~ /^Done$/ {next} {sub(/.*\(/, \"\");sub(/\).*/, \"\")} /Available/ {h=\"a\";next} /Installed/ {h=\"i\";next} h==\"i\" || h==\"a\" {print h\" \"$0\" 1\"}'"; + + package_list_name_regex => "a|i ([^\s]+) 1"; + package_list_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) check-update $(redhat_knowledge.check_update_postproc)"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + package_list_version_regex => "(1)"; + package_name_convention => "$(name)"; + package_name_regex => "(.*)"; + package_noverify_returncode => "0"; + package_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) groupupdate"; + + # grep -x to only get full line matching + package_verify_command => "$(redhat_knowledge.call_yum) grouplist -v|awk '$0 ~ /^Done$/ {next} {sub(/.*\(/, \"\");sub(/\).*/, \"\")} /Available/ {h=\"a\";next} /Installed/ {h=\"i\";next} h==\"i\"|grep -qx"; +} + +## + +body package_method rpm_filebased(path) +# @depends common_knowledge rpm_knowledge redhat_knowledge +# @brief install packages from local filesystem-based RPM repository. +# @param path the path to the local package repository +# +# Contributed by Aleksey Tsalolikhin. Written on 29-Feb-2012. +# Based on `yum_rpm()` body by Trond Hasle Amundsen. +# +# **Example:** +# +# ```cf3 +# packages: +# "epel-release" +# package_policy => "add", +# package_version => "5-4", +# package_architectures => { "noarch" }, +# package_method => rpm_filebased("/repo/RPMs"); +# ``` +{ + package_file_repositories => { "$(path)" }; + # the above is an addition to Trond's yum_rpm body + + package_add_command => "$(rpm_knowledge.call_rpm) -ihv "; + # The above is a change from Trond's yum_rpm body, this makes the commands rpm only. + # The reason I changed the install command from yum to rpm is yum will be default + # refuse to install the epel-release RPM as it does not have the EPEL GPG key, + # but rpm goes ahead and installs the epel-release RPM and the EPEL GPG key. + + package_name_convention => "$(name)-$(version).$(arch).rpm"; + # The above is a change from Tron's yum_rpm body. When package_file_repositories is in play, + # package_name_convention has to match the file name, not the package name, per the + # CFEngine 3 Reference Manual + + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) check-update $(redhat_knowledge.check_update_postproc)"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + # The rest is unchanged from Trond's yum_rpm body + package_changes => "bulk"; + package_list_command => "$(rpm_knowledge.call_rpm) -qa --qf '$(rpm_knowledge.rpm2_output_format)'"; + + package_list_name_regex => "$(rpm_knowledge.rpm2_name_regex)"; + package_list_version_regex => "$(rpm_knowledge.rpm2_version_regex)"; + package_list_arch_regex => "$(rpm_knowledge.rpm2_arch_regex)"; + + package_installed_regex => ".*"; + + package_delete_command => "$(rpm_knowledge.call_rpm) -e --allmatches"; + package_verify_command => "$(rpm_knowledge.call_rpm) -V"; + package_noverify_returncode => "1"; + package_version_less_command => "$(redhat_knowledge.rpm_compare_less)"; + package_version_equal_command => "$(redhat_knowledge.rpm_compare_equal)"; +} + +## + +body package_method ips +# @depends paths +# @depends common_knowledge +# @brief Image Package System method, used by OpenSolaris based systems (Solaris 11, Illumos, etc) +# +# A note about Solaris 11.1 versioning format: +# +# ``` +# $ pkg list -v --no-refresh zsh +# FMRI IFO +# pkg://solaris/shell/zsh@4.3.17,5.11-0.175.1.0.0.24.0:20120904T174236Z i-- +# name--------- |<----->| |/________________________\| +# version---------------- |\ /| +# ``` +# +# Notice that the publisher and timestamp aren't used. And that the package +# version then must have the commas replaced by underscores. +# +# Thus, +# 4.3.17,5.11-0.175.1.0.0.24.0 +# Becomes: +# 4.3.17_5.11-0.175.1.0.0.24.0 +# +# Therefore, a properly formatted package promise looks like this: +# +# ```cf3 +# "shell/zsh" +# package_policy => "addupdate", +# package_method => ips, +# package_select => ">=", +# package_version => "4.3.17_5.11-0.175.1.0.0.24.0"; +# ``` +{ + package_changes => "bulk"; + package_list_command => "$(paths.path[pkg]) list -v --no-refresh"; + package_list_name_regex => "pkg://.+?(?<=/)([^\s]+)@.*$"; + package_list_version_regex => "[^\s]+@([^\s]+):.*"; + package_installed_regex => ".*(i..)"; # all reported are installed + + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(paths.path[pkg]) refresh --full"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_add_command => "$(paths.path[pkg]) install --accept "; + package_delete_command => "$(paths.path[pkg]) uninstall"; + package_update_command => "$(paths.path[pkg]) install --accept"; + package_patch_command => "$(paths.path[pkg]) install --accept"; + package_verify_command => "$(paths.path[pkg]) list -a -v --no-refresh"; + package_noverify_regex => "(.*---|pkg list: no packages matching .* installed)"; +} + +## + +body package_method smartos +# @depends common_knowledge +# @brief pkgin method for SmartOS (solaris 10 fork by Joyent) +{ + package_changes => "bulk"; + package_list_command => "/opt/local/bin/pkgin list"; + package_list_name_regex => "([^\s]+)\-[0-9][^\s;]+.*[\s;]"; + package_list_version_regex => "[^\s]+\-([0-9][^\s;]+).*[\s;]"; + + package_installed_regex => ".*"; # all reported are installed + + package_list_update_command => "/opt/local/bin/pkgin -y update"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_add_command => "/opt/local/bin/pkgin -y install"; + + package_delete_command => "/opt/local/bin/pkgin -y remove"; + + # pkgin update doesn't do what you think it does. pkgin install against and + # already installed package will upgrade it however. + + package_update_command => "/opt/local/bin/pkgin -y install"; +} + +body package_method smartos_pkg_add(repo) +# @brief SmartOS pkg_add installation package method +# +# This package method interacts with SmartOS pkg_add to install from local +# or remote repositories. It is slightly different than the FreeBSD pkg_add. +# +# This example installs "perl5" from a remote repository: +# +# ```cf3 +# ---------------------------- +# packages: +# "perl5" +# package_policy => "add", +# package_method => smartos_pkg_add("http://pkg.example.com/packages/"); +# +# ``` +{ + package_changes => "individual"; + package_list_command => "/opt/local/sbin/pkg_info"; + package_list_name_regex => "([^\s]+)\-[0-9]+.*\s"; + package_list_version_regex => "[^\s]+\-([0-9][^\s]+)\s"; + + package_installed_regex => ".*"; # all reported are installed + + package_add_command => "/usr/bin/env PKG_PATH=$(repo) /opt/local/sbin/pkg_add"; + + package_delete_command => "/opt/local/sbin/pkg_delete"; + package_update_command => "/usr/bin/env PKG_PATH=$(repo) /opt/local/sbin/pkg_add"; +} + +body package_method opencsw +# @depends common_knowledge +# @brief OpenCSW (Solaris software packages) method +{ + package_changes => "bulk"; + package_list_command => "/opt/csw/bin/pkgutil -c"; + package_list_name_regex => "CSW(.*?)\s.*"; + package_list_version_regex => ".*?\s+(.*),.*"; + + package_installed_regex => ".*"; # all reported are installed + + package_list_update_command => "/opt/csw/bin/pkgutil -U"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_add_command => "/opt/csw/bin/pkgutil -yi"; + + package_delete_command => "/opt/csw/bin/pkgutil -yr"; + package_update_command => "/opt/csw/bin/pkgutil -yu"; +} + +body package_method solaris(pkgname, spoolfile, adminfile) +# @depends paths +# @brief Package method for old Solaris package system +# +# @param pkgname Not used +# @param spoolfile The spool file, located in `/tmp` +# @param adminfile The admin file, located in `/tmp` +# +# The older solaris package system is poorly designed, with too many different +# names to track. See the example in tests/units/unit_package_solaris.cf +# to see how to use this. +{ + package_changes => "individual"; + package_list_command => "$(solaris_knowledge.call_pkginfo) -l"; + package_multiline_start => "\s*PKGINST:\s+[^\s]+.*"; + package_list_name_regex => "\s*PKGINST:\s+([^\s]+).*"; + package_list_version_regex => "\s*VERSION:\s+([^\s]+).*"; + package_list_arch_regex => "\s*ARCH:\s+([^\s]+)"; + package_installed_regex => "\s*STATUS:\s*(completely|partially)\s+installed.*"; + package_name_convention => "$(name)"; + package_add_command => "$(solaris_knowledge.call_pkgadd) -n -a /tmp/$(adminfile) -d /tmp/$(spoolfile)"; + package_delete_command => "$(solaris_knowledge.call_pkgrm) -n -a /tmp/$(adminfile)"; +} + +## + +body package_method solaris_install(adminfile) +# @depends paths +# @brief Package method for old Solaris package system +# +# @param adminfile The admin file created by `create_solaris_admin_file` +{ + package_changes => "individual"; + package_list_command => "$(solaris_knowledge.call_pkginfo) -l"; + package_multiline_start => "\s*PKGINST:\s+[^\s]+.*"; + package_list_name_regex => "\s*PKGINST:\s+([^\s]+).*"; + package_list_version_regex => "\s*VERSION:\s+([^\s]+).*"; + package_list_arch_regex => "\s*ARCH:\s+([^\s]+)"; + package_installed_regex => "\s*STATUS:\s*(completely|partially)\s+installed.*"; + package_name_convention => "$(name)"; + package_add_command => "$(solaris_knowledge.call_pkgadd) -n -a $(adminfile)"; + package_delete_command => "$(solaris_knowledge.call_pkgrm) -n -a $(adminfile)"; +} + +## + +bundle edit_line create_solaris_admin_file +# @brief The following bundle is part of a package setup for solaris +# +# See unit examples. +{ + insert_lines: + + "$(solaris_knowledge.admin_nocheck)" + comment => "Insert contents of Solaris admin file (automatically install packages)"; +} + +## + +body package_method freebsd +# @depends common_knowledge +# @brief FreeBSD pkg_add installation package method +# +# This package method interacts with FreeBSD pkg_add to install from remote +# repositories. +# +# **Example:** +# NOTE: Do not use this method on pkgng systems! It will appear to operate +# normally but is highly likely to break your package system. +# +# This example installs "perl5" from a non-default repository: +# +# ```cf3 +# ---------------------------- +# +# vars: +# environment => { "PACKAGESITE=http://repo.example.com/private/8_STABLE/" }; +# packages: +# "perl5" +# package_policy => "add", +# package_method => freebsd; +# +# ``` +{ + package_changes => "individual"; + + # Could use rpm for this + package_list_command => "/usr/sbin/pkg info"; + + # Remember to escape special characters like | + + package_list_name_regex => "([^\s]+)-.*"; + package_list_version_regex => "[^\s]+-([^\s]+).*"; + + package_name_regex => "([^\s]+)-.*"; + package_version_regex => "[^\s]+-([^\s]+).*"; + + package_installed_regex => ".*"; + + package_name_convention => "$(name)-$(version)"; + + package_add_command => "/usr/sbin/pkg install -y"; + package_delete_command => "/usr/sbin/pkg delete -y"; +} + +body package_method freebsd_portmaster +# @depends common_knowledge +# @brief FreeBSD portmaster package installation method +# +# This package method interacts with portmaster to build and install packages. +# +# Note that you must use the complete package name as it appears in +# /usr/ports/*/name, such as 'perl5.14' rather than 'perl5'. +# Repositories are hard-coded to /usr/ports; alternate locations are +# unsupported at this time. +# This method supports both pkg_* and pkgng systems. +# +# **Example:** +# +# ```cf3 +# +# packages: +# "perl5.14" +# package_policy => "add", +# package_method => freebsd_portmaster; +# +# ``` +{ + package_changes => "individual"; + + package_list_command => "/usr/sbin/pkg info"; + + package_list_name_regex => "([^\s]+)-.*"; + package_list_version_regex => "[^\s]+-([^\s]+).*"; + + package_installed_regex => ".*"; + + package_name_convention => "$(name)"; + package_delete_convention => "$(name)-$(version)"; + + package_file_repositories => { + "/usr/ports/accessibility/", + "/usr/port/arabic/", + "/usr/ports/archivers/", + "/usr/ports/astro/", + "/usr/ports/audio/", + "/usr/ports/benchmarks/", + "/usr/ports/biology/", + "/usr/ports/cad/", + "/usr/ports/chinese/", + "/usr/ports/comms/", + "/usr/ports/converters/", + "/usr/ports/databases/", + "/usr/ports/deskutils/", + "/usr/ports/devel/", + "/usr/ports/dns/", + "/usr/ports/editors/", + "/usr/ports/emulators/", + "/usr/ports/finance/", + "/usr/ports/french/", + "/usr/ports/ftp/", + "/usr/ports/games/", + "/usr/ports/german/", + "/usr/ports/graphics/", + "/usr/ports/hebrew/", + "/usr/ports/hungarian/", + "/usr/ports/irc/", + "/usr/ports/japanese/", + "/usr/ports/java/", + "/usr/ports/korean/", + "/usr/ports/lang/", + "/usr/ports/mail/", + "/usr/ports/math/", + "/usr/ports/mbone/", + "/usr/ports/misc/", + "/usr/ports/multimedia/", + "/usr/ports/net/", + "/usr/ports/net-im/", + "/usr/ports/net-mgmt/", + "/usr/ports/net-p2p/", + "/usr/ports/news/", + "/usr/ports/packages/", + "/usr/ports/palm/", + "/usr/ports/polish/", + "/usr/ports/ports-mgmt/", + "/usr/ports/portuguese/", + "/usr/ports/print/", + "/usr/ports/russian/", + "/usr/ports/science/", + "/usr/ports/security/", + "/usr/ports/shells/", + "/usr/ports/sysutils/", + "/usr/ports/textproc/", + "/usr/ports/ukrainian/", + "/usr/ports/vietnamese/", + "/usr/ports/www/", + "/usr/ports/x11/", + "/usr/ports/x11-clocks/", + "/usr/ports/x11-drivers/", + "/usr/ports/x11-fm/", + "/usr/ports/x11-fonts/", + "/usr/ports/x11-servers/", + "/usr/ports/x11-themes/", + "/usr/ports/x11-toolkits/", + "/usr/ports/x11-wm/", + }; + + package_add_command => "/usr/local/sbin/portmaster -D -G --no-confirm"; + package_update_command => "/usr/local/sbin/portmaster -D -G --no-confirm"; + package_delete_command => "/usr/local/sbin/portmaster --no-confirm -e"; +} + +## + +body package_method alpinelinux +# @brief Alpine Linux apk package installation method +# +# This package method interacts with apk to manage packages. +# +# **Example:** +# +# ```cf3 +# +# packages: +# "vim" +# package_policy => "add", +# package_method => alpinelinux; +# +# ``` +{ + package_changes => "bulk"; + package_list_command => "/sbin/apk info -v"; + package_list_name_regex => "([^\s]+)-.*"; + package_list_version_regex => "[^\s]+-([^\s]+).*"; + package_name_regex => ".*"; + package_installed_regex => ".*"; + package_name_convention => "$(name)"; + package_add_command => "/sbin/apk add"; + package_delete_command => "/sbin/apk del"; +} + +## + +body package_method emerge +# @depends common_knowledge +# @brief Gentoo emerge package installation method +# +# This package method interacts with emerge to build and install packages. +# +# **Example:** +# +# ```cf3 +# +# packages: +# "zsh" +# package_policy => "add", +# package_method => emerge; +# +# ``` +{ + package_changes => "individual"; + package_list_command => "/bin/sh -c '/bin/ls -d /var/db/pkg/*/* | cut -c 13-'"; + package_list_name_regex => ".*/([^\s]+)-\d.*"; + package_list_version_regex => ".*/[^\s]+-(\d.*)"; + package_installed_regex => ".*"; # all reported are installed + package_name_convention => "$(name)"; + package_list_update_command => "/bin/true"; # I prefer manual syncing + #package_list_update_command => "/usr/bin/emerge --sync"; # if you like automatic + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_add_command => "/usr/bin/emerge -q --quiet-build"; + package_delete_command => "/usr/bin/emerge --depclean"; + package_update_command => "/usr/bin/emerge --update"; + package_patch_command => "/usr/bin/emerge --update"; + package_verify_command => "/usr/bin/emerge -s"; + package_noverify_regex => ".*(Not Installed|Applications found : 0).*"; +} + +## + +body package_method pacman +# @depends common_knowledge +# @brief Arch Linux pacman package management method +{ + package_changes => "bulk"; + + package_list_command => "/usr/bin/pacman -Q"; + package_verify_command => "/usr/bin/pacman -Q"; + package_noverify_regex => "error:\b.*\bwas not found"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_list_name_regex => "(.*)\s+.*"; + package_list_version_regex => ".*\s+(.*)"; + package_installed_regex => ".*"; + + package_name_convention => "$(name)"; + package_add_command => "/usr/bin/pacman -S --noconfirm --noprogressbar --needed"; + package_delete_command => "/usr/bin/pacman -Rs --noconfirm"; + package_update_command => "/usr/bin/pacman -S --noconfirm --noprogressbar --needed"; +} + +body package_method zypper +# @depends paths +# @depends common_knowledge rpm_knowledge suse_knowledge +# @brief SUSE installation method +# +# This package method interacts with the SUSE Zypper package manager +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => zypper, package_policy => "add"; +# ``` +{ + package_changes => "bulk"; + + package_list_command => "$(paths.path[rpm]) -qa --queryformat \"$(rpm_knowledge.rpm_output_format)\""; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(suse_knowledge.call_zypper) list-updates"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_patch_list_command => "$(suse_knowledge.call_zypper) patches"; + package_installed_regex => "i.*"; + package_list_name_regex => "$(rpm_knowledge.rpm_name_regex)"; + package_list_version_regex => "$(rpm_knowledge.rpm_version_regex)"; + package_list_arch_regex => "$(rpm_knowledge.rpm_arch_regex)"; + + package_patch_installed_regex => ".*Installed.*|.*Not Applicable.*"; + package_patch_name_regex => "[^|]+\|\s+([^\s]+).*"; + package_patch_version_regex => "[^|]+\|[^|]+\|\s+([^\s]+).*"; + + package_name_convention => "$(name)"; + package_add_command => "$(suse_knowledge.call_zypper) --non-interactive install"; + package_delete_command => "$(suse_knowledge.call_zypper) --non-interactive remove --force-resolution"; + package_update_command => "$(suse_knowledge.call_zypper) --non-interactive update"; + package_patch_command => "$(suse_knowledge.call_zypper) --non-interactive patch$"; # $ means no args + package_verify_command => "$(suse_knowledge.call_zypper) --non-interactive verify$"; +} + +body package_method generic +# @depends paths common_knowledge debian_knowledge rpm_knowledge redhat_knowledge +# @brief Generic installation package method +# +# This package method attempts to handle all platforms. +# +# The Redhat section is a verbatim insertion of `yum_rpm()`, which was +# contributed by Trond Hasle Amundsen. +# +# **Example:** +# +# ```cf3 +# packages: +# "mypackage" package_method => generic, package_policy => "add"; +# ``` +{ + suse|sles:: + package_changes => "bulk"; + package_list_command => "$(rpm_knowledge.call_rpm) -qa --queryformat \"$(rpm_knowledge.rpm_output_format)\""; + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(suse_knowledge.call_zypper) list-updates"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + package_patch_list_command => "$(suse_knowledge.call_zypper) patches"; + package_installed_regex => "i.*"; + package_list_name_regex => "$(rpm_knowledge.rpm_name_regex)"; + package_list_version_regex => "$(rpm_knowledge.rpm_version_regex)"; + package_list_arch_regex => "$(rpm_knowledge.rpm_arch_regex)"; + package_patch_installed_regex => ".*Installed.*|.*Not Applicable.*"; + package_patch_name_regex => "[^|]+\|\s+([^\s]+).*"; + package_patch_version_regex => "[^|]+\|[^|]+\|\s+([^\s]+).*"; + package_name_convention => "$(name)"; + package_add_command => "$(suse_knowledge.call_zypper) --non-interactive install"; + package_delete_command => "$(suse_knowledge.call_zypper) --non-interactive remove --force-resolution"; + package_update_command => "$(suse_knowledge.call_zypper) --non-interactive update"; + package_patch_command => "$(suse_knowledge.call_zypper) --non-interactive patch$"; # $ means no args + package_verify_command => "$(suse_knowledge.call_zypper) --non-interactive verify$"; + + redhat:: + package_changes => "bulk"; + package_list_command => "$(rpm_knowledge.call_rpm) -qa --qf '$(rpm_knowledge.rpm3_output_format)'"; + package_patch_list_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_offline_options) check-update $(redhat_knowledge.check_update_postproc)"; + + package_list_name_regex => "$(rpm_knowledge.rpm3_name_regex)"; + package_list_version_regex => "$(rpm_knowledge.rpm3_version_regex)"; + package_list_arch_regex => "$(rpm_knowledge.rpm3_arch_regex)"; + + package_installed_regex => ".*"; + package_name_convention => "$(name)-$(version).$(arch)"; + + # just give the package name to rpm to delete, otherwise it gets "name.*" (from package_name_convention above) + package_delete_convention => "$(name)"; + + # set it to "0" to avoid caching of list during upgrade + package_list_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) check-update $(redhat_knowledge.check_update_postproc)"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_patch_name_regex => "$(redhat_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(redhat_knowledge.patch_version_regex)"; + package_patch_arch_regex => "$(redhat_knowledge.patch_arch_regex)"; + + package_add_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y install"; + package_update_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y update"; + package_patch_command => "$(redhat_knowledge.call_yum) $(redhat_knowledge.yum_options) -y update"; + package_delete_command => "$(rpm_knowledge.call_rpm) -e --nodeps"; + package_verify_command => "$(rpm_knowledge.call_rpm) -V"; + package_noverify_returncode => "1"; + package_version_less_command => "$(redhat_knowledge.rpm_compare_less)"; + package_version_equal_command => "$(redhat_knowledge.rpm_compare_equal)"; + + debian:: + package_changes => "bulk"; + package_list_command => "$(debian_knowledge.call_dpkg) -l"; + package_list_name_regex => "$(debian_knowledge.list_name_regex)"; + package_list_version_regex => "$(debian_knowledge.list_version_regex)"; + package_installed_regex => ".i.*"; # packages that have been uninstalled may be listed + package_name_convention => "$(name)"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + # make correct version comparisons + package_version_less_command => "$(debian_knowledge.dpkg_compare_less)"; + package_version_equal_command => "$(debian_knowledge.dpkg_compare_equal)"; + + debian.have_aptitude:: + package_add_command => "$(debian_knowledge.call_aptitude) $(debian_knowledge.dpkg_options) --assume-yes install"; + package_list_update_command => "$(debian_knowledge.call_aptitude) update"; + package_delete_command => "$(debian_knowledge.call_aptitude) $(debian_knowledge.dpkg_options) --assume-yes remove"; + package_update_command => "$(debian_knowledge.call_aptitude) $(debian_knowledge.dpkg_options) --assume-yes install"; + package_patch_command => "$(debian_knowledge.call_aptitude) $(debian_knowledge.dpkg_options) --assume-yes install"; + package_verify_command => "$(debian_knowledge.call_aptitude) show"; + package_noverify_regex => "(State: not installed|E: Unable to locate package .*)"; + + package_patch_list_command => "$(debian_knowledge.call_aptitude) --assume-yes --simulate --verbose full-upgrade"; + package_patch_name_regex => "$(debian_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(debian_knowledge.patch_version_regex)"; + + debian.!have_aptitude:: + package_add_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_list_update_command => "$(debian_knowledge.call_apt_get) update"; + package_delete_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes remove"; + package_update_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_patch_command => "$(debian_knowledge.call_apt_get) $(debian_knowledge.dpkg_options) --yes install"; + package_verify_command => "$(debian_knowledge.call_dpkg) -s"; + package_noverify_returncode => "1"; + + package_patch_list_command => "$(debian_knowledge.call_apt_get) --just-print dist-upgrade"; + package_patch_name_regex => "$(debian_knowledge.patch_name_regex)"; + package_patch_version_regex => "$(debian_knowledge.patch_version_regex)"; + + freebsd:: + package_changes => "individual"; + package_list_command => "/usr/sbin/pkg info"; + package_list_name_regex => "([^\s]+)-.*"; + package_list_version_regex => "[^\s]+-([^\s]+).*"; + package_name_regex => "([^\s]+)-.*"; + package_version_regex => "[^\s]+-([^\s]+).*"; + package_installed_regex => ".*"; + package_name_convention => "$(name)-$(version)"; + package_add_command => "/usr/sbin/pkg install -y"; + package_delete_command => "/usr/sbin/pkg delete"; + + alpinelinux:: + package_changes => "bulk"; + package_list_command => "/sbin/apk info -v"; + package_list_name_regex => "([^\s]+)-.*"; + package_list_version_regex => "[^\s]+-([^\s]+).*"; + package_name_regex => ".*"; + package_installed_regex => ".*"; + package_name_convention => "$(name)"; + package_add_command => "/sbin/apk add"; + package_delete_command => "/sbin/apk del"; + + gentoo:: + package_changes => "individual"; + package_list_command => "/bin/sh -c '/bin/ls -d /var/db/pkg/*/* | cut -c 13-'"; + package_list_name_regex => "([^/]+/(?:(?!-\d).)+)-\d.*"; + package_list_version_regex => "[^/]+/(?:(?!-\d).)+-(\d.*)"; + package_installed_regex => ".*"; # all reported are installed + package_name_convention => "$(name)"; + package_list_update_command => "/bin/true"; # I prefer manual syncing + #package_list_update_command => "/usr/bin/emerge --sync"; # if you like automatic + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + + package_add_command => "/usr/bin/emerge -q --quiet-build"; + package_delete_command => "/usr/bin/emerge --depclean"; + package_update_command => "/usr/bin/emerge --update"; + package_patch_command => "/usr/bin/emerge --update"; + package_verify_command => "/usr/bin/emerge -s"; + package_noverify_regex => ".*(Not Installed|Applications found : 0).*"; + + archlinux:: + package_changes => "bulk"; + package_list_command => "/usr/bin/pacman -Q"; + package_verify_command => "/usr/bin/pacman -Q"; + package_noverify_regex => "error:\b.*\bwas not found"; + package_list_name_regex => "(.*)\s+.*"; + package_list_version_regex => ".*\s+(.*)"; + package_installed_regex => ".*"; + package_name_convention => "$(name)"; + package_list_update_ifelapsed => "$(common_knowledge.list_update_ifelapsed)"; + package_add_command => "/usr/bin/pacman -S --noconfirm --noprogressbar --needed"; + package_delete_command => "/usr/bin/pacman -Rs --noconfirm"; + package_update_command => "/usr/bin/pacman -S --noconfirm --noprogressbar --needed"; +} + +## Useful bundles ## + +bundle agent package_absent_legacy(package) +# @brief Ensure package is absent +# @param package the packages to remove +# +# This package method will remove `package`, using +# `package_ensure`. +# +# **Example:** +# +# ```cf3 +# methods: +# "nozip" usebundle => package_absent("zip"); +# ``` +{ + packages: + debian:: + "$(package)" + package_policy => "delete", + package_method => apt_get_permissive; + + redhat:: + "$(package)" + package_policy => "delete", + package_method => yum_rpm_permissive; + + suse|sles:: + "$(package)" + package_policy => "delete", + package_method => zypper; + + !debian.!redhat.!(suse|sles):: + "$(package)" + package_policy => "delete", + package_method => generic; +} + +bundle agent package_present_legacy(package) +# @brief Ensure package is present +# @param package the packages to install +# +# This package method will install `package`. On Debian, it will use +# `apt_get_permissive`. On Red Hat, `yum_rpm_permissive`. Otherwise, +# `generic`. +# +# **Example:** +# +# ```cf3 +# methods: +# "pleasezip" usebundle => package_present("zip"); +# ``` +{ + packages: + debian:: + "$(package)" + package_policy => "add", + package_method => apt_get_permissive; + + redhat:: + "$(package)" + package_policy => "add", + package_method => yum_rpm_permissive; + + suse|sles:: + "$(package)" + package_policy => "add", + package_method => zypper; + + !debian.!redhat.!(suse|sles):: + "$(package)" + package_policy => "add", + package_method => generic; +} + +bundle agent package_latest(package) +# @brief Ensure package is present and updated +# @param package the package to add/update +# +# This package method will install `package` or update it to the +# latest version. On Debian, it will use `apt_get_permissive`. On Red +# Hat, `yum_rpm_permissive`. Otherwise, `generic`. +# +# **Example:** +# +# ```cf3 +# methods: +# "latestzip" usebundle => package_latest("zip"); +# ``` +{ + packages: + debian:: + "$(package)" + package_policy => "addupdate", + package_version => "999999999:9999999999", + package_method => apt_get_permissive; + + redhat:: + "$(package)" + package_policy => "addupdate", + package_version => "999999999", + package_method => yum_rpm_permissive; + + suse|sles:: + "$(package)" + package_policy => "addupdate", + package_version => "999999999", + package_method => zypper; + + !debian.!redhat.!(suse|sles):: + "$(package)" + package_policy => "addupdate", + package_method => generic; +} + +bundle agent package_specific_present(packageorfile, package_version, package_arch) +# @depends package_specific +# @brief Ensure package is present +# @param packageorfile the package or full filename to add +# @param package_version the `package_version` desired +# @param package_arch a string determining the `package_architectures` desired +# +# This package method will add `packageorfile` as a package or file, +# using `package_specific`. +# +# **Example:** +# +# ```cf3 +# methods: +# "addfilezip" +# usebundle => package_specific_present("/mydir/zip", +# "3.0-7", +# $(debian_knowledge.default_arch)); +# ``` +{ + methods: + "ensure" usebundle => package_specific($(packageorfile), + "add", + $(package_version), + $(package_arch)); +} + +bundle agent package_specific_absent(packageorfile, package_version, package_arch) +# @depends package_specific +# @brief Ensure package is absent +# @param packageorfile the package or full filename to delete +# @param package_version the `package_version` desired +# @param package_arch a string determining the `package_architectures` desired +# +# This package method will remove `packageorfile` as a package or file, +# using `package_specific`. +# +# **Example:** +# +# ```cf3 +# methods: +# "addfilezip" +# usebundle => package_specific_absent("/mydir/zip", +# "3.0-7", +# $(debian_knowledge.default_arch)); +# ``` +{ + methods: + "ensure" usebundle => package_specific($(packageorfile), + "delete", + $(package_version), + $(package_arch)); +} + +bundle agent package_specific_latest(packageorfile, package_version, package_arch) +# @depends package_specific +# @brief Ensure package is added or updated +# @param packageorfile the package or full filename to add or update +# @param package_version the `package_version` desired +# @param package_arch a string determining the `package_architectures` desired +# +# This package method will add or update `packageorfile` as a package +# or file, using `package_specific`. +# +# **Example:** +# +# ```cf3 +# methods: +# "latestfilezip" +# usebundle => package_specific_latest("/mydir/zip", +# "3.0-7", +# $(debian_knowledge.default_arch)); +# "latestzip" +# usebundle => package_specific_latest("/mydir/zip", +# "3.0-7", +# $(debian_knowledge.default_arch)); +# ``` +{ + methods: + "ensure" usebundle => package_specific($(packageorfile), + "addupdate", + $(package_version), + $(package_arch)); +} + +bundle agent package_specific(package_name, desired, package_version, package_arch) +# @depends apt_get yum_rpm generic dpkg_version rpm_version zypper +# @brief Ensure `package_name` has the `desired` state +# @param package_name the packages to ensure (can be files) +# @param desired the desired `package_policy`, add or delete or addupdate +# @param package_version the desired `package_version` +# @param package_arch the desired package architecture +# +# This package method will manage `packages` with `package_policy` set +# to `desired`, using `package_version`, and `package_arch`. +# +# If `package_name` is **not** a file name: on Debian, it will use +# `apt_get`. On Red Hat, `yum_rpm`. Otherwise, `generic`. +# +# If `package_name` **is** a file name, it will use `dpkg_version` or +# `rpm_version` from the file's directory. +# +# For convenience on systems where `sys.arch` is not correct, you can +# use `debian_knowledge.default_arch` and +# `redhat_knowledge.default_arch`. +# +# Solaris is only supported with pkgadd. Patches welcome. +# +# **Example:** +# +# ```cf3 +# methods: +# "ensure" usebundle => package_specific("zsh", "add", "1.2.3", "amd64"); +# "ensure" usebundle => package_specific("/mydir/package.deb", "add", "9.8.7", "amd64"); +# "ensure" usebundle => package_specific("tcsh", "delete", "2.3.4", "x86_64"); +# ``` +{ + classes: + "filebased" expression => fileexists($(package_name)); + "solaris_pkgadd" and => { "solaris", "_stdlib_path_exists_pkgadd" }; + + vars: + "solaris_adminfile" string => "/tmp/cfe-adminfile"; + + filebased:: + "package_basename" string => lastnode($(package_name), "/"); + "dir" string => dirname($(package_name)); + + methods: + solaris_pkgadd.filebased:: + "" usebundle => file_make($(solaris_adminfile), + $(solaris_knowledge.admin_nocheck)), + classes => scoped_classes_generic("bundle", "solaris_adminfile"); + + packages: + + debian.!filebased:: + + "$(package_name)" + package_policy => $(desired), + package_select => '>=', # see verify_packages.c + package_version => $(package_version), + package_architectures => { $(package_arch) }, + package_method => apt_get; + + debian.filebased:: + + "$(package_basename)" + package_policy => $(desired), + package_select => '>=', + package_version => $(package_version), + package_architectures => { $(package_arch) }, + package_method => dpkg_version($(dir)); + + redhat.!filebased:: + + "$(package_name)" + package_policy => $(desired), + package_select => '>=', # see verify_packages.c + package_version => $(package_version), + package_architectures => { $(package_arch) }, + package_method => yum_rpm; + + suse|sles:: + + "$(package_name)" + package_policy => $(desired), + package_select => '>=', # see verify_packages.c + package_version => $(package_version), + package_architectures => { $(package_arch) }, + package_method => zypper; + + (redhat|aix).filebased:: + + "$(package_basename)" + package_policy => $(desired), + package_select => '>=', + package_version => $(package_version), + package_architectures => { $(package_arch) }, + package_method => rpm_version($(dir)); + + solaris_adminfile_ok:: + + "$(package_name)" + package_policy => $(desired), + package_select => '>=', + package_version => $(package_version), + package_method => solaris_install($(solaris_admin_file)); + + !filebased.!debian.!redhat.!(suse|sles):: + + "$(package_name)" + package_policy => $(desired), + package_method => generic; + + reports: + "(DEBUG|DEBUG_$(this.bundle)).filebased.!(suse|sles).!debian.!redhat.!aix.!solaris_pkgadd":: + "DEBUG $(this.bundle): sorry, can't do file-based installs on $(sys.os)"; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/paths.cf b/policies/lib/tree/20_cfe_basics/cfengine/paths.cf new file mode 100644 index 00000000000..390a2c81f24 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/paths.cf @@ -0,0 +1,704 @@ +# Paths bundle (used by other bodies) + +bundle common paths +# @brief Defines an array `path` with common paths to standard binaries and +# directories as well as classes for defined and existing paths. +# +# If the current platform knows that binary XYZ should be present, +# `_stdlib_has_path_XYZ` is defined. Furthermore, if XYZ is actually present +# (i.e. the binary exists) in the expected location, `_stdlib_path_exists_XYZ` is +# defined. +# +# **Example:** +# +# ```cf3 +# bundle agent no_carriage_returns(filename) +# { +# commands: +# _stdlib_path_exists_sed:: +# "$(paths.sed)" -> { "CFE-3216" } +# args => "-i 's/^M//' $(filename)", +# comment => "Preferred reference style"; +# +# "$(paths[sed])" +# args => "-i 's/^M//' $(filename)", +# comment => "Alternate array reference style"; +# } +# ``` +# +# The paths bundle can be extended with custom paths by tagging *classic array* variables with `paths.cf`. +# +# **Example:** +# +# ```cf3 +# bundle agent extended_paths_example +# { +# meta: +# "tags" slist => { "autorun" }; +# +# vars: +# # NOTE: the key will be canonified when it's pulled in to the paths bundle. +# "path[orange]" string => "/bin/true", meta => { "paths.cf" }; +# "path[true-blue]" string => "/bin/true", meta => { "paths.cf" }; +# "foo[bar]" string => "/bin/true", meta => { "paths.cf" }; +# +# this_context_isnt_defined_so_no_path:: +# "path[red]" string => "/bin/true", meta => { "paths.cf" }; +# +# reports: +# _stdlib_path_exists_orange:: +# "path paths.orange == $(paths.orange)"; +# "path paths.path[orange] == $(paths.path[orange])"; +# +# _stdlib_path_exists_bar:: +# "path paths.bar == $(paths.bar)"; +# "path paths.path[bar] == $(paths.path[bar])"; +# +# _stdlib_path_exists_true_blue:: +# "path paths.true_blue == $(paths.true_blue)"; +# "path paths.path[true_blue] == $(paths.path[true_blue])"; +# +# _stdlib_path_exists_red:: +# "path paths.red == $(paths.red)"; +# "path paths.path[red] == $(paths.path[red])"; +# +# !_stdlib_path_exists_red:: +# "path paths.red was not found"; +# "path paths.path[red] was not found"; +# } +# ``` +# +# Additionally several path entries are present to aid in policy sharing between +# unix systems and Android Termux environments. +# +# **Example:** +# +# ```cf3 +# bundle agent track_sshd_config +# { +# files: +# "$(paths.etc_path)/sshd/sshd_config" +# changes => detect_all_change; +# } +# ``` +# +# In case of termux, `paths.etc_path` will be `/data/data/com.termux/files/usr/etc`. +# +# **History:** +# +# - Ability to extend paths by tagging classic array variables added 3.17.0 (works with binary version 3.11.0 and greater) +{ + vars: + + # + # Common full pathname of commands for OS + # + + enterprise.(am_policy_hub|policy_server):: + "path[git]" + string => "$(sys.workdir)/bin/git", + comment => "CFEngine Enterprise Hub ships with its own git which is used internally"; + + !(enterprise.(am_policy_hub|policy_server)):: + "path[git]" string => "/usr/bin/git"; + + !(freebsd|darwin|smartos):: + "path[npm]" string => "/usr/bin/npm"; + "path[pip]" string => "/usr/bin/pip"; + "path[virtualenv]" string => "/usr/bin/virtualenv"; + + !(freebsd|darwin):: + "path[getfacl]" string => "/usr/bin/getfacl"; + "path[setfacl]" string => "/usr/bin/setfacl"; + + freebsd|darwin:: + "path[npm]" string => "/usr/local/bin/npm"; + "path[pip]" string => "/usr/local/bin/pip"; + "path[virtualenv]" string => "/usr/local/bin/virtualenv"; + "path[automount]" string => "/usr/sbin/automount"; + + _have_bin_env:: + "path[env]" string => "/bin/env"; + !_have_bin_env:: + "path[env]" string => "/usr/bin/env"; + + _have_bin_systemctl:: + "path[systemctl]" string => "/bin/systemctl"; + !_have_bin_systemctl:: + "path[systemctl]" string => "/usr/bin/systemctl"; + + _have_bin_journalctl:: + "path[journalctl]" string => "/bin/journalctl"; + !_have_bin_journalctl:: + "path[journalctl]" string => "/usr/bin/journalctl"; + + _have_bin_timedatectl:: + "path[timedatectl]" string => "/bin/timedatectl"; + !_have_bin_timedatectl:: + "path[timedatectl]" string => "/usr/bin/timedatectl"; + + linux:: + "path[date]" string => "/usr/bin/date"; + "path[lsattr]" string => "/usr/bin/lsattr"; + "path[lsmod]" string => "/sbin/lsmod"; + "path[tar]" string => "/bin/tar"; + "path[true]" string => "/bin/true"; + "path[false]" string => "/bin/false"; + "path[pgrep]" string => "/usr/bin/pgrep"; + "path[getent]" string => "/usr/bin/getent"; + "path[mailx]" string => "/usr/bin/mailx"; + "path[prelink]" string => "/usr/sbin/prelink"; + "path[ssh]" string => "/usr/bin/ssh"; + + aix:: + + "path[awk]" string => "/usr/bin/awk"; + "path[bc]" string => "/usr/bin/bc"; + "path[cat]" string => "/bin/cat"; + "path[cksum]" string => "/usr/bin/cksum"; + "path[crontabs]" string => "/var/spool/cron/crontabs"; + "path[cut]" string => "/usr/bin/cut"; + "path[dc]" string => "/usr/bin/dc"; + "path[df]" string => "/usr/bin/df"; + "path[diff]" string => "/usr/bin/diff"; + "path[dig]" string => "/usr/bin/dig"; + "path[echo]" string => "/usr/bin/echo"; + "path[egrep]" string => "/usr/bin/egrep"; + "path[find]" string => "/usr/bin/find"; + "path[grep]" string => "/usr/bin/grep"; + "path[ls]" string => "/usr/bin/ls"; + "path[netstat]" string => "/usr/bin/netstat"; + "path[oslevel]" string => "/usr/bin/oslevel"; + "path[ping]" string => "/usr/bin/ping"; + "path[perl]" string => "/usr/bin/perl"; + "path[printf]" string => "/usr/bin/printf"; + "path[sed]" string => "/usr/bin/sed"; + "path[sort]" string => "/usr/bin/sort"; + "path[tr]" string => "/usr/bin/tr"; + "path[yum]" string => "/usr/bin/yum"; + + archlinux:: + + "path[awk]" string => "/usr/bin/awk"; + "path[bc]" string => "/usr/bin/bc"; + "path[cat]" string => "/usr/bin/cat"; + "path[cksum]" string => "/usr/bin/cksum"; + "path[crontab]" string => "/usr/bin/crontab"; + "path[cut]" string => "/usr/bin/cut"; + "path[dc]" string => "/usr/bin/dc"; + "path[df]" string => "/usr/bin/df"; + "path[diff]" string => "/usr/bin/diff"; + "path[dig]" string => "/usr/bin/dig"; + "path[dmidecode]" string => "/usr/bin/dmidecode"; + "path[echo]" string => "/usr/bin/echo"; + "path[egrep]" string => "/usr/bin/egrep"; + "path[ethtool]" string => "/usr/bin/ethtool"; + "path[find]" string => "/usr/bin/find"; + "path[free]" string => "/usr/bin/free"; + "path[grep]" string => "/usr/bin/grep"; + "path[hostname]" string => "/usr/bin/hostname"; + "path[init]" string => "/usr/bin/init"; + "path[iptables]" string => "/usr/bin/iptables"; + "path[iptables_save]" string => "/usr/bin/iptables-save"; + "path[iptables_restore]" string => "/usr/bin/iptables-restore"; + "path[ls]" string => "/usr/bin/ls"; + "path[lsof]" string => "/usr/bin/lsof"; + "path[netstat]" string => "/usr/bin/netstat"; + "path[ping]" string => "/usr/bin/ping"; + "path[perl]" string => "/usr/bin/perl"; + "path[printf]" string => "/usr/bin/printf"; + "path[sed]" string => "/usr/bin/sed"; + "path[sort]" string => "/usr/bin/sort"; + "path[test]" string => "/usr/bin/test"; + "path[top]" string => "/usr/bin/top"; + "path[tr]" string => "/usr/bin/tr"; + # + "path[pacman]" string => "/usr/bin/pacman"; + "path[pamac]" string => "/usr/bin/pamac"; + "path[yaourt]" string => "/usr/bin/yaourt"; + "path[useradd]" string => "/usr/bin/useradd"; + "path[userdel]" string => "/usr/bin/userdel"; + "path[usermod]" string => "/usr/bin/usermod"; + "path[groupadd]" string => "/usr/bin/groupadd"; + "path[groupdel]" string => "/usr/bin/groupdel"; + "path[groupmod]" string => "/usr/bin/groupmod"; + "path[ip]" string => "/usr/bin/ip"; + "path[ifconfig]" string => "/usr/bin/ifconfig"; + "path[journalctl]" string => "/usr/bin/journalctl"; + "path[netctl]" string => "/usr/bin/netctl"; + + coreos:: + + "path[awk]" string => "/usr/bin/awk"; + "path[cat]" string => "/usr/bin/cat"; + "path[cksum]" string => "/usr/bin/cksum"; + "path[curl]" string => "/usr/bin/curl"; + "path[cut]" string => "/usr/bin/cut"; + "path[diff]" string => "/usr/bin/diff"; + "path[dig]" string => "/usr/bin/dig"; + "path[echo]" string => "/usr/bin/echo"; + "path[ip]" string => "/usr/bin/ip"; + "path[lsof]" string => "/usr/bin/lsof"; + "path[netstat]" string => "/usr/bin/netstat"; + "path[ping]" string => "/usr/bin/ping"; + "path[printf]" string => "/usr/bin/printf"; + "path[sed]" string => "/usr/bin/sed"; + "path[sort]" string => "/usr/bin/sort"; + "path[test]" string => "/usr/bin/test"; + "path[wget]" string => "/usr/bin/wget"; + + freebsd|netbsd|openbsd:: + + "path[awk]" string => "/usr/bin/awk"; + "path[bc]" string => "/usr/bin/bc"; + "path[cat]" string => "/bin/cat"; + "path[crontabs]" string => "/var/cron/tabs"; + "path[cut]" string => "/usr/bin/cut"; + "path[dc]" string => "/usr/bin/dc"; + "path[df]" string => "/bin/df"; + "path[diff]" string => "/usr/bin/diff"; + "path[dig]" string => "/usr/bin/dig"; + "path[echo]" string => "/bin/echo"; + "path[egrep]" string => "/usr/bin/egrep"; + "path[find]" string => "/usr/bin/find"; + "path[grep]" string => "/usr/bin/grep"; + "path[ls]" string => "/bin/ls"; + "path[netstat]" string => "/usr/bin/netstat"; + "path[perl]" string => "/usr/bin/perl"; + "path[printf]" string => "/usr/bin/printf"; + "path[sed]" string => "/usr/bin/sed"; + "path[sort]" string => "/usr/bin/sort"; + "path[tr]" string => "/usr/bin/tr"; + + freebsd.!(freebsd_9_3|freebsd_10|freebsd_11)|netbsd|openbsd:: + + "path[ping]" string => "/usr/bin/ping"; + + freebsd_9_3|freebsd_10|freebsd_11:: + + "path[ping]" string => "/sbin/ping"; + + freebsd|netbsd:: + + "path[cksum]" string => "/usr/bin/cksum"; + "path[realpath]" string => "/bin/realpath"; + + freebsd:: + + "path[bhyvectl]" string => "/usr/sbin/bhyvectl"; + "path[getfacl]" string => "/bin/getfacl"; + "path[setfacl]" string => "/bin/setfacl"; + "path[dtrace]" string => "/usr/sbin/dtrace"; + "path[service]" string => "/usr/sbin/service"; + "path[zpool]" string => "/sbin/zpool"; + "path[zfs]" string => "/sbin/zfs"; + + openbsd:: + + "path[cksum]" string => "/bin/cksum"; + + smartos:: + "path[npm]" string => "/opt/local/bin/npm"; + "path[pip]" string => "/opt/local/bin/pip"; + + solaris:: + + "path[bc]" string => "/usr/bin/bc"; + "path[cat]" string => "/usr/bin/cat"; + "path[cksum]" string => "/usr/bin/cksum"; + "path[crontab]" string => "/usr/bin/crontab"; + "path[crontabs]" string => "/var/spool/cron/crontabs"; + "path[curl]" string => "/usr/bin/curl"; + "path[cut]" string => "/usr/bin/cut"; + "path[dc]" string => "/usr/bin/dc"; + "path[diff]" string => "/usr/bin/diff"; + "path[dig]" string => "/usr/sbin/dig"; + "path[echo]" string => "/usr/bin/echo"; + "path[netstat]" string => "/usr/bin/netstat"; + "path[ping]" string => "/usr/bin/ping"; + "path[perl]" string => "/usr/bin/perl"; + "path[printf]" string => "/usr/bin/printf"; + "path[wget]" string => "/usr/bin/wget"; + # + "path[svcs]" string => "/usr/bin/svcs"; + "path[svcadm]" string => "/usr/sbin/svcadm"; + "path[svccfg]" string => "/usr/sbin/svccfg"; + "path[svcprop]" string => "/usr/bin/svcprop"; + "path[netadm]" string => "/usr/sbin/netadm"; + "path[dladm]" string => "/usr/sbin/dladm"; + "path[ipadm]" string => "/usr/sbin/ipadm"; + "path[pkg]" string => "/usr/bin/pkg"; + "path[pkginfo]" string => "/usr/bin/pkginfo"; + "path[pkgadd]" string => "/usr/sbin/pkgadd"; + "path[pkgrm]" string => "/usr/sbin/pkgrm"; + "path[zoneadm]" string => "/usr/sbin/zoneadm"; + "path[zonecfg]" string => "/usr/sbin/zonecfg"; + + solaris.(mpf_stdlib_use_posix_utils.!disable_mpf_stdlib_use_posix_utils):: + "path[awk]" string => "/usr/xpg4/bin/awk"; + "path[df]" string => "/usr/xpg4/bin/df"; + "path[egrep]" string => "/usr/xpg4/bin/egrep"; + "path[find]" string => "/usr/xpg4/bin/find"; + "path[grep]" string => "/usr/xpg4/bin/grep"; + "path[ls]" string => "/usr/xpg4/bin/ls"; + "path[sed]" string => "/usr/xpg4/bin/sed"; + "path[sort]" string => "/usr/xpg4/bin/sort"; + "path[tr]" string => "/usr/xpg4/bin/tr"; + + solaris.!(mpf_stdlib_use_posix_utils.!disable_mpf_stdlib_use_posix_utils):: + "path[awk]" string => "/usr/bin/awk"; + "path[df]" string => "/usr/bin/df"; + "path[egrep]" string => "/usr/bin/egrep"; + "path[find]" string => "/usr/bin/find"; + "path[grep]" string => "/usr/bin/grep"; + "path[ls]" string => "/usr/bin/ls"; + "path[sed]" string => "/usr/bin/sed"; + "path[sort]" string => "/usr/bin/sort"; + "path[tr]" string => "/usr/bin/tr"; + + redhat:: + + "path[awk]" string => "/bin/awk"; + "path[bc]" string => "/usr/bin/bc"; + "path[cat]" string => "/bin/cat"; + "path[cksum]" string => "/usr/bin/cksum"; + "path[createrepo]" string => "/usr/bin/createrepo"; + "path[crontab]" string => "/usr/bin/crontab"; + "path[crontabs]" string => "/var/spool/cron"; + "path[curl]" string => "/usr/bin/curl"; + "path[cut]" string => "/bin/cut"; + "path[dc]" string => "/usr/bin/dc"; + "path[df]" string => "/bin/df"; + "path[diff]" string => "/usr/bin/diff"; + "path[dig]" string => "/usr/bin/dig"; + "path[domainname]" string => "/bin/domainname"; + "path[echo]" string => "/bin/echo"; + "path[egrep]" string => "/bin/egrep"; + "path[ethtool]" string => "/usr/sbin/ethtool"; + "path[find]" string => "/usr/bin/find"; + "path[free]" string => "/usr/bin/free"; + "path[getenforce]" string => "/usr/sbin/getenforce"; + "path[grep]" string => "/bin/grep"; + "path[hostname]" string => "/bin/hostname"; + "path[init]" string => "/sbin/init"; + "path[iptables]" string => "/sbin/iptables"; + "path[iptables_save]" string => "/sbin/iptables-save"; + "path[ls]" string => "/bin/ls"; + "path[lsof]" string => "/usr/sbin/lsof"; + "path[netstat]" string => "/bin/netstat"; + "path[nologin]" string => "/sbin/nologin"; + "path[ping]" string => "/usr/bin/ping"; + "path[perl]" string => "/usr/bin/perl"; + "path[printf]" string => "/usr/bin/printf"; + "path[restorecon]" string => "/sbin/restorecon"; + "path[sed]" string => "/bin/sed"; + "path[semanage]" string => "/usr/sbin/semanage"; + "path[sort]" string => "/bin/sort"; + "path[test]" string => "/usr/bin/test"; + "path[tr]" string => "/usr/bin/tr"; + "path[wc]" string => "/usr/bin/wc"; + "path[wget]" string => "/usr/bin/wget"; + "path[realpath]" string => "/usr/bin/realpath"; + + # + "path[chkconfig]" string => "/sbin/chkconfig"; + "path[groupadd]" string => "/usr/sbin/groupadd"; + "path[groupdel]" string => "/usr/sbin/groupdel"; + "path[ifconfig]" string => "/sbin/ifconfig"; + "path[ip]" string => "/sbin/ip"; + "path[rpm]" string => "/bin/rpm"; + "path[service]" string => "/sbin/service"; + "path[svc]" string => "/sbin/service"; + "path[useradd]" string => "/usr/sbin/useradd"; + "path[userdel]" string => "/usr/sbin/userdel"; + "path[usermod]" string => "/usr/sbin/usermod"; + "path[yum]" string => "/usr/bin/yum"; + + darwin:: + "path[awk]" string => "/usr/bin/awk"; + "path[bc]" string => "/usr/bin/bc"; + "path[cat]" string => "/bin/cat"; + "path[cksum]" string => "/usr/bin/cksum"; + "path[createrepo]" string => "/usr/bin/createrepo"; + "path[crontab]" string => "/usr/bin/crontab"; + "path[crontabs]" string => "/usr/lib/cron/tabs"; + "path[cut]" string => "/usr/bin/cut"; + "path[dc]" string => "/usr/bin/dc"; + "path[df]" string => "/bin/df"; + "path[diff]" string => "/usr/bin/diff"; + "path[dig]" string => "/usr/bin/dig"; + "path[domainname]" string => "/bin/domainname"; + "path[dscl]" string => "/usr/bin/dscl"; + "path[echo]" string => "/bin/echo"; + "path[egrep]" string => "/usr/bin/egrep"; + "path[find]" string => "/usr/bin/find"; + "path[grep]" string => "/usr/bin/grep"; + "path[hostname]" string => "/bin/hostname"; + "path[ls]" string => "/bin/ls"; + "path[lsof]" string => "/usr/sbin/lsof"; + "path[netstat]" string => "/usr/sbin/netstat"; + "path[ping]" string => "/sbin/ping"; + "path[perl]" string => "/usr/bin/perl"; + "path[printf]" string => "/usr/bin/printf"; + "path[sed]" string => "/usr/bin/sed"; + "path[sort]" string => "/usr/bin/sort"; + "path[test]" string => "/bin/test"; + "path[tr]" string => "/usr/bin/tr"; + + # + "path[brew]" string => "/usr/local/bin/brew"; + "path[sudo]" string => "/usr/bin/sudo"; + + debian:: + + "path[awk]" string => "/usr/bin/awk"; + "path[bc]" string => "/usr/bin/bc"; + "path[cat]" string => "/bin/cat"; + "path[chkconfig]" string => "/sbin/chkconfig"; + "path[cksum]" string => "/usr/bin/cksum"; + "path[createrepo]" string => "/usr/bin/createrepo"; + "path[crontab]" string => "/usr/bin/crontab"; + "path[crontabs]" string => "/var/spool/cron/crontabs"; + "path[curl]" string => "/usr/bin/curl"; + "path[cut]" string => "/usr/bin/cut"; + "path[dc]" string => "/usr/bin/dc"; + "path[df]" string => "/bin/df"; + "path[diff]" string => "/usr/bin/diff"; + "path[dig]" string => "/usr/bin/dig"; + "path[dmidecode]" string => "/usr/sbin/dmidecode"; + "path[domainname]" string => "/bin/domainname"; + "path[echo]" string => "/bin/echo"; + "path[egrep]" string => "/bin/egrep"; + "path[ethtool]" string => "/sbin/ethtool"; + "path[find]" string => "/usr/bin/find"; + "path[free]" string => "/usr/bin/free"; + "path[getenforce]" string => "/usr/sbin/getenforce"; + "path[grep]" string => "/bin/grep"; + "path[hostname]" string => "/bin/hostname"; + "path[init]" string => "/sbin/init"; + "path[iptables]" string => "/sbin/iptables"; + "path[iptables_save]" string => "/sbin/iptables-save"; + "path[ls]" string => "/bin/ls"; + "path[lsof]" string => "/usr/bin/lsof"; + "path[netstat]" string => "/bin/netstat"; + "path[nologin]" string => "/usr/sbin/nologin"; + "path[ping]" string => "/bin/ping"; + "path[perl]" string => "/usr/bin/perl"; + "path[printf]" string => "/usr/bin/printf"; + "path[restorecon]" string => "/sbin/restorecon"; + "path[sed]" string => "/bin/sed"; + "path[semanage]" string => "/usr/sbin/semanage"; + "path[sort]" string => "/usr/bin/sort"; + "path[test]" string => "/usr/bin/test"; + "path[tr]" string => "/usr/bin/tr"; + "path[wc]" string => "/usr/bin/wc"; + "path[wget]" string => "/usr/bin/wget"; + "path[realpath]" string => "/usr/bin/realpath"; + + # + "path[apt_cache]" string => "/usr/bin/apt-cache"; + "path[apt_config]" string => "/usr/bin/apt-config"; + "path[apt_get]" string => "/usr/bin/apt-get"; + "path[apt_key]" string => "/usr/bin/apt-key"; + "path[aptitude]" string => "/usr/bin/aptitude"; + "path[dpkg]" string => "/usr/bin/dpkg"; + "path[dpkg_divert]" string => "/usr/bin/dpkg-divert"; + "path[groupadd]" string => "/usr/sbin/groupadd"; + "path[groupdel]" string => "/usr/sbin/groupdel"; + "path[groupmod]" string => "/usr/sbin/groupmod"; + "path[ifconfig]" string => "/sbin/ifconfig"; + "path[ip]" string => "/sbin/ip"; + "path[service]" string => "/usr/sbin/service"; + "path[svc]" string => "/usr/sbin/service"; + "path[update_alternatives]" string => "/usr/bin/update-alternatives"; + "path[update_rc_d]" string => "/usr/sbin/update-rc.d"; + "path[useradd]" string => "/usr/sbin/useradd"; + "path[userdel]" string => "/usr/sbin/userdel"; + "path[usermod]" string => "/usr/sbin/usermod"; + + archlinux||darwin:: + + "path[sysctl]" string => "/usr/bin/sysctl"; + + !(archlinux||darwin):: + + "path[sysctl]" string => "/sbin/sysctl"; + + !(suse|sles):: + "path[logger]" string => "/usr/bin/logger"; + + opensuse:: + "path[ls]" string => "/usr/bin/ls"; + "path[lsof]" string => "/usr/bin/lsof"; + "path[awk]" string => "/usr/bin/awk"; + "path[cat]" string => "/usr/bin/cat"; + "path[cksum]" string => "/usr/bin/cksum"; + "path[crontab]" string => "/usr/bin/crontab"; + "path[curl]" string => "/usr/bin/curl"; + "path[cut]" string => "/usr/bin/cut"; + "path[df]" string => "/usr/bin/df"; + "path[diff]" string => "/usr/bin/diff"; + "path[dig]" string => "/usr/bin/dig"; + "path[dmidecode]" string => "/usr/sbin/dmidecode"; + "path[echo]" string => "/usr/bin/echo"; + "path[egrep]" string => "/usr/bin/egrep"; + "path[ethtool]" string => "/usr/sbin/ethtool"; + "path[find]" string => "/usr/bin/find"; + "path[free]" string => "/usr/bin/free"; + "path[grep]" string => "/usr/bin/grep"; + "path[hostname]" string => "/usr/bin/hostname"; + "path[init]" string => "/sbin/init"; + "path[iptables]" string => "/usr/sbin/iptables"; + "path[ls]" string => "/usr/bin/ls"; + "path[lsof]" string => "/usr/bin/lsof"; + "path[nologin]" string => "/sbin/nologin"; + "path[ping]" string => "/usr/bin/ping"; + "path[perl]" string => "/usr/bin/perl"; + "path[printf]" string => "/usr/bin/printf"; + "path[sed]" string => "/usr/bin/sed"; + "path[sort]" string => "/usr/bin/sort"; + "path[test]" string => "/usr/bin/test"; + "path[tr]" string => "/usr/bin/tr"; + "path[logger]" string => "/usr/bin/logger"; + "path[wget]" string => "/usr/bin/wget"; + "path[chkconfig]" string => "/sbin/chkconfig"; + "path[groupadd]" string => "/usr/sbin/groupadd"; + "path[groupdel]" string => "/usr/sbin/groupdel"; + "path[groupmod]" string => "/usr/sbin/groupmod"; + "path[ip]" string => "/sbin/ip"; + "path[rpm]" string => "/usr/bin/rpm"; + "path[service]" string => "/sbin/service"; + "path[useradd]" string => "/usr/sbin/useradd"; + "path[userdel]" string => "/usr/sbin/userdel"; + "path[usermod]" string => "/usr/sbin/usermod"; + "path[zypper]" string => "/usr/bin/zypper"; + + suse|sles:: + + "path[awk]" string => "/usr/bin/awk"; + "path[bc]" string => "/usr/bin/bc"; + "path[cat]" string => "/bin/cat"; + "path[cksum]" string => "/usr/bin/cksum"; + "path[createrepo]" string => "/usr/bin/createrepo"; + "path[crontab]" string => "/usr/bin/crontab"; + "path[crontabs]" string => "/var/spool/cron/tabs"; + "path[curl]" string => "/usr/bin/curl"; + "path[cut]" string => "/usr/bin/cut"; + "path[dc]" string => "/usr/bin/dc"; + "path[df]" string => "/bin/df"; + "path[diff]" string => "/usr/bin/diff"; + "path[dig]" string => "/usr/bin/dig"; + "path[dmidecode]" string => "/usr/sbin/dmidecode"; + "path[domainname]" string => "/bin/domainname"; + "path[echo]" string => "/bin/echo"; + "path[egrep]" string => "/usr/bin/egrep"; + "path[ethtool]" string => "/usr/sbin/ethtool"; + "path[find]" string => "/usr/bin/find"; + "path[free]" string => "/usr/bin/free"; + "path[grep]" string => "/usr/bin/grep"; + "path[hostname]" string => "/bin/hostname"; + "path[init]" string => "/sbin/init"; + "path[iptables]" string => "/usr/sbin/iptables"; + "path[iptables_save]" string => "/usr/sbin/iptables-save"; + "path[ls]" string => "/bin/ls"; + "path[lsof]" string => "/usr/bin/lsof"; + "path[netstat]" string => "/bin/netstat"; + "path[nologin]" string => "/sbin/nologin"; + "path[ping]" string => "/bin/ping"; + "path[perl]" string => "/usr/bin/perl"; + "path[printf]" string => "/usr/bin/printf"; + "path[sed]" string => "/bin/sed"; + "path[sort]" string => "/usr/bin/sort"; + "path[test]" string => "/usr/bin/test"; + "path[tr]" string => "/usr/bin/tr"; + "path[logger]" string => "/bin/logger"; + "path[wget]" string => "/usr/bin/wget"; + + # + "path[chkconfig]" string => "/sbin/chkconfig"; + "path[groupadd]" string => "/usr/sbin/groupadd"; + "path[groupdel]" string => "/usr/sbin/groupdel"; + "path[groupmod]" string => "/usr/sbin/groupmod"; + "path[ifconfig]" string => "/sbin/ifconfig"; + "path[ip]" string => "/sbin/ip"; + "path[rpm]" string => "/bin/rpm"; + "path[service]" string => "/sbin/service"; + "path[useradd]" string => "/usr/sbin/useradd"; + "path[userdel]" string => "/usr/sbin/userdel"; + "path[usermod]" string => "/usr/sbin/usermod"; + "path[zypper]" string => "/usr/bin/zypper"; + + linux|solaris:: + + "path[shadow]" string => "/etc/shadow"; + + freebsd|openbsd|netbsd|darwin:: + + "path[shadow]" string => "/etc/master.passwd"; + + "path[mailx]" string => "/usr/bin/mailx"; + + aix:: + + "path[shadow]" string => "/etc/security/passwd"; + + termux:: + "path[tar]" string => "/usr/bin/tar"; + "path[true]" string => "/usr/bin/true"; + "path[false]" string => "/usr/bin/false"; + "path[cat]" string => "/usr/bin/cat"; + "path[sysctl]" string => "/usr/bin/sysctl"; + "path[env]" string => "/usr/bin/env"; + + # now, mangle the values by prepending the TERMUX_PREFIX + "files_path" string => "/data/data/com.termux/files"; + "etc_path" string => "$(files_path)/usr/etc"; + "tmp_path" string => "$(files_path)/usr/tmp"; + "bin_path" string => "$(files_path)/usr/bin"; + "var_path" string => "$(files_path)/usr/var"; + "tmp_paths" slist => getindices("path"); + "tmp_path[$(tmp_paths)]" string => "$(files_path)$(path[$(tmp_paths)])"; + "path[$(tmp_paths)]" string => "$(tmp_path[$(tmp_paths)])"; + + !(termux|windows):: + # reasonable defaults for unix systems to allow for writing + # more portable paths between termux and other systems + "etc_path" string => "/etc"; + "tmp_path" string => "/tmp"; + "bin_path" string => "/bin"; + "var_path" string => "/var"; + + any:: +@if minimum_version(3.11.0) + # Pull in variables tagged with `paths.cf` + "_extended_path_data" -> { "CFE-3426" } + data => variablesmatching_as_data( ".*", "paths.cf" ); + "_i" -> { "CFE-3426" } + slist => getindices( _extended_path_data ); + "path[$(with)]" -> { "CFE-3426" } + string => "$(_extended_path_data[$(_i)])", + with => canonify( regex_replace( $(_i), ".*\[(.*)\]", "$1", "") ); +@endif + + "all_paths" slist => getindices("path"); + "$(all_paths)" string => "$(path[$(all_paths)])"; + + classes: + "_have_bin_env" expression => fileexists("/bin/env"); + "_have_bin_systemctl" expression => fileexists("/bin/systemctl"); + "_have_bin_timedatectl" expression => fileexists("/bin/timedatectl"); + "_have_bin_journalctl" expression => fileexists("/bin/journalctl"); + "_have_python" or => { + returnszero("command -v python >/dev/null 2>/dev/null", "useshell"), + returnszero("command -v python3 >/dev/null 2>/dev/null", "useshell"), + returnszero("command -v python2 >/dev/null 2>/dev/null", "useshell") + }; + + "_stdlib_has_path_$(all_paths)" + expression => isvariable("$(all_paths)"), + comment => "It's useful to know if a given path is defined"; + + "_stdlib_path_exists_$(all_paths)" + expression => fileexists("$(path[$(all_paths)])"), + comment => "It's useful to know if $(all_paths) exists on the filesystem as defined"; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/processes.cf b/policies/lib/tree/20_cfe_basics/cfengine/processes.cf new file mode 100644 index 00000000000..1c16e9713a3 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/processes.cf @@ -0,0 +1,84 @@ +# Processes bodies + +body process_select exclude_procs(x) +# @brief Select all processes excluding those matching `x` +# @param x Regular expression matching the command/cmd field +# of the processes that should be excluded +{ + command => "$(x)"; + process_result => "!command"; +} + +## + +body process_select days_older_than(d) +# @brief Select all processes that are older than `d` days +# @param d Days that processes need to be old to be selected +{ + stime_range => irange(ago(0,0,"$(d)",0,0,0),now); + process_result => "!stime"; +} + +## + +body process_select by_owner(u) +# @brief Select processes owned by user `u` +# @param u The name of the user +# +# Matches processes against the given username and the given username's uid +# in case only uid is visible in process list. +{ + process_owner => { "$(u)", canonify(getuid("$(u)")) }; + process_result => "process_owner"; +} + +body process_select by_pid(pid) +# @brief Select a process matching the given PID +# @param pid PID of the process to be matched +{ + pid => irange("$(pid)","$(pid)"); + process_result => "pid"; +} + +## + +body process_count any_count(cl) +# @brief Define class `cl` if the process is running +# @param cl Name of the class to be defined +{ + match_range => "0,0"; + out_of_range_define => { "$(cl)" }; +} + +## + +body process_count check_range(name,lower,upper) +# @brief Define a class if the number of processes is not +# within the specified range. +# @param name The name part of the class `$(name)_out_of_range` +# @param lower The lower bound of the range +# @param upper The upper bound of the range +{ + match_range => irange("$(lower)","$(upper)"); + out_of_range_define => { "$(name)_out_of_range" }; +} + +bundle agent process_kill(name) +# @brief Kill a process by name (can be a regular expression) +# @param name the regular expression or string +# +# **Example:** +# +# ```cf3 +# methods: +# "kill" usebundle => process_kill("badprocess"); +# ``` +{ + processes: + !windows:: + # Signals are presented as an ordered list to the process. + "$(name)" signals => { "term", "kill" }; + windows:: + # On Windows, only the kill signal is supported, which terminates the process. + "$(name)" signals => { "kill" }; +} diff --git a/policies/lib/tree/20_cfe_basics/cfengine/reports.cf b/policies/lib/tree/20_cfe_basics/cfengine/reports.cf new file mode 100644 index 00000000000..c386d70fcf9 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/reports.cf @@ -0,0 +1,47 @@ +body printfile cat(file) +# @brief Report the contents of a file +# @param file The full path of the file to report +{ + file_to_print => "$(file)"; + number_of_lines => "inf"; +} + +body printfile head(file) +# @brief Report the first 10 lines of a file +# @param file The full path of the file to report +{ + file_to_print => "$(file)"; + # GNU head defaults to 10 + number_of_lines => "10"; +} + +body printfile head_n(file, n) +# @brief Report the first `n` lines of a file +# @param file The full path of the file to report +# @param n The number of lines to report +{ + file_to_print => "$(file)"; + number_of_lines => "$(n)"; +} + +@if minimum_version(3.18) +body printfile tail(file) +# @brief Report the last 10 lines of a file +# @param file The full path of the file to report +{ + file_to_print => "$(file)"; + # GNU tail defaults to 10 + number_of_lines => "-10"; +} +@endif + +@if minimum_version(3.18) +body printfile tail_n(file, n) +# @brief Report the last `n` lines of a file +# @param file The full path of the file to report +# @param n The number of lines to report +{ + file_to_print => "$(file)"; + number_of_lines => "-$(n)"; +} +@endif diff --git a/policies/lib/tree/20_cfe_basics/cfengine/services.cf b/policies/lib/tree/20_cfe_basics/cfengine/services.cf new file mode 100644 index 00000000000..d3f1b2ced53 --- /dev/null +++ b/policies/lib/tree/20_cfe_basics/cfengine/services.cf @@ -0,0 +1,1092 @@ +# Services bodies + +bundle common services_common +# @brief Enumerate policy files used by this policy file for inclusion to inputs +{ + vars: + "inputs" slist => { "$(this.promise_dirname)/common.cf", + "$(this.promise_dirname)/paths.cf" }; +} + +body file control +# @brief Include policy files used by this policy file as part of inputs +{ + inputs => { @(services_common.inputs) }; +} + +##------------------------------------------------------- +## service promises +##------------------------------------------------------- + +body service_method bootstart +# @brief Start the service and all its dependencies at boot time +# +# **See also:** `service_autostart_policy`, `service_dependence_chain` +{ + service_autostart_policy => "boot_time"; + service_dependence_chain => "start_parent_services"; + windows:: + service_type => "windows"; +} + +## + +body service_method force_deps +# @brief Start all dependendencies when this service starts, and stop all +# dependent services when this service stops. +# +# The service does not get automatically started. +# +# **See also:** `service_autostart_policy`, `service_dependence_chain` +{ + service_dependence_chain => "all_related"; + windows:: + service_type => "windows"; +} + +body service_method standard_services +# @brief Default services_method for when you wan't to call it explicitly +# +# By default this service_method is *not* used. The call for standard_services +# is within the core and not here. In case you use a promise like: +# +# ```cf3 +# services: +# "ssh" +# service_policy => "start"; +# ``` +# +# Then this method is skipped and CFEngine calls standard_services bundle +# directly. This is here as a helper in case you wan't to be explicit +# with your service promise and point it to standard_services (for readability, +# documentation, etc). +# +# Do note that any options defined in this method does not apply to service +# promises without explicit template_method call for standard_services. +# +# **Example:** +# +# ```cf3 +# services: +# "ssh" +# service_policy => "start", +# service_method => standard_services; +# ``` +{ + service_bundle => default:standard_services( $(this.promiser), $(this.service_policy) ); +} + +body service_method systemd_services +# @brief systemd service method +# +# **Example:** +# +# ```cf3 +# services: +# "ssh" +# service_policy => "enabled", +# service_method => systemd_services; +# ``` +{ + service_bundle => default:systemd_services( $(this.promiser), $(this.service_policy) ); +} + +bundle agent standard_services(service,state) +# @brief Standard services bundle, used by CFEngine by default +# @author CFEngine AS +# @param service Name of service to control +# @param state The desired state for that service: "start", "restart", "reload", "stop", or "disable". "enable", "enabled", and "disabled" are also able to be used when systemd is detected. "active" and "inactive" states are supported for systemd managed hosts and can be used for controlling the services currently running state, but make no promises about the service state on boot. +# +# This bundle is used by CFEngine if you don't specify a services +# handler explicitly, and will work with systemd or chkconfig or other +# non-sysvinit service managers. It will try to automate service +# discovery, unlike `classic_services` which requires known service +# names. If it can't do the automatic management, it will pass control +# to `classic_services`. +# +# This bundle receives the service name and the desired service state, +# then does the needful to reach the desired state. +# +# If you're running systemd, systemctl will be used via `systemd_services`. +# +# Else, if chkconfig is present, it will be used. +# +# Else, if the service command is available, if will be used. +# +# Else, if the svcadm command is available, if will be used. Note you +# have to supply the full SMF service identifier. +# +# Else, control is passed to `classic_services`. +# +# Note you do **not** have to call this bundle from `services` +# promises. You can simply make a `methods` call to it. That would +# enable you to use systemd states like `try-restart` for instance. +# +# **Example:** +# +# ```cf3 +# services: +# "sshd" service_policy => "start"; # uses `standard_services` +# +# methods: +# "" usebundle => standard_services("sshd", "start"); # direct +# ``` +# +# Alternatively, since services promises are an abstraction around bundles, the service state can be promised via a methods type promise. +# +# ```cf3 +# +# methods: +# "SSHD should be running" usebundle => standard_services("sshd", "start"); +# ``` +{ + vars: + "c_service" string => canonify("$(service)"); + + freebsd:: + "init" string => ifelse(fileexists("/usr/local/etc/rc.d/$(service)"), + "/usr/local/etc/rc.d/$(service)", + "/etc/rc.d/$(service)"); + + !freebsd:: + "init" string => "/etc/init.d/$(service)"; + + start|restart|reload:: + "chkconfig_mode" string => "on"; + "svcadm_mode" string => "enable"; + + stop|disable:: + "chkconfig_mode" string => "off"; + "svcadm_mode" string => "disable"; + + classes: + # define a class named after the desired state + "$(state)" expression => "any"; + "non_disabling" or => { "start", "stop", "restart", "reload" }; + + "chkconfig" expression => "!systemd._stdlib_path_exists_chkconfig"; + "sysvservice" expression => "!systemd.!chkconfig._stdlib_path_exists_service"; + "smf" expression => "!systemd.!chkconfig.!sysvservice._stdlib_path_exists_svcadm"; + "fallback" expression => "!systemd.!chkconfig.!sysvservice.!smf"; + + "have_init" expression => fileexists($(init)); + + chkconfig.have_init.freebsd:: + "running" -> { "CFE-3513" } + expression => returnszero("$(init) onestatus > /dev/null", "useshell"); + + chkconfig.have_init.!freebsd:: + "running" expression => returnszero("$(init) status > /dev/null", "useshell"); + + sysvservice.have_init:: + "running" expression => returnszero("$(paths.service) $(service) status > /dev/null", "useshell"); + + chkconfig.SuSE:: + "onboot" + expression => returnszero("$(paths.chkconfig) $(service) | $(paths.grep) 'on$' >/dev/null", "useshell"), + comment => "SuSE chkconfig outputs current state to stdout rather than as an exit code"; + + chkconfig.!SuSE:: + "onboot" + expression => returnszero("$(paths.chkconfig) $(service)", "noshell"), + comment => "We need to know if the service is configured to start at boot or not"; + + # We redirect stderr and stdout to dev null so that we do not create noise in the logs + "chkconfig_$(c_service)_unregistered" + not => returnszero("$(paths.chkconfig) --list $(service) &> /dev/null", "useshell"), + comment => "We need to know if the service is registered with chkconfig + so that we can perform other chkconfig operations, if the + service is not registered it must be added. Note we do not + automatically try to add the service at this time."; + + commands: + + chkconfig.stop.onboot:: + # Only chkconfig disable if it's currently set to start on boot + "$(paths.chkconfig) $(service) $(chkconfig_mode)" + classes => kept_successful_command, + contain => silent; + + chkconfig.start.!onboot:: + # Only chkconfig enable service if it's not already set to start on boot, and if its a registered chkconfig service + "$(paths.chkconfig) $(service) $(chkconfig_mode)" + if => "!chkconfig_$(c_service)_unregistered", + classes => kept_successful_command, + contain => silent; + + chkconfig.have_init.(((start|restart).!running)|((stop|restart|reload).running)).non_disabling:: + "$(init) $(state)" + contain => silent; + + sysvservice.start.!running:: + "$(paths.service) $(service) start" + handle => "standard_services_sysvservice_not_running_start", + classes => kept_successful_command, + comment => "If the service should be running and it is not + currently running then we should issue the standard service + command to start the service."; + + sysvservice.restart:: + "$(paths.service) $(service) restart" + handle => "standard_services_sysvservice_restart", + classes => kept_successful_command, + comment => "If the service should be restarted we issue the + standard service command to restart or reload the service. + There is no restriction based on the services current state as + restart can start a service that was not already + running."; + + sysvservice.reload.running:: + "$(paths.service) $(service) reload" + handle => "standard_services_sysvservice_reload", + classes => kept_successful_command, + comment => "If the service should be reloaded we issue the + standard service command to reload the service. + It is restricted to when the service is running as a reload + should not start services that are not already running. This + may not be triggered as service state parameters are limited + and translated to the closest meaning."; + + sysvservice.((stop|disable).running):: + "$(paths.service) $(service) stop" + handle => "standard_services_sysvservice_stop", + classes => kept_successful_command, + comment => "If the service should be stopped or disabled and it is + currently running then we should issue the standard service + command to stop the service."; + + smf:: + "$(paths.svcadm) $(svcadm_mode) $(service)" + classes => kept_successful_command; + + methods: + fallback:: + "classic" usebundle => classic_services($(service), $(state)); + + systemd:: + "systemd" + usebundle => systemd_services( $(service), $(state) ); + + reports: + verbose_mode.systemd:: + "$(this.bundle): using systemd layer to $(state) $(service)"; + verbose_mode.systemd.!service_loaded:: + "$(this.bundle): Service $(service) unit file is not loaded; doing nothing"; + verbose_mode.chkconfig:: + "$(this.bundle): using chkconfig layer to $(state) $(service) (chkconfig mode $(chkconfig_mode))" + if => "!chkconfig_$(c_service)_unregistered.((start.!onboot)|(stop.onboot))"; + verbose_mode.chkconfig:: + "$(this.bundle): skipping chkconfig layer to $(state) $(service) because $(service) is not registered with chkconfig (chkconfig --list $(service))" + if => "chkconfig_$(c_service)_unregistered"; + verbose_mode.sysvservice:: + "$(this.bundle): using System V service / Upstart layer to $(state) $(service)"; + verbose_mode.smf:: + "$(this.bundle): using Solaris SMF to $(state) $(service) (svcadm mode $(svcadm_mode))"; + verbose_mode.fallback:: + "$(this.bundle): falling back to classic_services to $(state) $(service)"; + +} + +body action fresh_systemd_state +# @brief An 'action' body ensuring the state information for a systemd service is always fresh +# +# This 'action' body disables caching for functions, in particular the +# execresult*() family of functions. +# +# Although it's now the same as the 'immediate' action body, this may change in +# the future. +{ + cfengine:: + # ^Needed for versions 3.15.x and older that did not + # support empty bodies. When 3.15.x is no longer + # supported, this can be removed. + +@if minimum_version(3.18.1) + # Beginning with 3.18.1 ifelapsed being set to 0 results in bypassing of + # function caching. In versions prior to 3.18, if an action body with + # ifelapsed set to 0 was used in a vars type promise a warning was + # emitted. This is guarded to suppress that warning in older versions + # where this setting would not change the behavior. + + ifelapsed => "0"; +@endif + +} + +bundle agent systemd_services(service,state) +# @brief Manage systemd service state +# @author Bryan Burke +# @param service specific service to control +# @param state The desired state for that service: "active", "inactive", "restart", "reload", "enabled", "disabled", "start", and "stop" are specifically understood states. Any other custom state will be passed through to systemctl. +# +# **State descriptions:** +# +# * active - Service should be running, no promise about state on boot made. +# * inactive - Service should not be running, no promise about state on boot made. +# * restart - Service should be restarted, no promise about state on boot made. +# * reload - Service should be reloaded, no promise about state on boot made. +# * enabled - Service should be enabled, no promise about state on boot made. +# * disabled - Service should be reloaded, no promise about state on boot made. +# * start - Service should be running, service should be started on boot (active + enabled). +# * stop - Service should not be running, service should not be started on boot (inactive + disabled). +# +# **Example:** +# +# ```cf3 +# services: +# # Uses `standard_services`, dynamic decision about init subsystem +# "sshd" +# service_policy => "enabled"; +# +# # Explicitly use `systemd_services` +# "sshd" +# service_policy => "running", +# service_policy => "systemd_services"; +# ``` +# +# Alternatively, since services promises are an abstraction around bundles, the service state can be promised via a methods type promise. +# +# ```cf3 +# +# methods: +# "SSHD should be running" usebundle => systemd_services("sshd", "enabled"); +# ``` +{ + vars: + systemd:: + "call_systemctl" + string => "$(paths.systemctl) --no-ask-password --global --system"; + + "systemd_properties" + string => "-pLoadState,CanStop,UnitFileState,ActiveState,LoadState,CanStart,CanReload"; + + "systemd_service_info" + slist => string_split(execresult("$(call_systemctl) $(systemd_properties) show $(service)", + "noshell"), "\n", "10"), + action => fresh_systemd_state; # Ensure this info is always fresh and not cached [CFE-3753] + + classes: + systemd:: + "service_enabled" expression => reglist(@(systemd_service_info), "UnitFileState=enabled"); + "service_enabled" -> { "CFE-2923" } + expression => returnszero( "$(call_systemctl) is-enabled $(service) > /dev/null 2>&1", useshell); + "service_active" -> { "CFE-3238" } + expression => reglist(@(systemd_service_info), "ActiveState=(active|activating)"); + "service_loaded" expression => reglist(@(systemd_service_info), "LoadState=loaded"); + "service_notfound" expression => reglist(@(systemd_service_info), "LoadState=not-found"); + + "can_stop_service" expression => reglist(@(systemd_service_info), "CanStop=yes"); + "can_start_service" expression => reglist(@(systemd_service_info), "CanStart=yes"); + "can_reload_service" expression => reglist(@(systemd_service_info), "CanReload=yes"); + + "request_start" expression => strcmp("start", "$(state)"); + "request_stop" expression => strcmp("stop", "$(state)"); + "request_reload" expression => strcmp("reload", "$(state)"); + "request_restart" expression => strcmp("restart", "$(state)"); + "request_disable" expression => strcmp("disable", "$(state)"); + "request_disabled" expression => strcmp("disabled", "$(state)"); + "request_enable" expression => strcmp("enable", "$(state)"); + "request_enabled" expression => strcmp("enabled", "$(state)"); + "request_active" expression => strcmp("active", "$(state)"); + "request_inactive" expression => strcmp("inactive", "$(state)"); + + "action_custom" expression => "!(request_start|request_stop|request_reload|request_restart|request_disable|request_disabled|request_enable|request_enabled|request_active|request_inactive)"; + "action_start" expression => "(request_start|request_active).!service_active.can_start_service"; + "action_stop" expression => "(request_stop|request_inactive).service_active.can_stop_service"; + "action_reload" expression => "request_reload.service_active.can_reload_service"; + "action_restart" or => { + "request_restart", + + # Possibly undesirable... if a reload is + # requested, and the service "can't" be + # reloaded, then we restart it instead. + "request_reload.!can_reload_service.service_active", + }; + + # Starting a service implicitly enables it + "action_enable" expression => "(request_start|request_enable|request_enabled).!service_enabled"; + + # Respectively, stopping it implicitly disables it + "action_disable" expression => "(request_disable|request_disabled|request_stop).service_enabled"; + + commands: + systemd.service_loaded:: # note this class is defined in `inventory/linux.cf` + # conveniently, systemd states map to `services` states, except + # for `enable` + + "$(call_systemctl) -q start $(service)" + if => "action_start"; + + "$(call_systemctl) -q stop $(service)" + if => "action_stop"; + + "$(call_systemctl) -q reload $(service)" + if => "action_reload"; + + "$(call_systemctl) -q restart $(service)" + if => "action_restart"; + + "$(call_systemctl) -q enable $(service)" + if => "action_enable"; + + "$(call_systemctl) -q disable $(service)" + if => "action_disable"; + + # Custom action for any of the non-standard systemd actions such a + # status, try-restart, isolate, et al. + "$(call_systemctl) $(state) $(service)" + if => "action_custom"; + + reports: + systemd.service_notfound.(start|restart|reload).(inform_mode|verbose_mode):: + "$(this.bundle): Could not find service: $(service)"; +} + +bundle agent classic_services(service,state) +# @brief Classic services bundle +# @author CFEngine AS +# @author Tero Kantonen +# @param service specific service to control +# @param state desired state for that service +# +# This bundle is used by `standard_services` if it doesn't have an +# automatic driver for the current service manager. +# +# It receives the service name and the desired service state, then +# does the needful to reach the desired state. +# +# **Example:** +# +# ```cf3 +# services: +# "ntp" service_policy => "start"; +# "ssh" service_policy => "stop"; +# ``` +# +# There's multiple ways you can add new services to this list. +# Here's few examples: +# +# a) The zeroconf mode; If the new service matches these rules, +# you don't need to add anything to the standard_services: +# +# 1. Your init script basename = `$(service)` +# 2. Your init script argument = `$(state)` +# 3. Your init script lives in `/etc/init.d/` (for non-*bsd), +# or `/etc/rc.d/` (for *bsd) +# 4. Your process regex pattern = `\b$(service)\b` +# 5. You call the init as `/etc/init.d/ @@ -21,7 +23,7 @@
- Rudder + Rudder
diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/apiManagement.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/access/apiManagement.html similarity index 81% rename from webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/apiManagement.html rename to webapp/sources/rudder/rudder-web/src/main/webapp/secure/access/apiManagement.html index 7917572ff05..a4ef051e6dc 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/apiManagement.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/access/apiManagement.html @@ -2,9 +2,8 @@
That gives us a JS variable with the servlet - context path named "contextPath" and the API - path for token management named apiPath - var contextPath = "/rudder""; + context path named "contextPath" and the variable + for module status `aclPluginEnabled` and `tenantsPluginEnabled`
@@ -22,10 +21,12 @@ + + +
+ + + diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/databaseManagement.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/databaseManagement.html deleted file mode 100644 index 95c2ddeaef8..00000000000 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/databaseManagement.html +++ /dev/null @@ -1,177 +0,0 @@ - - - - Rudder - Reports database - - - - -
-
-
-
-

- Reports database -

-
-
-
-
-
-
-
- -
-

Active reports -

-
-
- -
-
- Active reports are stored in an SQL table with full indexing. - Storing large quantities may consume considerable disk space, - and generally slow Rudder down. Active reports are required - to calculate current compliance, and are available for problem - analysis in the "Technical logs" tab of a node's details. -
Note: - When reports are deleted, they are removed from - the active reports table. However, this will not - immediately free up disk space (but new active reports will - not use any extra space). See the Rudder User Documentation - to recover this space. -
-
- - - - - - - - - - - - - -
Newest report:[Here - comes the newest entry]
Oldest report:[Here - comes the oldest entry]
Total disk space used by reports:[here - comes the database size]
-
-
- -
-

Automatic report cleaning

-
-
-
- -
-
- Regular deleting of reports can be set in the - Rudder configuration file (/opt/rudder/etc/rudder-web.properties). -
Enabling regular deleting of old reports is - highly recommended, to avoid Rudder's disk usage growing forever. -
-
-
- Automatic report deleting is [Here - comes the report deleting status]. - All reports older than - [Here comes the automatic report - delete age parameter] days will be deleted regularly. - -
- - - - - - - - - -
Frequency:[Here - comes the cleaning job frequency]
Next run:[Here comes the next run time for the cleaning job]
-
-
- -
-

Manual report cleaning

-
-
-
- -
-
- You can trigger a reports database cleaning in this section. If a - delete process is already in progress, you will have to wait - until the cleaning process is idle. -
-
-
- The delete process is - [Here comes the report archiving progress]. -
- - - - -
-
-
- - all reports older than: - - - - - -
- - - -
-
- -
-
-
-
-
-
-
-
-
-
-
diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/maintenance.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/maintenance.html new file mode 100644 index 00000000000..aeaf974d961 --- /dev/null +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/maintenance.html @@ -0,0 +1,69 @@ + + + Maintenance + + +
+
+
+
+
+

+ Maintenance +

+
+
+ +
+
+
+
+
+
+
+
+
+
+
+ + +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/pluginInformation.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/pluginInformation.html new file mode 100644 index 00000000000..5ca7abe73fe --- /dev/null +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/pluginInformation.html @@ -0,0 +1,35 @@ + + + + Plugins + + + + +
+ + + +
\ No newline at end of file diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/policyServerManagement.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/policyServerManagement.html deleted file mode 100644 index 5d57fb9764a..00000000000 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/policyServerManagement.html +++ /dev/null @@ -1,658 +0,0 @@ - - - - Rudder - General settings - - - - -
-
-
-
-

- General settings -

-
-
-
- - - -
-
-
-
-

Allowed networks

-
-
-

- Configure the networks from which nodes are allowed - to connect to the Rudder policy servers to get their updated - configuration policy. -

-

- You can add as many networks as you want, the expected - format is: NetworkIP/mask, for example - "42.42.0.0/16". -

-
-
-
-
-
-
-
-
-
-
- - - - -
-
- -
-
- -
- -
- - -
- - -
-
-
-
-
-
-
-
- -
-

Security

-
-
-
    -
  • -
    - - -
    -
  • -
- - - -
-
-
-
- -
-

Relay synchronization

-
-

- Configure the method used to synchronize files between Relay servers and the Rudder server. -

-

- The classic method doesn't require any setup and use the standard protocol. However, it does not scale beyond 1000 nodes per relay. -

-

- The rsync method triggers rsync synchronization between each Relay and the Rudder server, for the selected resources (policies and/or shared files). It is more efficient, but you need to manually set up rsync on the Relay servers, and proceed with the SSH key exchange. Note that ressources not selected below won't be synchronized. -

-

- Finally, the manual method disable all synchronization of policies and shared files between the Relay servers and the Rudder server; you will need to manually set up a transfer method. -

-
- -
-
- -
-
-
    -
  • -
    - - -
    -
  • -
  • -
    - - -
    -
  • -
-
- - - -
-
-
- -
- -
-

Default settings for new nodes

-
-
-
-

Configure the default state and policy mode for nodes when they are accepted within Rudder.

-

State may be used to classify nodes, in search and groups, and some states have also an impact on the policies generation for the node. -

-

You may read the node lifecycle documentation for more information.

-
-
-
-
    -
  • -
    - - -
    -
  • -
-
    -
  • -
    - - -
    -
  • -
- - - -
-
-
-
-
- -
- -
-

Modified files backup

-
-
-

Every time Rudder modifies a file (by file editing or copying from a remote source), a backup is created on the agent under /var/rudder/modified-files/.

-
-
-
-
- - -
- - - -
-
-
-
- -
-

Logging

-
-
-
-

All nodes in Rudder send reports via syslog to this Rudder root server. These logs are stored in an SQL database in order to determine compliance information displayed in this web interface. However, it can be useful to also store this information in a plain text log file, for example for statistics or debugging purposes. The option below enables this.

-

- Also, the full output from each agent run is stored in a file under /var/rudder/cfengine-community/outputs/. - These files are automatically removed to save on disk space. You can configure the retention time (Time To Live) they are kept for here. -

-
-
-
- - -
- - - -
-
-
-
- -
-

Audit logs

-
-
-
-

- If enabled, prompt users to enter a message explaining the reason for each configuration change they make.
- These messages will be stored in each Event log and as the commit message for the underlying git repository in - -

-
-
-
    -
  • -
    - - -
    -
  • -
  • -
    - - -
    -
  • -
-
- -
- - - - -
-
- - - -
-
-
-
- -
- -
-

Compliance display

-
-
-
-

In Rules table, we display a graph for each Rule showing its activity (number of repaired reports).

-

Unfortunately, some browsers (especially Firefox) have trouble displaying them and make Rule pages almost unusable.

-

If you experience slow loading of Rules pages, you can disable this feature here.

-
-
-
    -
  • -
    - - -
    -
  • -
-
-
- In directive configuration page, we have the possibility to choose rules for the directive. The rule - are presented in a summary table which look alike the one in rule page. For performance on aesthetic - reason, you may want to hide compliance and recent changes columns on that table. - The column will still be displayed on the rule page. -
-
-
    -
  • -
    - - -
    -
  • -
- - - [messages] - -
-
-
-
- -
-

Script evaluation in directives

-
-
-
- If enabled, all directive fields can contain a JavaScript expression. - These expressions are evaluated during promise generation, and can therefore provide unique values for each node. - Read the script documentation for more information. -
-
-
    -
  • -
    - - -
    -
  • -
- - - -
-
-
-
- -
-

Policy generation system hooks

-
-
-
-

Development Policy Check

-

Rudder can check generated policies syntax after generation to ensure that only syntactically valid policies are deployed on node..

-

- That check is especially useful when you are developing your own generic method or technique without the technique - editor, and you want to catch possible bugs as early as possible. -

-

- This check can add some overhead to policy generation, so if you don't code techniques or generic methods, - you can disable it to make policy generation quicker. -

-
-
-
    -
  • -
    - - -
    -
  • -
- - - [messages] - -
-
-
-
-
-

Trigger immediate update of nodes when their configuration changes

-

When new policies are generated, you have to wait for the next scheduled run of a node to have it apply them.

-

- Depending on your use case, you may prefer to reduce latency between a node configuration change and its application on - the node. For that, Rudder can send an update notification at the end of a policy generation to all nodes whose configuration - has changed. That notification will tell the corresponding nodes to start a run immediately - and thus, not honouring neither - their agent run period nor the splaytime between nodes. This can be a problem on large installation where hundreds of nodes would start a - run concurrently, using a lot of CPU resources and network at the same time. -

-

- So by default, we advice to always limit the notification to a maximum number of nodes. Setting that number - to 0 has the same effect than disabling that hook. -

-

- You can also limit the number of notified nodes to a fraction of the total updated node so that you can - see on a small number of nodes how a change works. This is especially interesting if your schedule period is - long, like 30 min, which let you some time to revert a faulty change. Of course, that help does not replace - a real rollout strategy to deploy policies, and you should always test critical configuration changes on a - dedicated group of nodes before rolling out the change on other production nodes. -

-

- If the number of nodes given by the ratio is bigger than the maximun number of nodes configured in the previous - parameter, then the configured maximun number of node is used. -

-

- Nodes are chosen at random, with the exception of the policy servers that will always be notified first. -

-

- The ratio of nodes to update is given in percent, rounded up, so that 1% of 10 nodes will be 1 node, - and 90% of 8 nodes will be all the 8 nodes. Setting that ration to 0 is the same than disabling that hook, - and setting it to 100 -

-

- The notification use the "trigger remote agent run" feature and you will need to open network port accordingly to - that (see documentation). -

-
-
-
-
    -
  • -
    - - -
    -
  • -
-
- - -
-
- - -
- - - [messages] - -
-
-
-
-
- -
-

Debug information

-
-
-

Launch the debug script (/opt/rudder/bin/rudder-debug-info) to get information about your setup.

-

The provided information are used to troubleshoot Rudder.

-

Data includes various commands about your install (package version ...), your system and useful logs

-
-
- -
- - [error] -
-
-
-
-
- -
-

Usage survey

-
-
-
-

To help the Rudder team continue to improve this software day after day, we are running a survey to collect usage statistics.

-

These statistics are submitted anonymously, and include overall statistics about your instance of Rudder - (number of Rules, Directives, Nodes, etc). No potentially-sensitive data is included - (only stock Rudder-provided techniques are examined, no hostnames, etc). - We highly value your privacy, as we do our own, so we will never share individual submissions (only globally compiled statistics). -

-

- If you want to check the information that is sent, just run /opt/rudder/bin/rudder-metrics-reporting -v on your Rudder server. This won't send any information without your consent. -

-

- This information is very valuable to the development team, as it helps us focus on the features that matter most and better understand what our users care about. - Please consider participating in the survey! -

-
-
-
-
-
-
- - -
-
- -
- -
-
-
-
-
-
-
-
- -
-

Reload groups

-
-
-

Groups in Rudder can be static (fixed list of nodes) or dynamic (the list of nodes is built from a search query).

-

To take into account new nodes and changes to their inventory, dynamic groups must be reloaded regularly.

-

Currently, Rudder will automatically do this reload every 5 minutes (see /opt/rudder/etc/rudder-web.properties).

-
-
- -
- -
-
-
-
-
- -
-

Reload techniques

-
-
-

Techniques in Rudder are read from the filesystem (in /var/rudder/configuration-repository/techniques).

-

To take into account new Techniques and changes, the Technique library must be updated regularly.

-

Currently, Rudder will automatically do this update every 5 minutes (see /opt/rudder/etc/rudder-web.properties).

-
-
- -
-
-
-
-
- -
-
-
-
-
-
- -
diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/settings.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/settings.html new file mode 100644 index 00000000000..368d8e38447 --- /dev/null +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/settings.html @@ -0,0 +1,42 @@ + + + Settings + + + +
+
+
+
+
+

+ Settings +

+
+
+ +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/techniqueLibraryManagement.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/techniqueLibraryManagement.html deleted file mode 100644 index 98a1ada658b..00000000000 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/administration/techniqueLibraryManagement.html +++ /dev/null @@ -1,103 +0,0 @@ - - - -Rudder - Technique library management - - -
- -
-
-
-
Technique library
-
-
- -
- - Only techniques in the user library are available to create new directives. Add new techniques by drag'n'dropping them from left to right. -
-
- Arrange techniques and categories in the active techniques library into the organization that suits you best. -
-
-
-
-
-
- Reference technique library - - Edit techniques - -
-
-
-
-
- - - - -
-
-
-
-
-
- -
-
- -
-
- Active techniques library - -
-
-
-
-
-
-
- - - -
-
-
-
-
-
- -
-
-
- -
- -
- - - - - -
- diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/eventLogs.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/changeLogs.html similarity index 68% rename from webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/eventLogs.html rename to webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/changeLogs.html index 9f532c03679..e1294498f85 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/eventLogs.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/changeLogs.html @@ -1,34 +1,8 @@ - Rudder - Event logs - + Rudder - Change logs + @@ -37,18 +11,18 @@

- Event logs + Change logs

- Get event logs between + Get change logs between and - +
- +
@@ -57,11 +31,11 @@

-
+
-
+
- This div gets the content of the eventDetailPopup injected within + This div gets the content of the changeDetailPopup injected within
@@ -96,6 +70,6 @@

-
+
diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/directiveManagement.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/directiveManagement.html index 338aadfa323..a2aad9d6c74 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/directiveManagement.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/directiveManagement.html @@ -48,16 +48,19 @@

Technique

+
+ +
+
The Directive [Directive] is based on following Technique:
-
[Disabled Technique]

Description

[technique.description] diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/parameterManagement.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/parameterManagement.html index db932f93de1..ca1d82bde56 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/parameterManagement.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/configurationManager/parameterManagement.html @@ -80,7 +80,7 @@

Be careful to enter exactly node.properties (lower case) and your global property name with the same case - (global property keys are case sensitive). Also, remember that their value can be overriden by + (global property keys are case sensitive). Also, remember that their value can be overridden by group and node properties.

@@ -103,7 +103,7 @@

-
+

- \ No newline at end of file + diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/index.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/index.html index 0b410b5c4c0..1127b86780b 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/index.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/index.html @@ -46,7 +46,7 @@
Statistics
  • - + Techniques
    diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/manageNewNode.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/manageNewNode.html index e86c42877df..c94154ab4fb 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/manageNewNode.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/manageNewNode.html @@ -71,7 +71,7 @@

    -
    +
    diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/node.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/node.html index 227d2893bcb..ab20dd30ae6 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/node.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/node.html @@ -21,9 +21,8 @@
    -
    +
    -
    diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/nodes.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/nodes.html index 268137334a8..e6c38ae7a96 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/nodes.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/nodes.html @@ -9,7 +9,7 @@ flex-basis : initial !important; flex: auto !important; } - #nodes tbody tr td .rudder-label.label-sm:not(.label-provider){ + #nodes_tab .dataTable tbody tr td .rudder-label.label-sm:not(.label-provider){ padding: 3px 24px !important; margin-right: 0; font-size: inherit; @@ -26,16 +26,52 @@

    -
    -
    -
    -
    -
    + +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +
    +
    -
    + diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/searchNodes.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/searchNodes.html deleted file mode 100644 index 4c91aab6dc8..00000000000 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/nodeManager/searchNodes.html +++ /dev/null @@ -1,61 +0,0 @@ - - - - Rudder - Node search - - - -
    -
    -
    -
    -
    -

    - Node search -

    -
    -
    -

    - Find nodes that match criteria. - Define node groups from your search results. -

    -
    -
    - -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/plugins/pluginInformation.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/plugins/pluginInformation.html deleted file mode 100644 index 4eb98ba7715..00000000000 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/plugins/pluginInformation.html +++ /dev/null @@ -1,68 +0,0 @@ - - - - Rudder - Plugins Management - - - -
    -
    -
    -
    -
    -

    Plugins

    -
    -
    -

    - Plugins can extend Rudder’s base functionality to add extra features. They can be either open source or - proprietary (this is up to the plugin author). -

    -

    - Learn about available plugins on our website, or - directly - download free plugins. -

    -
    -
    - -
    -
    -
    -
    -

    Plugins are managed by the Rudder package manager.

    -

    - You can consult the list of plugins with the command rudder package list --all
    - To consult the list of plugins already installed you can use the command rudder package list
    - Finally you can install a plugin with the command rudder package install plugin-name
    -

    -

    Rudder package manager comes with more commands, to consult the help use the command rudder package help

    -
    -
    -

    [Here comes the plugins name]

    -

    [Here comes the plugin description]

    -
      -
    • Plugin ID: [Here comes the plugin full id]
    • -
    • Plugin version: [Here comes the plugin version]
    • -
    • Rudder ABI version: [Here comes the rudder version used by - the plugin] - [warning message if patch version of the plugin is not the same as - Rudder] -
    • -
    - -
    - [Here comes optionnal information about plugin - status/license] -
    - -
    - -
    -
    -
    -
    -
    -
    \ No newline at end of file diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/archiveManagement.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/archiveManagement.html deleted file mode 100644 index 64105614157..00000000000 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/archiveManagement.html +++ /dev/null @@ -1,233 +0,0 @@ - - - - Rudder - Archives - - - - - - -
    -
    -
    -
    -

    - Archives -

    -
    -
    -
    -
    -
    -
    - -
    -

    Global archive

    -
    -
    -
    - -
    - Archive or restore settings (groups, rules, directives and active techniques) from Rudder to backup, restore or transfer items between Rudder instances. -
    - -
    - -
    - - - -
    - [error] -
    -
    -
    -
    - -
    -

    Archive groups

    -
    -
    -
    - -
    -
    Archive or restore groups only.
    -
    -
    -
    - -
    - - - -
    - [error] -
    -
    -
    -
    - -
    -

    Archive rules

    -
    -
    -
    - -
    -
    Archive or restore rules only.
    -
    -
    -
    - -
    - - - -
    - [error] -
    -
    -
    -
    - -
    -

    Archive directives and active techniques

    -
    -
    -
    - -
    -
    Archive or restore directives and active techniques only.
    -
    -
    -
    - -
    - - - -
    - [error] -
    -
    -
    -
    - -
    -

    Archive parameters

    -
    -
    -
    - -
    -
    Archive or restore parameters only.
    -
    -
    -
    - -
    - - - -
    - [error] -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - - - - -
    - diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/healthcheck.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/healthcheck.html deleted file mode 100644 index 01a370badb9..00000000000 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/healthcheck.html +++ /dev/null @@ -1,45 +0,0 @@ - - -
    - - Health check - - - - -
    -
    -
    -
    -

    - Health check -

    -
    -
    - Get an overview of Rudder server's health -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - - - - diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/index.html b/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/index.html deleted file mode 100644 index 2efe8481e26..00000000000 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/secure/utilities/index.html +++ /dev/null @@ -1,4 +0,0 @@ - - -
    -
    diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/accept_new_server.html b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/accept_new_server.html index 8f73a30fa6a..f3db33cb860 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/accept_new_server.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/accept_new_server.html @@ -17,7 +17,7 @@
    Please confirm that you wish to accept the following nodes in Rudder
    -
    +
    diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/createCloneGroupPopup.html b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/createCloneGroupPopup.html index c86d57d68ce..2feda40eab7 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/createCloneGroupPopup.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/createCloneGroupPopup.html @@ -10,7 +10,7 @@ -
    +
    diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/expected_policy_popup.html b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/expected_policy_popup.html index 39f9ce6c9b5..f667f5d5ab1 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/expected_policy_popup.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/expected_policy_popup.html @@ -37,7 +37,7 @@

    The following Rules will be applied to this node as it j
    All the following grid will be replace by the real grid
    -
    +

    diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/refuse_new_server.html b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/refuse_new_server.html index 1fc0c59b65b..cfcfe10eadf 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/refuse_new_server.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/Popup/refuse_new_server.html @@ -17,7 +17,7 @@
    Please confirm that you wish to refuse the following nodes in Rudder
    -
    +
    diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/common-layout.html b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/common-layout.html index 4ba679e20ce..08c34577303 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/common-layout.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/common-layout.html @@ -15,9 +15,6 @@ - - - @@ -30,6 +27,7 @@ + @@ -42,6 +40,8 @@ + + @@ -108,8 +108,8 @@
    - - - - - - - - -
    Col1Col2Col3
    Col1Col2Col3
    Col1Col2Col3
    Col1Col2Col3
    - -
    @@ -98,10 +85,10 @@

    -

    No description defined, click on to edit

    +

    No description defined, click on to edit

    -
    +
    Here comes the longDescription field
    @@ -196,77 +183,4 @@

    Technique version deprecated

    - -
    Directive information
    -
    -
    - Here comes the name field -
    -
    -
    - Here comes the shortDescription field -
    -
    -
    - Here comes the longDescription field -
    -
    - -
    - Here come the priority field -
    -
    -
    - -
    -
    - - -
    -
    -
    Categories
    -
    -
    -
    -
    -
    -
    Rules
    -
    -
    -
    -
    -
    -
    - diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/components/ComponentRuleEditForm.html b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/components/ComponentRuleEditForm.html index 9ea544a4e96..e0025e50df3 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/components/ComponentRuleEditForm.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/components/ComponentRuleEditForm.html @@ -168,10 +168,10 @@

    Rule

    -

    No description defined, click on to edit

    +

    No description defined, click on to edit

    -
    +
    Here comes the longDescription field
    diff --git a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/components/ComponentTechniqueEditForm.html b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/components/ComponentTechniqueEditForm.html index ac90dfde4a2..dc095d4b769 100644 --- a/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/components/ComponentTechniqueEditForm.html +++ b/webapp/sources/rudder/rudder-web/src/main/webapp/templates-hidden/components/ComponentTechniqueEditForm.html @@ -36,11 +36,9 @@ -
    -
    -
    -
    -
    +
    +
    +
    @@ -89,54 +87,49 @@

    [technique.description]

    -
    -