diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..8ab4cdfe --- /dev/null +++ b/.editorconfig @@ -0,0 +1,7 @@ +root = true + +[tests/golden/*.jsonnet.golden] +generated_code = true +indent_style = space +indent_size = 4 +insert_final_newline = false diff --git a/.envrc b/.envrc index 3550a30f..31565b0e 100644 --- a/.envrc +++ b/.envrc @@ -1 +1,8 @@ +RED='\033[0;31m' +RESET='\033[0m' + use flake + +if ! diff .github/hooks/pre-commit .git/hooks/pre-commit >/dev/null; then +echo -e "${RED}Hooks are updated, read .github/hooks/pre-commit, and then install it with cp .github/hooks/pre-commit .git/hooks/pre-commit${RESET}" +fi diff --git a/.github/hooks/pre-commit b/.github/hooks/pre-commit new file mode 100755 index 00000000..58f3e5c6 --- /dev/null +++ b/.github/hooks/pre-commit @@ -0,0 +1 @@ +cargo xtask lint diff --git a/Cargo.lock b/Cargo.lock index 05039bdd..f21a208a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -90,9 +90,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.83" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3" +checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "autocfg" @@ -102,9 +102,9 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "base64" -version = "0.21.7" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "beef" @@ -112,15 +112,6 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" -[[package]] -name = "bincode" -version = "1.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" -dependencies = [ - "serde", -] - [[package]] name = "bitflags" version = "2.5.0" @@ -194,7 +185,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.61", + "syn 2.0.64", ] [[package]] @@ -257,6 +248,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + [[package]] name = "digest" version = "0.10.7" @@ -289,9 +286,9 @@ checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1" [[package]] name = "either" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" +checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" [[package]] name = "encode_unicode" @@ -412,9 +409,9 @@ checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" [[package]] name = "insta" -version = "1.38.0" +version = "1.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3eab73f58e59ca6526037208f0e98851159ec1633cf17b6cd2e1f2c3fd5d53cc" +checksum = "810ae6042d48e2c9e9215043563a58a80b877bc863228a74cf10c49d4620a6f5" dependencies = [ "console", "lazy_static", @@ -430,9 +427,9 @@ checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" [[package]] name = "itertools" -version = "0.12.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] @@ -477,7 +474,6 @@ version = "0.5.0-pre96" dependencies = [ "annotate-snippets", "anyhow", - "bincode", "derivative", "hashbrown 0.14.5", "hi-doc", @@ -511,9 +507,9 @@ dependencies = [ [[package]] name = "jrsonnet-gcmodule" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c11fb98940a7f8b419619e98ccbf2e094671a5fdd0e277f05acd373071186d57" +checksum = "47975473b24b4503acee0d449fd5eda04ae18828f9811828a6cc99abbbbc38c8" dependencies = [ "jrsonnet-gcmodule-derive", "parking_lot", @@ -521,13 +517,13 @@ dependencies = [ [[package]] name = "jrsonnet-gcmodule-derive" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bee774b7ba86fc86ee84482cd6732aa860ae3559f9827c65efd75c51e66ac76" +checksum = "4782d1d76731f5e5bb4bdeff26ed3350f21d662f178ce6dee7b4da810e7a8f72" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.64", ] [[package]] @@ -537,8 +533,6 @@ dependencies = [ "hashbrown 0.14.5", "jrsonnet-gcmodule", "rustc-hash", - "serde", - "structdump", ] [[package]] @@ -547,7 +541,7 @@ version = "0.5.0-pre96" dependencies = [ "proc-macro2", "quote", - "syn 2.0.61", + "syn 2.0.64", ] [[package]] @@ -557,9 +551,7 @@ dependencies = [ "jrsonnet-gcmodule", "jrsonnet-interner", "peg", - "serde", "static_assertions", - "structdump", ] [[package]] @@ -580,7 +572,6 @@ name = "jrsonnet-stdlib" version = "0.5.0-pre96" dependencies = [ "base64", - "bincode", "jrsonnet-evaluator", "jrsonnet-gcmodule", "jrsonnet-macros", @@ -596,7 +587,6 @@ dependencies = [ "sha1", "sha2", "sha3", - "structdump", ] [[package]] @@ -607,6 +597,17 @@ dependencies = [ "peg", ] +[[package]] +name = "json-structural-diff" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25c7940d3c84d2079306c176c7b2b37622b6bc5e43fbd1541b1e4a4e1fd02045" +dependencies = [ + "difflib", + "regex", + "serde_json", +] + [[package]] name = "keccak" version = "0.1.5" @@ -624,9 +625,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.154" +version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libjsonnet" @@ -634,6 +635,7 @@ version = "0.5.0-pre96" dependencies = [ "jrsonnet-evaluator", "jrsonnet-gcmodule", + "jrsonnet-interner", "jrsonnet-parser", "jrsonnet-stdlib", ] @@ -646,9 +648,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" @@ -681,7 +683,7 @@ dependencies = [ "proc-macro2", "quote", "regex-syntax", - "syn 2.0.61", + "syn 2.0.64", ] [[package]] @@ -779,9 +781,9 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "parking_lot" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -989,22 +991,22 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" -version = "1.0.201" +version = "1.0.202" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "780f1cebed1629e4753a1a38a3c72d30b97ec044f0aef68cb26650a3c5cf363c" +checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.201" +version = "1.0.202" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e405930b9796f1c00bee880d03fc7e0bb4b9a11afc776885ffe84320da2865" +checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838" dependencies = [ "proc-macro2", "quote", - "syn 2.0.61", + "syn 2.0.64", ] [[package]] @@ -1086,28 +1088,6 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" -[[package]] -name = "structdump" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0570327507bf281d8a6e6b0d4c082b12cb6bcee27efce755aa5efacd44076c1" -dependencies = [ - "proc-macro2", - "quote", - "structdump-derive", -] - -[[package]] -name = "structdump-derive" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29cc0b59cfa11f1bceda09a9a7e37e6a6c3138575fd24ade8aa9af6d09aedf28" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "syn" version = "1.0.109" @@ -1121,9 +1101,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.61" +version = "2.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c993ed8ccba56ae856363b1845da7266a7cb78e1d146c8a32d54b45a8b831fc9" +checksum = "7ad3dee41f36859875573074334c200d1add8e4a87bb37113ebd31d926b7b11f" dependencies = [ "proc-macro2", "quote", @@ -1149,7 +1129,9 @@ dependencies = [ "jrsonnet-evaluator", "jrsonnet-gcmodule", "jrsonnet-stdlib", + "json-structural-diff", "serde", + "serde_json", ] [[package]] @@ -1160,22 +1142,22 @@ checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "thiserror" -version = "1.0.60" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.60" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2", "quote", - "syn 2.0.61", + "syn 2.0.64", ] [[package]] @@ -1313,6 +1295,7 @@ name = "xtask" version = "0.1.0" dependencies = [ "anyhow", + "clap", "indexmap 2.2.6", "itertools", "proc-macro2", @@ -1347,5 +1330,5 @@ checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.61", + "syn 2.0.64", ] diff --git a/Cargo.toml b/Cargo.toml index 2ace9a6e..de0f47d3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,9 +19,7 @@ jrsonnet-interner = { path = "./crates/jrsonnet-interner", version = "0.5.0-pre9 jrsonnet-stdlib = { path = "./crates/jrsonnet-stdlib", version = "0.5.0-pre96" } jrsonnet-cli = { path = "./crates/jrsonnet-cli", version = "0.5.0-pre96" } jrsonnet-types = { path = "./crates/jrsonnet-types", version = "0.5.0-pre96" } - -jrsonnet-gcmodule = "0.3.6" - +jrsonnet-gcmodule = { version = "0.3.7" } # Diagnostics. # hi-doc is my library, which handles text formatting very well, but isn't polished enough yet # Previous implementation was based on annotate-snippets, which I don't like for many reasons. @@ -44,8 +42,8 @@ serde_json = "1.0.114" serde_yaml_with_quirks = "0.8.24" # Error handling -anyhow = "1.0.80" -thiserror = "1.0" +anyhow = "1.0.83" +thiserror = "1.0.60" # Code formatting dprint-core = "0.65.0" @@ -56,44 +54,41 @@ sha1 = "0.10.6" sha2 = "0.10.8" sha3 = "0.10.8" -# Pre-parsed stdlib serialization. -# TODO: Drop in favor of replacing std.jsonnet with full native implementation, version 2.0 of bincode is bad. -bincode = "1.3" - # Source code parsing. # Jrsonnet has two parsers for jsonnet - one is for execution, and another is for better parsing diagnostics/lints/LSP. # First (and fast one) is based on peg, second is based on rowan. -peg = "0.8.2" +peg = "0.8.3" logos = "0.14.0" ungrammar = "1.16.1" -rowan = "0.15" +rowan = "0.15.15" mimallocator = "0.1.3" indoc = "2.0" -insta = "1.35" +insta = "1.39" tempfile = "3.10" pathdiff = "0.2.1" -hashbrown = "0.14.3" +hashbrown = "0.14.5" static_assertions = "1.1" rustc-hash = "1.1" -num-bigint = "0.4.4" +num-bigint = "0.4.5" derivative = "2.2.0" strsim = "0.11.0" -structdump = "0.2.0" proc-macro2 = "1.0" quote = "1.0" syn = "2.0" drop_bomb = "0.1.5" -base64 = "0.21.7" +base64 = "0.22.1" indexmap = "2.2.3" -itertools = "0.12.1" -xshell = "0.2.5" +itertools = "0.13.0" +xshell = "0.2.6" lsp-server = "0.7.6" -lsp-types = "0.95.0" +lsp-types = "0.96.0" + +regex = "1.10" +lru = "0.12.3" -regex = "1.10.3" -lru = "0.12.2" +json-structural-diff = "0.1.0" [workspace.lints.rust] unsafe_op_in_unsafe_fn = "deny" diff --git a/bindings/c/libjsonnet.h b/bindings/c/libjsonnet.h index 093b4550..1ebea79b 100644 --- a/bindings/c/libjsonnet.h +++ b/bindings/c/libjsonnet.h @@ -1,22 +1,9 @@ -/* -Copyright 2015 Google Inc. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - #ifndef LIB_JSONNET_H #define LIB_JSONNET_H #include -/** \file This file is a library interface for evaluating Jsonnet. It is written in C++ but exposes +/** \file This file is a library interface for evaluating Jsonnet. It is written in Rust but exposes * a C interface for easier wrapping by other languages. See \see jsonnet_lib_test.c for an example * of using the library. */ @@ -28,10 +15,10 @@ limitations under the License. * * If this isn't the sae as jsonnet_version() then you've got a mismatched binary / header. */ -#define LIB_JSONNET_VERSION "v0.16.0" +#define LIB_JSONNET_VERSION "v0.20.0" /** Return the version string of the Jsonnet interpreter. Conforms to semantic versioning - * http://semver.org/ If this does not match LIB_JSONNET_VERSION then there is a mismatch between + * https://semver.org/ If this does not match LIB_JSONNET_VERSION then there is a mismatch between * header and compiled library. */ const char *jsonnet_version(void); @@ -66,10 +53,12 @@ void jsonnet_string_output(struct JsonnetVm *vm, int v); * process's CWD. This is necessary so that imports from the content of the imported file can * be resolved correctly. Allocate memory with jsonnet_realloc. Only use when *success = 1. * \param success Set this byref param to 1 to indicate success and 0 for failure. - * \returns The content of the imported file, or an error message. + * \param buf Set this byref param to the content of the imported file, or an error message. Allocate memory with jsonnet_realloc. Do not include a null terminator byte. + * \param buflen Set this byref param to the length of the data returned in buf. + * \returns 0 to indicate success and 1 for failure. On success, the content is in *buf. On failure, an error message is in *buf. */ -typedef char *JsonnetImportCallback(void *ctx, const char *base, const char *rel, char **found_here, - int *success); +typedef int JsonnetImportCallback(void *ctx, const char *base, const char *rel, + char **found_here, char **buf, size_t *buflen); /** An opaque type which can only be utilized via the jsonnet_json_* family of functions. */ @@ -82,7 +71,7 @@ const char *jsonnet_json_extract_string(struct JsonnetVm *vm, const struct Jsonn /** If the value is a number, return 1 and store the number in out, otherwise return 0. */ int jsonnet_json_extract_number(struct JsonnetVm *vm, const struct JsonnetJsonValue *v, - double *out); + double *out); /** Return 0 if the value is false, 1 if it is true, and 2 if it is not a bool. */ @@ -117,7 +106,7 @@ struct JsonnetJsonValue *jsonnet_json_make_array(struct JsonnetVm *vm); /** Add v to the end of the array. */ void jsonnet_json_array_append(struct JsonnetVm *vm, struct JsonnetJsonValue *arr, - struct JsonnetJsonValue *v); + struct JsonnetJsonValue *v); /** Make a JsonnetJsonValue representing an object with the given number of fields. * @@ -130,7 +119,7 @@ struct JsonnetJsonValue *jsonnet_json_make_object(struct JsonnetVm *vm); * This replaces any previous binding of the field. */ void jsonnet_json_object_append(struct JsonnetVm *vm, struct JsonnetJsonValue *obj, const char *f, - struct JsonnetJsonValue *v); + struct JsonnetJsonValue *v); /** Clean up a JSON subtree. * @@ -151,8 +140,8 @@ void jsonnet_json_destroy(struct JsonnetVm *vm, struct JsonnetJsonValue *v); * \returns The content of the imported file, or an error message. */ typedef struct JsonnetJsonValue *JsonnetNativeCallback(void *ctx, - const struct JsonnetJsonValue *const *argv, - int *success); + const struct JsonnetJsonValue *const *argv, + int *success); /** Allocate, resize, or free a buffer. This will abort if the memory cannot be allocated. It will * only return NULL if sz was zero. @@ -181,7 +170,7 @@ void jsonnet_import_callback(struct JsonnetVm *vm, JsonnetImportCallback *cb, vo * \param params NULL-terminated array of the names of the params. Must be valid identifiers. */ void jsonnet_native_callback(struct JsonnetVm *vm, const char *name, JsonnetNativeCallback *cb, - void *ctx, const char *const *params); + void *ctx, const char *const *params); /** Bind a Jsonnet external var to the given string. * @@ -236,7 +225,7 @@ char *jsonnet_evaluate_file(struct JsonnetVm *vm, const char *filename, int *err * \returns Either JSON or the error message. */ char *jsonnet_evaluate_snippet(struct JsonnetVm *vm, const char *filename, const char *snippet, - int *error); + int *error); /** Evaluate a file containing Jsonnet code, return a number of named JSON files. * @@ -260,7 +249,7 @@ char *jsonnet_evaluate_file_multi(struct JsonnetVm *vm, const char *filename, in * \returns Either the error, or a sequence of strings separated by \0, terminated with \0\0. */ char *jsonnet_evaluate_snippet_multi(struct JsonnetVm *vm, const char *filename, - const char *snippet, int *error); + const char *snippet, int *error); /** Evaluate a file containing Jsonnet code, return a number of JSON files. * @@ -284,9 +273,46 @@ char *jsonnet_evaluate_file_stream(struct JsonnetVm *vm, const char *filename, i * \returns Either the error, or a sequence of strings separated by \0, terminated with \0\0. */ char *jsonnet_evaluate_snippet_stream(struct JsonnetVm *vm, const char *filename, - const char *snippet, int *error); + const char *snippet, int *error); /** Complement of \see jsonnet_vm_make. */ void jsonnet_destroy(struct JsonnetVm *vm); -#endif // LIB_JSONNET_H +/** Jrsonnet addition. + * + * In jrsonnet, vm state is bound to the thread, because interpreter + * also uses thread_local storage for some things (I.e GC). + * + * It makes it impossible to correctly use those bindings in golang, + * where developer has little control over goroutine scheduler. + * + * To make it work, jrsonnet provides methods to dump and restore thread + * state manually, making it possible to wire it with golang. + */ +struct JrThreadCTX; + +/** Dump current thread state, to be restored with + * jrsonnet_reenter_thread. + */ +struct JrThreadCTX *jrsonnet_exit_thread(); +/** Restore thread state, freeing JrThreadCTX. + */ +void jrsonnet_reenter_thread(struct JrThreadCTX *ctx); + +struct JrThreadId; + +/** Get current thread id (opaque pointer). + */ +struct JrThreadId* jrsonnet_thread_id(); + +/** Compare two thread ids, it is not the same as a == b. + * + * \returns 1 if the same thread, 0 otherwise + */ +int jrsonnet_thread_id_compare(struct JrThreadId *a, struct JrThreadId *b); + +/** Free thread id value. + */ +void jrsonnet_thread_id_free(struct JrThreadId *id); + +#endif // LIB_JSONNET_H diff --git a/bindings/c/libjsonnet_test_file.c b/bindings/c/libjsonnet_test_file.c index 26d174ef..9bb0a561 100644 --- a/bindings/c/libjsonnet_test_file.c +++ b/bindings/c/libjsonnet_test_file.c @@ -1,16 +1,3 @@ -/* -Copyright 2015 Google Inc. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - #include #include diff --git a/bindings/jsonnet/Cargo.toml b/bindings/jsonnet/Cargo.toml index 70c5f767..455ed7ae 100644 --- a/bindings/jsonnet/Cargo.toml +++ b/bindings/jsonnet/Cargo.toml @@ -23,14 +23,21 @@ jrsonnet-evaluator.workspace = true jrsonnet-parser.workspace = true jrsonnet-stdlib.workspace = true jrsonnet-gcmodule.workspace = true +jrsonnet-interner.workspace = true [lib] name = "jsonnet" -crate-type = ["cdylib"] +crate-type = ["cdylib", "staticlib"] [features] +default = ["interop-common", "interop-wasm", "interop-threading"] # Export additional functions for native integration, i.e ability to set custom trace format -interop = [] +interop-common = [] +# Provide ability to statically override callbacks from WASM (by using imports) +interop-wasm = [] +# Provide ability to move jsonnet vm state between threads +interop-threading = [] + experimental = ["exp-preserve-order", "exp-destruct"] exp-preserve-order = ["jrsonnet-evaluator/exp-preserve-order"] exp-destruct = ["jrsonnet-evaluator/exp-destruct"] diff --git a/bindings/jsonnet/src/import.rs b/bindings/jsonnet/src/import.rs index a806f5e6..10473336 100644 --- a/bindings/jsonnet/src/import.rs +++ b/bindings/jsonnet/src/import.rs @@ -15,7 +15,7 @@ use std::{ use jrsonnet_evaluator::{ bail, error::{ErrorKind::*, Result}, - FileImportResolver, ImportResolver, + ImportResolver, }; use jrsonnet_gcmodule::Trace; use jrsonnet_parser::{SourceDirectory, SourceFile, SourcePath}; @@ -64,7 +64,7 @@ impl ImportResolver for CallbackImportResolver { self.ctx, base.as_ptr(), rel.as_ptr(), - &mut (found_here as *const _), + &mut found_here.cast_const(), &mut buf, &mut buf_len, ) @@ -106,6 +106,10 @@ impl ImportResolver for CallbackImportResolver { fn as_any(&self) -> &dyn Any { self } + + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } } /// # Safety @@ -117,11 +121,11 @@ pub unsafe extern "C" fn jsonnet_import_callback( cb: JsonnetImportCallback, ctx: *mut c_void, ) { - vm.state.set_import_resolver(CallbackImportResolver { + vm.replace_import_resolver(CallbackImportResolver { cb, ctx, out: RefCell::new(HashMap::new()), - }) + }); } /// # Safety @@ -131,10 +135,5 @@ pub unsafe extern "C" fn jsonnet_import_callback( pub unsafe extern "C" fn jsonnet_jpath_add(vm: &VM, path: *const c_char) { let cstr = unsafe { CStr::from_ptr(path) }; let path = PathBuf::from(cstr.to_str().unwrap()); - let any_resolver = vm.state.import_resolver(); - let resolver = any_resolver - .as_any() - .downcast_ref::() - .expect("jpaths are not compatible with callback imports!"); - resolver.add_jpath(path); + vm.add_jpath(path); } diff --git a/bindings/jsonnet/src/interop.rs b/bindings/jsonnet/src/interop.rs index 6dfcf673..ae05cdd1 100644 --- a/bindings/jsonnet/src/interop.rs +++ b/bindings/jsonnet/src/interop.rs @@ -1,53 +1,154 @@ //! Jrsonnet specific additional binding helpers -use std::{ - ffi::c_void, - os::raw::{c_char, c_int}, -}; +#[cfg(feature = "interop-wasm")] +pub mod wasm { + use std::ffi::{c_char, c_int, c_void}; -use jrsonnet_evaluator::Val; + use jrsonnet_evaluator::Val; -use crate::{import::jsonnet_import_callback, native::jsonnet_native_callback}; + use crate::VM; -extern "C" { - pub fn _jrsonnet_static_import_callback( + extern "C" { + + pub fn _jrsonnet_static_import_callback( + ctx: *mut c_void, + base: *const c_char, + rel: *const c_char, + found_here: *mut *const c_char, + buf: *mut *mut c_char, + buflen: *mut usize, + ) -> c_int; + + #[allow(improper_ctypes)] + pub fn _jrsonnet_static_native_callback( + ctx: *const c_void, + argv: *const *const Val, + success: *mut c_int, + ) -> *mut Val; + } + + #[no_mangle] + #[cfg(feature = "interop-wasm")] + // ctx arg is passed as-is to callback + #[allow(clippy::not_unsafe_ptr_arg_deref)] + pub extern "C" fn jrsonnet_apply_static_import_callback(vm: &VM, ctx: *mut c_void) { + unsafe { crate::import::jsonnet_import_callback(vm, _jrsonnet_static_import_callback, ctx) } + } + + /// # Safety + /// + /// `name` and `raw_params` should be correctly initialized + #[no_mangle] + #[cfg(feature = "interop-wasm")] + pub unsafe extern "C" fn jrsonnet_apply_static_native_callback( + vm: &VM, + name: *const c_char, ctx: *mut c_void, - base: *const c_char, - rel: *const c_char, - found_here: *mut *const c_char, - success: &mut c_int, - ) -> *const c_char; - - #[allow(improper_ctypes)] - pub fn _jrsonnet_static_native_callback( - ctx: *const c_void, - argv: *const *const Val, - success: *mut c_int, - ) -> *mut Val; + raw_params: *const *const c_char, + ) { + unsafe { + crate::native::jsonnet_native_callback( + vm, + name, + _jrsonnet_static_native_callback, + ctx, + raw_params, + ); + } + } } -/// # Safety -#[no_mangle] -pub unsafe extern "C" fn jrsonnet_apply_static_import_callback(vm: &VM, ctx: *mut c_void) { - jsonnet_import_callback(vm, _jrsonnet_static_import_callback, ctx) -} +#[cfg(feature = "interop-common")] +mod common { + use jrsonnet_evaluator::trace::{CompactFormat, ExplainingFormat, JsFormat, PathResolver}; + + use crate::VM; -/// # Safety -#[no_mangle] -pub unsafe extern "C" fn jrsonnet_apply_static_native_callback( - vm: &VM, - name: *const c_char, - ctx: *mut c_void, - raw_params: *const *const c_char, -) { - jsonnet_native_callback(vm, name, _jrsonnet_static_native_callback, ctx, raw_params) + #[no_mangle] + pub extern "C" fn jrsonnet_set_trace_format(vm: &mut VM, format: u8) { + match format { + 0 => { + vm.trace_format = Box::new(CompactFormat { + max_trace: 20, + resolver: PathResolver::new_cwd_fallback(), + padding: 4, + }); + } + 1 => vm.trace_format = Box::new(JsFormat { max_trace: 20 }), + 2 => { + vm.trace_format = Box::new(ExplainingFormat { + resolver: PathResolver::new_cwd_fallback(), + max_trace: 20, + }); + } + _ => panic!("unknown trace format"), + } + } } -#[no_mangle] -pub extern "C" fn jrsonnet_set_trace_format(vm: &VM, format: u8) { - use jrsonnet_evaluator::trace::JsFormat; - match format { - 1 => vm.set_trace_format(Box::new(JsFormat)), - _ => panic!("unknown trace format"), +#[cfg(feature = "interop-threading")] +mod threading { + use std::{ffi::c_int, thread::ThreadId}; + + pub struct ThreadCTX { + interner: *mut jrsonnet_interner::interop::PoolState, + gc: *mut jrsonnet_gcmodule::interop::GcState, + } + + /// Golang jrsonnet bindings require Jsonnet VM to be movable. + /// Jrsonnet uses `thread_local` in some places, thus making VM + /// immovable by default. By using `jrsonnet_exit_thread` and + /// `jrsonnet_reenter_thread`, you can move `thread_local` state to + /// where it is more convinient to use it. + /// + /// # Safety + /// + /// Current thread GC will be broken after this call, need to call + /// `jrsonet_enter_thread` before doing anything. + #[no_mangle] + pub unsafe extern "C" fn jrsonnet_exit_thread() -> *mut ThreadCTX { + Box::into_raw(Box::new(ThreadCTX { + interner: jrsonnet_interner::interop::exit_thread(), + gc: unsafe { jrsonnet_gcmodule::interop::exit_thread() }, + })) + } + + #[no_mangle] + pub extern "C" fn jrsonnet_reenter_thread(mut ctx: Box) { + use std::ptr::null_mut; + assert!( + !ctx.interner.is_null() && !ctx.gc.is_null(), + "reused context?" + ); + unsafe { jrsonnet_interner::interop::reenter_thread(ctx.interner) } + unsafe { jrsonnet_gcmodule::interop::reenter_thread(ctx.gc) } + // Just in case + ctx.interner = null_mut(); + ctx.gc = null_mut(); + } + + // ThreadId is compatible with u64, and there is unstable cast + // method... But until it is stabilized, lets erase its type by + // boxing. + pub enum JrThreadId {} + + #[no_mangle] + pub extern "C" fn jrsonnet_thread_id() -> *mut JrThreadId { + Box::into_raw(Box::new(std::thread::current().id())).cast() + } + + #[no_mangle] + pub extern "C" fn jrsonnet_thread_id_compare( + a: *const JrThreadId, + b: *const JrThreadId, + ) -> c_int { + let a: &ThreadId = unsafe { *a.cast() }; + let b: &ThreadId = unsafe { *b.cast() }; + i32::from(*a == *b) + } + + #[no_mangle] + pub unsafe extern "C" fn jrsonnet_thread_id_free(id: *mut JrThreadId) { + let _id: Box = unsafe { Box::from_raw(id.cast()) }; } } diff --git a/bindings/jsonnet/src/lib.rs b/bindings/jsonnet/src/lib.rs index af6e0b08..2767c0c1 100644 --- a/bindings/jsonnet/src/lib.rs +++ b/bindings/jsonnet/src/lib.rs @@ -1,6 +1,5 @@ #![allow(clippy::box_default)] -#[cfg(feature = "interop")] pub mod interop; pub mod import; @@ -12,22 +11,27 @@ pub mod vars_tlas; use std::{ alloc::Layout, + any::Any, borrow::Cow, + cell::RefCell, ffi::{CStr, CString, OsStr}, os::raw::{c_char, c_double, c_int, c_uint}, - path::Path, + path::{Path, PathBuf}, }; use jrsonnet_evaluator::{ apply_tla, bail, function::TlaArg, - gc::GcHashMap, + gc::{GcHashMap, TraceBox}, manifest::{JsonFormat, ManifestFormat, ToStringFormat}, stack::set_stack_depth_limit, tb, trace::{CompactFormat, PathResolver, TraceFormat}, - FileImportResolver, IStr, Result, State, Val, + FileImportResolver, IStr, ImportResolver, Result, State, Val, }; +use jrsonnet_gcmodule::Trace; +use jrsonnet_parser::SourcePath; +use jrsonnet_stdlib::ContextInitializer; /// WASM stub #[cfg(target_arch = "wasm32")] @@ -40,7 +44,7 @@ pub extern "C" fn _start() {} /// then there is a mismatch between header and compiled library. #[no_mangle] pub extern "C" fn jsonnet_version() -> &'static [u8; 8] { - b"v0.19.1\0" + b"v0.20.0\0" } unsafe fn parse_path(input: &CStr) -> Cow { @@ -72,23 +76,84 @@ unsafe fn unparse_path(input: &Path) -> Cow { } } +#[derive(Trace)] +struct VMImportResolver { + #[trace(tracking(force))] + inner: RefCell>, +} +impl VMImportResolver { + fn new(value: impl ImportResolver) -> Self { + Self { + inner: RefCell::new(tb!(value)), + } + } +} +impl ImportResolver for VMImportResolver { + fn load_file_contents(&self, resolved: &SourcePath) -> Result> { + self.inner.borrow().load_file_contents(resolved) + } + + fn resolve_from(&self, from: &SourcePath, path: &str) -> Result { + self.inner.borrow().resolve_from(from, path) + } + + fn resolve_from_default(&self, path: &str) -> Result { + self.inner.borrow().resolve_from_default(path) + } + + fn resolve(&self, path: &Path) -> Result { + self.inner.borrow().resolve(path) + } + + fn as_any(&self) -> &dyn Any { + self + } + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } +} + pub struct VM { state: State, manifest_format: Box, trace_format: Box, tla_args: GcHashMap, } +impl VM { + fn replace_import_resolver(&self, resolver: impl ImportResolver) { + *self + .state + .import_resolver() + .as_any() + .downcast_ref::() + .expect("valid resolver ty") + .inner + .borrow_mut() = tb!(resolver); + } + fn add_jpath(&self, path: PathBuf) { + self.state + .import_resolver() + .as_any() + .downcast_ref::() + .expect("valid resolver ty") + .inner + .borrow_mut() + .as_any_mut() + .downcast_mut::() + .expect("jpaths are not compatible with callback imports!") + .add_jpath(path); + } +} /// Creates a new Jsonnet virtual machine. #[no_mangle] #[allow(clippy::box_default)] pub extern "C" fn jsonnet_make() -> *mut VM { - let state = State::default(); - state.settings_mut().import_resolver = tb!(FileImportResolver::default()); - state.settings_mut().context_initializer = tb!(jrsonnet_stdlib::ContextInitializer::new( - state.clone(), - PathResolver::new_cwd_fallback(), - )); + let mut state = State::builder(); + state + .import_resolver(VMImportResolver::new(FileImportResolver::default())) + .context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())); + let state = state.build(); Box::into_raw(Box::new(VM { state, manifest_format: Box::new(JsonFormat::default()), @@ -107,7 +172,7 @@ pub extern "C" fn jsonnet_destroy(vm: Box) { /// Set the maximum stack depth. #[no_mangle] pub extern "C" fn jsonnet_max_stack(_vm: &VM, v: c_uint) { - set_stack_depth_limit(v as usize) + set_stack_depth_limit(v as usize); } /// Set the number of objects required before a garbage collection cycle is allowed. @@ -175,7 +240,7 @@ pub extern "C" fn jsonnet_json_destroy(_vm: &VM, v: Box) { #[no_mangle] pub extern "C" fn jsonnet_max_trace(vm: &mut VM, v: c_uint) { if let Some(format) = vm.trace_format.as_any_mut().downcast_mut::() { - format.max_trace = v as usize + format.max_trace = v as usize; } else { panic!("max_trace is not supported by current tracing format") } @@ -183,7 +248,7 @@ pub extern "C" fn jsonnet_max_trace(vm: &mut VM, v: c_uint) { /// Evaluate a file containing Jsonnet code, return a JSON string. /// -/// The returned string should be cleaned up with jsonnet_realloc. +/// The returned string should be cleaned up with `jsonnet_realloc`. /// /// # Safety /// @@ -216,7 +281,7 @@ pub unsafe extern "C" fn jsonnet_evaluate_file( /// Evaluate a string containing Jsonnet code, return a JSON string. /// -/// The returned string should be cleaned up with jsonnet_realloc. +/// The returned string should be cleaned up with `jsonnet_realloc`. /// /// # Safety /// @@ -359,7 +424,7 @@ fn stream_to_raw(multi: Vec) -> *const c_char { out.push(0); let v = out.as_ptr(); std::mem::forget(out); - v as *const c_char + v.cast::() } /// # Safety diff --git a/bindings/jsonnet/src/val_extract.rs b/bindings/jsonnet/src/val_extract.rs index 6f798b3d..516b876d 100644 --- a/bindings/jsonnet/src/val_extract.rs +++ b/bindings/jsonnet/src/val_extract.rs @@ -26,7 +26,7 @@ pub extern "C" fn jsonnet_json_extract_string(_vm: &VM, v: &Val) -> *mut c_char pub extern "C" fn jsonnet_json_extract_number(_vm: &VM, v: &Val, out: &mut c_double) -> c_int { match v { Val::Num(n) => { - *out = *n; + *out = n.get(); 1 } _ => 0, diff --git a/bindings/jsonnet/src/val_make.rs b/bindings/jsonnet/src/val_make.rs index c6fec2c6..15789c3c 100644 --- a/bindings/jsonnet/src/val_make.rs +++ b/bindings/jsonnet/src/val_make.rs @@ -5,7 +5,10 @@ use std::{ os::raw::{c_char, c_double, c_int}, }; -use jrsonnet_evaluator::{val::ArrValue, ObjValue, Val}; +use jrsonnet_evaluator::{ + val::{ArrValue, NumValue}, + ObjValue, Val, +}; use crate::VM; @@ -24,7 +27,9 @@ pub unsafe extern "C" fn jsonnet_json_make_string(_vm: &VM, val: *const c_char) /// Convert the given double to a `JsonnetJsonValue`. #[no_mangle] pub extern "C" fn jsonnet_json_make_number(_vm: &VM, v: c_double) -> *mut Val { - Box::into_raw(Box::new(Val::Num(v))) + Box::into_raw(Box::new(Val::Num( + NumValue::new(v).expect("jsonnet numbers are finite"), + ))) } /// Convert the given `bool` (`1` or `0`) to a `JsonnetJsonValue`. diff --git a/bindings/jsonnet/src/val_modify.rs b/bindings/jsonnet/src/val_modify.rs index f1f39767..8c35cecc 100644 --- a/bindings/jsonnet/src/val_modify.rs +++ b/bindings/jsonnet/src/val_modify.rs @@ -12,7 +12,7 @@ use crate::VM; /// /// # Safety /// -/// `arr` should be a pointer to array value allocated by make_array, or returned by other library call +/// `arr` should be a pointer to array value allocated by `make_array`, or returned by other library call /// `val` should be a pointer to value allocated using this library #[no_mangle] pub unsafe extern "C" fn jsonnet_json_array_append(_vm: &VM, arr: &mut Val, val: &Val) { diff --git a/bindings/jsonnet/src/vars_tlas.rs b/bindings/jsonnet/src/vars_tlas.rs index abfc9137..39abfc81 100644 --- a/bindings/jsonnet/src/vars_tlas.rs +++ b/bindings/jsonnet/src/vars_tlas.rs @@ -27,7 +27,7 @@ pub unsafe extern "C" fn jsonnet_ext_var(vm: &VM, name: *const c_char, value: *c .add_ext_str( name.to_str().expect("name is not utf-8").into(), value.to_str().expect("value is not utf-8").into(), - ) + ); } /// Binds a Jsonnet external variable to the given code. @@ -51,7 +51,7 @@ pub unsafe extern "C" fn jsonnet_ext_code(vm: &VM, name: *const c_char, code: *c name.to_str().expect("name is not utf-8"), code.to_str().expect("code is not utf-8"), ) - .expect("can't parse ext code") + .expect("can't parse ext code"); } /// Binds a top-level string argument for a top-level parameter. diff --git a/cmds/jrsonnet-fmt/src/children.rs b/cmds/jrsonnet-fmt/src/children.rs index 63576c86..2ecd4210 100644 --- a/cmds/jrsonnet-fmt/src/children.rs +++ b/cmds/jrsonnet-fmt/src/children.rs @@ -28,7 +28,7 @@ pub fn trivia_before(node: SyntaxNode, end: Option<&SyntaxElement>) -> ChildTriv TS![, ;].contains(item.kind()), "silently eaten token: {:?}", item.kind() - ) + ); } } out @@ -48,13 +48,13 @@ pub fn trivia_after(node: SyntaxNode, start: Option<&SyntaxElement>) -> ChildTri if let Some(trivia) = item.as_token().cloned().and_then(Trivia::cast) { out.push(Ok(trivia)); } else if CustomError::can_cast(item.kind()) { - out.push(Err(item.to_string())) + out.push(Err(item.to_string())); } else { assert!( TS![, ;].contains(item.kind()), "silently eaten token: {:?}", item.kind() - ) + ); } } out @@ -115,11 +115,7 @@ fn count_newlines_after(tt: &ChildTrivia) -> usize { TriviaKind::Whitespace => { nl_count += t.text().bytes().filter(|b| *b == b'\n').count(); } - TriviaKind::SingleLineHashComment => { - nl_count += 1; - break; - } - TriviaKind::SingleLineSlashComment => { + TriviaKind::SingleLineHashComment | TriviaKind::SingleLineSlashComment => { nl_count += 1; break; } @@ -163,7 +159,7 @@ pub fn children( inline_trivia: Vec::new(), }); if let Some(last_child) = last_child { - out.push(last_child) + out.push(last_child); } had_some = true; started_next = false; @@ -188,7 +184,7 @@ pub fn children( } had_some = true; } else if CustomError::can_cast(item.kind()) { - next.push(Err(item.to_string())) + next.push(Err(item.to_string())); } else if loose { if had_some { break; @@ -199,7 +195,7 @@ pub fn children( TS![, ;].contains(item.kind()), "silently eaten token: {:?}", item.kind() - ) + ); } } diff --git a/cmds/jrsonnet-fmt/src/comments.rs b/cmds/jrsonnet-fmt/src/comments.rs index 744c6624..576f44ec 100644 --- a/cmds/jrsonnet-fmt/src/comments.rs +++ b/cmds/jrsonnet-fmt/src/comments.rs @@ -1,3 +1,5 @@ +use std::string::String; + use dprint_core::formatting::PrintItems; use jrsonnet_rowan_parser::{nodes::TriviaKind, AstToken}; @@ -12,6 +14,7 @@ pub enum CommentLocation { EndOfItems, } +#[allow(clippy::too_many_lines, clippy::cognitive_complexity)] pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut PrintItems) { for c in comments { let Ok(c) = c else { @@ -62,14 +65,14 @@ pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut P } }) .collect::>(); - while lines.last().map(|l| l.is_empty()).unwrap_or(false) { + while lines.last().is_some_and(String::is_empty) { lines.pop(); } if lines.len() == 1 && !doc { if matches!(loc, CommentLocation::ItemInline) { p!(out, str(" ")); } - p!(out, str("/* ") string(lines[0].trim().to_string()) str(" */") nl) + p!(out, str("/* ") string(lines[0].trim().to_string()) str(" */") nl); } else if !lines.is_empty() { fn common_ws_prefix<'a>(a: &'a str, b: &str) -> &'a str { let offset = a @@ -95,7 +98,7 @@ pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut P } for line in lines .iter_mut() - .skip(if immediate_start { 1 } else { 0 }) + .skip(usize::from(immediate_start)) .filter(|l| !l.is_empty()) { *line = line @@ -127,13 +130,13 @@ pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut P } line = new_line.to_string(); } - p!(out, string(line.to_string()) nl) + p!(out, string(line.to_string()) nl); } } if doc { p!(out, str(" ")); } - p!(out, str("*/") nl) + p!(out, str("*/") nl); } } // TODO: Keep common padding for multiple continous lines of single-line comments @@ -154,20 +157,20 @@ pub fn format_comments(comments: &ChildTrivia, loc: CommentLocation, out: &mut P // ``` TriviaKind::SingleLineHashComment => { if matches!(loc, CommentLocation::ItemInline) { - p!(out, str(" ")) + p!(out, str(" ")); } p!(out, str("# ") string(c.text().strip_prefix('#').expect("hash comment starts with #").trim().to_string())); if !matches!(loc, CommentLocation::ItemInline) { - p!(out, nl) + p!(out, nl); } } TriviaKind::SingleLineSlashComment => { if matches!(loc, CommentLocation::ItemInline) { - p!(out, str(" ")) + p!(out, str(" ")); } p!(out, str("// ") string(c.text().strip_prefix("//").expect("comment starts with //").trim().to_string())); if !matches!(loc, CommentLocation::ItemInline) { - p!(out, nl) + p!(out, nl); } } // Garbage in - garbage out diff --git a/cmds/jrsonnet-fmt/src/main.rs b/cmds/jrsonnet-fmt/src/main.rs index c391ab58..f6099f70 100644 --- a/cmds/jrsonnet-fmt/src/main.rs +++ b/cmds/jrsonnet-fmt/src/main.rs @@ -13,6 +13,7 @@ use dprint_core::formatting::{ condition_helpers::is_multiple_lines, condition_resolvers::true_resolver, ConditionResolverContext, LineNumber, PrintItems, PrintOptions, }; +use hi_doc::Formatting; use jrsonnet_rowan_parser::{ nodes::{ Arg, ArgsDesc, Assertion, BinaryOperator, Bind, CompSpec, Destruct, DestructArrayPart, @@ -155,7 +156,7 @@ where { fn print(&self, out: &mut PrintItems) { if let Some(v) = self { - v.print(out) + v.print(out); } else { p!( out, @@ -163,31 +164,31 @@ where "/*missing {}*/", type_name::

().replace("jrsonnet_rowan_parser::generated::nodes::", "") ),) - ) + ); } } } impl Printable for SyntaxToken { fn print(&self, out: &mut PrintItems) { - p!(out, string(self.to_string())) + p!(out, string(self.to_string())); } } impl Printable for Text { fn print(&self, out: &mut PrintItems) { - p!(out, string(format!("{}", self))) + p!(out, string(format!("{}", self))); } } impl Printable for Number { fn print(&self, out: &mut PrintItems) { - p!(out, string(format!("{}", self))) + p!(out, string(format!("{}", self))); } } impl Printable for Name { fn print(&self, out: &mut PrintItems) { - p!(out, { self.ident_lit() }) + p!(out, { self.ident_lit() }); } } @@ -203,30 +204,30 @@ impl Printable for DestructRest { impl Printable for Destruct { fn print(&self, out: &mut PrintItems) { match self { - Destruct::DestructFull(f) => { - p!(out, { f.name() }) + Self::DestructFull(f) => { + p!(out, { f.name() }); } - Destruct::DestructSkip(_) => p!(out, str("?")), - Destruct::DestructArray(a) => { + Self::DestructSkip(_) => p!(out, str("?")), + Self::DestructArray(a) => { p!(out, str("[") >i nl); for el in a.destruct_array_parts() { match el { DestructArrayPart::DestructArrayElement(e) => { - p!(out, {e.destruct()} str(",") nl) + p!(out, {e.destruct()} str(",") nl); } DestructArrayPart::DestructRest(d) => { - p!(out, {d} str(",") nl) + p!(out, {d} str(",") nl); } } } p!(out, { + Self::DestructObject(o) => { p!(out, str("{") >i nl); for item in o.destruct_object_fields() { p!(out, { item.field() }); if let Some(des) = item.destruct() { - p!(out, str(": ") {des}) + p!(out, str(": ") {des}); } if let Some(def) = item.expr() { p!(out, str(" = ") {def}); @@ -234,7 +235,7 @@ impl Printable for Destruct { p!(out, str(",") nl); } if let Some(rest) = o.destruct_rest() { - p!(out, {rest} nl) + p!(out, {rest} nl); } p!(out, { + Self::FieldNameFixed(f) => { if let Some(id) = f.id() { - p!(out, { id }) + p!(out, { id }); } else if let Some(str) = f.text() { - p!(out, { str }) + p!(out, { str }); } else { - p!(out, str("/*missing FieldName*/")) + p!(out, str("/*missing FieldName*/")); } } - FieldName::FieldNameDynamic(d) => { - p!(out, str("[") {d.expr()} str("]")) + Self::FieldNameDynamic(d) => { + p!(out, str("[") {d.expr()} str("]")); } } } @@ -263,13 +264,13 @@ impl Printable for FieldName { impl Printable for Visibility { fn print(&self, out: &mut PrintItems) { - p!(out, string(self.to_string())) + p!(out, string(self.to_string())); } } impl Printable for ObjLocal { fn print(&self, out: &mut PrintItems) { - p!(out, str("local ") {self.bind()}) + p!(out, str("local ") {self.bind()}); } } @@ -277,7 +278,7 @@ impl Printable for Assertion { fn print(&self, out: &mut PrintItems) { p!(out, str("assert ") {self.condition()}); if self.colon_token().is_some() || self.message().is_some() { - p!(out, str(": ") {self.message()}) + p!(out, str(": ") {self.message()}); } } } @@ -288,9 +289,9 @@ impl Printable for ParamsDesc { for param in self.params() { p!(out, { param.destruct() }); if param.assign_token().is_some() || param.expr().is_some() { - p!(out, str(" = ") {param.expr()}) + p!(out, str(" = ") {param.expr()}); } - p!(out, str(",") nl) + p!(out, str(",") nl); } p!(out, { - p!(out, { b.obj_local() }) + p!(out, { b.obj_local() }); } Self::MemberAssertStmt(ass) => { - p!(out, { ass.assertion() }) + p!(out, { ass.assertion() }); } Self::MemberFieldNormal(n) => { - p!(out, {n.field_name()} if(n.plus_token().is_some())({n.plus_token()}) {n.visibility()} str(" ") {n.expr()}) + p!(out, {n.field_name()} if(n.plus_token().is_some())({n.plus_token()}) {n.visibility()} str(" ") {n.expr()}); } Self::MemberFieldMethod(m) => { - p!(out, {m.field_name()} {m.params_desc()} {m.visibility()} str(" ") {m.expr()}) + p!(out, {m.field_name()} {m.params_desc()} {m.visibility()} str(" ") {m.expr()}); } } } @@ -375,7 +376,7 @@ impl Printable for Member { impl Printable for ObjBody { fn print(&self, out: &mut PrintItems) { match self { - ObjBody::ObjBodyComp(l) => { + Self::ObjBodyComp(l) => { let (children, mut end_comments) = children_between::( l.syntax().clone(), l.l_brace_token().map(Into::into).as_ref(), @@ -391,14 +392,14 @@ impl Printable for ObjBody { ); let trailing_for_comp = end_comments.extract_trailing(); p!(out, str("{") >i nl); - for mem in children.into_iter() { + for mem in children { if mem.should_start_with_newline { p!(out, nl); } format_comments(&mem.before_trivia, CommentLocation::AboveItem, out); p!(out, {mem.value} str(",")); format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out); - p!(out, nl) + p!(out, nl); } if end_comments.should_start_with_newline { @@ -417,7 +418,7 @@ impl Printable for ObjBody { l.r_brace_token().map(Into::into).as_ref(), Some(trailing_for_comp), ); - for mem in compspecs.into_iter() { + for mem in compspecs { if mem.should_start_with_newline { p!(out, nl); } @@ -432,7 +433,7 @@ impl Printable for ObjBody { p!(out, nl { + Self::ObjBodyMemberList(l) => { let (children, end_comments) = children_between::( l.syntax().clone(), l.l_brace_token().map(Into::into).as_ref(), @@ -451,7 +452,7 @@ impl Printable for ObjBody { format_comments(&mem.before_trivia, CommentLocation::AboveItem, out); p!(out, {mem.value} str(",")); format_comments(&mem.inline_trivia, CommentLocation::ItemInline, out); - p!(out, nl) + p!(out, nl); } if end_comments.should_start_with_newline { @@ -465,51 +466,51 @@ impl Printable for ObjBody { } impl Printable for UnaryOperator { fn print(&self, out: &mut PrintItems) { - p!(out, string(self.text().to_string())) + p!(out, string(self.text().to_string())); } } impl Printable for BinaryOperator { fn print(&self, out: &mut PrintItems) { - p!(out, string(self.text().to_string())) + p!(out, string(self.text().to_string())); } } impl Printable for Bind { fn print(&self, out: &mut PrintItems) { match self { - Bind::BindDestruct(d) => { - p!(out, {d.into()} str(" = ") {d.value()}) + Self::BindDestruct(d) => { + p!(out, {d.into()} str(" = ") {d.value()}); } - Bind::BindFunction(f) => { - p!(out, {f.name()} {f.params()} str(" = ") {f.value()}) + Self::BindFunction(f) => { + p!(out, {f.name()} {f.params()} str(" = ") {f.value()}); } } } } impl Printable for Literal { fn print(&self, out: &mut PrintItems) { - p!(out, string(self.syntax().to_string())) + p!(out, string(self.syntax().to_string())); } } impl Printable for ImportKind { fn print(&self, out: &mut PrintItems) { - p!(out, string(self.syntax().to_string())) + p!(out, string(self.syntax().to_string())); } } impl Printable for ForSpec { fn print(&self, out: &mut PrintItems) { - p!(out, str("for ") {self.bind()} str(" in ") {self.expr()}) + p!(out, str("for ") {self.bind()} str(" in ") {self.expr()}); } } impl Printable for IfSpec { fn print(&self, out: &mut PrintItems) { - p!(out, str("if ") {self.expr()}) + p!(out, str("if ") {self.expr()}); } } impl Printable for CompSpec { fn print(&self, out: &mut PrintItems) { match self { - CompSpec::ForSpec(f) => f.print(out), - CompSpec::IfSpec(i) => i.print(out), + Self::ForSpec(f) => f.print(out), + Self::IfSpec(i) => i.print(out), } } } @@ -549,23 +550,23 @@ impl Printable for Expr { impl Printable for Suffix { fn print(&self, out: &mut PrintItems) { match self { - Suffix::SuffixIndex(i) => { + Self::SuffixIndex(i) => { if i.question_mark_token().is_some() { p!(out, str("?")); } p!(out, str(".") {i.index()}); } - Suffix::SuffixIndexExpr(e) => { + Self::SuffixIndexExpr(e) => { if e.question_mark_token().is_some() { p!(out, str(".?")); } - p!(out, str("[") {e.index()} str("]")) + p!(out, str("[") {e.index()} str("]")); } - Suffix::SuffixSlice(d) => { - p!(out, { d.slice_desc() }) + Self::SuffixSlice(d) => { + p!(out, { d.slice_desc() }); } - Suffix::SuffixApply(a) => { - p!(out, { a.args_desc() }) + Self::SuffixApply(a) => { + p!(out, { a.args_desc() }); } } } @@ -573,7 +574,7 @@ impl Printable for Suffix { impl Printable for Stmt { fn print(&self, out: &mut PrintItems) { match self { - Stmt::StmtLocal(l) => { + Self::StmtLocal(l) => { let (binds, end_comments) = children_between::( l.syntax().clone(), l.local_kw_token().map(Into::into).as_ref(), @@ -594,18 +595,18 @@ impl Printable for Stmt { format_comments(&bind.before_trivia, CommentLocation::AboveItem, out); p!(out, {bind.value} str(",")); format_comments(&bind.inline_trivia, CommentLocation::ItemInline, out); - p!(out, nl) + p!(out, nl); } if end_comments.should_start_with_newline { - p!(out, nl) + p!(out, nl); } format_comments(&end_comments.trivia, CommentLocation::EndOfItems, out); p!(out, { - p!(out, {a.assertion()} str(";") nl) + Self::StmtAssert(a) => { + p!(out, {a.assertion()} str(";") nl); } } } @@ -614,7 +615,7 @@ impl Printable for ExprBase { fn print(&self, out: &mut PrintItems) { match self { Self::ExprBinary(b) => { - p!(out, {b.lhs_work()} str(" ") {b.binary_operator()} str(" ") {b.rhs_work()}) + p!(out, {b.lhs_work()} str(" ") {b.binary_operator()} str(" ") {b.rhs_work()}); } Self::ExprUnary(u) => p!(out, {u.unary_operator()} {u.rhs()}), // Self::ExprSlice(s) => { @@ -632,10 +633,10 @@ impl Printable for ExprBase { // pi // } Self::ExprObjExtend(ex) => { - p!(out, {ex.lhs_work()} str(" ") {ex.rhs_work()}) + p!(out, {ex.lhs_work()} str(" ") {ex.rhs_work()}); } Self::ExprParened(p) => { - p!(out, str("(") {p.expr()} str(")")) + p!(out, str("(") {p.expr()} str(")")); } Self::ExprString(s) => p!(out, { s.text() }), Self::ExprNumber(n) => p!(out, { n.number() }), @@ -647,7 +648,7 @@ impl Printable for ExprBase { p!(out, { - p!(out, { obj.obj_body() }) + p!(out, { obj.obj_body() }); } Self::ExprArrayComp(arr) => { p!(out, str("[") {arr.expr()}); @@ -657,7 +658,7 @@ impl Printable for ExprBase { p!(out, str("]")); } Self::ExprImport(v) => { - p!(out, {v.import_kind()} str(" ") {v.text()}) + p!(out, {v.import_kind()} str(" ") {v.text()}); } Self::ExprVar(n) => p!(out, { n.name() }), // Self::ExprLocal(l) => { @@ -665,14 +666,14 @@ impl Printable for ExprBase { Self::ExprIfThenElse(ite) => { p!(out, str("if ") {ite.cond()} str(" then ") {ite.then().map(|t| t.expr())}); if ite.else_kw_token().is_some() || ite.else_().is_some() { - p!(out, str(" else ") {ite.else_().map(|t| t.expr())}) + p!(out, str(" else ") {ite.else_().map(|t| t.expr())}); } } Self::ExprFunction(f) => p!(out, str("function") {f.params_desc()} nl {f.expr()}), // Self::ExprAssert(a) => p!(new: {a.assertion()} str("; ") {a.expr()}), Self::ExprError(e) => p!(out, str("error ") {e.expr()}), Self::ExprLiteral(l) => { - p!(out, { l.literal() }) + p!(out, { l.literal() }); } } } @@ -696,7 +697,7 @@ impl Printable for SourceFile { ); format_comments(&before, CommentLocation::AboveItem, out); p!(out, {self.expr()} nl); - format_comments(&after, CommentLocation::EndOfItems, out) + format_comments(&after, CommentLocation::EndOfItems, out); } } @@ -712,7 +713,7 @@ fn format(input: &str, opts: &FormatOptions) -> Option { builder .error(hi_doc::Text::single( format!("{:?}", error.error).chars(), - Default::default(), + Formatting::default(), )) .range( error.range.start().into() @@ -751,6 +752,7 @@ fn format(input: &str, opts: &FormatOptions) -> Option { } #[derive(Parser)] +#[allow(clippy::struct_excessive_bools)] struct Opts { /// Treat input as code, reformat it instead of reading file. #[clap(long, short = 'e')] @@ -814,7 +816,7 @@ fn main_result() -> Result<(), Error> { let mut iteration = 0; let mut formatted = input.clone(); - let mut tmp; + let mut convergence_tmp; // https://github.com/dprint/dprint/pull/423 loop { let Some(reformatted) = format( @@ -829,18 +831,16 @@ fn main_result() -> Result<(), Error> { ) else { return Err(Error::Parse); }; - tmp = reformatted.trim().to_owned(); - if formatted == tmp { + convergence_tmp = reformatted.trim().to_owned(); + if formatted == convergence_tmp { break; } - formatted = tmp; + formatted = convergence_tmp; if opts.conv_limit == 0 { break; } iteration += 1; - if iteration > opts.conv_limit { - panic!("formatting not converged"); - } + assert!(iteration <= opts.conv_limit, "formatting not converged"); } formatted.push('\n'); if opts.test && formatted != input { @@ -855,7 +855,7 @@ fn main_result() -> Result<(), Error> { temp.flush()?; temp.persist(&path)?; } else { - print!("{formatted}") + print!("{formatted}"); } Ok(()) } diff --git a/cmds/jrsonnet/Cargo.toml b/cmds/jrsonnet/Cargo.toml index 811f9a8c..2fde9f57 100644 --- a/cmds/jrsonnet/Cargo.toml +++ b/cmds/jrsonnet/Cargo.toml @@ -44,9 +44,6 @@ exp-null-coaelse = [ # --exp-apply exp-apply = [] -# std.thisFile support -legacy-this-file = ["jrsonnet-cli/legacy-this-file"] - nightly = ["jrsonnet-evaluator/nightly"] [dependencies] diff --git a/cmds/jrsonnet/src/main.rs b/cmds/jrsonnet/src/main.rs index 00e506e5..415b9435 100644 --- a/cmds/jrsonnet/src/main.rs +++ b/cmds/jrsonnet/src/main.rs @@ -147,9 +147,8 @@ impl From for Error { } fn main_catch(opts: Opts) -> bool { - let s = State::default(); let trace = opts.trace.trace_format(); - if let Err(e) = main_real(&s, opts) { + if let Err(e) = main_real(opts) { if let Error::Evaluation(e) = e { let mut out = String::new(); trace.write_trace(&mut out, &e).expect("format error"); @@ -162,18 +161,17 @@ fn main_catch(opts: Opts) -> bool { true } -fn main_real(s: &State, opts: Opts) -> Result<(), Error> { +fn main_real(opts: Opts) -> Result<(), Error> { let _gc_leak_guard = opts.gc.leak_on_exit(); let _gc_print_stats = opts.gc.stats_printer(); let _stack_depth_override = opts.misc.stack_size_override(); let import_resolver = opts.misc.import_resolver(); - s.set_import_resolver(import_resolver); + let std = opts.std.context_initializer()?; - let std = opts.std.context_initializer(s)?; - if let Some(std) = std { - s.set_context_initializer(std); - } + let mut s = State::builder(); + s.import_resolver(import_resolver).context_initializer(std); + let s = s.build(); let input = opts.input.input.ok_or(Error::MissingInputArgument)?; let val = if opts.input.exec { @@ -188,7 +186,11 @@ fn main_real(s: &State, opts: Opts) -> Result<(), Error> { }; let tla = opts.tla.tla_opts()?; - #[allow(unused_mut)] + #[allow( + // It is not redundant/unused in exp-apply + unused_mut, + clippy::redundant_clone, + )] let mut val = apply_tla(s.clone(), &tla, val)?; #[cfg(feature = "exp-apply")] diff --git a/crates/jrsonnet-cli/Cargo.toml b/crates/jrsonnet-cli/Cargo.toml index b3eb92b9..b2bcfd33 100644 --- a/crates/jrsonnet-cli/Cargo.toml +++ b/crates/jrsonnet-cli/Cargo.toml @@ -26,7 +26,6 @@ exp-null-coaelse = [ exp-regex = [ "jrsonnet-stdlib/exp-regex", ] -legacy-this-file = ["jrsonnet-stdlib/legacy-this-file"] [dependencies] jrsonnet-evaluator = { workspace = true, features = ["explaining-traces"] } diff --git a/crates/jrsonnet-cli/src/manifest.rs b/crates/jrsonnet-cli/src/manifest.rs index 0324f8b5..683420b6 100644 --- a/crates/jrsonnet-cli/src/manifest.rs +++ b/crates/jrsonnet-cli/src/manifest.rs @@ -4,7 +4,7 @@ use clap::{Parser, ValueEnum}; use jrsonnet_evaluator::manifest::{ JsonFormat, ManifestFormat, StringFormat, ToStringFormat, YamlStreamFormat, }; -use jrsonnet_stdlib::{TomlFormat, YamlFormat}; +use jrsonnet_stdlib::{IniFormat, TomlFormat, XmlJsonmlFormat, YamlFormat}; #[derive(Clone, Copy, ValueEnum)] pub enum ManifestFormatName { @@ -13,6 +13,8 @@ pub enum ManifestFormatName { Json, Yaml, Toml, + XmlJsonml, + Ini, } #[derive(Parser)] @@ -70,10 +72,15 @@ impl ManifestOpts { #[cfg(feature = "exp-preserve-order")] preserve_order, )), + ManifestFormatName::XmlJsonml => Box::new(XmlJsonmlFormat::cli()), + ManifestFormatName::Ini => Box::new(IniFormat::cli( + #[cfg(feature = "exp-preserve-order")] + preserve_order, + )), } }; if self.yaml_stream { - Box::new(YamlStreamFormat(format)) + Box::new(YamlStreamFormat::cli(format)) } else { format } diff --git a/crates/jrsonnet-cli/src/stdlib.rs b/crates/jrsonnet-cli/src/stdlib.rs index 903b8993..15689e2e 100644 --- a/crates/jrsonnet-cli/src/stdlib.rs +++ b/crates/jrsonnet-cli/src/stdlib.rs @@ -1,7 +1,7 @@ use std::{fs::read_to_string, str::FromStr}; use clap::Parser; -use jrsonnet_evaluator::{trace::PathResolver, Result, State}; +use jrsonnet_evaluator::{trace::PathResolver, Result}; use jrsonnet_stdlib::ContextInitializer; #[derive(Clone)] @@ -104,11 +104,11 @@ pub struct StdOpts { ext_code_file: Vec, } impl StdOpts { - pub fn context_initializer(&self, s: &State) -> Result> { + pub fn context_initializer(&self) -> Result> { if self.no_stdlib { return Ok(None); } - let ctx = ContextInitializer::new(s.clone(), PathResolver::new_cwd_fallback()); + let ctx = ContextInitializer::new(PathResolver::new_cwd_fallback()); for ext in &self.ext_str { ctx.add_ext_str((&ext.name as &str).into(), (&ext.value as &str).into()); } diff --git a/crates/jrsonnet-evaluator/Cargo.toml b/crates/jrsonnet-evaluator/Cargo.toml index 215597bd..4b5a430b 100644 --- a/crates/jrsonnet-evaluator/Cargo.toml +++ b/crates/jrsonnet-evaluator/Cargo.toml @@ -53,8 +53,6 @@ strsim.workspace = true serde.workspace = true anyhow = { workspace = true, optional = true } -# Serialized stdlib -bincode = { workspace = true, optional = true } # Explaining traces annotate-snippets = { workspace = true, optional = true } # Better explaining traces diff --git a/crates/jrsonnet-evaluator/src/arr/mod.rs b/crates/jrsonnet-evaluator/src/arr/mod.rs index de583586..aeb225e9 100644 --- a/crates/jrsonnet-evaluator/src/arr/mod.rs +++ b/crates/jrsonnet-evaluator/src/arr/mod.rs @@ -1,4 +1,4 @@ -use std::any::Any; +use std::{any::Any, num::NonZeroU32}; use jrsonnet_gcmodule::{Cc, Trace}; use jrsonnet_interner::IBytes; @@ -11,7 +11,7 @@ pub use spec::{ArrayLike, *}; /// Represents a Jsonnet array value. #[derive(Debug, Clone, Trace)] -// may contrain other ArrValue +// may contain other ArrValue #[trace(tracking(force))] pub struct ArrValue(Cc>); @@ -54,7 +54,12 @@ impl ArrValue { #[must_use] pub fn map(self, mapper: FuncVal) -> Self { - Self::new(MappedArray::new(self, mapper)) + Self::new(>::new(self, mapper)) + } + + #[must_use] + pub fn map_with_index(self, mapper: FuncVal) -> Self { + Self::new(>::new(self, mapper)) } pub fn filter(self, filter: impl Fn(&Val) -> Result) -> Result { @@ -100,27 +105,26 @@ impl ArrValue { } #[must_use] - pub fn slice( - self, - from: Option, - to: Option, - step: Option, - ) -> Option { - let len = self.len(); - let from = from.unwrap_or(0); - let to = to.unwrap_or(len).min(len); - let step = step.unwrap_or(1); - - if from >= to || step == 0 { - return None; + pub fn slice(self, index: Option, end: Option, step: Option) -> Self { + let get_idx = |pos: Option, len: usize, default| match pos { + Some(v) if v < 0 => len.saturating_sub((-v) as usize), + Some(v) => (v as usize).min(len), + None => default, + }; + let index = get_idx(index, self.len(), 0); + let end = get_idx(end, self.len(), self.len()); + let step = step.unwrap_or_else(|| NonZeroU32::new(1).expect("1 != 0")); + + if index >= end { + return Self::empty(); } - Some(Self::new(SliceArray { + Self::new(SliceArray { inner: self, - from: from as u32, - to: to as u32, - step: step as u32, - })) + from: index as u32, + to: end as u32, + step: step.get(), + }) } /// Array length. @@ -141,6 +145,9 @@ impl ArrValue { } /// Returns None if get is either non cheap, or out of bounds + /// Note that non-cheap access includes errorable values + /// + /// Prefer it to `get_lazy`, but use `get` when you can. fn get_cheap(&self, index: usize) -> Option { self.0.get_cheap(index) } @@ -161,6 +168,7 @@ impl ArrValue { (0..self.len()).map(|i| self.get_lazy(i).expect("length checked")) } + /// Prefer it over `iter_lazy`, but do not use it where `iter` will do. pub fn iter_cheap(&self) -> Option + '_> { if self.is_cheap() { Some((0..self.len()).map(|i| self.get_cheap(i).expect("length and is_cheap checked"))) diff --git a/crates/jrsonnet-evaluator/src/arr/spec.rs b/crates/jrsonnet-evaluator/src/arr/spec.rs index 49cd02c4..897928a2 100644 --- a/crates/jrsonnet-evaluator/src/arr/spec.rs +++ b/crates/jrsonnet-evaluator/src/arr/spec.rs @@ -120,7 +120,7 @@ impl ArrayLike for BytesArray { } fn get_cheap(&self, index: usize) -> Option { - self.0.get(index).map(|v| Val::Num(f64::from(*v))) + self.0.get(index).map(|v| Val::Num((*v).into())) } fn is_cheap(&self) -> bool { true @@ -399,7 +399,7 @@ impl ArrayLike for RangeArray { } fn get_cheap(&self, index: usize) -> Option { - self.range().nth(index).map(|i| Val::Num(f64::from(i))) + self.range().nth(index).map(|i| Val::Num(i.into())) } fn is_cheap(&self) -> bool { true @@ -430,12 +430,12 @@ impl ArrayLike for ReverseArray { } #[derive(Trace, Debug, Clone)] -pub struct MappedArray { +pub struct MappedArray { inner: ArrValue, cached: Cc>>>, mapper: FuncVal, } -impl MappedArray { +impl MappedArray { pub fn new(inner: ArrValue, mapper: FuncVal) -> Self { let len = inner.len(); Self { @@ -444,8 +444,15 @@ impl MappedArray { mapper, } } + fn evaluate(&self, index: usize, value: Val) -> Result { + if WITH_INDEX { + self.mapper.evaluate_simple(&(index, value), false) + } else { + self.mapper.evaluate_simple(&(value,), false) + } + } } -impl ArrayLike for MappedArray { +impl ArrayLike for MappedArray { fn len(&self) -> usize { self.cached.borrow().len() } @@ -472,7 +479,7 @@ impl ArrayLike for MappedArray { .get(index) .transpose() .expect("index checked") - .and_then(|r| self.mapper.evaluate_simple(&(r,), false)); + .and_then(|r| self.evaluate(index, r)); let new_value = match val { Ok(v) => v, @@ -486,12 +493,12 @@ impl ArrayLike for MappedArray { } fn get_lazy(&self, index: usize) -> Option> { #[derive(Trace)] - struct ArrayElement { - arr_thunk: MappedArray, + struct ArrayElement { + arr_thunk: MappedArray, index: usize, } - impl ThunkValue for ArrayElement { + impl ThunkValue for ArrayElement { type Output = Val; fn get(self: Box) -> Result { diff --git a/crates/jrsonnet-evaluator/src/error.rs b/crates/jrsonnet-evaluator/src/error.rs index 50ee1cf0..37100a48 100644 --- a/crates/jrsonnet-evaluator/src/error.rs +++ b/crates/jrsonnet-evaluator/src/error.rs @@ -1,16 +1,23 @@ use std::{ cmp::Ordering, + convert::Infallible, fmt::{Debug, Display}, path::PathBuf, }; use jrsonnet_gcmodule::Trace; use jrsonnet_interner::IStr; -use jrsonnet_parser::{BinaryOpType, ExprLocation, LocExpr, Source, SourcePath, UnaryOpType}; +use jrsonnet_parser::{BinaryOpType, LocExpr, Source, SourcePath, Span, UnaryOpType}; use jrsonnet_types::ValType; use thiserror::Error; -use crate::{function::CallLocation, stdlib::format::FormatError, typed::TypeLocError, ObjValue}; +use crate::{ + function::{builtin::ParamDefault, CallLocation}, + stdlib::format::FormatError, + typed::TypeLocError, + val::ConvertNumValueError, + ObjValue, +}; pub(crate) fn format_found(list: &[IStr], what: &str) -> String { if list.is_empty() { @@ -43,7 +50,7 @@ fn format_signature(sig: &FunctionSignature) -> String { if sig.is_empty() { out.push_str("/*no arguments*/"); } else { - for (i, (name, has_default)) in sig.iter().enumerate() { + for (i, (name, default)) in sig.iter().enumerate() { if i != 0 { out.push_str(", "); } @@ -52,8 +59,13 @@ fn format_signature(sig: &FunctionSignature) -> String { } else { out.push_str(""); } - if *has_default { - out.push_str(" = "); + match default { + ParamDefault::None => {} + ParamDefault::Exists => out.push_str(" = "), + ParamDefault::Literal(lit) => { + out.push_str(" = "); + out.push_str(lit); + } } } } @@ -88,7 +100,7 @@ pub(crate) fn suggest_object_fields(v: &ObjValue, key: IStr) -> Vec { heap.into_iter().map(|v| v.1).collect() } -type FunctionSignature = Vec<(Option, bool)>; +type FunctionSignature = Vec<(Option, ParamDefault)>; /// Possible errors #[allow(missing_docs)] @@ -226,6 +238,9 @@ pub enum ErrorKind { #[error("invalid unicode codepoint: {0}")] InvalidUnicodeCodepointGot(u32), + #[error("convert num value: {0}")] + ConvertNumValue(#[from] ConvertNumValueError), + #[error("format error: {0}")] Format(#[from] FormatError), #[error("type error: {0}")] @@ -249,12 +264,18 @@ impl From for Error { } } +impl From for Error { + fn from(_value: Infallible) -> Self { + unreachable!() + } +} + /// Single stack trace frame #[derive(Clone, Debug, Trace)] pub struct StackTraceElement { /// Source of this frame /// Some frames only act as description, without attached source - pub location: Option, + pub location: Option, /// Frame description pub desc: String, } @@ -303,20 +324,20 @@ impl Debug for Error { impl std::error::Error for Error {} pub trait ErrorSource { - fn to_location(self) -> Option; + fn to_location(self) -> Option; } impl ErrorSource for &LocExpr { - fn to_location(self) -> Option { - Some(self.1.clone()) + fn to_location(self) -> Option { + Some(self.span()) } } -impl ErrorSource for &ExprLocation { - fn to_location(self) -> Option { +impl ErrorSource for &Span { + fn to_location(self) -> Option { Some(self.clone()) } } impl ErrorSource for CallLocation<'_> { - fn to_location(self) -> Option { + fn to_location(self) -> Option { self.0.cloned() } } diff --git a/crates/jrsonnet-evaluator/src/evaluate/destructure.rs b/crates/jrsonnet-evaluator/src/evaluate/destructure.rs index f3e90db0..73284792 100644 --- a/crates/jrsonnet-evaluator/src/evaluate/destructure.rs +++ b/crates/jrsonnet-evaluator/src/evaluate/destructure.rs @@ -110,10 +110,11 @@ pub fn destruct( fn get(self: Box) -> Result { let full = self.full.evaluate()?; let to = full.len() - self.end; - Ok(Val::Arr( - full.slice(Some(self.start), Some(to), None) - .expect("arguments checked"), - )) + Ok(Val::Arr(full.slice( + Some(self.start as i32), + Some(to as i32), + None, + ))) } } diff --git a/crates/jrsonnet-evaluator/src/evaluate/mod.rs b/crates/jrsonnet-evaluator/src/evaluate/mod.rs index a0e7f927..89e5cd1c 100644 --- a/crates/jrsonnet-evaluator/src/evaluate/mod.rs +++ b/crates/jrsonnet-evaluator/src/evaluate/mod.rs @@ -16,17 +16,18 @@ use crate::{ error::{suggest_object_fields, ErrorKind::*}, evaluate::operator::{evaluate_add_op, evaluate_binary_op_special, evaluate_unary_op}, function::{CallLocation, FuncDesc, FuncVal}, + in_frame, typed::Typed, - val::{CachedUnbound, IndexableVal, StrValue, Thunk, ThunkValue}, + val::{CachedUnbound, IndexableVal, NumValue, StrValue, Thunk, ThunkValue}, Context, Error, GcHashMap, ObjValue, ObjValueBuilder, ObjectAssertion, Pending, Result, - ResultExt, State, Unbound, Val, + ResultExt, Unbound, Val, }; pub mod destructure; pub mod operator; pub fn evaluate_trivial(expr: &LocExpr) -> Option { fn is_trivial(expr: &LocExpr) -> bool { - match &*expr.0 { + match expr.expr() { Expr::Str(_) | Expr::Num(_) | Expr::Literal(LiteralType::False | LiteralType::True | LiteralType::Null) => true, @@ -35,9 +36,11 @@ pub fn evaluate_trivial(expr: &LocExpr) -> Option { _ => false, } } - Some(match &*expr.0 { + Some(match expr.expr() { Expr::Str(s) => Val::string(s.clone()), - Expr::Num(n) => Val::Num(*n), + Expr::Num(n) => { + Val::Num(NumValue::new(*n).expect("parser will not allow non-finite values")) + } Expr::Literal(LiteralType::False) => Val::Bool(false), Expr::Literal(LiteralType::True) => Val::Bool(true), Expr::Literal(LiteralType::Null) => Val::Null, @@ -69,8 +72,8 @@ pub fn evaluate_method(ctx: Context, name: IStr, params: ParamsDesc, body: LocEx pub fn evaluate_field_name(ctx: Context, field_name: &FieldName) -> Result> { Ok(match field_name { FieldName::Fixed(n) => Some(n.clone()), - FieldName::Dyn(expr) => State::push( - CallLocation::new(&expr.1), + FieldName::Dyn(expr) => in_frame( + CallLocation::new(&expr.span()), || "evaluating field name".to_string(), || { let value = evaluate(ctx, expr)?; @@ -229,7 +232,7 @@ pub fn evaluate_field_member + Clone>( .field(name.clone()) .with_add(*plus) .with_visibility(*visibility) - .with_location(value.1.clone()) + .with_location(value.span()) .bindable(UnboundValue { uctx, value: value.clone(), @@ -264,7 +267,7 @@ pub fn evaluate_field_member + Clone>( builder .field(name.clone()) .with_visibility(*visibility) - .with_location(value.1.clone()) + .with_location(value.span()) .bindable(UnboundMethod { uctx, value: value.clone(), @@ -372,7 +375,7 @@ pub fn evaluate_apply( if tailstrict { body()? } else { - State::push(loc, || format!("function <{}> call", f.name()), body)? + in_frame(loc, || format!("function <{}> call", f.name()), body)? } } v => bail!(OnlyFunctionsCanBeCalledGot(v.value_type())), @@ -382,14 +385,14 @@ pub fn evaluate_apply( pub fn evaluate_assert(ctx: Context, assertion: &AssertStmt) -> Result<()> { let value = &assertion.0; let msg = &assertion.1; - let assertion_result = State::push( - CallLocation::new(&value.1), + let assertion_result = in_frame( + CallLocation::new(&value.span()), || "assertion condition".to_owned(), || bool::from_untyped(evaluate(ctx.clone(), value)?), )?; if !assertion_result { - State::push( - CallLocation::new(&value.1), + in_frame( + CallLocation::new(&value.span()), || "assertion failure".to_owned(), || { if let Some(msg) = msg { @@ -404,8 +407,7 @@ pub fn evaluate_assert(ctx: Context, assertion: &AssertStmt) -> Result<()> { pub fn evaluate_named(ctx: Context, expr: &LocExpr, name: IStr) -> Result { use Expr::*; - let LocExpr(raw_expr, _loc) = expr; - Ok(match &**raw_expr { + Ok(match expr.expr() { Function(params, body) => evaluate_method(ctx, name, params.clone(), body.clone()), _ => evaluate(ctx, expr)?, }) @@ -418,8 +420,8 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { if let Some(trivial) = evaluate_trivial(expr) { return Ok(trivial); } - let LocExpr(expr, loc) = expr; - Ok(match &**expr { + let loc = expr.span(); + Ok(match expr.expr() { Literal(LiteralType::This) => { Val::Obj(ctx.this().ok_or(CantUseSelfOutsideOfObject)?.clone()) } @@ -438,7 +440,7 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { Literal(LiteralType::Null) => Val::Null, Parened(e) => evaluate(ctx, e)?, Str(v) => Val::string(v.clone()), - Num(v) => Val::new_checked_num(*v)?, + Num(v) => Val::try_num(*v)?, // I have tried to remove special behavior from super by implementing standalone-super // expresion, but looks like this case still needs special treatment. // @@ -446,7 +448,7 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { // because the standalone super literal is not supported, that is because in other // implementations `in super` treated differently from in `smth_else`. BinaryOp(field, BinaryOpType::In, e) - if matches!(&*e.0, Expr::Literal(LiteralType::Super)) => + if matches!(e.expr(), Expr::Literal(LiteralType::Super)) => { let Some(super_obj) = ctx.super_obj() else { return Ok(Val::Bool(false)); @@ -456,53 +458,51 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { } BinaryOp(v1, o, v2) => evaluate_binary_op_special(ctx, v1, *o, v2)?, UnaryOp(o, v) => evaluate_unary_op(*o, &evaluate(ctx, v)?)?, - Var(name) => State::push( - CallLocation::new(loc), + Var(name) => in_frame( + CallLocation::new(&loc), || format!("variable <{name}> access"), || ctx.binding(name.clone())?.evaluate(), )?, Index { indexable, parts } => { let mut parts = parts.iter(); - let mut indexable = match &indexable { - // Cheaper to execute than creating object with overriden `this` - LocExpr(v, _) if matches!(&**v, Expr::Literal(LiteralType::Super)) => { - let part = parts.next().expect("at least part should exist"); - let Some(super_obj) = ctx.super_obj() else { - #[cfg(feature = "exp-null-coaelse")] - if part.null_coaelse { - return Ok(Val::Null); - } - bail!(NoSuperFound) - }; - let name = evaluate(ctx.clone(), &part.value)?; - - let Val::Str(name) = name else { - bail!(ValueIndexMustBeTypeGot( - ValType::Obj, - ValType::Str, - name.value_type(), - )) - }; - - let this = ctx - .this() - .expect("no this found, while super present, should not happen"); - let name = name.into_flat(); - match super_obj - .get_for(name.clone(), this.clone()) - .with_description_src(&part.value, || format!("field <{name}> access"))? - { - Some(v) => v, - #[cfg(feature = "exp-null-coaelse")] - None if part.null_coaelse => return Ok(Val::Null), - None => { - let suggestions = suggest_object_fields(super_obj, name.clone()); + let mut indexable = if matches!(indexable.expr(), Expr::Literal(LiteralType::Super)) { + let part = parts.next().expect("at least part should exist"); + let Some(super_obj) = ctx.super_obj() else { + #[cfg(feature = "exp-null-coaelse")] + if part.null_coaelse { + return Ok(Val::Null); + } + bail!(NoSuperFound) + }; + let name = evaluate(ctx.clone(), &part.value)?; - bail!(NoSuchField(name, suggestions)) - } + let Val::Str(name) = name else { + bail!(ValueIndexMustBeTypeGot( + ValType::Obj, + ValType::Str, + name.value_type(), + )) + }; + + let this = ctx + .this() + .expect("no this found, while super present, should not happen"); + let name = name.into_flat(); + match super_obj + .get_for(name.clone(), this.clone()) + .with_description_src(&part.value, || format!("field <{name}> access"))? + { + Some(v) => v, + #[cfg(feature = "exp-null-coaelse")] + None if part.null_coaelse => return Ok(Val::Null), + None => { + let suggestions = suggest_object_fields(super_obj, name.clone()); + + bail!(NoSuchField(name, suggestions)) } } - e => evaluate(ctx.clone(), e)?, + } else { + evaluate(ctx.clone(), indexable)? }; for part in parts { @@ -530,6 +530,7 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { n.value_type(), )), (Val::Arr(v), Val::Num(n)) => { + let n = n.get(); if n.fract() > f64::EPSILON { bail!(FractionalIndex) } @@ -553,13 +554,13 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { .clone() .into_flat() .chars() - .skip(n as usize) + .skip(n.get() as usize) .take(1) .collect::() .into(); if v.is_empty() { let size = s.into_flat().chars().count(); - bail!(StringBoundsError(n as usize, size)) + bail!(StringBoundsError(n.get() as usize, size)) } StrValue::Flat(v) }), @@ -636,7 +637,7 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { &Val::Obj(evaluate_object(ctx, b)?), )?, Apply(value, args, tailstrict) => { - evaluate_apply(ctx, value, args, CallLocation::new(loc), *tailstrict)? + evaluate_apply(ctx, value, args, CallLocation::new(&loc), *tailstrict)? } Function(params, body) => { evaluate_method(ctx, "anonymous".into(), params.clone(), body.clone()) @@ -645,8 +646,8 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { evaluate_assert(ctx.clone(), assert)?; evaluate(ctx, returned)? } - ErrorStmt(e) => State::push( - CallLocation::new(loc), + ErrorStmt(e) => in_frame( + CallLocation::new(&loc), || "error statement".to_owned(), || bail!(RuntimeError(evaluate(ctx, e)?.to_string()?,)), )?, @@ -655,8 +656,8 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { cond_then, cond_else, } => { - if State::push( - CallLocation::new(loc), + if in_frame( + CallLocation::new(&loc), || "if condition".to_owned(), || bool::from_untyped(evaluate(ctx.clone(), &cond.0)?), )? { @@ -676,7 +677,7 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { desc: &'static str, ) -> Result> { if let Some(value) = expr { - Ok(Some(State::push( + Ok(Some(in_frame( loc, || format!("slice {desc}"), || T::from_untyped(evaluate(ctx.clone(), value)?), @@ -687,7 +688,7 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { } let indexable = evaluate(ctx.clone(), value)?; - let loc = CallLocation::new(loc); + let loc = CallLocation::new(&loc); let start = parse_idx(loc, &ctx, desc.start.as_ref(), "start")?; let end = parse_idx(loc, &ctx, desc.end.as_ref(), "end")?; @@ -696,15 +697,15 @@ pub fn evaluate(ctx: Context, expr: &LocExpr) -> Result { IndexableVal::into_untyped(indexable.into_indexable()?.slice(start, end, step)?)? } i @ (Import(path) | ImportStr(path) | ImportBin(path)) => { - let Expr::Str(path) = &*path.0 else { + let Expr::Str(path) = &path.expr() else { bail!("computed imports are not supported") }; let tmp = loc.clone().0; let s = ctx.state(); let resolved_path = s.resolve_from(tmp.source_path(), path as &str)?; match i { - Import(_) => State::push( - CallLocation::new(loc), + Import(_) => in_frame( + CallLocation::new(&loc), || format!("import {:?}", path.clone()), || s.import_resolved(resolved_path), )?, diff --git a/crates/jrsonnet-evaluator/src/evaluate/operator.rs b/crates/jrsonnet-evaluator/src/evaluate/operator.rs index c15a8281..4988d219 100644 --- a/crates/jrsonnet-evaluator/src/evaluate/operator.rs +++ b/crates/jrsonnet-evaluator/src/evaluate/operator.rs @@ -17,10 +17,10 @@ pub fn evaluate_unary_op(op: UnaryOpType, b: &Val) -> Result { use UnaryOpType::*; use Val::*; Ok(match (op, b) { - (Plus, Num(n)) => Num(*n), - (Minus, Num(n)) => Num(-*n), + (Plus, Num(n)) => Val::Num(*n), + (Minus, Num(n)) => Val::try_num(-n.get())?, (Not, Bool(v)) => Bool(!v), - (BitNot, Num(n)) => Num(!(*n as i64) as f64), + (BitNot, Num(n)) => Val::try_num(!(n.get() as i64) as f64)?, (op, o) => bail!(UnaryOperatorDoesNotOperateOnType(op, o.value_type())), }) } @@ -40,7 +40,7 @@ pub fn evaluate_add_op(a: &Val, b: &Val) -> Result { (Obj(v1), Obj(v2)) => Obj(v2.extend_from(v1.clone())), (Arr(a), Arr(b)) => Val::Arr(ArrValue::extended(a.clone(), b.clone())), - (Num(v1), Num(v2)) => Val::new_checked_num(v1 + v2)?, + (Num(v1), Num(v2)) => Val::try_num(v1.get() + v2.get())?, #[cfg(feature = "exp-bigint")] (BigInt(a), BigInt(b)) => BigInt(Box::new(&**a + &**b)), _ => bail!(BinaryOperatorDoesNotOperateOnValues( @@ -55,10 +55,10 @@ pub fn evaluate_mod_op(a: &Val, b: &Val) -> Result { use Val::*; match (a, b) { (Num(a), Num(b)) => { - if *b == 0.0 { + if b.get() == 0.0 { bail!(DivisionByZero) } - Ok(Num(a % b)) + Ok(Val::try_num(a.get() % b.get())?) } (Str(str), vals) => { String::into_untyped(std_format(&str.clone().into_flat(), vals.clone())?) @@ -94,26 +94,17 @@ pub fn evaluate_compare_op(a: &Val, b: &Val, op: BinaryOpType) -> Result a.cmp(b), - (Num(a), Num(b)) => a.partial_cmp(b).expect("jsonnet numbers are non NaN"), + (Num(a), Num(b)) => a.cmp(b), #[cfg(feature = "exp-bigint")] (BigInt(a), BigInt(b)) => a.cmp(b), (Arr(a), Arr(b)) => { - if let (Some(ai), Some(bi)) = (a.iter_cheap(), b.iter_cheap()) { - for (a, b) in ai.zip(bi) { - let ord = evaluate_compare_op(&a, &b, op)?; - if !ord.is_eq() { - return Ok(ord); - } - } - } else { - let ai = a.iter(); - let bi = b.iter(); - - for (a, b) in ai.zip(bi) { - let ord = evaluate_compare_op(&a?, &b?, op)?; - if !ord.is_eq() { - return Ok(ord); - } + let ai = a.iter(); + let bi = b.iter(); + + for (a, b) in ai.zip(bi) { + let ord = evaluate_compare_op(&a?, &b?, op)?; + if !ord.is_eq() { + return Ok(ord); } } a.len().cmp(&b.len()) @@ -143,39 +134,39 @@ pub fn evaluate_binary_op_normal(a: &Val, op: BinaryOpType, b: &Val) -> Result Bool(obj.has_field_ex(a.clone().into_flat(), true)), (a, Mod, b) => evaluate_mod_op(a, b)?, - (Str(v1), Mul, Num(v2)) => Val::string(v1.to_string().repeat(*v2 as usize)), + (Str(v1), Mul, Num(v2)) => Val::string(v1.to_string().repeat(v2.get() as usize)), // Bool X Bool (Bool(a), And, Bool(b)) => Bool(*a && *b), (Bool(a), Or, Bool(b)) => Bool(*a || *b), // Num X Num - (Num(v1), Mul, Num(v2)) => Val::new_checked_num(v1 * v2)?, + (Num(v1), Mul, Num(v2)) => Val::try_num(v1.get() * v2.get())?, (Num(v1), Div, Num(v2)) => { - if *v2 == 0.0 { + if v2.get() == 0.0 { bail!(DivisionByZero) } - Val::new_checked_num(v1 / v2)? + Val::try_num(v1.get() / v2.get())? } - (Num(v1), Sub, Num(v2)) => Val::new_checked_num(v1 - v2)?, + (Num(v1), Sub, Num(v2)) => Val::try_num(v1.get() - v2.get())?, - (Num(v1), BitAnd, Num(v2)) => Num((*v1 as i64 & *v2 as i64) as f64), - (Num(v1), BitOr, Num(v2)) => Num((*v1 as i64 | *v2 as i64) as f64), - (Num(v1), BitXor, Num(v2)) => Num((*v1 as i64 ^ *v2 as i64) as f64), + (Num(v1), BitAnd, Num(v2)) => Val::try_num((v1.get() as i64 & v2.get() as i64) as f64)?, + (Num(v1), BitOr, Num(v2)) => Val::try_num((v1.get() as i64 | v2.get() as i64) as f64)?, + (Num(v1), BitXor, Num(v2)) => Val::try_num((v1.get() as i64 ^ v2.get() as i64) as f64)?, (Num(v1), Lhs, Num(v2)) => { - if *v2 < 0.0 { + if v2.get() < 0.0 { bail!("shift by negative exponent") } - let exp = ((*v2 as i64) & 63) as u32; - Num((*v1 as i64).wrapping_shl(exp) as f64) + let exp = ((v2.get() as i64) & 63) as u32; + Val::try_num((v1.get() as i64).wrapping_shl(exp) as f64)? } (Num(v1), Rhs, Num(v2)) => { - if *v2 < 0.0 { + if v2.get() < 0.0 { bail!("shift by negative exponent") } - let exp = ((*v2 as i64) & 63) as u32; - Num((*v1 as i64).wrapping_shr(exp) as f64) + let exp = ((v2.get() as i64) & 63) as u32; + Val::try_num((v1.get() as i64).wrapping_shr(exp) as f64)? } // Bigint X Bigint diff --git a/crates/jrsonnet-evaluator/src/function/builtin.rs b/crates/jrsonnet-evaluator/src/function/builtin.rs index 8658d633..0af0f9e3 100644 --- a/crates/jrsonnet-evaluator/src/function/builtin.rs +++ b/crates/jrsonnet-evaluator/src/function/builtin.rs @@ -33,22 +33,40 @@ impl PartialEq for ParamName { } } +#[derive(Clone, Copy, Debug, Trace)] +pub enum ParamDefault { + None, + Exists, + Literal(&'static str), +} +impl ParamDefault { + pub const fn exists(is_exists: bool) -> Self { + if is_exists { + Self::Exists + } else { + Self::None + } + } +} + #[derive(Clone, Trace)] pub struct BuiltinParam { name: ParamName, - has_default: bool, + default: ParamDefault, } impl BuiltinParam { - pub const fn new(name: ParamName, has_default: bool) -> Self { - Self { name, has_default } + pub const fn new(name: ParamName, default: ParamDefault) -> Self { + Self { name, default } } /// Parameter name for named call parsing pub fn name(&self) -> &ParamName { &self.name } - /// Is implementation allowed to return empty value + pub fn default(&self) -> ParamDefault { + self.default + } pub fn has_default(&self) -> bool { - self.has_default + !matches!(self.default, ParamDefault::None) } } @@ -87,7 +105,7 @@ impl NativeCallback { .into_iter() .map(|n| BuiltinParam { name: ParamName::new_dynamic(n), - has_default: false, + default: ParamDefault::None, }) .collect(), handler: tb!(handler), diff --git a/crates/jrsonnet-evaluator/src/function/mod.rs b/crates/jrsonnet-evaluator/src/function/mod.rs index c26fdbc5..b3ac9e89 100644 --- a/crates/jrsonnet-evaluator/src/function/mod.rs +++ b/crates/jrsonnet-evaluator/src/function/mod.rs @@ -4,11 +4,11 @@ pub use arglike::{ArgLike, ArgsLike, TlaArg}; use jrsonnet_gcmodule::{Cc, Trace}; use jrsonnet_interner::IStr; pub use jrsonnet_macros::builtin; -use jrsonnet_parser::{Destruct, Expr, ExprLocation, LocExpr, ParamsDesc}; +use jrsonnet_parser::{Destruct, Expr, LocExpr, ParamsDesc, Span}; use self::{ arglike::OptionalContext, - builtin::{Builtin, BuiltinParam, ParamName, StaticBuiltin}, + builtin::{Builtin, BuiltinParam, ParamDefault, ParamName, StaticBuiltin}, native::NativeDesc, parse::{parse_default_function_call, parse_function_call}, }; @@ -22,10 +22,10 @@ pub mod parse; /// Function callsite location. /// Either from other jsonnet code, specified by expression location, or from native (without location). #[derive(Clone, Copy)] -pub struct CallLocation<'l>(pub Option<&'l ExprLocation>); +pub struct CallLocation<'l>(pub Option<&'l Span>); impl<'l> CallLocation<'l> { /// Construct new location for calls coming from specified jsonnet expression location. - pub const fn new(loc: &'l ExprLocation) -> Self { + pub const fn new(loc: &'l Span) -> Self { Self(Some(loc)) } } @@ -142,7 +142,7 @@ impl FuncVal { .as_ref() .map(IStr::to_string) .map_or(ParamName::ANONYMOUS, ParamName::new_dynamic), - p.1.is_some(), + ParamDefault::exists(p.1.is_some()), ) }) .collect(), @@ -225,7 +225,7 @@ impl FuncVal { #[cfg(feature = "exp-destruct")] _ => return false, }; - &desc.body.0 as &Expr == &Expr::Var(id.clone()) + desc.body.expr() == &Expr::Var(id.clone()) } _ => false, } diff --git a/crates/jrsonnet-evaluator/src/function/parse.rs b/crates/jrsonnet-evaluator/src/function/parse.rs index 86e297d2..03f014c8 100644 --- a/crates/jrsonnet-evaluator/src/function/parse.rs +++ b/crates/jrsonnet-evaluator/src/function/parse.rs @@ -10,6 +10,7 @@ use crate::{ destructure::destruct, error::{ErrorKind::*, Result}, evaluate_named, + function::builtin::ParamDefault, gc::GcHashMap, val::ThunkValue, Context, Pending, Thunk, Val, @@ -49,7 +50,10 @@ pub fn parse_function_call( if args.unnamed_len() > params.len() { bail!(TooManyArgsFunctionHas( params.len(), - params.iter().map(|p| (p.0.name(), p.1.is_some())).collect() + params + .iter() + .map(|p| (p.0.name(), ParamDefault::exists(p.1.is_some()))) + .collect() )) } @@ -127,7 +131,10 @@ pub fn parse_function_call( if !found { bail!(FunctionParameterNotBoundInCall( param.0.clone().name(), - params.iter().map(|p| (p.0.name(), p.1.is_some())).collect() + params + .iter() + .map(|p| (p.0.name(), ParamDefault::exists(p.1.is_some()))) + .collect() )); } } @@ -163,7 +170,7 @@ pub fn parse_builtin_call( params.len(), params .iter() - .map(|p| (p.name().as_str().map(IStr::from), p.has_default())) + .map(|p| (p.name().as_str().map(IStr::from), p.default())) .collect() )) } @@ -211,7 +218,7 @@ pub fn parse_builtin_call( param.name().as_str().map(IStr::from), params .iter() - .map(|p| (p.name().as_str().map(IStr::from), p.has_default())) + .map(|p| (p.name().as_str().map(IStr::from), p.default())) .collect() )); } @@ -232,7 +239,10 @@ pub fn parse_default_function_call(body_ctx: Context, params: &ParamsDesc) -> Re fn get(self: Box) -> Result { Err(FunctionParameterNotBoundInCall( Some(self.0.clone()), - self.1.iter().map(|p| (p.0.name(), p.1.is_some())).collect(), + self.1 + .iter() + .map(|p| (p.0.name(), ParamDefault::exists(p.1.is_some()))) + .collect(), ) .into()) } diff --git a/crates/jrsonnet-evaluator/src/import.rs b/crates/jrsonnet-evaluator/src/import.rs index 3cc0a1d5..53946968 100644 --- a/crates/jrsonnet-evaluator/src/import.rs +++ b/crates/jrsonnet-evaluator/src/import.rs @@ -1,6 +1,5 @@ use std::{ any::Any, - cell::RefCell, env::current_dir, fs, io::{ErrorKind, Read}, @@ -41,8 +40,10 @@ pub trait ImportResolver: Trace { /// this cannot be resolved using associated type, as evaluator uses object instead of generic for [`ImportResolver`] fn load_file_contents(&self, resolved: &SourcePath) -> Result>; - /// For downcasts + // For downcasts, will be removed after trait_upcasting_coercion + // stabilization. fn as_any(&self) -> &dyn Any; + fn as_any_mut(&mut self) -> &mut dyn Any; } /// Dummy resolver, can't resolve/load any file @@ -56,6 +57,9 @@ impl ImportResolver for DummyImportResolver { fn as_any(&self) -> &dyn Any { self } + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } } #[allow(clippy::use_self)] impl Default for Box { @@ -69,17 +73,15 @@ impl Default for Box { pub struct FileImportResolver { /// Library directories to search for file. /// Referred to as `jpath` in original jsonnet implementation. - library_paths: RefCell>, + library_paths: Vec, } impl FileImportResolver { - pub fn new(jpath: Vec) -> Self { - Self { - library_paths: RefCell::new(jpath), - } + pub fn new(library_paths: Vec) -> Self { + Self { library_paths } } /// Dynamically add new jpath, used by bindings - pub fn add_jpath(&self, path: PathBuf) { - self.library_paths.borrow_mut().push(path); + pub fn add_jpath(&mut self, path: PathBuf) { + self.library_paths.push(path); } } @@ -132,7 +134,7 @@ impl ImportResolver for FileImportResolver { if let Some(direct) = check_path(&direct)? { return Ok(direct); } - for library_path in self.library_paths.borrow().iter() { + for library_path in &self.library_paths { let mut cloned = library_path.clone(); cloned.push(path); if let Some(cloned) = check_path(&cloned)? { @@ -165,11 +167,15 @@ impl ImportResolver for FileImportResolver { Ok(out) } + fn resolve_from_default(&self, path: &str) -> Result { + self.resolve_from(&SourcePath::default(), path) + } + fn as_any(&self) -> &dyn Any { self } - fn resolve_from_default(&self, path: &str) -> Result { - self.resolve_from(&SourcePath::default(), path) + fn as_any_mut(&mut self) -> &mut dyn Any { + self } } diff --git a/crates/jrsonnet-evaluator/src/integrations/serde.rs b/crates/jrsonnet-evaluator/src/integrations/serde.rs index 0511b61e..f42bf0b6 100644 --- a/crates/jrsonnet-evaluator/src/integrations/serde.rs +++ b/crates/jrsonnet-evaluator/src/integrations/serde.rs @@ -2,7 +2,7 @@ use std::borrow::Cow; use jrsonnet_interner::IStr; use serde::{ - de::Visitor, + de::{self, Visitor}, ser::{ Error, SerializeMap, SerializeSeq, SerializeStruct, SerializeStructVariant, SerializeTuple, SerializeTupleStruct, SerializeTupleVariant, @@ -11,7 +11,8 @@ use serde::{ }; use crate::{ - arr::ArrValue, runtime_error, Error as JrError, ObjValue, ObjValueBuilder, Result, State, Val, + arr::ArrValue, in_description_frame, runtime_error, val::NumValue, Error as JrError, ObjValue, + ObjValueBuilder, Result, Val, }; impl<'de> Deserialize<'de> for Val { @@ -37,22 +38,21 @@ impl<'de> Deserialize<'de> for Val { fn visit_bool(self, v: bool) -> Result where - E: serde::de::Error, + E: de::Error, { Ok(Val::Bool(v)) } fn visit_f64(self, v: f64) -> Result where - E: serde::de::Error, + E: de::Error, { - if !v.is_finite() { - return Err(E::custom("only finite numbers are supported")); - } - Ok(Val::Num(v)) + Ok(Val::Num(NumValue::new(v).ok_or_else(|| { + E::custom("only finite numbers are supported") + })?)) } fn visit_str(self, v: &str) -> Result where - E: serde::de::Error, + E: de::Error, { Ok(Val::string(v)) } @@ -67,27 +67,27 @@ impl<'de> Deserialize<'de> for Val { // } fn visit_i64(self, v: i64) -> Result where - E: serde::de::Error, + E: de::Error, { - Ok(Val::Num(v as f64)) + Ok(Val::Num(NumValue::new(v as f64).expect("no overflow"))) } fn visit_u64(self, v: u64) -> Result where - E: serde::de::Error, + E: de::Error, { - Ok(Val::Num(v as f64)) + Ok(Val::Num(NumValue::new(v as f64).expect("no overflow"))) } fn visit_bytes(self, v: &[u8]) -> Result where - E: serde::de::Error, + E: de::Error, { Ok(Val::Arr(ArrValue::bytes(v.into()))) } fn visit_none(self) -> Result where - E: serde::de::Error, + E: de::Error, { Ok(Val::Null) } @@ -100,7 +100,7 @@ impl<'de> Deserialize<'de> for Val { fn visit_unit(self) -> Result where - E: serde::de::Error, + E: de::Error, { Ok(Val::Null) } @@ -114,7 +114,7 @@ impl<'de> Deserialize<'de> for Val { fn visit_seq(self, mut seq: A) -> Result where - A: serde::de::SeqAccess<'de>, + A: de::SeqAccess<'de>, { let mut out = seq.size_hint().map_or_else(Vec::new, Vec::with_capacity); @@ -127,7 +127,7 @@ impl<'de> Deserialize<'de> for Val { fn visit_map(self, mut map: A) -> Result where - A: serde::de::MapAccess<'de>, + A: de::MapAccess<'de>, { let mut out = map .size_hint() @@ -159,11 +159,12 @@ impl Serialize for Val { Self::Null => serializer.serialize_none(), Self::Str(s) => serializer.serialize_str(&s.clone().into_flat()), Self::Num(n) => { + let n = n.get(); if n.fract() == 0.0 { - let n = *n as i64; + let n = n as i64; serializer.serialize_i64(n) } else { - serializer.serialize_f64(*n) + serializer.serialize_f64(n) } } #[cfg(feature = "exp-bigint")] @@ -172,8 +173,7 @@ impl Serialize for Val { let mut seq = serializer.serialize_seq(Some(arr.len()))?; for (i, element) in arr.iter().enumerate() { let mut serde_error = None; - // TODO: rewrite using try{} after stabilization - State::push_description( + in_description_frame( || format!("array index [{i}]"), || { let e = element?; @@ -198,7 +198,7 @@ impl Serialize for Val { ) { let mut serde_error = None; // TODO: rewrite using try{} after stabilization - State::push_description( + in_description_frame( || format!("object field {field:?}"), || { let v = value?; @@ -449,15 +449,15 @@ impl Serializer for IntoValSerializer { } fn serialize_i8(self, v: i8) -> Result { - Ok(Val::Num(f64::from(v))) + Ok(Val::Num(v.into())) } fn serialize_i16(self, v: i16) -> Result { - Ok(Val::Num(f64::from(v))) + Ok(Val::Num(v.into())) } fn serialize_i32(self, v: i32) -> Result { - Ok(Val::Num(f64::from(v))) + Ok(Val::Num(v.into())) } fn serialize_i64(self, v: i64) -> Result { @@ -465,15 +465,15 @@ impl Serializer for IntoValSerializer { } fn serialize_u8(self, v: u8) -> Result { - Ok(Val::Num(f64::from(v))) + Ok(Val::Num(v.into())) } fn serialize_u16(self, v: u16) -> Result { - Ok(Val::Num(f64::from(v))) + Ok(Val::Num(v.into())) } fn serialize_u32(self, v: u32) -> Result { - Ok(Val::Num(f64::from(v))) + Ok(Val::Num(v.into())) } fn serialize_u64(self, v: u64) -> Result { @@ -481,11 +481,11 @@ impl Serializer for IntoValSerializer { } fn serialize_f32(self, v: f32) -> Result { - Ok(Val::Num(f64::from(v))) + Ok(Val::try_num(f64::from(v))?) } fn serialize_f64(self, v: f64) -> Result { - Ok(Val::Num(v)) + Ok(Val::try_num(v)?) } fn serialize_char(self, v: char) -> Result { diff --git a/crates/jrsonnet-evaluator/src/lib.rs b/crates/jrsonnet-evaluator/src/lib.rs index 76c4d161..a543c93a 100644 --- a/crates/jrsonnet-evaluator/src/lib.rs +++ b/crates/jrsonnet-evaluator/src/lib.rs @@ -27,7 +27,7 @@ pub mod val; use std::{ any::Any, - cell::{Ref, RefCell, RefMut}, + cell::{RefCell, RefMut}, fmt::{self, Debug}, path::Path, }; @@ -45,7 +45,7 @@ pub use jrsonnet_interner::{IBytes, IStr}; #[doc(hidden)] pub use jrsonnet_macros; pub use jrsonnet_parser as parser; -use jrsonnet_parser::{ExprLocation, LocExpr, ParserSettings, Source, SourcePath}; +use jrsonnet_parser::{LocExpr, ParserSettings, Source, SourcePath}; pub use obj::*; use stack::check_depth; pub use tla::apply_tla; @@ -116,6 +116,29 @@ impl ContextInitializer for () { } } +impl ContextInitializer for Option +where + T: ContextInitializer, +{ + fn initialize(&self, state: State, for_file: Source) -> Context { + if let Some(ctx) = self { + ctx.initialize(state, for_file) + } else { + ().initialize(state, for_file) + } + } + + fn populate(&self, for_file: Source, builder: &mut ContextBuilder) { + if let Some(ctx) = self { + ctx.populate(for_file, builder); + } + } + + fn as_any(&self) -> &dyn Any { + self + } +} + macro_rules! impl_context_initializer { ($($gen:ident)*) => { #[allow(non_snake_case)] @@ -147,24 +170,6 @@ impl_context_initializer! { A @ B C D E F G } -/// Dynamically reconfigurable evaluation settings -#[derive(Trace)] -pub struct EvaluationSettings { - /// Context initializer, which will be used for imports and everything - /// [`NoopContextInitializer`] is used by default, most likely you want to have `jrsonnet-stdlib` - pub context_initializer: TraceBox, - /// Used to resolve file locations/contents - pub import_resolver: TraceBox, -} -impl Default for EvaluationSettings { - fn default() -> Self { - Self { - context_initializer: tb!(()), - import_resolver: tb!(DummyImportResolver), - } - } -} - #[derive(Trace)] struct FileData { string: Option, @@ -207,16 +212,19 @@ impl FileData { } } -#[derive(Default, Trace)] +#[derive(Trace)] pub struct EvaluationStateInternals { /// Internal state file_cache: RefCell>, - /// Settings, safe to change at runtime - settings: RefCell, + /// Context initializer, which will be used for imports and everything + /// [`NoopContextInitializer`] is used by default, most likely you want to have `jrsonnet-stdlib` + context_initializer: TraceBox, + /// Used to resolve file locations/contents + import_resolver: TraceBox, } /// Maintains stack trace and import resolution -#[derive(Default, Clone, Trace)] +#[derive(Clone, Trace)] pub struct State(Cc); impl State { @@ -228,7 +236,7 @@ impl State { let file = match file { RawEntryMut::Occupied(ref mut d) => d.get_mut(), RawEntryMut::Vacant(v) => { - let data = self.settings().import_resolver.load_file_contents(&path)?; + let data = self.import_resolver().load_file_contents(&path)?; v.insert( path.clone(), FileData::new_string( @@ -252,7 +260,7 @@ impl State { let file = match file { RawEntryMut::Occupied(ref mut d) => d.get_mut(), RawEntryMut::Vacant(v) => { - let data = self.settings().import_resolver.load_file_contents(&path)?; + let data = self.import_resolver().load_file_contents(&path)?; v.insert(path.clone(), FileData::new_bytes(data.as_slice().into())) .1 } @@ -279,7 +287,7 @@ impl State { let file = match file { RawEntryMut::Occupied(ref mut d) => d.get_mut(), RawEntryMut::Vacant(v) => { - let data = self.settings().import_resolver.load_file_contents(&path)?; + let data = self.import_resolver().load_file_contents(&path)?; v.insert( path.clone(), FileData::new_string( @@ -350,8 +358,7 @@ impl State { /// Creates context with all passed global variables pub fn create_default_context(&self, source: Source) -> Context { - let context_initializer = &self.settings().context_initializer; - context_initializer.initialize(self.clone(), source) + self.context_initializer().initialize(self.clone(), source) } /// Creates context with all passed global variables, calling custom modifier @@ -360,7 +367,7 @@ impl State { source: Source, context_initializer: impl ContextInitializer, ) -> Context { - let default_initializer = &self.settings().context_initializer; + let default_initializer = self.context_initializer(); let mut builder = ContextBuilder::with_capacity( self.clone(), default_initializer.reserve_vars() + context_initializer.reserve_vars(), @@ -370,38 +377,6 @@ impl State { builder.build() } - - /// Executes code creating a new stack frame - pub fn push( - e: CallLocation<'_>, - frame_desc: impl FnOnce() -> String, - f: impl FnOnce() -> Result, - ) -> Result { - let _guard = check_depth()?; - - f().with_description_src(e, frame_desc) - } - - /// Executes code creating a new stack frame - pub fn push_val( - &self, - e: &ExprLocation, - frame_desc: impl FnOnce() -> String, - f: impl FnOnce() -> Result, - ) -> Result { - let _guard = check_depth()?; - - f().with_description_src(e, frame_desc) - } - /// Executes code creating a new stack frame - pub fn push_description( - frame_desc: impl FnOnce() -> String, - f: impl FnOnce() -> Result, - ) -> Result { - let _guard = check_depth()?; - - f().with_description(frame_desc) - } } /// Internals @@ -409,49 +384,26 @@ impl State { fn file_cache(&self) -> RefMut<'_, GcHashMap> { self.0.file_cache.borrow_mut() } - pub fn settings(&self) -> Ref<'_, EvaluationSettings> { - self.0.settings.borrow() - } - pub fn settings_mut(&self) -> RefMut<'_, EvaluationSettings> { - self.0.settings.borrow_mut() - } - pub fn add_global(&self, name: IStr, value: Thunk) { - #[derive(Trace)] - struct GlobalsCtx { - globals: RefCell>>, - inner: TraceBox, - } - impl ContextInitializer for GlobalsCtx { - fn reserve_vars(&self) -> usize { - self.inner.reserve_vars() + self.globals.borrow().len() - } - fn populate(&self, for_file: Source, builder: &mut ContextBuilder) { - self.inner.populate(for_file, builder); - for (name, val) in self.globals.borrow().iter() { - builder.bind(name.clone(), val.clone()); - } - } +} +/// Executes code creating a new stack frame, to be replaced with try{} +pub fn in_frame( + e: CallLocation<'_>, + frame_desc: impl FnOnce() -> String, + f: impl FnOnce() -> Result, +) -> Result { + let _guard = check_depth()?; + + f().with_description_src(e, frame_desc) +} - fn as_any(&self) -> &dyn Any { - self - } - } - let mut settings = self.settings_mut(); - let initializer = &mut settings.context_initializer; - if let Some(global) = initializer.as_any().downcast_ref::() { - global.globals.borrow_mut().insert(name, value); - } else { - let inner = std::mem::replace(&mut settings.context_initializer, tb!(())); - settings.context_initializer = tb!(GlobalsCtx { - globals: { - let mut out = GcHashMap::with_capacity(1); - out.insert(name, value); - RefCell::new(out) - }, - inner - }); - } - } +/// Executes code creating a new stack frame, to be replaced with try{} +pub fn in_description_frame( + frame_desc: impl FnOnce() -> String, + f: impl FnOnce() -> Result, +) -> Result { + let _guard = check_depth()?; + + f().with_description(frame_desc) } #[derive(Trace)] @@ -523,16 +475,51 @@ impl State { pub fn resolve(&self, path: impl AsRef) -> Result { self.import_resolver().resolve(path.as_ref()) } - pub fn import_resolver(&self) -> Ref<'_, dyn ImportResolver> { - Ref::map(self.settings(), |s| &*s.import_resolver) + pub fn import_resolver(&self) -> &dyn ImportResolver { + &*self.0.import_resolver } - pub fn set_import_resolver(&self, resolver: impl ImportResolver) { - self.settings_mut().import_resolver = tb!(resolver); + pub fn context_initializer(&self) -> &dyn ContextInitializer { + &*self.0.context_initializer } - pub fn context_initializer(&self) -> Ref<'_, dyn ContextInitializer> { - Ref::map(self.settings(), |s| &*s.context_initializer) +} + +impl State { + pub fn builder() -> StateBuilder { + StateBuilder::default() + } +} + +impl Default for State { + fn default() -> Self { + Self::builder().build() + } +} + +#[derive(Default)] +pub struct StateBuilder { + import_resolver: Option>, + context_initializer: Option>, +} +impl StateBuilder { + pub fn import_resolver(&mut self, import_resolver: impl ImportResolver) -> &mut Self { + let _ = self.import_resolver.insert(tb!(import_resolver)); + self + } + pub fn context_initializer( + &mut self, + context_initializer: impl ContextInitializer, + ) -> &mut Self { + let _ = self.context_initializer.insert(tb!(context_initializer)); + self } - pub fn set_context_initializer(&self, initializer: impl ContextInitializer) { - self.settings_mut().context_initializer = tb!(initializer); + pub fn build(mut self) -> State { + State(Cc::new(EvaluationStateInternals { + file_cache: RefCell::new(GcHashMap::new()), + context_initializer: self.context_initializer.take().unwrap_or_else(|| tb!(())), + import_resolver: self + .import_resolver + .take() + .unwrap_or_else(|| tb!(DummyImportResolver)), + })) } } diff --git a/crates/jrsonnet-evaluator/src/manifest.rs b/crates/jrsonnet-evaluator/src/manifest.rs index 3e552451..6c3bf951 100644 --- a/crates/jrsonnet-evaluator/src/manifest.rs +++ b/crates/jrsonnet-evaluator/src/manifest.rs @@ -1,6 +1,6 @@ use std::{borrow::Cow, fmt::Write, ptr}; -use crate::{bail, Result, ResultExt, State, Val}; +use crate::{bail, in_description_frame, Result, ResultExt, Val}; pub trait ManifestFormat { fn manifest_buf(&self, val: Val, buf: &mut String) -> Result<()>; @@ -84,8 +84,9 @@ impl<'s> JsonFormat<'s> { debug_truncate_strings: None, } } - // Same format as std.toString - pub fn std_to_string() -> Self { + /// Same format as std.toString, except does not keeps top-level string as-is + /// To avoid confusion, the format is private in jrsonnet, use [`ToStringFormat`] instead + const fn std_to_string_helper() -> Self { Self { padding: Cow::Borrowed(""), mtype: JsonFormatting::ToString, @@ -183,6 +184,8 @@ fn manifest_json_ex_buf( cur_padding: &mut String, options: &JsonFormat<'_>, ) -> Result<()> { + use JsonFormatting::*; + let mtype = options.mtype; match val { Val::Bool(v) => { @@ -218,89 +221,117 @@ fn manifest_json_ex_buf( } Val::Arr(items) => { buf.push('['); - if !items.is_empty() { - if mtype != JsonFormatting::ToString && mtype != JsonFormatting::Minify { - buf.push_str(options.newline); - } - let old_len = cur_padding.len(); - cur_padding.push_str(&options.padding); - for (i, item) in items.iter().enumerate() { - if i != 0 { - buf.push(','); - if mtype == JsonFormatting::ToString { - buf.push(' '); - } else if mtype != JsonFormatting::Minify { - buf.push_str(options.newline); - } + let old_len = cur_padding.len(); + cur_padding.push_str(&options.padding); + + let mut had_items = false; + for (i, item) in items.iter().enumerate() { + had_items = true; + let item = item.with_description(|| format!("elem <{i}> evaluation"))?; + + if i != 0 { + buf.push(','); + } + match mtype { + Manifest | Std => { + buf.push_str(options.newline); + buf.push_str(cur_padding); } + ToString if i != 0 => buf.push(' '), + Minify | ToString => {} + }; + + in_description_frame( + || format!("elem <{i}> manifestification"), + || manifest_json_ex_buf(&item, buf, cur_padding, options), + )?; + } + + cur_padding.truncate(old_len); + + match mtype { + Manifest | ToString if !had_items => { + // Empty array as "[ ]" + buf.push(' '); + } + Manifest => { + buf.push_str(options.newline); buf.push_str(cur_padding); - manifest_json_ex_buf(&item?, buf, cur_padding, options) - .with_description(|| format!("elem <{i}> manifestification"))?; } - cur_padding.truncate(old_len); - - if mtype != JsonFormatting::ToString && mtype != JsonFormatting::Minify { + Std => { + if !had_items { + // Stdlib formats empty array as "[\n\n]" + buf.push_str(options.newline); + } buf.push_str(options.newline); buf.push_str(cur_padding); } - } else if mtype == JsonFormatting::Std { - buf.push_str(options.newline); - buf.push_str(options.newline); - buf.push_str(cur_padding); - } else if mtype == JsonFormatting::ToString || mtype == JsonFormatting::Manifest { - buf.push(' '); + Minify | ToString => {} } + buf.push(']'); } Val::Obj(obj) => { obj.run_assertions()?; buf.push('{'); - let fields = obj.fields( - #[cfg(feature = "exp-preserve-order")] - options.preserve_order, - ); - if !fields.is_empty() { - if mtype != JsonFormatting::ToString && mtype != JsonFormatting::Minify { - buf.push_str(options.newline); - } - let old_len = cur_padding.len(); - cur_padding.push_str(&options.padding); - for (i, field) in fields.into_iter().enumerate() { - if i != 0 { - buf.push(','); - if mtype == JsonFormatting::ToString { - buf.push(' '); - } else if mtype != JsonFormatting::Minify { - buf.push_str(options.newline); - } + let old_len = cur_padding.len(); + cur_padding.push_str(&options.padding); + + let mut had_fields = false; + for (i, (key, value)) in obj + .iter( + #[cfg(feature = "exp-preserve-order")] + options.preserve_order, + ) + .enumerate() + { + had_fields = true; + let value = value.with_description(|| format!("field <{key}> evaluation"))?; + + if i != 0 { + buf.push(','); + } + match mtype { + Manifest | Std => { + buf.push_str(options.newline); + buf.push_str(cur_padding); } - buf.push_str(cur_padding); - escape_string_json_buf(&field, buf); - buf.push_str(options.key_val_sep); - State::push_description( - || format!("field <{}> manifestification", field.clone()), - || { - let value = obj.get(field.clone())?.unwrap(); - manifest_json_ex_buf(&value, buf, cur_padding, options)?; - Ok(()) - }, - )?; + ToString if i != 0 => buf.push(' '), + Minify | ToString => {} } - cur_padding.truncate(old_len); - if mtype != JsonFormatting::ToString && mtype != JsonFormatting::Minify { + escape_string_json_buf(&key, buf); + buf.push_str(options.key_val_sep); + in_description_frame( + || format!("field <{key}> manifestification"), + || manifest_json_ex_buf(&value, buf, cur_padding, options), + )?; + } + + cur_padding.truncate(old_len); + + match mtype { + Manifest | ToString if !had_fields => { + // Empty object as "{ }" + buf.push(' '); + } + Manifest => { buf.push_str(options.newline); buf.push_str(cur_padding); } - } else if mtype == JsonFormatting::Std { - buf.push_str(options.newline); - buf.push_str(options.newline); - buf.push_str(cur_padding); - } else if mtype == JsonFormatting::ToString || mtype == JsonFormatting::Manifest { - buf.push(' '); + Std => { + if !had_fields { + // Stdlib formats empty object as "{\n\n}" + buf.push_str(options.newline); + } + buf.push_str(options.newline); + buf.push_str(cur_padding); + } + Minify | ToString => {} } + buf.push('}'); } Val::Func(_) => bail!("tried to manifest function"), @@ -314,10 +345,17 @@ impl ManifestFormat for JsonFormat<'_> { } } +/// Same as [`JsonFormat`] with pre-set options, but top-level string is serialized as-is, +/// without quoting. pub struct ToStringFormat; impl ManifestFormat for ToStringFormat { fn manifest_buf(&self, val: Val, out: &mut String) -> Result<()> { - JsonFormat::std_to_string().manifest_buf(val, out) + const JSON_TO_STRING: JsonFormat = JsonFormat::std_to_string_helper(); + if let Some(str) = val.as_str() { + out.push_str(&str); + return Ok(()); + } + JSON_TO_STRING.manifest_buf(val, out) } fn file_trailing_newline(&self) -> bool { false @@ -340,7 +378,28 @@ impl ManifestFormat for StringFormat { } } -pub struct YamlStreamFormat(pub I); +pub struct YamlStreamFormat { + inner: I, + c_document_end: bool, + end_newline: bool, +} +impl YamlStreamFormat { + pub fn std_yaml_stream(inner: I, c_document_end: bool) -> Self { + Self { + inner, + c_document_end, + // Stdlib format always inserts useless newline at the end + end_newline: true, + } + } + pub fn cli(inner: I) -> Self { + Self { + inner, + c_document_end: true, + end_newline: false, + } + } +} impl ManifestFormat for YamlStreamFormat { fn manifest_buf(&self, val: Val, out: &mut String) -> Result<()> { let Val::Arr(arr) = val else { @@ -350,14 +409,22 @@ impl ManifestFormat for YamlStreamFormat { ) }; if !arr.is_empty() { - for v in arr.iter() { - let v = v?; + for (i, v) in arr.iter().enumerate() { + let v = v.with_description(|| format!("elem <{i}> evaluation"))?; out.push_str("---\n"); - self.0.manifest_buf(v, out)?; + in_description_frame( + || format!("elem <{i}> manifestification"), + || self.inner.manifest_buf(v, out), + )?; out.push('\n'); } + } + if self.c_document_end { out.push_str("..."); } + if self.end_newline { + out.push('\n'); + } Ok(()) } } diff --git a/crates/jrsonnet-evaluator/src/obj.rs b/crates/jrsonnet-evaluator/src/obj.rs index 672d2505..e00b8d2f 100644 --- a/crates/jrsonnet-evaluator/src/obj.rs +++ b/crates/jrsonnet-evaluator/src/obj.rs @@ -8,7 +8,7 @@ use std::{ use jrsonnet_gcmodule::{Cc, Trace, Weak}; use jrsonnet_interner::IStr; -use jrsonnet_parser::{ExprLocation, Visibility}; +use jrsonnet_parser::{Span, Visibility}; use rustc_hash::FxHashMap; use crate::{ @@ -17,10 +17,11 @@ use crate::{ error::{suggest_object_fields, Error, ErrorKind::*}, function::{CallLocation, FuncVal}, gc::{GcHashMap, GcHashSet, TraceBox}, + in_frame, operator::evaluate_add_op, tb, val::{ArrValue, ThunkValue}, - MaybeUnbound, Result, State, Thunk, Unbound, Val, + MaybeUnbound, Result, Thunk, Unbound, Val, }; #[cfg(not(feature = "exp-preserve-order"))] @@ -135,7 +136,7 @@ pub struct ObjMember { flags: ObjFieldFlags, original_index: FieldIndex, pub invoke: MaybeUnbound, - pub location: Option, + pub location: Option, } pub trait ObjectAssertion: Trace { @@ -669,17 +670,18 @@ impl ObjectLike for OopObject { } fn len(&self) -> usize { + // Maybe it will be better to not compute sort key here? self.fields_visibility() .into_iter() .filter(|(_, (visible, _))| *visible) .count() } + /// Returns false only if there is any visible entry. + /// + /// Note that object with hidden fields `{a:: 1}` will be reported as empty here. fn is_empty(&self) -> bool { - if !self.this_entries.is_empty() { - return false; - } - self.sup.as_ref().map_or(true, ObjValue::is_empty) + self.len() == 0 } /// Run callback for every field found in object @@ -895,7 +897,7 @@ pub struct ObjMemberBuilder { add: bool, visibility: Visibility, original_index: FieldIndex, - location: Option, + location: Option, } #[allow(clippy::missing_const_for_fn)] @@ -925,7 +927,7 @@ impl ObjMemberBuilder { pub fn hide(self) -> Self { self.with_visibility(Visibility::Hidden) } - pub fn with_location(mut self, location: ExprLocation) -> Self { + pub fn with_location(mut self, location: Span) -> Self { self.location = Some(location); self } @@ -968,7 +970,7 @@ impl ObjMemberBuilder> { let location = member.location.clone(); let old = receiver.0.map.insert(name.clone(), member); if old.is_some() { - State::push( + in_frame( CallLocation(location.as_ref()), || format!("field <{}> initializtion", name.clone()), || bail!(DuplicateFieldName(name.clone())), diff --git a/crates/jrsonnet-evaluator/src/stdlib/format.rs b/crates/jrsonnet-evaluator/src/stdlib/format.rs index bc1b5902..fb50fa1c 100644 --- a/crates/jrsonnet-evaluator/src/stdlib/format.rs +++ b/crates/jrsonnet-evaluator/src/stdlib/format.rs @@ -604,10 +604,13 @@ pub fn format_code( } } ConvTypeV::Char => match value.clone() { - Val::Num(n) => tmp_out.push( - std::char::from_u32(n as u32) - .ok_or_else(|| InvalidUnicodeCodepointGot(n as u32))?, - ), + Val::Num(n) => { + let n = n.get(); + tmp_out.push( + std::char::from_u32(n as u32) + .ok_or_else(|| InvalidUnicodeCodepointGot(n as u32))?, + ); + } Val::Str(s) => { let s = s.into_flat(); if s.chars().count() != 1 { @@ -786,6 +789,7 @@ pub fn format_obj(str: &str, values: &ObjValue) -> Result { #[cfg(test)] pub mod test_format { use super::*; + use crate::val::NumValue; #[test] fn parse() { @@ -799,17 +803,21 @@ pub mod test_format { ); } + fn num(v: f64) -> Val { + Val::Num(NumValue::new(v).expect("finite")) + } + #[test] fn octals() { - assert_eq!(format_arr("%#o", &[Val::Num(8.0)]).unwrap(), "010"); - assert_eq!(format_arr("%#4o", &[Val::Num(8.0)]).unwrap(), " 010"); - assert_eq!(format_arr("%4o", &[Val::Num(8.0)]).unwrap(), " 10"); - assert_eq!(format_arr("%04o", &[Val::Num(8.0)]).unwrap(), "0010"); - assert_eq!(format_arr("%+4o", &[Val::Num(8.0)]).unwrap(), " +10"); - assert_eq!(format_arr("%+04o", &[Val::Num(8.0)]).unwrap(), "+010"); - assert_eq!(format_arr("%-4o", &[Val::Num(8.0)]).unwrap(), "10 "); - assert_eq!(format_arr("%+-4o", &[Val::Num(8.0)]).unwrap(), "+10 "); - assert_eq!(format_arr("%+-04o", &[Val::Num(8.0)]).unwrap(), "+10 "); + assert_eq!(format_arr("%#o", &[num(8.0)]).unwrap(), "010"); + assert_eq!(format_arr("%#4o", &[num(8.0)]).unwrap(), " 010"); + assert_eq!(format_arr("%4o", &[num(8.0)]).unwrap(), " 10"); + assert_eq!(format_arr("%04o", &[num(8.0)]).unwrap(), "0010"); + assert_eq!(format_arr("%+4o", &[num(8.0)]).unwrap(), " +10"); + assert_eq!(format_arr("%+04o", &[num(8.0)]).unwrap(), "+010"); + assert_eq!(format_arr("%-4o", &[num(8.0)]).unwrap(), "10 "); + assert_eq!(format_arr("%+-4o", &[num(8.0)]).unwrap(), "+10 "); + assert_eq!(format_arr("%+-04o", &[num(8.0)]).unwrap(), "+10 "); } #[test] @@ -817,7 +825,7 @@ pub mod test_format { assert_eq!( format_arr( "How much error budget is left looking at our %.3f%% availability gurantees?", - &[Val::Num(4.0)] + &[num(4.0)] ) .unwrap(), "How much error budget is left looking at our 4.000% availability gurantees?" diff --git a/crates/jrsonnet-evaluator/src/stdlib/mod.rs b/crates/jrsonnet-evaluator/src/stdlib/mod.rs index fbd1eccf..df2022a7 100644 --- a/crates/jrsonnet-evaluator/src/stdlib/mod.rs +++ b/crates/jrsonnet-evaluator/src/stdlib/mod.rs @@ -3,12 +3,12 @@ use format::{format_arr, format_obj}; -use crate::{function::CallLocation, Result, State, Val}; +use crate::{function::CallLocation, in_frame, Result, Val}; pub mod format; pub fn std_format(str: &str, vals: Val) -> Result { - State::push( + in_frame( CallLocation::native(), || format!("std.format of {str}"), || { diff --git a/crates/jrsonnet-evaluator/src/tla.rs b/crates/jrsonnet-evaluator/src/tla.rs index a34f568f..8e9b36eb 100644 --- a/crates/jrsonnet-evaluator/src/tla.rs +++ b/crates/jrsonnet-evaluator/src/tla.rs @@ -3,12 +3,12 @@ use jrsonnet_parser::Source; use crate::{ function::{ArgsLike, CallLocation}, - Result, State, Val, + in_description_frame, Result, State, Val, }; pub fn apply_tla(s: State, args: &A, val: Val) -> Result { Ok(if let Val::Func(func) = val { - State::push_description( + in_description_frame( || "during TLA call".to_owned(), || { func.evaluate( diff --git a/crates/jrsonnet-evaluator/src/trace/mod.rs b/crates/jrsonnet-evaluator/src/trace/mod.rs index 681c5e3d..30b3cbea 100644 --- a/crates/jrsonnet-evaluator/src/trace/mod.rs +++ b/crates/jrsonnet-evaluator/src/trace/mod.rs @@ -5,7 +5,7 @@ use std::{ }; use jrsonnet_gcmodule::Trace; -use jrsonnet_parser::{CodeLocation, ExprLocation, Source}; +use jrsonnet_parser::{CodeLocation, Source, Span}; use crate::{error::ErrorKind, Error}; @@ -380,7 +380,7 @@ impl TraceFormat for AssStrokeFormat { error: &Error, ) -> Result<(), std::fmt::Error> { struct ResetData { - loc: ExprLocation, + loc: Span, } use hi_doc::{source_to_ansi, Formatting, SnippetBuilder, Text}; @@ -399,7 +399,7 @@ impl TraceFormat for AssStrokeFormat { } let trace = &error.trace(); let snippet_builder: RefCell> = RefCell::new(None); - let mut last_location: Option = None; + let mut last_location: Option = None; let mut flush_builder = |data: Option| { use std::fmt::Write; let mut out = String::new(); diff --git a/crates/jrsonnet-evaluator/src/typed/conversions.rs b/crates/jrsonnet-evaluator/src/typed/conversions.rs index 4c77bcab..998723fe 100644 --- a/crates/jrsonnet-evaluator/src/typed/conversions.rs +++ b/crates/jrsonnet-evaluator/src/typed/conversions.rs @@ -10,8 +10,8 @@ use crate::{ bail, function::{native::NativeDesc, FuncDesc, FuncVal}, typed::CheckType, - val::{IndexableVal, StrValue, ThunkMapper}, - ObjValue, ObjValueBuilder, Result, Thunk, Val, + val::{IndexableVal, NumValue, StrValue, ThunkMapper}, + ObjValue, ObjValueBuilder, Result, ResultExt, Thunk, Val, }; #[derive(Trace)] @@ -120,7 +120,8 @@ where } } -const MAX_SAFE_INTEGER: f64 = ((1u64 << (f64::MANTISSA_DIGITS + 1)) - 1) as f64; +pub const MAX_SAFE_INTEGER: f64 = ((1u64 << (f64::MANTISSA_DIGITS + 1)) - 1) as f64; +pub const MIN_SAFE_INTEGER: f64 = -MAX_SAFE_INTEGER; macro_rules! impl_int { ($($ty:ty)*) => {$( @@ -131,6 +132,7 @@ macro_rules! impl_int { ::TYPE.check(&value)?; match value { Val::Num(n) => { + let n = n.get(); #[allow(clippy::float_cmp)] if n.trunc() != n { bail!( @@ -143,9 +145,8 @@ macro_rules! impl_int { _ => unreachable!(), } } - #[allow(clippy::cast_lossless)] fn into_untyped(value: Self) -> Result { - Ok(Val::Num(value as f64)) + Ok(Val::Num(value.into())) } } )*}; @@ -187,6 +188,7 @@ macro_rules! impl_bounded_int { ::TYPE.check(&value)?; match value { Val::Num(n) => { + let n = n.get(); #[allow(clippy::float_cmp)] if n.trunc() != n { bail!( @@ -202,7 +204,7 @@ macro_rules! impl_bounded_int { #[allow(clippy::cast_lossless)] fn into_untyped(value: Self) -> Result { - Ok(Val::Num(value.0 as f64)) + Ok(Val::try_num(value.0)?) } } )*}; @@ -220,13 +222,13 @@ impl Typed for f64 { const TYPE: &'static ComplexValType = &ComplexValType::Simple(ValType::Num); fn into_untyped(value: Self) -> Result { - Ok(Val::Num(value)) + Ok(Val::try_num(value)?) } fn from_untyped(value: Val) -> Result { ::TYPE.check(&value)?; match value { - Val::Num(n) => Ok(n), + Val::Num(n) => Ok(n.get()), _ => unreachable!(), } } @@ -237,13 +239,13 @@ impl Typed for PositiveF64 { const TYPE: &'static ComplexValType = &ComplexValType::BoundedNumber(Some(0.0), None); fn into_untyped(value: Self) -> Result { - Ok(Val::Num(value.0)) + Ok(Val::try_num(value.0)?) } fn from_untyped(value: Val) -> Result { ::TYPE.check(&value)?; match value { - Val::Num(n) => Ok(Self(n)), + Val::Num(n) => Ok(Self(n.get())), _ => unreachable!(), } } @@ -253,16 +255,14 @@ impl Typed for usize { &ComplexValType::BoundedNumber(Some(0.0), Some(MAX_SAFE_INTEGER)); fn into_untyped(value: Self) -> Result { - if value > MAX_SAFE_INTEGER as Self { - bail!("number is too large") - } - Ok(Val::Num(value as f64)) + Ok(Val::try_num(value)?) } fn from_untyped(value: Val) -> Result { ::TYPE.check(&value)?; match value { Val::Num(n) => { + let n = n.get(); #[allow(clippy::float_cmp)] if n.trunc() != n { bail!("cannot convert number with fractional part to usize") @@ -359,7 +359,12 @@ where unreachable!("typecheck should fail") }; a.iter() - .map(|r| r.and_then(T::from_untyped)) + .enumerate() + .map(|(i, r)| { + r.and_then(|t| { + T::from_untyped(t).with_description(|| format!("parsing elem <{i}>")) + }) + }) .collect::>() } } @@ -474,7 +479,7 @@ impl Typed for M1 { const TYPE: &'static ComplexValType = &ComplexValType::BoundedNumber(Some(-1.0), Some(-1.0)); fn into_untyped(_: Self) -> Result { - Ok(Val::Num(-1.0)) + Ok(Val::Num(NumValue::new(-1.0).expect("finite"))) } fn from_untyped(value: Val) -> Result { @@ -674,3 +679,19 @@ impl Typed for NativeFn { )) } } + +impl Typed for NumValue { + const TYPE: &'static ComplexValType = &ComplexValType::Simple(ValType::Num); + + fn into_untyped(typed: Self) -> Result { + Ok(Val::Num(typed)) + } + + fn from_untyped(untyped: Val) -> Result { + Self::TYPE.check(&untyped)?; + match untyped { + Val::Num(v) => Ok(v), + _ => unreachable!(), + } + } +} diff --git a/crates/jrsonnet-evaluator/src/typed/mod.rs b/crates/jrsonnet-evaluator/src/typed/mod.rs index e4c343b4..15e69513 100644 --- a/crates/jrsonnet-evaluator/src/typed/mod.rs +++ b/crates/jrsonnet-evaluator/src/typed/mod.rs @@ -1,6 +1,6 @@ use std::{fmt::Display, rc::Rc}; -mod conversions; +pub(crate) mod conversions; pub use conversions::*; use jrsonnet_gcmodule::Trace; pub use jrsonnet_types::{ComplexValType, ValType}; @@ -8,7 +8,7 @@ use thiserror::Error; use crate::{ error::{Error, ErrorKind, Result}, - State, Val, + in_description_frame, Val, }; #[derive(Debug, Error, Clone, Trace)] @@ -89,7 +89,7 @@ fn push_type_description( path: impl Fn() -> ValuePathItem, item: impl Fn() -> Result<()>, ) -> Result<()> { - State::push_description(error_reason, || match item() { + in_description_frame(error_reason, || match item() { Ok(()) => Ok(()), Err(mut e) => { if let ErrorKind::TypeError(e) = &mut e.error_mut() { @@ -155,10 +155,11 @@ impl CheckType for ComplexValType { }, Self::BoundedNumber(from, to) => { if let Val::Num(n) = value { - if from.map(|from| from > *n).unwrap_or(false) - || to.map(|to| to < *n).unwrap_or(false) + let n = n.get(); + if from.map(|from| from > n).unwrap_or(false) + || to.map(|to| to < n).unwrap_or(false) { - return Err(TypeError::BoundsFailed(*n, *from, *to).into()); + return Err(TypeError::BoundsFailed(n, *from, *to).into()); } Ok(()) } else { diff --git a/crates/jrsonnet-evaluator/src/val.rs b/crates/jrsonnet-evaluator/src/val.rs index daea1c4a..259d0dce 100644 --- a/crates/jrsonnet-evaluator/src/val.rs +++ b/crates/jrsonnet-evaluator/src/val.rs @@ -1,13 +1,18 @@ use std::{ cell::RefCell, + cmp::Ordering, fmt::{self, Debug, Display}, mem::replace, + num::NonZeroU32, + ops::Deref, rc::Rc, }; +use derivative::Derivative; use jrsonnet_gcmodule::{Cc, Trace}; use jrsonnet_interner::IStr; use jrsonnet_types::ValType; +use thiserror::Error; pub use crate::arr::{ArrValue, ArrayLike}; use crate::{ @@ -219,6 +224,13 @@ pub enum IndexableVal { Arr(ArrValue), } impl IndexableVal { + pub fn is_empty(&self) -> bool { + match self { + Self::Str(s) => s.is_empty(), + Self::Arr(s) => s.is_empty(), + } + } + pub fn to_array(self) -> ArrValue { match self { Self::Str(s) => ArrValue::chars(s.chars()), @@ -277,26 +289,11 @@ impl IndexableVal { .into(), )) } - Self::Arr(arr) => { - let get_idx = |pos: Option, len: usize, default| match pos { - Some(v) if v < 0 => len.saturating_sub((-v) as usize), - Some(v) => (v as usize).min(len), - None => default, - }; - let index = get_idx(index, arr.len(), 0); - let end = get_idx(end, arr.len(), arr.len()); - let step = step.as_deref().copied().unwrap_or(1); - - if index >= end { - return Ok(Self::Arr(ArrValue::empty())); - } - - Ok(Self::Arr( - arr.clone() - .slice(Some(index), Some(end), Some(step)) - .expect("arguments checked"), - )) - } + Self::Arr(arr) => Ok(Self::Arr(arr.clone().slice( + index, + end, + step.map(|v| NonZeroU32::new(v.value() as u32).expect("bounded != 0")), + ))), } } } @@ -386,18 +383,135 @@ impl PartialEq for StrValue { } impl Eq for StrValue {} impl PartialOrd for StrValue { - fn partial_cmp(&self, other: &Self) -> Option { + fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } impl Ord for StrValue { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { + fn cmp(&self, other: &Self) -> Ordering { let a = self.clone().into_flat(); let b = other.clone().into_flat(); a.cmp(&b) } } +/// Represents jsonnet number +/// Jsonnet numbers are finite f64, with NaNs disallowed +#[derive(Trace, Clone, Copy, Derivative)] +#[derivative(Debug = "transparent")] +#[repr(transparent)] +pub struct NumValue(f64); +impl NumValue { + /// Creates a [`NumValue`], if value is finite and not NaN + pub fn new(v: f64) -> Option { + if !v.is_finite() { + return None; + } + Some(Self(v)) + } + #[inline] + pub const fn get(&self) -> f64 { + self.0 + } +} +impl PartialEq for NumValue { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } +} +impl Eq for NumValue {} +impl Ord for NumValue { + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + // Can't use `total_cmp`: its behavior for `-0` and `0` + // is not following wanted. + unsafe { self.0.partial_cmp(&other.0).unwrap_unchecked() } + } +} +impl PartialOrd for NumValue { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} +impl Display for NumValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&self.0, f) + } +} +impl Deref for NumValue { + type Target = f64; + + #[inline] + fn deref(&self) -> &Self::Target { + &self.0 + } +} +macro_rules! impl_num { + ($($ty:ty),+) => {$( + impl From<$ty> for NumValue { + #[inline] + fn from(value: $ty) -> Self { + Self(value.into()) + } + } + )+}; +} +impl_num!(i8, u8, i16, u16, i32, u32); + +#[derive(Clone, Copy, Debug, Error, Trace)] +pub enum ConvertNumValueError { + #[error("overflow")] + Overflow, + #[error("underflow")] + Underflow, + #[error("non-finite")] + NonFinite, +} +impl From for Error { + fn from(e: ConvertNumValueError) -> Self { + Self::new(e.into()) + } +} + +macro_rules! impl_try_num { + ($($ty:ty),+) => {$( + impl TryFrom<$ty> for NumValue { + type Error = ConvertNumValueError; + #[inline] + fn try_from(value: $ty) -> Result { + use crate::typed::conversions::{MIN_SAFE_INTEGER, MAX_SAFE_INTEGER}; + let value = value as f64; + if value < MIN_SAFE_INTEGER { + return Err(ConvertNumValueError::Underflow) + } else if value > MAX_SAFE_INTEGER { + return Err(ConvertNumValueError::Overflow) + } + // Number is finite. + Ok(Self(value)) + } + } + )+}; +} +impl_try_num!(usize, isize, i64, u64); + +impl TryFrom for NumValue { + type Error = ConvertNumValueError; + + #[inline] + fn try_from(value: f64) -> Result { + Self::new(value).ok_or(ConvertNumValueError::NonFinite) + } +} +impl TryFrom for NumValue { + type Error = ConvertNumValueError; + + #[inline] + fn try_from(value: f32) -> Result { + Self::new(f64::from(value)).ok_or(ConvertNumValueError::NonFinite) + } +} + /// Represents any valid Jsonnet value. #[derive(Debug, Clone, Trace, Default)] pub enum Val { @@ -411,7 +525,7 @@ pub enum Val { /// Represents a Jsonnet number. /// Should be finite, and not NaN /// This restriction isn't enforced by enum, as enum field can't be marked as private - Num(f64), + Num(NumValue), /// Experimental bigint #[cfg(feature = "exp-bigint")] BigInt(#[trace(skip)] Box), @@ -456,7 +570,7 @@ impl Val { } pub const fn as_num(&self) -> Option { match self { - Self::Num(n) => Some(*n), + Self::Num(n) => Some(n.get()), _ => None, } } @@ -479,16 +593,6 @@ impl Val { } } - /// Creates `Val::Num` after checking for numeric overflow. - /// As numbers are `f64`, we can just check for their finity. - pub fn new_checked_num(num: f64) -> Result { - if num.is_finite() { - Ok(Self::Num(num)) - } else { - bail!("overflow") - } - } - pub const fn value_type(&self) -> ValType { match self { Self::Str(..) => ValType::Str, @@ -534,6 +638,15 @@ impl Val { pub fn string(string: impl Into) -> Self { Self::Str(string.into()) } + pub fn num(num: impl Into) -> Self { + Self::Num(num.into()) + } + pub fn try_num(num: V) -> Result + where + NumValue: TryFrom, + { + Ok(Self::Num(num.try_into()?)) + } } impl From for Val { @@ -567,7 +680,7 @@ pub fn primitive_equals(val_a: &Val, val_b: &Val) -> Result { (Val::Bool(a), Val::Bool(b)) => a == b, (Val::Null, Val::Null) => true, (Val::Str(a), Val::Str(b)) => a == b, - (Val::Num(a), Val::Num(b)) => (a - b).abs() <= f64::EPSILON, + (Val::Num(a), Val::Num(b)) => (a.get() - b.get()).abs() <= f64::EPSILON, #[cfg(feature = "exp-bigint")] (Val::BigInt(a), Val::BigInt(b)) => a == b, (Val::Arr(_), Val::Arr(_)) => { diff --git a/crates/jrsonnet-interner/Cargo.toml b/crates/jrsonnet-interner/Cargo.toml index 097ee567..5f692555 100644 --- a/crates/jrsonnet-interner/Cargo.toml +++ b/crates/jrsonnet-interner/Cargo.toml @@ -10,20 +10,8 @@ version.workspace = true [lints] workspace = true -[features] -default = [] -# Implement value serialization using structdump -structdump = ["dep:structdump"] -# Implement value serialization using serde -# -# Warning: serialized values won't be deduplicated -serde = ["dep:serde"] - [dependencies] jrsonnet-gcmodule.workspace = true -serde = { workspace = true, optional = true } -structdump = { workspace = true, optional = true } - rustc-hash.workspace = true hashbrown = { workspace = true, features = ["inline-more"] } diff --git a/crates/jrsonnet-interner/src/lib.rs b/crates/jrsonnet-interner/src/lib.rs index 4459caef..762de0e5 100644 --- a/crates/jrsonnet-interner/src/lib.rs +++ b/crates/jrsonnet-interner/src/lib.rs @@ -219,47 +219,46 @@ impl From<&[u8]> for IBytes { } } -#[cfg(feature = "serde")] -impl serde::Serialize for IStr { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - self.as_str().serialize(serializer) - } -} +type PoolMap = HashMap>; -#[cfg(feature = "serde")] -impl<'de> serde::Deserialize<'de> for IStr { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let str = <&str>::deserialize(deserializer)?; - Ok(intern_str(str)) - } +thread_local! { + static POOL: RefCell = RefCell::new(HashMap::with_capacity_and_hasher(200, BuildHasherDefault::default())); } -#[cfg(feature = "structdump")] -impl structdump::Codegen for IStr { - fn gen_code( - &self, - res: &mut structdump::CodegenResult, - _unique: bool, - ) -> structdump::TokenStream { - let s: &str = self; - res.add_code( - structdump::quote! { - structdump_import::IStr::from(#s) - }, - Some(structdump::quote![structdump_import::IStr]), - false, - ) +/// Jrsonnet golang bindings require that it is possible to move jsonnet +/// VM between OS threads, and this is not possible due to usage of +/// `thread_local`. Instead, there is two methods added, one should be +/// called at the end of current thread work, and one that should be +/// used when using other thread. +pub mod interop { + use std::mem; + + use crate::{PoolMap, POOL}; + + /// Type-erased interned string pool + pub enum PoolState {} + + /// Dump current interned string pool, to be restored by + /// `reenter_thread` + pub fn exit_thread() -> *mut PoolState { + Box::into_raw(Box::new(POOL.with_borrow_mut(mem::take))).cast() } -} -thread_local! { - static POOL: RefCell>> = RefCell::new(HashMap::with_capacity_and_hasher(200, BuildHasherDefault::default())); + /// Reenter thread, using state dumped by `exit_thread`. + /// + /// # Safety + /// + /// `state` should be acquired from `exit_thread`, it is not allowed + /// to reuse state to reenter multiple threads. + pub unsafe fn reenter_thread(state: *mut PoolState) { + let ptr: *mut PoolMap = state.cast(); + // SAFETY: ptr is an unique state per method safety requirements. + let ptr: Box = unsafe { Box::from_raw(ptr) }; + let ptr: PoolMap = *ptr; + POOL.with_borrow_mut(|pool| { + let _ = mem::replace(pool, ptr); + }); + } } #[must_use] diff --git a/crates/jrsonnet-macros/src/lib.rs b/crates/jrsonnet-macros/src/lib.rs index 0880bf6f..c154e7fa 100644 --- a/crates/jrsonnet-macros/src/lib.rs +++ b/crates/jrsonnet-macros/src/lib.rs @@ -34,6 +34,12 @@ where Ok(Some(attr)) } +fn remove_attr(attrs: &mut Vec, ident: I) +where + Ident: PartialEq, +{ + attrs.retain(|a| !a.path().is_ident(&ident)); +} fn path_is(path: &Path, needed: &str) -> bool { path.leading_colon.is_none() @@ -121,10 +127,16 @@ impl Parse for BuiltinAttrs { } } +enum Optionality { + Required, + Optional, + Default(Expr), +} + enum ArgInfo { Normal { ty: Box, - is_option: bool, + optionality: Optionality, name: Option, cfg_attrs: Vec, }, @@ -138,7 +150,7 @@ enum ArgInfo { } impl ArgInfo { - fn parse(name: &str, arg: &FnArg) -> Result { + fn parse(name: &str, arg: &mut FnArg) -> Result { let FnArg::Typed(arg) = arg else { unreachable!() }; @@ -163,7 +175,10 @@ impl ArgInfo { _ => {} } - let (is_option, ty) = if let Some(ty) = extract_type_from_option(ty)? { + let (optionality, ty) = if let Some(default) = parse_attr::<_, _>(&arg.attrs, "default")? { + remove_attr(&mut arg.attrs, "default"); + (Optionality::Default(default), ty.clone()) + } else if let Some(ty) = extract_type_from_option(ty)? { if type_is_path(ty, "Thunk").is_some() { return Ok(Self::Lazy { is_option: true, @@ -171,9 +186,9 @@ impl ArgInfo { }); } - (true, Box::new(ty.clone())) + (Optionality::Optional, Box::new(ty.clone())) } else { - (false, ty.clone()) + (Optionality::Required, ty.clone()) }; let cfg_attrs = arg @@ -185,7 +200,7 @@ impl ArgInfo { Ok(Self::Normal { ty, - is_option, + optionality, name: ident.map(|v| v.to_string()), cfg_attrs, }) @@ -198,21 +213,16 @@ pub fn builtin( item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let attr = parse_macro_input!(attr as BuiltinAttrs); - let item_fn = item.clone(); - let item_fn: ItemFn = parse_macro_input!(item_fn); + let item_fn = parse_macro_input!(item as ItemFn); - match builtin_inner(attr, item_fn, item.into()) { + match builtin_inner(attr, item_fn) { Ok(v) => v.into(), Err(e) => e.into_compile_error().into(), } } #[allow(clippy::too_many_lines)] -fn builtin_inner( - attr: BuiltinAttrs, - fun: ItemFn, - item: proc_macro2::TokenStream, -) -> syn::Result { +fn builtin_inner(attr: BuiltinAttrs, mut fun: ItemFn) -> syn::Result { let ReturnType::Type(_, result) = &fun.sig.output else { return Err(Error::new( fun.sig.span(), @@ -224,13 +234,13 @@ fn builtin_inner( let args = fun .sig .inputs - .iter() + .iter_mut() .map(|arg| ArgInfo::parse(&name, arg)) .collect::>>()?; let params_desc = args.iter().filter_map(|a| match a { ArgInfo::Normal { - is_option, + optionality, name, cfg_attrs, .. @@ -238,9 +248,14 @@ fn builtin_inner( let name = name .as_ref() .map_or_else(|| quote! {None}, |n| quote! {ParamName::new_static(#n)}); + let default = match optionality { + Optionality::Required => quote!(ParamDefault::None), + Optionality::Optional => quote!(ParamDefault::Exists), + Optionality::Default(e) => quote!(ParamDefault::Literal(stringify!(#e))), + }; Some(quote! { #(#cfg_attrs)* - BuiltinParam::new(#name, #is_option), + BuiltinParam::new(#name, #default), }) } ArgInfo::Lazy { is_option, name } => { @@ -248,7 +263,7 @@ fn builtin_inner( .as_ref() .map_or_else(|| quote! {None}, |n| quote! {ParamName::new_static(#n)}); Some(quote! { - BuiltinParam::new(#name, #is_option), + BuiltinParam::new(#name, ParamDefault::exists(#is_option)), }) } ArgInfo::Context | ArgInfo::Location | ArgInfo::This => None, @@ -270,26 +285,31 @@ fn builtin_inner( .map(|(id, a)| match a { ArgInfo::Normal { ty, - is_option, + optionality, name, cfg_attrs, } => { let name = name.as_ref().map_or("", String::as_str); - let eval = quote! {jrsonnet_evaluator::State::push_description( + let eval = quote! {jrsonnet_evaluator::in_description_frame( || format!("argument <{}> evaluation", #name), || <#ty>::from_untyped(value.evaluate()?), )?}; - let value = if *is_option { - quote! {if let Some(value) = &parsed[#id] { + let value = match optionality { + Optionality::Required => quote! {{ + let value = parsed[#id].as_ref().expect("args shape is checked"); + #eval + },}, + Optionality::Optional => quote! {if let Some(value) = &parsed[#id] { Some(#eval) } else { None - },} - } else { - quote! {{ - let value = parsed[#id].as_ref().expect("args shape is checked"); + },}, + Optionality::Default(expr) => quote! {if let Some(value) = &parsed[#id] { #eval - },} + } else { + let v: #ty = #expr; + v + },}, }; quote! { #(#cfg_attrs)* @@ -302,7 +322,7 @@ fn builtin_inner( Some(value.clone()) } else { None - }} + },} } else { quote! { parsed[#id].as_ref().expect("args shape is correct").clone(), @@ -343,7 +363,7 @@ fn builtin_inner( }; Ok(quote! { - #item + #fun #[doc(hidden)] #[allow(non_camel_case_types)] @@ -354,9 +374,9 @@ fn builtin_inner( const _: () = { use ::jrsonnet_evaluator::{ State, Val, - function::{builtin::{Builtin, StaticBuiltin, BuiltinParam, ParamName}, CallLocation, ArgsLike, parse::parse_builtin_call}, + function::{builtin::{Builtin, StaticBuiltin, BuiltinParam, ParamName, ParamDefault}, CallLocation, ArgsLike, parse::parse_builtin_call}, Result, Context, typed::Typed, - parser::ExprLocation, + parser::Span, }; const PARAMS: &'static [BuiltinParam] = &[ #(#params_desc)* @@ -373,7 +393,7 @@ fn builtin_inner( fn params(&self) -> &[BuiltinParam] { PARAMS } - #[allow(unused_variable)] + #[allow(unused_variables)] fn call(&self, ctx: Context, location: CallLocation, args: &dyn ArgsLike) -> Result { let parsed = parse_builtin_call(ctx.clone(), &PARAMS, args, false)?; diff --git a/crates/jrsonnet-parser/Cargo.toml b/crates/jrsonnet-parser/Cargo.toml index 2b4bac98..4bdea6da 100644 --- a/crates/jrsonnet-parser/Cargo.toml +++ b/crates/jrsonnet-parser/Cargo.toml @@ -11,21 +11,6 @@ version.workspace = true default = [] exp-destruct = [] exp-null-coaelse = [] -# Implement serialization of AST using structdump -# -# Structdump generates code, which exactly replicated passed AST -# Contrary to serde, has no code bloat problem, and is recommended -# -# The only limitation is serialized form is only useable if built from build script -structdump = ["dep:structdump", "jrsonnet-interner/structdump"] -# Implement serialization of AST using serde -# -# Warning: as serde doesn't deduplicate strings, `Source` struct will bloat -# output binary with repeating source code. To resolve this issue, you should either -# override serialization of this struct using custom `Serializer`/`Deserializer`, -# not rely on Source, and fill its `source_code` with empty value, or use `structdump` -# instead -serde = ["dep:serde"] [dependencies] jrsonnet-interner.workspace = true @@ -34,6 +19,3 @@ jrsonnet-gcmodule.workspace = true static_assertions.workspace = true peg.workspace = true - -serde = { workspace = true, features = ["derive", "rc"], optional = true } -structdump = { workspace = true, features = ["derive"], optional = true } diff --git a/crates/jrsonnet-parser/src/expr.rs b/crates/jrsonnet-parser/src/expr.rs index 110f9d71..d11ed119 100644 --- a/crates/jrsonnet-parser/src/expr.rs +++ b/crates/jrsonnet-parser/src/expr.rs @@ -6,15 +6,9 @@ use std::{ use jrsonnet_gcmodule::Trace; use jrsonnet_interner::IStr; -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; -#[cfg(feature = "structdump")] -use structdump::Codegen; use crate::source::Source; -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "structdump", derive(Codegen))] #[derive(Debug, PartialEq, Trace)] pub enum FieldName { /// {fixed: 2} @@ -23,8 +17,6 @@ pub enum FieldName { Dyn(LocExpr), } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, Copy, PartialEq, Eq, Trace)] #[repr(u8)] pub enum Visibility { @@ -42,13 +34,9 @@ impl Visibility { } } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Debug, PartialEq, Trace)] pub struct AssertStmt(pub LocExpr, pub Option); -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub struct FieldMember { pub name: FieldName, @@ -58,8 +46,6 @@ pub struct FieldMember { pub value: LocExpr, } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub enum Member { Field(FieldMember), @@ -67,8 +53,6 @@ pub enum Member { AssertStmt(AssertStmt), } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, Copy, PartialEq, Eq, Trace)] pub enum UnaryOpType { Plus, @@ -93,8 +77,6 @@ impl Display for UnaryOpType { } } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, Copy, PartialEq, Eq, Trace)] pub enum BinaryOpType { Mul, @@ -164,14 +146,10 @@ impl Display for BinaryOpType { } /// name, default value -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub struct Param(pub Destruct, pub Option); /// Defined function parameters -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Trace)] pub struct ParamsDesc(pub Rc>); @@ -182,8 +160,6 @@ impl Deref for ParamsDesc { } } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub struct ArgsDesc { pub unnamed: Vec, @@ -195,8 +171,6 @@ impl ArgsDesc { } } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Eq, Trace)] pub enum DestructRest { /// ...rest @@ -205,8 +179,6 @@ pub enum DestructRest { Drop, } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Trace)] pub enum Destruct { Full(IStr), @@ -268,8 +240,6 @@ impl Destruct { } } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq, Trace)] pub enum BindSpec { Field { @@ -291,26 +261,18 @@ impl BindSpec { } } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub struct IfSpecData(pub LocExpr); -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub struct ForSpecData(pub Destruct, pub LocExpr); -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub enum CompSpec { IfSpec(IfSpecData), ForSpec(ForSpecData), } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub struct ObjComp { pub pre_locals: Vec, @@ -319,16 +281,12 @@ pub struct ObjComp { pub compspecs: Vec, } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub enum ObjBody { MemberList(Vec), ObjComp(ObjComp), } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Eq, Clone, Copy, Trace)] pub enum LiteralType { This, @@ -339,8 +297,6 @@ pub enum LiteralType { False, } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub struct SliceDesc { pub start: Option, @@ -349,8 +305,6 @@ pub struct SliceDesc { } /// Syntax base -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub enum Expr { Literal(LiteralType), @@ -420,8 +374,6 @@ pub enum Expr { Slice(LocExpr, SliceDesc), } -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, PartialEq, Trace)] pub struct IndexPart { pub value: LocExpr, @@ -430,44 +382,52 @@ pub struct IndexPart { } /// file, begin offset, end offset -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, PartialEq, Eq, Trace)] #[trace(skip)] #[repr(C)] -pub struct ExprLocation(pub Source, pub u32, pub u32); -impl ExprLocation { - pub fn belongs_to(&self, other: &ExprLocation) -> bool { +pub struct Span(pub Source, pub u32, pub u32); +impl Span { + pub fn belongs_to(&self, other: &Span) -> bool { other.0 == self.0 && other.1 <= self.1 && other.2 >= self.2 } } -#[cfg(target_pointer_width = "64")] -static_assertions::assert_eq_size!(ExprLocation, [u8; 16]); +static_assertions::assert_eq_size!(Span, (usize, usize)); -impl Debug for ExprLocation { +impl Debug for Span { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}:{:?}-{:?}", self.0, self.1, self.2) } } /// Holds AST expression and its location in source file -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "structdump", derive(Codegen))] #[derive(Clone, PartialEq, Trace)] -pub struct LocExpr(pub Rc, pub ExprLocation); +pub struct LocExpr(Rc<(Expr, Span)>); +impl LocExpr { + pub fn new(expr: Expr, span: Span) -> Self { + Self(Rc::new((expr, span))) + } + #[inline] + pub fn span(&self) -> Span { + self.0 .1.clone() + } + #[inline] + pub fn expr(&self) -> &Expr { + &self.0 .0 + } +} -#[cfg(target_pointer_width = "64")] -static_assertions::assert_eq_size!(LocExpr, [u8; 24]); +static_assertions::assert_eq_size!(LocExpr, usize); impl Debug for LocExpr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let expr = self.expr(); if f.alternate() { - write!(f, "{:#?}", self.0)?; + write!(f, "{:#?}", expr)?; } else { - write!(f, "{:?}", self.0)?; + write!(f, "{:?}", expr)?; } - write!(f, " from {:?}", self.1)?; + write!(f, " from {:?}", self.span())?; Ok(()) } } diff --git a/crates/jrsonnet-parser/src/lib.rs b/crates/jrsonnet-parser/src/lib.rs index 7a398c1c..ce419d57 100644 --- a/crates/jrsonnet-parser/src/lib.rs +++ b/crates/jrsonnet-parser/src/lib.rs @@ -232,7 +232,7 @@ parser! { pub rule var_expr(s: &ParserSettings) -> Expr = n:id() { expr::Expr::Var(n) } pub rule id_loc(s: &ParserSettings) -> LocExpr - = a:position!() n:id() b:position!() { LocExpr(Rc::new(expr::Expr::Str(n)), ExprLocation(s.source.clone(), a as u32,b as u32)) } + = a:position!() n:id() b:position!() { LocExpr::new(expr::Expr::Str(n), Span(s.source.clone(), a as u32,b as u32)) } pub rule if_then_else_expr(s: &ParserSettings) -> Expr = cond:ifspec(s) _ keyword("then") _ cond_then:expr(s) cond_else:(_ keyword("else") _ e:expr(s) {e})? {Expr::IfElse{ cond, @@ -299,7 +299,7 @@ parser! { use UnaryOpType::*; rule expr(s: &ParserSettings) -> LocExpr = precedence! { - start:position!() v:@ end:position!() { LocExpr(Rc::new(v), ExprLocation(s.source.clone(), start as u32, end as u32)) } + start:position!() v:@ end:position!() { LocExpr::new(v, Span(s.source.clone(), start as u32, end as u32)) } -- a:(@) _ binop(<"||">) _ b:@ {expr_bin!(a Or b)} a:(@) _ binop(<"??">) _ ensure_null_coaelse() b:@ { @@ -370,10 +370,7 @@ pub fn parse(str: &str, settings: &ParserSettings) -> Result LocExpr { let len = str.len(); - LocExpr( - Rc::new(Expr::Str(str)), - ExprLocation(settings.source.clone(), 0, len as u32), - ) + LocExpr::new(Expr::Str(str), Span(settings.source.clone(), 0, len as u32)) } #[cfg(test)] @@ -398,9 +395,9 @@ pub mod tests { macro_rules! el { ($expr:expr, $from:expr, $to:expr$(,)?) => { - LocExpr( - std::rc::Rc::new($expr), - ExprLocation( + LocExpr::new( + $expr, + Span( Source::new_virtual("".into(), IStr::empty()), $from, $to, diff --git a/crates/jrsonnet-parser/src/source.rs b/crates/jrsonnet-parser/src/source.rs index 96d1083a..cb54de48 100644 --- a/crates/jrsonnet-parser/src/source.rs +++ b/crates/jrsonnet-parser/src/source.rs @@ -8,10 +8,6 @@ use std::{ use jrsonnet_gcmodule::{Trace, Tracer}; use jrsonnet_interner::{IBytes, IStr}; -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; -#[cfg(feature = "structdump")] -use structdump::Codegen; use crate::location::{location_to_offset, offset_to_location, CodeLocation}; @@ -133,31 +129,6 @@ impl Default for SourcePath { } } -#[cfg(feature = "structdump")] -impl Codegen for SourcePath { - fn gen_code( - &self, - res: &mut structdump::CodegenResult, - unique: bool, - ) -> structdump::TokenStream { - let source_virtual = self - .0 - .as_any() - .downcast_ref::() - .expect("can only codegen for virtual source paths!") - .0 - .clone(); - let val = res.add_value(source_virtual, false); - res.add_code( - structdump::quote! { - structdump_import::SourcePath::new(structdump_import::SourceVirtual(#val)) - }, - Some(structdump::quote!(SourcePath)), - unique, - ) - } -} - #[derive(Trace, Hash, PartialEq, Eq, Debug)] struct SourceDefault; impl Display for SourceDefault { @@ -237,7 +208,6 @@ impl SourcePathT for SourceDirectory { /// /// It is used for --ext-code=.../--tla-code=.../standard library source code by default, /// and user can construct arbitrary values by hand, without asking import resolver -#[cfg_attr(feature = "structdump", derive(Codegen))] #[derive(Trace, Hash, PartialEq, Eq, Debug, Clone)] pub struct SourceVirtual(pub IStr); impl Display for SourceVirtual { @@ -288,8 +258,6 @@ impl SourcePathT for SourceFifo { /// Either real file, or virtual /// Hash of FileName always have same value as raw Path, to make it possible to use with raw_entry_mut -#[cfg_attr(feature = "structdump", derive(Codegen))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, PartialEq, Eq, Debug)] pub struct Source(pub Rc<(SourcePath, IStr)>); diff --git a/crates/jrsonnet-rowan-parser/src/lex.rs b/crates/jrsonnet-rowan-parser/src/lex.rs index a9cfadb8..77961a2c 100644 --- a/crates/jrsonnet-rowan-parser/src/lex.rs +++ b/crates/jrsonnet-rowan-parser/src/lex.rs @@ -37,9 +37,10 @@ impl<'a> Iterator for Lexer<'a> { // In kinds, string blocks is parsed at least as `|||` lexer.bump(3); let res = lex_str_block(&mut lexer); - debug_assert!(lexer.next().is_none(), "str_block is lexed"); + let next = lexer.next(); + assert!(next.is_none(), "str_block is lexed"); match res { - Ok(_) => {} + Ok(()) => {} Err(e) => { kind = Ok(match e { StringBlockError::UnexpectedEnd => ERROR_STRING_BLOCK_UNEXPECTED_END, @@ -48,7 +49,7 @@ impl<'a> Iterator for Lexer<'a> { ERROR_STRING_BLOCK_MISSING_TERMINATION } StringBlockError::MissingIndent => ERROR_STRING_BLOCK_MISSING_INDENT, - }) + }); } } } diff --git a/crates/jrsonnet-rowan-parser/src/marker.rs b/crates/jrsonnet-rowan-parser/src/marker.rs index 5bca186c..65d3c342 100644 --- a/crates/jrsonnet-rowan-parser/src/marker.rs +++ b/crates/jrsonnet-rowan-parser/src/marker.rs @@ -141,12 +141,7 @@ impl CompletedMarker { new_m } /// Create new node around existing marker, not counting anything that comes after it - fn wrap_raw( - self, - p: &mut Parser, - kind: SyntaxKind, - error: Option, - ) -> CompletedMarker { + fn wrap_raw(self, p: &mut Parser, kind: SyntaxKind, error: Option) -> Self { let new_m = p.start(); match &mut p.events[self.start_event_idx] { Event::Start { forward_parent, .. } => { @@ -173,10 +168,10 @@ impl CompletedMarker { } completed } - pub fn wrap(self, p: &mut Parser, kind: SyntaxKind) -> CompletedMarker { + pub fn wrap(self, p: &mut Parser, kind: SyntaxKind) -> Self { self.wrap_raw(p, kind, None) } - pub fn wrap_error(self, p: &mut Parser, msg: impl AsRef) -> CompletedMarker { + pub fn wrap_error(self, p: &mut Parser, msg: impl AsRef) -> Self { self.wrap_raw( p, SyntaxKind::ERROR_CUSTOM, diff --git a/crates/jrsonnet-rowan-parser/src/parser.rs b/crates/jrsonnet-rowan-parser/src/parser.rs index afb64b74..0f9dea90 100644 --- a/crates/jrsonnet-rowan-parser/src/parser.rs +++ b/crates/jrsonnet-rowan-parser/src/parser.rs @@ -72,9 +72,9 @@ impl Parser { .rev() .take_while(|h| h.0 > self.entered) .count(); - self.hints.truncate(self.hints.len() - amount) + self.hints.truncate(self.hints.len() - amount); } - fn clear_expected_syntaxes(&mut self) { + fn clear_expected_syntaxes(&self) { self.expected_syntax_tracking_state .set(ExpectedSyntax::Unnamed(TS![])); } @@ -104,7 +104,7 @@ impl Parser { } pub(crate) fn expect(&mut self, kind: SyntaxKind) { - self.expect_with_recovery_set(kind, TS![]) + self.expect_with_recovery_set(kind, TS![]); } pub(crate) fn expect_with_recovery_set( @@ -153,7 +153,7 @@ impl Parser { m } fn bump_assert(&mut self, kind: SyntaxKind) { - assert!(self.at(kind), "expected {:?}", kind); + assert!(self.at(kind), "expected {kind:?}"); self.bump_remap(self.current()); } fn bump(&mut self) { @@ -168,11 +168,11 @@ impl Parser { fn step(&self) { use std::fmt::Write; let steps = self.steps.get(); - if steps >= 15000000 { + if steps >= 15_000_000 { let mut out = "seems like parsing is stuck".to_owned(); { let last = 20; - write!(out, "\n\nLast {} events:", last).unwrap(); + write!(out, "\n\nLast {last} events:").unwrap(); for (i, event) in self .events .iter() @@ -205,38 +205,38 @@ impl Parser { self.nth(0) } #[must_use] - pub(crate) fn expected_syntax_name(&mut self, name: &'static str) -> ExpectedSyntaxGuard { + pub(crate) fn expected_syntax_name(&self, name: &'static str) -> ExpectedSyntaxGuard { self.expected_syntax_tracking_state .set(ExpectedSyntax::Named(name)); ExpectedSyntaxGuard::new(Rc::clone(&self.expected_syntax_tracking_state)) } - pub fn at(&mut self, kind: SyntaxKind) -> bool { + pub fn at(&self, kind: SyntaxKind) -> bool { self.nth_at(0, kind) } - pub fn nth_at(&mut self, n: usize, kind: SyntaxKind) -> bool { + pub fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool { if n == 0 { if let ExpectedSyntax::Unnamed(kinds) = self.expected_syntax_tracking_state.get() { let kinds = kinds.with(kind); self.expected_syntax_tracking_state - .set(ExpectedSyntax::Unnamed(kinds)) + .set(ExpectedSyntax::Unnamed(kinds)); } } self.nth(n) == kind } - pub fn at_ts(&mut self, set: SyntaxKindSet) -> bool { + pub fn at_ts(&self, set: SyntaxKindSet) -> bool { if let ExpectedSyntax::Unnamed(kinds) = self.expected_syntax_tracking_state.get() { let kinds = kinds.union(set); self.expected_syntax_tracking_state - .set(ExpectedSyntax::Unnamed(kinds)) + .set(ExpectedSyntax::Unnamed(kinds)); } set.contains(self.current()) } - pub fn at_end(&mut self) -> bool { + pub fn at_end(&self) -> bool { self.at(EOF) } } -pub(crate) struct ExpectedSyntaxGuard { +pub struct ExpectedSyntaxGuard { expected_syntax_tracking_state: Rc>, } @@ -263,8 +263,8 @@ pub enum ExpectedSyntax { impl fmt::Display for ExpectedSyntax { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - ExpectedSyntax::Named(name) => write!(f, "{name}"), - ExpectedSyntax::Unnamed(set) => write!(f, "{set}"), + Self::Named(name) => write!(f, "{name}"), + Self::Unnamed(set) => write!(f, "{set}"), } } } @@ -298,8 +298,7 @@ fn expr(p: &mut Parser) -> CompletedMarker { } } match expr_binding_power(p, 0) { - Ok(m) => m, - Err(m) => m, + Ok(m) | Err(m) => m, }; m.complete(p, EXPR) } @@ -399,7 +398,7 @@ fn field_name(p: &mut Parser) { } fn visibility(p: &mut Parser) { if p.at_ts(TS![: :: :::]) { - p.bump() + p.bump(); } else { p.error_with_recovery_set(TS![=]); } @@ -556,7 +555,7 @@ fn args_desc(p: &mut Parser) { expr(p); let arg = m.complete(p, ARG); if started_named.get() { - unnamed_after_named.push(arg) + unnamed_after_named.push(arg); } } if comma(p) { @@ -566,7 +565,7 @@ fn args_desc(p: &mut Parser) { } p.expect(T![')']); if p.at(T![tailstrict]) { - p.bump() + p.bump(); } for errored in unnamed_after_named { @@ -719,7 +718,7 @@ fn destruct_rest(p: &mut Parser) { let m = p.start(); p.bump_assert(T![...]); if p.at(IDENT) { - p.bump() + p.bump(); } m.complete(p, DESTRUCT_REST); } diff --git a/crates/jrsonnet-rowan-parser/src/precedence.rs b/crates/jrsonnet-rowan-parser/src/precedence.rs index 6a501491..ae69d23d 100644 --- a/crates/jrsonnet-rowan-parser/src/precedence.rs +++ b/crates/jrsonnet-rowan-parser/src/precedence.rs @@ -13,8 +13,7 @@ impl BinaryOperatorKind { Self::BitXor => (8, 9), Self::BitOr => (6, 7), Self::And => (4, 5), - Self::NullCoaelse => (2, 3), - Self::Or => (2, 3), + Self::NullCoaelse | Self::Or => (2, 3), Self::ErrorNoOperator => (0, 1), } } @@ -23,9 +22,7 @@ impl BinaryOperatorKind { impl UnaryOperatorKind { pub fn binding_power(&self) -> ((), u8) { match self { - Self::Minus => ((), 20), - Self::Not => ((), 20), - Self::BitNot => ((), 20), + Self::Minus | Self::Not | Self::BitNot => ((), 20), } } } diff --git a/crates/jrsonnet-rowan-parser/src/string_block.rs b/crates/jrsonnet-rowan-parser/src/string_block.rs index a546dce4..db7d55e5 100644 --- a/crates/jrsonnet-rowan-parser/src/string_block.rs +++ b/crates/jrsonnet-rowan-parser/src/string_block.rs @@ -17,6 +17,7 @@ pub fn lex_str_block_test(lex: &mut Lexer) { let _ = lex_str_block(lex); } +#[allow(clippy::too_many_lines)] pub fn lex_str_block(lex: &mut Lexer) -> Result<(), StringBlockError> { struct Context<'a> { source: &'a str, @@ -78,6 +79,7 @@ pub fn lex_str_block(lex: &mut Lexer) -> Result<(), StringBlockError }; } + #[allow(clippy::range_plus_one)] fn pos(&self) -> Range { if self.index == self.source.len() { self.offset + self.index..self.offset + self.index @@ -120,8 +122,7 @@ pub fn lex_str_block(lex: &mut Lexer) -> Result<(), StringBlockError let end_index = ctx .rest() .find("|||") - .map(|v| v + 3) - .unwrap_or_else(|| ctx.rest().len()); + .map_or_else(|| ctx.rest().len(), |v| v + 3); lex.bump(ctx.index + end_index); } @@ -150,7 +151,7 @@ pub fn lex_str_block(lex: &mut Lexer) -> Result<(), StringBlockError } // Process leading blank lines before calculating string block indent - while let Some('\n') = ctx.peek() { + while ctx.peek() == Some('\n') { ctx.next(); } @@ -179,7 +180,7 @@ pub fn lex_str_block(lex: &mut Lexer) -> Result<(), StringBlockError } // Skip any blank lines - while let Some('\n') = ctx.peek() { + while ctx.peek() == Some('\n') { ctx.next(); } @@ -187,9 +188,11 @@ pub fn lex_str_block(lex: &mut Lexer) -> Result<(), StringBlockError num_whitespace = check_whitespace(str_block_indent, ctx.rest()); if num_whitespace == 0 { // End of the text block - let mut term_indent = String::with_capacity(num_whitespace); + // let mut term_indent = String::with_capacity(num_whitespace); while let Some(' ' | '\t') = ctx.peek() { - term_indent.push(ctx.next().unwrap()); + // term_indent.push( + ctx.next().unwrap(); + // ); } if !ctx.rest().starts_with("|||") { diff --git a/crates/jrsonnet-rowan-parser/src/token_set.rs b/crates/jrsonnet-rowan-parser/src/token_set.rs index 918395fc..24055fc4 100644 --- a/crates/jrsonnet-rowan-parser/src/token_set.rs +++ b/crates/jrsonnet-rowan-parser/src/token_set.rs @@ -10,21 +10,23 @@ impl SyntaxKindSet { pub const EMPTY: Self = Self(0); pub const ALL: Self = Self(u128::MAX); - pub const fn new(kinds: &[SyntaxKind]) -> SyntaxKindSet { + pub const fn new(kinds: &[SyntaxKind]) -> Self { let mut res = 0u128; let mut i = 0; while i < kinds.len() { res |= mask(kinds[i]); - i += 1 + i += 1; } - SyntaxKindSet(res) + Self(res) } - pub const fn union(self, other: SyntaxKindSet) -> SyntaxKindSet { - SyntaxKindSet(self.0 | other.0) + #[must_use] + pub const fn union(self, other: Self) -> Self { + Self(self.0 | other.0) } - pub const fn with(self, kind: SyntaxKind) -> SyntaxKindSet { - SyntaxKindSet(self.0 | mask(kind)) + #[must_use] + pub const fn with(self, kind: SyntaxKind) -> Self { + Self(self.0 | mask(kind)) } pub fn contains(&self, kind: SyntaxKind) -> bool { @@ -40,7 +42,7 @@ impl fmt::Display for SyntaxKindSet { let mut variants = >::new(); for i in 0..128 { if v & 1 == 1 { - variants.push(SyntaxKind::from_raw(i)) + variants.push(SyntaxKind::from_raw(i)); } v >>= 1; if v == 0 { @@ -65,7 +67,7 @@ impl fmt::Debug for SyntaxKindSet { let mut variants = >::new(); for i in 0..128 { if v & 1 == 1 { - variants.push(SyntaxKind::from_raw(i)) + variants.push(SyntaxKind::from_raw(i)); } v >>= 1; if v == 0 { @@ -77,9 +79,7 @@ impl fmt::Debug for SyntaxKindSet { } const fn mask(kind: SyntaxKind) -> u128 { - if kind as u32 > 128 { - panic!("mask for not a token kind") - } + assert!(kind as u32 <= 128, "mask for not a token kind"); 1u128 << (kind as u128) } diff --git a/crates/jrsonnet-stdlib/Cargo.toml b/crates/jrsonnet-stdlib/Cargo.toml index 3d655abd..910afc5e 100644 --- a/crates/jrsonnet-stdlib/Cargo.toml +++ b/crates/jrsonnet-stdlib/Cargo.toml @@ -11,16 +11,6 @@ version.workspace = true workspace = true [features] -default = ["codegenerated-stdlib"] -# Speed-up initialization by generating code for parsed stdlib, -# instead of invoking parser for it. -# This is mutually exclusive with `serialized-stdlib`. -codegenerated-stdlib = ["jrsonnet-parser/structdump"] -# Use the embedded serialized stdlib. -# This is mutually exclusive with `codegenerated-stdlib`. -serialized-stdlib = [] -# Enables legacy `std.thisFile` support, at the cost of worse caching -legacy-this-file = [] # Add order preservation flag to some functions exp-preserve-order = ["jrsonnet-evaluator/exp-preserve-order"] # Bigint type @@ -36,9 +26,7 @@ jrsonnet-macros.workspace = true jrsonnet-parser.workspace = true jrsonnet-gcmodule.workspace = true -# Used for stdlib AST serialization -bincode = { workspace = true, optional = true } -# Used both for stdlib AST serialization and std.parseJson/std.parseYaml +# Used for std.parseJson/std.parseYaml serde.workspace = true # std.md5 @@ -65,4 +53,3 @@ rustc-hash = { workspace = true, optional = true } [build-dependencies] jrsonnet-parser.workspace = true -structdump = { workspace = true, features = ["derive"] } diff --git a/crates/jrsonnet-stdlib/build.rs b/crates/jrsonnet-stdlib/build.rs deleted file mode 100644 index 7830f79d..00000000 --- a/crates/jrsonnet-stdlib/build.rs +++ /dev/null @@ -1,36 +0,0 @@ -fn main() { - #[cfg(feature = "codegenerated-stdlib")] - { - use std::{env, fs::File, io::Write, path::Path}; - - use jrsonnet_parser::{parse, ParserSettings, Source}; - use structdump::CodegenResult; - - let parsed = parse( - include_str!("./src/std.jsonnet"), - &ParserSettings { - source: Source::new_virtual( - "".into(), - include_str!("./src/std.jsonnet").into(), - ), - }, - ) - .expect("parse"); - - let mut out = CodegenResult::default(); - - let v = out.codegen(&parsed, true); - - { - let out_dir = env::var("OUT_DIR").unwrap(); - let dest_path = Path::new(&out_dir).join("stdlib.rs"); - let mut f = File::create(dest_path).unwrap(); - f.write_all( - ("#[allow(clippy::redundant_clone, clippy::similar_names)]".to_owned() - + &v.to_string()) - .as_bytes(), - ) - .unwrap(); - } - } -} diff --git a/crates/jrsonnet-stdlib/src/arrays.rs b/crates/jrsonnet-stdlib/src/arrays.rs index 543fe1d3..effa8dca 100644 --- a/crates/jrsonnet-stdlib/src/arrays.rs +++ b/crates/jrsonnet-stdlib/src/arrays.rs @@ -6,7 +6,7 @@ use jrsonnet_evaluator::{ runtime_error, typed::{BoundedI32, BoundedUsize, Either2, NativeFn, Typed}, val::{equals, ArrValue, IndexableVal}, - Either, IStr, ObjValueBuilder, Result, ResultExt, Thunk, Val, + Either, IStr, ObjValue, ObjValueBuilder, Result, ResultExt, Thunk, Val, }; pub fn eval_on_empty(on_empty: Option>) -> Result { @@ -61,6 +61,29 @@ pub fn builtin_map(func: FuncVal, arr: IndexableVal) -> ArrValue { arr.map(func) } +#[builtin] +pub fn builtin_map_with_index(func: FuncVal, arr: IndexableVal) -> ArrValue { + let arr = arr.to_array(); + arr.map_with_index(func) +} + +#[builtin] +pub fn builtin_map_with_key(func: FuncVal, obj: ObjValue) -> Result { + let mut out = ObjValueBuilder::new(); + for (k, v) in obj.iter( + // Makes sense mapped object should be ordered the same way, should not break anything when the output is not ordered (the default). + // The thrown error might be different, but jsonnet + // does not specify the evaluation order. + #[cfg(feature = "exp-preserve-order")] + true, + ) { + let v = v?; + out.field(k.clone()) + .value(func.evaluate_simple(&(k, v), false)?); + } + Ok(out.build()) +} + #[builtin] pub fn builtin_flatmap( func: NativeFn<((Either![String, Val],), Val)>, @@ -194,6 +217,43 @@ pub fn builtin_join(sep: IndexableVal, arr: ArrValue) -> Result { }) } +#[builtin] +pub fn builtin_lines(arr: ArrValue) -> Result { + builtin_join( + IndexableVal::Str("\n".into()), + ArrValue::extended(arr, ArrValue::eager(vec![Val::string("")])), + ) +} + +#[builtin] +pub fn builtin_resolve_path(f: String, r: String) -> String { + let Some(pos) = f.rfind('/') else { + return r; + }; + format!("{}{}", &f[..=pos], r) +} + +pub fn deep_join_inner(out: &mut String, arr: IndexableVal) -> Result<()> { + use std::fmt::Write; + match arr { + IndexableVal::Str(s) => write!(out, "{s}").expect("no error"), + IndexableVal::Arr(arr) => { + for ele in arr.iter() { + let indexable = IndexableVal::from_untyped(ele?)?; + deep_join_inner(out, indexable)?; + } + } + } + Ok(()) +} + +#[builtin] +pub fn builtin_deep_join(arr: IndexableVal) -> Result { + let mut out = String::new(); + deep_join_inner(&mut out, arr)?; + Ok(out) +} + #[builtin] pub fn builtin_reverse(arr: ArrValue) -> ArrValue { arr.reversed() @@ -240,6 +300,18 @@ pub fn builtin_member(arr: IndexableVal, x: Val) -> Result { } } +#[builtin] +pub fn builtin_find(value: Val, arr: ArrValue) -> Result> { + let mut out = Vec::new(); + for (i, ele) in arr.iter().enumerate() { + let ele = ele?; + if equals(&ele, &value)? { + out.push(i); + } + } + Ok(out) +} + #[builtin] pub fn builtin_contains(arr: IndexableVal, elem: Val) -> Result { builtin_member(arr, elem) @@ -261,25 +333,22 @@ pub fn builtin_avg(arr: Vec, onEmpty: Option>) -> Result { if arr.is_empty() { return eval_on_empty(onEmpty); } - Ok(Val::Num(arr.iter().sum::() / (arr.len() as f64))) + Ok(Val::try_num(arr.iter().sum::() / (arr.len() as f64))?) } #[builtin] -pub fn builtin_remove_at(arr: ArrValue, at: usize) -> Result { +pub fn builtin_remove_at(arr: ArrValue, at: i32) -> Result { let newArrLeft = arr.clone().slice(None, Some(at), None); let newArrRight = arr.slice(Some(at + 1), None, None); - Ok(ArrValue::extended( - newArrLeft.unwrap_or_else(ArrValue::empty), - newArrRight.unwrap_or_else(ArrValue::empty), - )) + Ok(ArrValue::extended(newArrLeft, newArrRight)) } #[builtin] pub fn builtin_remove(arr: ArrValue, elem: Val) -> Result { for (index, item) in arr.iter().enumerate() { if equals(&item?, &elem)? { - return builtin_remove_at(arr.clone(), index); + return builtin_remove_at(arr.clone(), index as i32); } } Ok(arr) @@ -325,7 +394,10 @@ pub fn builtin_flatten_deep_array(value: Val) -> Result> { #[builtin] pub fn builtin_prune( a: Val, - #[cfg(feature = "exp-preserve-order")] preserve_order: bool, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, ) -> Result { fn is_content(val: &Val) -> bool { match val { diff --git a/crates/jrsonnet-stdlib/src/compat.rs b/crates/jrsonnet-stdlib/src/compat.rs index 40a96cd0..d873a04d 100644 --- a/crates/jrsonnet-stdlib/src/compat.rs +++ b/crates/jrsonnet-stdlib/src/compat.rs @@ -1,6 +1,8 @@ use std::cmp::Ordering; -use jrsonnet_evaluator::{function::builtin, operator::evaluate_compare_op, Result, Val}; +use jrsonnet_evaluator::{ + function::builtin, operator::evaluate_compare_op, val::ArrValue, Result, Val, +}; #[builtin] #[allow(non_snake_case)] @@ -13,3 +15,34 @@ pub fn builtin___compare(v1: Val, v2: Val) -> Result { }, ) } + +#[builtin] +#[allow(non_snake_case)] +pub fn builtin___compare_array(arr1: ArrValue, arr2: ArrValue) -> Result { + builtin___compare(Val::Arr(arr1), Val::Arr(arr2)) +} + +macro_rules! arr_comp { + ($name:ident, $operator:expr) => { + #[builtin] + #[allow(non_snake_case)] + pub fn $name(arr1: ArrValue, arr2: ArrValue) -> Result { + let ordering = evaluate_compare_op( + &Val::Arr(arr1), + &Val::Arr(arr2), + jrsonnet_parser::BinaryOpType::Lt, + )?; + Ok($operator.contains(&ordering)) + } + }; +} +arr_comp!(builtin___array_less, [Ordering::Less]); +arr_comp!(builtin___array_greater, [Ordering::Greater]); +arr_comp!( + builtin___array_less_or_equal, + [Ordering::Less, Ordering::Equal] +); +arr_comp!( + builtin___array_greater_or_equal, + [Ordering::Greater, Ordering::Equal] +); diff --git a/crates/jrsonnet-stdlib/src/expr.rs b/crates/jrsonnet-stdlib/src/expr.rs deleted file mode 100644 index d3c530f7..00000000 --- a/crates/jrsonnet-stdlib/src/expr.rs +++ /dev/null @@ -1,109 +0,0 @@ -use jrsonnet_parser::LocExpr; - -pub fn stdlib_expr() -> LocExpr { - #[cfg(all(feature = "serialized-stdlib", feature = "codegenerated-stdlib"))] - compile_error!( - "features `serialized-stdlib` and `codegenerated-stdlib` are mutually exclusive" - ); - #[cfg(all(feature = "serialized-stdlib", not(feature = "codegenerated-stdlib")))] - { - use bincode::{BincodeRead, DefaultOptions, Options}; - use serde::{Deserialize, Deserializer}; - - struct LocDeserializer { - source: Source, - wrapped: bincode::Deserializer, - } - macro_rules! delegate { - ($(fn $name:ident($($arg:ident: $ty:ty),*))+) => {$( - fn $name(mut self $(, $arg: $ty)*, visitor: V) -> Result - where V: serde::de::Visitor<'de>, - { - self.wrapped.$name($($arg,)* visitor) - } - )+}; - } - impl<'de, R, O> Deserializer<'de> for LocDeserializer - where - R: BincodeRead<'de>, - O: Options, - { - type Error = <&'de mut bincode::Deserializer as Deserializer<'de>>::Error; - - delegate! { - fn deserialize_any() - fn deserialize_bool() - fn deserialize_u16() - fn deserialize_u32() - fn deserialize_u64() - fn deserialize_i16() - fn deserialize_i32() - fn deserialize_i64() - fn deserialize_f32() - fn deserialize_f64() - fn deserialize_u128() - fn deserialize_i128() - fn deserialize_u8() - fn deserialize_i8() - fn deserialize_unit() - fn deserialize_char() - fn deserialize_str() - fn deserialize_string() - fn deserialize_bytes() - fn deserialize_byte_buf() - fn deserialize_enum(name: &'static str, variants: &'static [&'static str]) - fn deserialize_tuple(len: usize) - fn deserialize_option() - fn deserialize_seq() - fn deserialize_map() - fn deserialize_struct(name: &'static str, fields: &'static [&'static str]) - fn deserialize_identifier() - fn deserialize_newtype_struct(name: &'static str) - fn deserialize_unit_struct(name: &'static str) - fn deserialize_tuple_struct(name: &'static str, len: usize) - fn deserialize_ignored_any() - } - - fn is_human_readable(&self) -> bool { - false - } - } - - // In build.rs, Source object is populated with empty values, deserializer wrapper loads correct values on deserialize - let mut deserializer = bincode::Deserializer::from_slice( - include_bytes!(concat!(env!("OUT_DIR"), "/stdlib.bincode")), - DefaultOptions::new() - .with_fixint_encoding() - .allow_trailing_bytes(), - ); - - // Should not panic, stdlib.bincode is generated in build.rs - LocExpr::deserialize(&mut deserializer).unwrap() - } - - #[cfg(all(feature = "codegenerated-stdlib", not(feature = "serialized-stdlib")))] - { - mod structdump_import { - pub(super) use std::{option::Option, rc::Rc, vec}; - - pub(super) use jrsonnet_parser::*; - } - - include!(concat!(env!("OUT_DIR"), "/stdlib.rs")) - } - - #[cfg(not(any(feature = "serialized-stdlib", feature = "codegenerated-stdlib")))] - { - use jrsonnet_parser::Source; - - const STDLIB_STR: &str = include_str!("./std.jsonnet"); - - jrsonnet_parser::parse( - STDLIB_STR, - &jrsonnet_parser::ParserSettings { - source: Source::new_virtual("".into(), STDLIB_STR.into()), - }, - ) - .unwrap() - } -} diff --git a/crates/jrsonnet-stdlib/src/lib.rs b/crates/jrsonnet-stdlib/src/lib.rs index 95e525f1..de84777f 100644 --- a/crates/jrsonnet-stdlib/src/lib.rs +++ b/crates/jrsonnet-stdlib/src/lib.rs @@ -13,9 +13,8 @@ pub use hash::*; use jrsonnet_evaluator::{ error::{ErrorKind::*, Result}, function::{CallLocation, FuncVal, TlaArg}, - tb, trace::PathResolver, - ContextBuilder, IStr, ObjValue, ObjValueBuilder, State, Thunk, Val, + ContextBuilder, IStr, ObjValue, ObjValueBuilder, Thunk, Val, }; use jrsonnet_gcmodule::Trace; use jrsonnet_parser::Source; @@ -36,7 +35,6 @@ pub use crate::regex::*; mod arrays; mod compat; mod encoding; -mod expr; mod hash; mod manifest; mod math; @@ -55,14 +53,6 @@ mod types; pub fn stdlib_uncached(settings: Rc>) -> ObjValue { let mut builder = ObjValueBuilder::new(); - let expr = expr::stdlib_expr(); - let eval = jrsonnet_evaluator::evaluate(ContextBuilder::dangerous_empty_state().build(), &expr) - .expect("stdlib.jsonnet should have no errors") - .as_obj() - .expect("stdlib.jsonnet should evaluate to object"); - - builder.with_super(eval); - // FIXME: Use PHF for (name, builtin) in [ // Types @@ -78,16 +68,22 @@ pub fn stdlib_uncached(settings: Rc>) -> ObjValue { ("repeat", builtin_repeat::INST), ("slice", builtin_slice::INST), ("map", builtin_map::INST), + ("mapWithIndex", builtin_map_with_index::INST), + ("mapWithKey", builtin_map_with_key::INST), ("flatMap", builtin_flatmap::INST), ("filter", builtin_filter::INST), ("foldl", builtin_foldl::INST), ("foldr", builtin_foldr::INST), ("range", builtin_range::INST), ("join", builtin_join::INST), + ("lines", builtin_lines::INST), + ("resolvePath", builtin_resolve_path::INST), + ("deepJoin", builtin_deep_join::INST), ("reverse", builtin_reverse::INST), ("any", builtin_any::INST), ("all", builtin_all::INST), ("member", builtin_member::INST), + ("find", builtin_find::INST), ("contains", builtin_contains::INST), ("count", builtin_count::INST), ("avg", builtin_avg::INST), @@ -102,6 +98,7 @@ pub fn stdlib_uncached(settings: Rc>) -> ObjValue { ("sign", builtin_sign::INST), ("max", builtin_max::INST), ("min", builtin_min::INST), + ("clamp", builtin_clamp::INST), ("sum", builtin_sum::INST), ("modulo", builtin_modulo::INST), ("floor", builtin_floor::INST), @@ -163,11 +160,21 @@ pub fn stdlib_uncached(settings: Rc>) -> ObjValue { ("objectRemoveKey", builtin_object_remove_key::INST), // Manifest ("escapeStringJson", builtin_escape_string_json::INST), + ("escapeStringPython", builtin_escape_string_python::INST), + ("escapeStringXML", builtin_escape_string_xml::INST), ("manifestJsonEx", builtin_manifest_json_ex::INST), + ("manifestJson", builtin_manifest_json::INST), + ("manifestJsonMinified", builtin_manifest_json_minified::INST), ("manifestYamlDoc", builtin_manifest_yaml_doc::INST), + ("manifestYamlStream", builtin_manifest_yaml_stream::INST), ("manifestTomlEx", builtin_manifest_toml_ex::INST), + ("manifestToml", builtin_manifest_toml::INST), ("toString", builtin_to_string::INST), - // Parsing + ("manifestPython", builtin_manifest_python::INST), + ("manifestPythonVars", builtin_manifest_python_vars::INST), + ("manifestXmlJsonml", builtin_manifest_xml_jsonml::INST), + ("manifestIni", builtin_manifest_ini::INST), + // Parse ("parseJson", builtin_parse_json::INST), ("parseYaml", builtin_parse_yaml::INST), // Strings @@ -175,10 +182,13 @@ pub fn stdlib_uncached(settings: Rc>) -> ObjValue { ("substr", builtin_substr::INST), ("char", builtin_char::INST), ("strReplace", builtin_str_replace::INST), + ("escapeStringBash", builtin_escape_string_bash::INST), + ("escapeStringDollars", builtin_escape_string_dollars::INST), ("isEmpty", builtin_is_empty::INST), ("equalsIgnoreCase", builtin_equals_ignore_case::INST), ("splitLimit", builtin_splitlimit::INST), ("splitLimitR", builtin_splitlimitr::INST), + ("split", builtin_split::INST), ("asciiUpper", builtin_ascii_upper::INST), ("asciiLower", builtin_ascii_lower::INST), ("findSubstr", builtin_find_substr::INST), @@ -188,10 +198,16 @@ pub fn stdlib_uncached(settings: Rc>) -> ObjValue { ("parseOctal", builtin_parse_octal::INST), ("parseHex", builtin_parse_hex::INST), ("stringChars", builtin_string_chars::INST), + ("lstripChars", builtin_lstrip_chars::INST), + ("rstripChars", builtin_rstrip_chars::INST), + ("stripChars", builtin_strip_chars::INST), // Misc ("length", builtin_length::INST), + ("get", builtin_get::INST), ("startsWith", builtin_starts_with::INST), ("endsWith", builtin_ends_with::INST), + ("assertEqual", builtin_assert_equal::INST), + ("mergePatch", builtin_merge_patch::INST), // Sets ("setMember", builtin_set_member::INST), ("setInter", builtin_set_inter::INST), @@ -202,6 +218,14 @@ pub fn stdlib_uncached(settings: Rc>) -> ObjValue { ("regexQuoteMeta", builtin_regex_quote_meta::INST), // Compat ("__compare", builtin___compare::INST), + ("__compare_array", builtin___compare_array::INST), + ("__array_less", builtin___array_less::INST), + ("__array_greater", builtin___array_greater::INST), + ("__array_less_or_equal", builtin___array_less_or_equal::INST), + ( + "__array_greater_or_equal", + builtin___array_greater_or_equal::INST, + ), ] .iter() .copied() @@ -303,19 +327,12 @@ fn extvar_source(name: &str, code: impl Into) -> Source { #[derive(Trace, Clone)] pub struct ContextInitializer { - /// When we don't need to support legacy-this-file, we can reuse same context for all files - #[cfg(not(feature = "legacy-this-file"))] - context: jrsonnet_evaluator::Context, - /// For `populate` - #[cfg(not(feature = "legacy-this-file"))] - stdlib_thunk: Thunk, - /// Otherwise, we can only keep first stdlib layer, and then stack thisFile on top of it - #[cfg(feature = "legacy-this-file")] + /// std without applied thisFile overlay stdlib_obj: ObjValue, settings: Rc>, } impl ContextInitializer { - pub fn new(s: State, resolver: PathResolver) -> Self { + pub fn new(resolver: PathResolver) -> Self { let settings = Settings { ext_vars: HashMap::new(), ext_natives: HashMap::new(), @@ -324,20 +341,7 @@ impl ContextInitializer { }; let settings = Rc::new(RefCell::new(settings)); let stdlib_obj = stdlib_uncached(settings.clone()); - #[cfg(not(feature = "legacy-this-file"))] - let stdlib_thunk = Thunk::evaluated(Val::Obj(stdlib_obj)); - #[cfg(feature = "legacy-this-file")] - let _ = s; Self { - #[cfg(not(feature = "legacy-this-file"))] - context: { - let mut context = ContextBuilder::with_capacity(s, 1); - context.bind("std", stdlib_thunk.clone()); - context.build() - }, - #[cfg(not(feature = "legacy-this-file"))] - stdlib_thunk, - #[cfg(feature = "legacy-this-file")] stdlib_obj, settings, } @@ -387,15 +391,6 @@ impl jrsonnet_evaluator::ContextInitializer for ContextInitializer { fn reserve_vars(&self) -> usize { 1 } - #[cfg(not(feature = "legacy-this-file"))] - fn initialize(&self, _s: State, _source: Source) -> jrsonnet_evaluator::Context { - self.context.clone() - } - #[cfg(not(feature = "legacy-this-file"))] - fn populate(&self, _for_file: Source, builder: &mut ContextBuilder) { - builder.bind("std", self.stdlib_thunk.clone()); - } - #[cfg(feature = "legacy-this-file")] fn populate(&self, source: Source, builder: &mut ContextBuilder) { let mut std = ObjValueBuilder::new(); std.with_super(self.stdlib_obj.clone()); @@ -414,15 +409,3 @@ impl jrsonnet_evaluator::ContextInitializer for ContextInitializer { self } } - -pub trait StateExt { - /// This method was previously implemented in jrsonnet-evaluator itself - fn with_stdlib(&self); -} - -impl StateExt for State { - fn with_stdlib(&self) { - let initializer = ContextInitializer::new(self.clone(), PathResolver::new_cwd_fallback()); - self.settings_mut().context_initializer = tb!(initializer); - } -} diff --git a/crates/jrsonnet-stdlib/src/manifest/ini.rs b/crates/jrsonnet-stdlib/src/manifest/ini.rs new file mode 100644 index 00000000..77289c10 --- /dev/null +++ b/crates/jrsonnet-stdlib/src/manifest/ini.rs @@ -0,0 +1,121 @@ +use std::collections::BTreeMap; + +use jrsonnet_evaluator::{ + manifest::{ManifestFormat, ToStringFormat}, + typed::Typed, + ObjValue, Result, ResultExt, Val, +}; +use jrsonnet_parser::IStr; + +pub struct IniFormat { + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, + final_newline: bool, +} + +impl IniFormat { + pub fn std(#[cfg(feature = "exp-preserve-order")] preserve_order: bool) -> Self { + Self { + #[cfg(feature = "exp-preserve-order")] + preserve_order, + final_newline: true, + } + } + pub fn cli(#[cfg(feature = "exp-preserve-order")] preserve_order: bool) -> Self { + Self { + #[cfg(feature = "exp-preserve-order")] + preserve_order, + final_newline: false, + } + } +} + +impl ManifestFormat for IniFormat { + fn manifest_buf(&self, val: Val, buf: &mut String) -> Result<()> { + manifest_ini_obj( + self, + IniObj::from_untyped(val).description("ini object structure")?, + buf, + ) + } +} + +fn manifest_ini_body( + #[cfg(feature = "exp-preserve-order")] format: &IniFormat, + body: ObjValue, + out: &mut String, +) -> Result<()> { + for (i, (key, value)) in body + .iter( + #[cfg(feature = "exp-preserve-order")] + format.preserve_order, + ) + .enumerate() + { + if i != 0 || !out.is_empty() { + out.push('\n'); + } + let value = value.with_description(|| format!("field <{key}> evaluation"))?; + let manifest_desc = || format!("field <{key}> manifestification"); + if let Some(arr) = value.as_arr() { + for (i, ele) in arr.iter().enumerate() { + if i != 0 { + out.push('\n'); + } + let ele = ele + .with_description(|| format!("elem <{i}> evaluation")) + .with_description(manifest_desc)?; + out.push_str(&key); + out.push_str(" = "); + ToStringFormat + .manifest_buf(ele, out) + .with_description(manifest_desc)?; + } + } else { + out.push_str(&key); + out.push_str(" = "); + ToStringFormat + .manifest_buf(value, out) + .with_description(manifest_desc)?; + } + } + Ok(()) +} + +#[derive(Typed)] +struct IniObj { + main: Option, + // TODO: Preserve section order? + sections: BTreeMap, +} + +fn manifest_ini_obj(format: &IniFormat, obj: IniObj, out: &mut String) -> Result<()> { + if let Some(main) = obj.main { + manifest_ini_body( + #[cfg(feature = "exp-preserve-order")] + format, + main, + out, + ) + .description("

manifestification")?; + } + for (i, (section, val)) in obj.sections.into_iter().enumerate() { + if i != 0 || !out.is_empty() { + out.push('\n'); + } + out.push('['); + out.push_str(§ion); + out.push(']'); + manifest_ini_body( + #[cfg(feature = "exp-preserve-order")] + format, + val, + out, + ) + .with_description(|| format!("<{section}> section manifestification"))?; + } + if format.final_newline { + out.push('\n'); + } + Ok(()) +} diff --git a/crates/jrsonnet-stdlib/src/manifest/mod.rs b/crates/jrsonnet-stdlib/src/manifest/mod.rs index eb84ff20..e0fd37f1 100644 --- a/crates/jrsonnet-stdlib/src/manifest/mod.rs +++ b/crates/jrsonnet-stdlib/src/manifest/mod.rs @@ -1,12 +1,18 @@ +mod ini; +mod python; mod toml; +mod xml; mod yaml; +pub use ini::IniFormat; use jrsonnet_evaluator::{ function::builtin, - manifest::{escape_string_json, JsonFormat}, + manifest::{escape_string_json, JsonFormat, YamlStreamFormat}, IStr, ObjValue, Result, Val, }; +pub use python::{PythonFormat, PythonVarsFormat}; pub use toml::TomlFormat; +pub use xml::XmlJsonmlFormat; pub use yaml::YamlFormat; #[builtin] @@ -14,54 +20,189 @@ pub fn builtin_escape_string_json(str_: IStr) -> Result { Ok(escape_string_json(&str_)) } +#[builtin] +pub fn builtin_escape_string_python(str: IStr) -> Result { + Ok(escape_string_json(&str)) +} + #[builtin] pub fn builtin_manifest_json_ex( value: Val, - indent: IStr, + indent: String, newline: Option, key_val_sep: Option, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, ) -> Result { let newline = newline.as_deref().unwrap_or("\n"); let key_val_sep = key_val_sep.as_deref().unwrap_or(": "); value.manifest(JsonFormat::std_to_json( - indent.to_string(), + indent, newline, key_val_sep, #[cfg(feature = "exp-preserve-order")] - preserve_order.unwrap_or(false), + preserve_order, + )) +} + +#[builtin] +pub fn builtin_manifest_json( + value: Val, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, +) -> Result { + builtin_manifest_json_ex( + value, + " ".to_owned(), + None, + None, + #[cfg(feature = "exp-preserve-order")] + preserve_order, + ) +} + +#[builtin] +pub fn builtin_manifest_json_minified( + value: Val, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, +) -> Result { + value.manifest(JsonFormat::minify( + #[cfg(feature = "exp-preserve-order")] + preserve_order, )) } #[builtin] pub fn builtin_manifest_yaml_doc( value: Val, - indent_array_in_object: Option, - quote_keys: Option, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + #[default(false)] indent_array_in_object: bool, + #[default(true)] quote_keys: bool, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, ) -> Result { value.manifest(YamlFormat::std_to_yaml( - indent_array_in_object.unwrap_or(false), - quote_keys.unwrap_or(true), + indent_array_in_object, + quote_keys, #[cfg(feature = "exp-preserve-order")] - preserve_order.unwrap_or(false), + preserve_order, + )) +} + +#[builtin] +pub fn builtin_manifest_yaml_stream( + value: Val, + #[default(false)] indent_array_in_object: bool, + #[default(true)] c_document_end: bool, + #[default(true)] quote_keys: bool, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, +) -> Result { + value.manifest(YamlStreamFormat::std_yaml_stream( + YamlFormat::std_to_yaml( + indent_array_in_object, + quote_keys, + #[cfg(feature = "exp-preserve-order")] + preserve_order, + ), + c_document_end, )) } #[builtin] pub fn builtin_manifest_toml_ex( value: ObjValue, - indent: IStr, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + indent: String, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, ) -> Result { Val::Obj(value).manifest(TomlFormat::std_to_toml( - indent.to_string(), + indent, #[cfg(feature = "exp-preserve-order")] - preserve_order.unwrap_or(false), + preserve_order, )) } +#[builtin] +pub fn builtin_manifest_toml( + value: ObjValue, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, +) -> Result { + builtin_manifest_toml_ex( + value, + " ".to_owned(), + #[cfg(feature = "exp-preserve-order")] + preserve_order, + ) +} + #[builtin] pub fn builtin_to_string(a: Val) -> Result { a.to_string() } + +#[builtin] +pub fn builtin_manifest_python( + v: Val, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, +) -> Result { + v.manifest(PythonFormat::std( + #[cfg(feature = "exp-preserve-order")] + preserve_order, + )) +} +#[builtin] +pub fn builtin_manifest_python_vars( + conf: Val, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, +) -> Result { + conf.manifest(PythonVarsFormat::std( + #[cfg(feature = "exp-preserve-order")] + preserve_order, + )) +} + +#[builtin] +pub fn builtin_escape_string_xml(str_: String) -> String { + xml::escape_string_xml(str_.as_str()) +} + +#[builtin] +pub fn builtin_manifest_xml_jsonml(value: Val) -> Result { + value.manifest(XmlJsonmlFormat::std_to_xml()) +} + +#[builtin] +pub fn builtin_manifest_ini( + ini: Val, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, +) -> Result { + ini.manifest(IniFormat::std( + #[cfg(feature = "exp-preserve-order")] + preserve_order, + )) +} diff --git a/crates/jrsonnet-stdlib/src/manifest/python.rs b/crates/jrsonnet-stdlib/src/manifest/python.rs new file mode 100644 index 00000000..bca68d93 --- /dev/null +++ b/crates/jrsonnet-stdlib/src/manifest/python.rs @@ -0,0 +1,105 @@ +use jrsonnet_evaluator::{ + bail, + manifest::{escape_string_json_buf, ManifestFormat, ToStringFormat}, + Result, Val, +}; + +pub struct PythonFormat { + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, +} + +impl PythonFormat { + pub fn std(#[cfg(feature = "exp-preserve-order")] preserve_order: bool) -> Self { + Self { + #[cfg(feature = "exp-preserve-order")] + preserve_order, + } + } +} + +impl ManifestFormat for PythonFormat { + fn manifest_buf(&self, val: Val, buf: &mut String) -> Result<()> { + match val { + Val::Bool(true) => buf.push_str("True"), + Val::Bool(false) => buf.push_str("False"), + Val::Null => buf.push_str("None"), + Val::Str(s) => escape_string_json_buf(&s.to_string(), buf), + Val::Num(_) => ToStringFormat.manifest_buf(val, buf)?, + #[cfg(feature = "exp-bigint")] + Val::BigInt(_) => ToStringFormat.manifest_buf(val, buf)?, + Val::Arr(arr) => { + buf.push('['); + for (i, el) in arr.iter().enumerate() { + let el = el?; + if i != 0 { + buf.push_str(", "); + } + self.manifest_buf(el, buf)?; + } + buf.push(']'); + } + Val::Obj(obj) => { + obj.run_assertions()?; + buf.push('{'); + let fields = obj.fields( + #[cfg(feature = "exp-preserve-order")] + self.preserve_order, + ); + for (i, field) in fields.into_iter().enumerate() { + if i != 0 { + buf.push_str(", "); + } + escape_string_json_buf(&field, buf); + buf.push_str(": "); + let value = obj.get(field)?.expect("field exists"); + self.manifest_buf(value, buf)?; + } + buf.push('}'); + } + Val::Func(_) => bail!("tried to manifest function"), + } + Ok(()) + } +} + +pub struct PythonVarsFormat { + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, +} + +impl PythonVarsFormat { + pub fn std(#[cfg(feature = "exp-preserve-order")] preserve_order: bool) -> Self { + Self { + #[cfg(feature = "exp-preserve-order")] + preserve_order, + } + } +} + +impl ManifestFormat for PythonVarsFormat { + fn manifest_buf(&self, val: Val, buf: &mut String) -> Result<()> { + let inner = PythonFormat { + #[cfg(feature = "exp-preserve-order")] + preserve_order: self.preserve_order, + }; + let Val::Obj(obj) = val else { + bail!("python vars root should be object"); + }; + obj.run_assertions()?; + + let fields = obj.fields( + #[cfg(feature = "exp-preserve-order")] + self.preserve_order, + ); + + for field in fields { + // Yep, no escaping + buf.push_str(&field); + buf.push_str(" = "); + inner.manifest_buf(obj.get(field)?.expect("field exists"), buf)?; + buf.push('\n'); + } + Ok(()) + } +} diff --git a/crates/jrsonnet-stdlib/src/manifest/toml.rs b/crates/jrsonnet-stdlib/src/manifest/toml.rs index f878fd5c..12dcae95 100644 --- a/crates/jrsonnet-stdlib/src/manifest/toml.rs +++ b/crates/jrsonnet-stdlib/src/manifest/toml.rs @@ -1,10 +1,10 @@ use std::borrow::Cow; use jrsonnet_evaluator::{ - bail, + bail, in_description_frame, manifest::{escape_string_json_buf, ManifestFormat}, val::ArrValue, - IStr, ObjValue, Result, Val, + IStr, ObjValue, Result, ResultExt, Val, }; pub struct TomlFormat<'s> { @@ -106,16 +106,15 @@ fn manifest_value( #[cfg(feature = "exp-bigint")] Val::BigInt(n) => write!(buf, "{n}").unwrap(), Val::Arr(a) => { - if a.is_empty() { - buf.push_str("[]"); - return Ok(()); - } + buf.push('['); + + let mut had_items = false; for (i, e) in a.iter().enumerate() { - let e = e?; + had_items = true; + let e = e.with_description(|| format!("elem <{i}> evaluation"))?; + if i != 0 { buf.push(','); - } else { - buf.push('['); } if inline { buf.push(' '); @@ -124,9 +123,15 @@ fn manifest_value( buf.push_str(cur_padding); buf.push_str(&options.padding); } - manifest_value(&e, true, buf, "", options)?; + + in_description_frame( + || format!("elem <{i}> manifestification"), + || manifest_value(&e, true, buf, "", options), + )?; } - if inline { + + if !had_items { + } else if inline { buf.push(' '); } else { buf.push('\n'); @@ -135,10 +140,10 @@ fn manifest_value( buf.push(']'); } Val::Obj(o) => { - if o.is_empty() { - buf.push_str("{}"); - } - buf.push_str("{ "); + o.run_assertions()?; + buf.push('{'); + + let mut had_fields = false; for (i, (k, v)) in o .iter( #[cfg(feature = "exp-preserve-order")] @@ -146,15 +151,27 @@ fn manifest_value( ) .enumerate() { - let v = v?; + had_fields = true; + let v = v.with_description(|| format!("field <{k}> evaluation"))?; + if i != 0 { - buf.push_str(", "); + buf.push(','); } + buf.push(' '); + escape_key_toml_buf(&k, buf); buf.push_str(" = "); - manifest_value(&v, true, buf, "", options)?; + in_description_frame( + || format!("field <{k}> manifestification"), + || manifest_value(&v, true, buf, "", options), + )?; } - buf.push_str(" }"); + + if had_fields { + buf.push(' '); + } + + buf.push('}'); } Val::Null => { bail!("tried to manifest null") @@ -179,7 +196,7 @@ fn manifest_table_internal( #[cfg(feature = "exp-preserve-order")] options.preserve_order, ) { - let value = value?; + let value = value.with_description(|| format!("field <{key}> evaluation"))?; if is_section(&value)? { sections.push((key, value)); } else { diff --git a/crates/jrsonnet-stdlib/src/manifest/xml.rs b/crates/jrsonnet-stdlib/src/manifest/xml.rs new file mode 100644 index 00000000..596b875a --- /dev/null +++ b/crates/jrsonnet-stdlib/src/manifest/xml.rs @@ -0,0 +1,184 @@ +use jrsonnet_evaluator::{ + bail, in_description_frame, + manifest::{ManifestFormat, ToStringFormat}, + typed::{ComplexValType, Either2, Typed, ValType}, + val::ArrValue, + Either, ObjValue, Result, ResultExt, Val, +}; + +pub struct XmlJsonmlFormat { + force_closing: bool, +} +impl XmlJsonmlFormat { + pub fn std_to_xml() -> Self { + Self { + force_closing: true, + } + } + pub fn cli() -> Self { + Self { + force_closing: false, + } + } +} + +enum JSONMLValue { + Tag { + tag: String, + attrs: ObjValue, + children: Vec, + }, + String(String), +} +impl Typed for JSONMLValue { + const TYPE: &'static ComplexValType = &ComplexValType::Simple(ValType::Arr); + + fn into_untyped(_typed: Self) -> Result { + unreachable!("not used, reserved for parseXML?") + } + + fn from_untyped(untyped: Val) -> Result { + let val = ::from_untyped(untyped) + .description("parsing JSONML value (an array or string)")?; + let arr = match val { + Either2::A(a) => a, + Either2::B(s) => return Ok(Self::String(s)), + }; + if arr.is_empty() { + bail!("JSONML value should have tag (array length should be >=1)"); + }; + let tag = String::from_untyped( + arr.get(0) + .description("getting JSONML tag")? + .expect("length checked"), + ) + .description("parsing JSONML tag")?; + + let (has_attrs, attrs) = if arr.len() >= 2 { + let maybe_attrs = arr + .get(1) + .with_description(|| "getting JSONML attrs")? + .expect("length checked"); + if let Val::Obj(attrs) = maybe_attrs { + (true, attrs) + } else { + (false, ObjValue::new_empty()) + } + } else { + (false, ObjValue::new_empty()) + }; + Ok(Self::Tag { + tag, + attrs, + children: in_description_frame( + || "parsing children".to_owned(), + || { + Typed::from_untyped(Val::Arr(arr.slice( + Some(if has_attrs { 2 } else { 1 }), + None, + None, + ))) + }, + )?, + }) + } +} + +impl ManifestFormat for XmlJsonmlFormat { + fn manifest_buf(&self, val: Val, buf: &mut String) -> Result<()> { + let val = JSONMLValue::from_untyped(val).with_description(|| "parsing JSONML value")?; + manifest_jsonml(&val, buf, self) + } +} + +fn manifest_jsonml(v: &JSONMLValue, buf: &mut String, opts: &XmlJsonmlFormat) -> Result<()> { + match v { + JSONMLValue::Tag { + tag, + attrs, + children, + } => { + let has_children = !children.is_empty(); + buf.push('<'); + buf.push_str(tag); + attrs.run_assertions()?; + for (key, value) in attrs.iter( + // Not much sense to preserve order here + #[cfg(feature = "exp-preserve-order")] + false, + ) { + buf.push(' '); + buf.push_str(&key); + buf.push('='); + buf.push('"'); + let value = value?; + let value = if let Val::Str(s) = value { + s.to_string() + } else { + ToStringFormat.manifest(value)? + }; + escape_string_xml_buf(&value, buf); + buf.push('"'); + } + if !has_children && !opts.force_closing { + buf.push('/'); + } + buf.push('>'); + for child in children { + manifest_jsonml(child, buf, opts)?; + } + if has_children || opts.force_closing { + buf.push('<'); + buf.push('/'); + buf.push_str(tag); + buf.push('>'); + } + Ok(()) + } + JSONMLValue::String(s) => { + escape_string_xml_buf(s, buf); + Ok(()) + } + } +} + +pub fn escape_string_xml(str: &str) -> String { + let mut out = String::new(); + escape_string_xml_buf(str, &mut out); + out +} + +fn escape_string_xml_buf(str: &str, out: &mut String) { + if str.is_empty() { + return; + } + let mut remaining = str; + + let mut found = false; + while let Some(position) = remaining + .bytes() + .position(|c| matches!(c, b'<' | b'>' | b'&' | b'"' | b'\'')) + { + found = true; + + let (plain, rem) = remaining.split_at(position); + out.push_str(plain); + + out.push_str(match rem.as_bytes()[0] { + b'<' => "<", + b'>' => ">", + b'&' => "&", + b'"' => """, + b'\'' => "'", + _ => unreachable!("position() searches for those matches"), + }); + + remaining = &rem[1..]; + } + if !found { + // No match - no escapes required + out.push_str(str); + return; + } + out.push_str(remaining); +} diff --git a/crates/jrsonnet-stdlib/src/manifest/yaml.rs b/crates/jrsonnet-stdlib/src/manifest/yaml.rs index 20ee5731..fe683d49 100644 --- a/crates/jrsonnet-stdlib/src/manifest/yaml.rs +++ b/crates/jrsonnet-stdlib/src/manifest/yaml.rs @@ -1,9 +1,9 @@ use std::{borrow::Cow, fmt::Write}; use jrsonnet_evaluator::{ - bail, + bail, in_description_frame, manifest::{escape_string_json_buf, ManifestFormat}, - Result, Val, + Result, ResultExt, Val, }; pub struct YamlFormat<'s> { @@ -152,80 +152,87 @@ fn manifest_yaml_ex_buf( #[cfg(feature = "exp-bigint")] Val::BigInt(n) => write!(buf, "{}", *n).unwrap(), Val::Arr(a) => { - if a.is_empty() { - buf.push_str("[]"); - } else { - for (i, item) in a.iter().enumerate() { - if i != 0 { + let mut had_items = false; + for (i, item) in a.iter().enumerate() { + had_items = true; + let item = item.with_description(|| format!("elem <{i}> evaluation"))?; + if i != 0 { + buf.push('\n'); + buf.push_str(cur_padding); + } + buf.push('-'); + match &item { + Val::Arr(a) if !a.is_empty() => { buf.push('\n'); buf.push_str(cur_padding); + buf.push_str(&options.padding); } - let item = item?; - buf.push('-'); - match &item { - Val::Arr(a) if !a.is_empty() => { - buf.push('\n'); - buf.push_str(cur_padding); - buf.push_str(&options.padding); - } - _ => buf.push(' '), - } - let extra_padding = match &item { - Val::Arr(a) => !a.is_empty(), - Val::Obj(o) => !o.is_empty(), - _ => false, - }; - let prev_len = cur_padding.len(); - if extra_padding { - cur_padding.push_str(&options.padding); - } - manifest_yaml_ex_buf(&item, buf, cur_padding, options)?; - cur_padding.truncate(prev_len); + _ => buf.push(' '), + } + let extra_padding = match &item { + Val::Arr(a) => !a.is_empty(), + Val::Obj(o) => !o.is_empty(), + _ => false, + }; + let prev_len = cur_padding.len(); + if extra_padding { + cur_padding.push_str(&options.padding); } + in_description_frame( + || format!("elem <{i}> manifestification"), + || manifest_yaml_ex_buf(&item, buf, cur_padding, options), + )?; + cur_padding.truncate(prev_len); + } + if !had_items { + buf.push_str("[]"); } } Val::Obj(o) => { - if o.is_empty() { - buf.push_str("{}"); - } else { - for (i, key) in o - .fields( - #[cfg(feature = "exp-preserve-order")] - options.preserve_order, - ) - .iter() - .enumerate() - { - if i != 0 { + let mut had_fields = false; + for (i, (key, value)) in o + .iter( + #[cfg(feature = "exp-preserve-order")] + options.preserve_order, + ) + .enumerate() + { + had_fields = true; + let value = value.with_description(|| format!("field <{key}> evaluation"))?; + if i != 0 { + buf.push('\n'); + buf.push_str(cur_padding); + } + if !options.quote_keys && !yaml_needs_quotes(&key) { + buf.push_str(&key); + } else { + escape_string_json_buf(&key, buf); + } + buf.push(':'); + let prev_len = cur_padding.len(); + match &value { + Val::Arr(a) if !a.is_empty() => { buf.push('\n'); buf.push_str(cur_padding); + buf.push_str(&options.arr_element_padding); + cur_padding.push_str(&options.arr_element_padding); } - if !options.quote_keys && !yaml_needs_quotes(key) { - buf.push_str(key); - } else { - escape_string_json_buf(key, buf); - } - buf.push(':'); - let prev_len = cur_padding.len(); - let item = o.get(key.clone())?.expect("field exists"); - match &item { - Val::Arr(a) if !a.is_empty() => { - buf.push('\n'); - buf.push_str(cur_padding); - buf.push_str(&options.arr_element_padding); - cur_padding.push_str(&options.arr_element_padding); - } - Val::Obj(o) if !o.is_empty() => { - buf.push('\n'); - buf.push_str(cur_padding); - buf.push_str(&options.padding); - cur_padding.push_str(&options.padding); - } - _ => buf.push(' '), + Val::Obj(o) if !o.is_empty() => { + buf.push('\n'); + buf.push_str(cur_padding); + buf.push_str(&options.padding); + cur_padding.push_str(&options.padding); } - manifest_yaml_ex_buf(&item, buf, cur_padding, options)?; - cur_padding.truncate(prev_len); + _ => buf.push(' '), } + in_description_frame( + || format!("field <{key}> manifestification"), + || manifest_yaml_ex_buf(&value, buf, cur_padding, options), + )?; + cur_padding.truncate(prev_len); + } + if !had_fields { + buf.push_str("{}"); } } Val::Func(_) => bail!("tried to manifest function"), diff --git a/crates/jrsonnet-stdlib/src/math.rs b/crates/jrsonnet-stdlib/src/math.rs index ac8bffa9..cd0e1d6e 100644 --- a/crates/jrsonnet-stdlib/src/math.rs +++ b/crates/jrsonnet-stdlib/src/math.rs @@ -24,6 +24,12 @@ pub fn builtin_min(a: f64, b: f64) -> f64 { a.min(b) } +#[allow(non_snake_case)] +#[builtin] +pub fn builtin_clamp(x: f64, minVal: f64, maxVal: f64) -> f64 { + x.clamp(minVal, maxVal) +} + #[builtin] pub fn builtin_sum(arr: Vec) -> f64 { arr.iter().sum() diff --git a/crates/jrsonnet-stdlib/src/misc.rs b/crates/jrsonnet-stdlib/src/misc.rs index 2933258e..8c1c813c 100644 --- a/crates/jrsonnet-stdlib/src/misc.rs +++ b/crates/jrsonnet-stdlib/src/misc.rs @@ -1,4 +1,4 @@ -use std::{cell::RefCell, rc::Rc}; +use std::{cell::RefCell, collections::BTreeSet, rc::Rc}; use jrsonnet_evaluator::{ bail, @@ -7,7 +7,7 @@ use jrsonnet_evaluator::{ manifest::JsonFormat, typed::{Either2, Either4}, val::{equals, ArrValue}, - Context, Either, IStr, ObjValue, Thunk, Val, + Context, Either, IStr, ObjValue, ObjValueBuilder, ResultExt, Thunk, Val, }; use crate::{extvar_source, Settings}; @@ -23,6 +23,29 @@ pub fn builtin_length(x: Either![IStr, ArrValue, ObjValue, FuncVal]) -> usize { } } +#[builtin] +pub fn builtin_get( + o: ObjValue, + f: IStr, + default: Option>, + #[default(true)] inc_hidden: bool, +) -> Result { + let do_default = move || { + let Some(default) = default else { + return Ok(Val::Null); + }; + default.evaluate() + }; + // Happy path for invisible fields + if !inc_hidden && !o.has_field_ex(f.clone(), false) { + return do_default(); + } + let Some(v) = o.get(f)? else { + return do_default(); + }; + Ok(v) +} + #[builtin(fields( settings: Rc>, ))] @@ -118,3 +141,73 @@ pub fn builtin_ends_with(a: Either![IStr, ArrValue], b: Either![IStr, ArrValue]) _ => bail!("both arguments should be of the same type"), }) } + +#[builtin] +pub fn builtin_assert_equal(a: Val, b: Val) -> Result { + if equals(&a, &b)? { + return Ok(true); + } + // TODO: Use debug output format + let format = JsonFormat::std_to_json( + " ".to_owned(), + "\n", + ": ", + #[cfg(feature = "exp-preserve-order")] + true, + ); + let a = a.manifest(&format).description(" manifestification")?; + let b = b.manifest(&format).description(" manifestification")?; + bail!("assertion failed: A != B\nA: {a}\nB: {b}") +} + +#[builtin] +pub fn builtin_merge_patch(target: Val, patch: Val) -> Result { + let Some(patch) = patch.as_obj() else { + return Ok(patch); + }; + let Some(target) = target.as_obj() else { + return Ok(Val::Obj(patch)); + }; + let target_fields = target + .fields( + // FIXME: Makes no sense to preserve order for BTreeSet, it would be better to use IndexSet here? + // But IndexSet won't allow fast ordered union... + // // Makes sense to preserve source ordering where possible. + // // May affect evaluation order, but it is not specified by jsonnet spec. + // #[cfg(feature = "exp-preserve-order")] + // true, + #[cfg(feature = "exp-preserve-order")] + false, + ) + .into_iter() + .collect::>(); + let patch_fields = patch + .fields( + // No need to look at the patch field order, I think? + // New fields (that will be appended at the end) will be alphabeticaly-ordered, + // but it is fine for jsonpatch, I don't think people write jsonpatch in jsonnet, + // when they can use mixins. + #[cfg(feature = "exp-preserve-order")] + false, + ) + .into_iter() + .collect::>(); + + let mut out = ObjValueBuilder::new(); + for field in target_fields.union(&patch_fields) { + let Some(field_patch) = patch.get(field.clone())? else { + out.field(field.clone()).value(target.get(field.clone())?.expect("we're iterating over fields union, if field is missing in patch - it exists in target")); + continue; + }; + if matches!(field_patch, Val::Null) { + continue; + } + let Some(field_target) = target.get(field.clone())? else { + out.field(field.clone()).value(field_patch); + continue; + }; + out.field(field.clone()) + .value(builtin_merge_patch(field_target, field_patch)?); + } + Ok(out.build().into()) +} diff --git a/crates/jrsonnet-stdlib/src/objects.rs b/crates/jrsonnet-stdlib/src/objects.rs index 1f36bf20..1089c21c 100644 --- a/crates/jrsonnet-stdlib/src/objects.rs +++ b/crates/jrsonnet-stdlib/src/objects.rs @@ -8,10 +8,11 @@ use jrsonnet_evaluator::{ pub fn builtin_object_fields_ex( obj: ObjValue, hidden: bool, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, -) -> Vec { + + #[default(false)] #[cfg(feature = "exp-preserve-order")] - let preserve_order = preserve_order.unwrap_or(false); + preserve_order: bool, +) -> Vec { let out = obj.fields_ex( hidden, #[cfg(feature = "exp-preserve-order")] @@ -23,7 +24,10 @@ pub fn builtin_object_fields_ex( #[builtin] pub fn builtin_object_fields( o: ObjValue, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, ) -> Vec { builtin_object_fields_ex( o, @@ -36,7 +40,10 @@ pub fn builtin_object_fields( #[builtin] pub fn builtin_object_fields_all( o: ObjValue, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, ) -> Vec { builtin_object_fields_ex( o, @@ -49,10 +56,9 @@ pub fn builtin_object_fields_all( pub fn builtin_object_values_ex( o: ObjValue, include_hidden: bool, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + + #[cfg(feature = "exp-preserve-order")] preserve_order: bool, ) -> ArrValue { - #[cfg(feature = "exp-preserve-order")] - let preserve_order = preserve_order.unwrap_or(false); o.values_ex( include_hidden, #[cfg(feature = "exp-preserve-order")] @@ -62,7 +68,10 @@ pub fn builtin_object_values_ex( #[builtin] pub fn builtin_object_values( o: ObjValue, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, ) -> ArrValue { builtin_object_values_ex( o, @@ -74,7 +83,10 @@ pub fn builtin_object_values( #[builtin] pub fn builtin_object_values_all( o: ObjValue, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, ) -> ArrValue { builtin_object_values_ex( o, @@ -87,10 +99,9 @@ pub fn builtin_object_values_all( pub fn builtin_object_keys_values_ex( o: ObjValue, include_hidden: bool, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + + #[cfg(feature = "exp-preserve-order")] preserve_order: bool, ) -> ArrValue { - #[cfg(feature = "exp-preserve-order")] - let preserve_order = preserve_order.unwrap_or(false); o.key_values_ex( include_hidden, #[cfg(feature = "exp-preserve-order")] @@ -100,7 +111,10 @@ pub fn builtin_object_keys_values_ex( #[builtin] pub fn builtin_object_keys_values( o: ObjValue, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, ) -> ArrValue { builtin_object_keys_values_ex( o, @@ -112,7 +126,10 @@ pub fn builtin_object_keys_values( #[builtin] pub fn builtin_object_keys_values_all( o: ObjValue, - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, + + #[default(false)] + #[cfg(feature = "exp-preserve-order")] + preserve_order: bool, ) -> ArrValue { builtin_object_keys_values_ex( o, @@ -141,12 +158,13 @@ pub fn builtin_object_has_all(o: ObjValue, f: IStr) -> bool { pub fn builtin_object_remove_key( obj: ObjValue, key: IStr, + // Standard implementation uses std.objectFields without such argument, we can't // assume order preservation should always be enabled/disabled - #[cfg(feature = "exp-preserve-order")] preserve_order: Option, -) -> ObjValue { + #[default(false)] #[cfg(feature = "exp-preserve-order")] - let preserve_order = preserve_order.unwrap_or(false); + preserve_order: bool, +) -> ObjValue { let mut new_obj = ObjValueBuilder::with_capacity(obj.len() - 1); for (k, v) in obj.iter( #[cfg(feature = "exp-preserve-order")] diff --git a/crates/jrsonnet-stdlib/src/operator.rs b/crates/jrsonnet-stdlib/src/operator.rs index 3fddd436..acaf3ef8 100644 --- a/crates/jrsonnet-stdlib/src/operator.rs +++ b/crates/jrsonnet-stdlib/src/operator.rs @@ -6,12 +6,12 @@ use jrsonnet_evaluator::{ operator::evaluate_mod_op, stdlib::std_format, typed::{Either, Either2}, - val::{equals, primitive_equals}, + val::{equals, primitive_equals, NumValue}, IStr, Result, Val, }; #[builtin] -pub fn builtin_mod(a: Either![f64, IStr], b: Val) -> Result { +pub fn builtin_mod(a: Either![NumValue, IStr], b: Val) -> Result { use Either2::*; evaluate_mod_op( &match a { diff --git a/crates/jrsonnet-stdlib/src/sort.rs b/crates/jrsonnet-stdlib/src/sort.rs index 18de08ad..6554c609 100644 --- a/crates/jrsonnet-stdlib/src/sort.rs +++ b/crates/jrsonnet-stdlib/src/sort.rs @@ -20,20 +20,6 @@ enum SortKeyType { Unknown, } -#[derive(PartialEq)] -struct NonNaNf64(f64); -impl PartialOrd for NonNaNf64 { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} -impl Eq for NonNaNf64 {} -impl Ord for NonNaNf64 { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.0.partial_cmp(&other.0).expect("non nan") - } -} - fn get_sort_type(values: &[T], key_getter: impl Fn(&T) -> &Val) -> Result { let mut sort_type = SortKeyType::Unknown; for i in values { @@ -56,7 +42,7 @@ fn sort_identity(mut values: Vec) -> Result> { let sort_type = get_sort_type(&values, |k| k)?; match sort_type { SortKeyType::Number => values.sort_unstable_by_key(|v| match v { - Val::Num(n) => NonNaNf64(*n), + Val::Num(n) => *n, _ => unreachable!(), }), SortKeyType::String => values.sort_unstable_by_key(|v| match v { @@ -95,7 +81,7 @@ fn sort_keyf(values: ArrValue, keyf: FuncVal) -> Result>> { let sort_type = get_sort_type(&vk, |v| &v.1)?; match sort_type { SortKeyType::Number => vk.sort_by_key(|v| match v.1 { - Val::Num(n) => NonNaNf64(n), + Val::Num(n) => n, _ => unreachable!(), }), SortKeyType::String => vk.sort_by_key(|v| match &v.1 { @@ -139,8 +125,12 @@ pub fn sort(values: ArrValue, key_getter: FuncVal) -> Result { } #[builtin] -pub fn builtin_sort(arr: ArrValue, keyF: Option) -> Result { - super::sort::sort(arr, keyF.unwrap_or_else(FuncVal::identity)) +pub fn builtin_sort( + arr: ArrValue, + + #[default(FuncVal::identity())] keyF: FuncVal, +) -> Result { + super::sort::sort(arr, keyF) } fn uniq_identity(arr: Vec) -> Result> { @@ -174,11 +164,14 @@ fn uniq_keyf(arr: ArrValue, keyf: FuncVal) -> Result>> { #[builtin] #[allow(non_snake_case)] -pub fn builtin_uniq(arr: ArrValue, keyF: Option) -> Result { +pub fn builtin_uniq( + arr: ArrValue, + + #[default(FuncVal::identity())] keyF: FuncVal, +) -> Result { if arr.len() <= 1 { return Ok(arr); } - let keyF = keyF.unwrap_or(FuncVal::identity()); if keyF.is_identity() { Ok(ArrValue::eager(uniq_identity( arr.iter().collect::>>()?, @@ -190,11 +183,14 @@ pub fn builtin_uniq(arr: ArrValue, keyF: Option) -> Result { #[builtin] #[allow(non_snake_case)] -pub fn builtin_set(arr: ArrValue, keyF: Option) -> Result { +pub fn builtin_set( + arr: ArrValue, + + #[default(FuncVal::identity())] keyF: FuncVal, +) -> Result { if arr.len() <= 1 { return Ok(arr); } - let keyF = keyF.unwrap_or(FuncVal::identity()); if keyF.is_identity() { let arr = arr.iter().collect::>>()?; let arr = sort_identity(arr)?; diff --git a/crates/jrsonnet-stdlib/src/std.jsonnet b/crates/jrsonnet-stdlib/src/std.jsonnet deleted file mode 100644 index 9cce7d1b..00000000 --- a/crates/jrsonnet-stdlib/src/std.jsonnet +++ /dev/null @@ -1,219 +0,0 @@ -{ - local std = self, - local id = std.id, - - thisFile:: error 'std.thisFile is deprecated, to enable its support in jrsonnet - recompile it with "legacy-this-file" support.\nThis will slow down stdlib caching a bit, though', - - lstripChars(str, chars):: - if std.length(str) > 0 && std.member(chars, str[0]) then - std.lstripChars(str[1:], chars) - else - str, - - rstripChars(str, chars):: - local len = std.length(str); - if len > 0 && std.member(chars, str[len - 1]) then - std.rstripChars(str[:len - 1], chars) - else - str, - - stripChars(str, chars):: - std.lstripChars(std.rstripChars(str, chars), chars), - - split(str, c):: std.splitLimit(str, c, -1), - - mapWithIndex(func, arr):: - if !std.isFunction(func) then - error ('std.mapWithIndex first param must be function, got ' + std.type(func)) - else if !std.isArray(arr) && !std.isString(arr) then - error ('std.mapWithIndex second param must be array, got ' + std.type(arr)) - else - std.makeArray(std.length(arr), function(i) func(i, arr[i])), - - mapWithKey(func, obj):: - if !std.isFunction(func) then - error ('std.mapWithKey first param must be function, got ' + std.type(func)) - else if !std.isObject(obj) then - error ('std.mapWithKey second param must be object, got ' + std.type(obj)) - else - { [k]: func(k, obj[k]) for k in std.objectFields(obj) }, - - lines(arr):: - std.join('\n', arr + ['']), - - deepJoin(arr):: - if std.isString(arr) then - arr - else if std.isArray(arr) then - std.join('', [std.deepJoin(x) for x in arr]) - else - error 'Expected string or array, got %s' % std.type(arr), - - assertEqual(a, b):: - if a == b then - true - else - error 'Assertion failed. ' + a + ' != ' + b, - - clamp(x, minVal, maxVal):: - if x < minVal then minVal - else if x > maxVal then maxVal - else x, - - manifestIni(ini):: - local body_lines(body) = - std.join([], [ - local value_or_values = body[k]; - if std.isArray(value_or_values) then - ['%s = %s' % [k, value] for value in value_or_values] - else - ['%s = %s' % [k, value_or_values]] - - for k in std.objectFields(body) - ]); - - local section_lines(sname, sbody) = ['[%s]' % [sname]] + body_lines(sbody), - main_body = if std.objectHas(ini, 'main') then body_lines(ini.main) else [], - all_sections = [ - section_lines(k, ini.sections[k]) - for k in std.objectFields(ini.sections) - ]; - std.join('\n', main_body + std.flattenArrays(all_sections) + ['']), - - manifestToml(value):: std.manifestTomlEx(value, ' '), - - escapeStringPython(str):: - std.escapeStringJson(str), - - escapeStringBash(str_):: - local str = std.toString(str_); - local trans(ch) = - if ch == "'" then - "'\"'\"'" - else - ch; - "'%s'" % std.join('', [trans(ch) for ch in std.stringChars(str)]), - - escapeStringDollars(str_):: - local str = std.toString(str_); - local trans(ch) = - if ch == '$' then - '$$' - else - ch; - std.foldl(function(a, b) a + trans(b), std.stringChars(str), ''), - - local xml_escapes = { - '<': '<', - '>': '>', - '&': '&', - '"': '"', - "'": ''', - }, - - escapeStringXML(str_):: - local str = std.toString(str_); - std.join('', [std.get(xml_escapes, ch, ch) for ch in std.stringChars(str)]), - - manifestJson(value):: std.manifestJsonEx(value, ' ') tailstrict, - - manifestJsonMinified(value):: std.manifestJsonEx(value, '', '', ':'), - - manifestYamlStream(value, indent_array_in_object=false, c_document_end=true, quote_keys=true):: - if !std.isArray(value) then - error 'manifestYamlStream only takes arrays, got ' + std.type(value) - else - '---\n' + std.join( - '\n---\n', [std.manifestYamlDoc(e, indent_array_in_object, quote_keys) for e in value] - ) + if c_document_end then '\n...\n' else '\n', - - manifestPython(v):: - if std.isObject(v) then - local fields = [ - '%s: %s' % [std.escapeStringPython(k), std.manifestPython(v[k])] - for k in std.objectFields(v) - ]; - '{%s}' % [std.join(', ', fields)] - else if std.isArray(v) then - '[%s]' % [std.join(', ', [std.manifestPython(v2) for v2 in v])] - else if std.isString(v) then - '%s' % [std.escapeStringPython(v)] - else if std.isFunction(v) then - error 'cannot manifest function' - else if std.isNumber(v) then - std.toString(v) - else if v == true then - 'True' - else if v == false then - 'False' - else if v == null then - 'None', - - manifestPythonVars(conf):: - local vars = ['%s = %s' % [k, std.manifestPython(conf[k])] for k in std.objectFields(conf)]; - std.join('\n', vars + ['']), - - manifestXmlJsonml(value):: - if !std.isArray(value) then - error 'Expected a JSONML value (an array), got %s' % std.type(value) - else - local aux(v) = - if std.isString(v) then - v - else - local tag = v[0]; - local has_attrs = std.length(v) > 1 && std.isObject(v[1]); - local attrs = if has_attrs then v[1] else {}; - local children = if has_attrs then v[2:] else v[1:]; - local attrs_str = - std.join('', [' %s="%s"' % [k, attrs[k]] for k in std.objectFields(attrs)]); - std.deepJoin(['<', tag, attrs_str, '>', [aux(x) for x in children], '']); - - aux(value), - - mergePatch(target, patch):: - if std.isObject(patch) then - local target_object = - if std.isObject(target) then target else {}; - - local target_fields = - if std.isObject(target_object) then std.objectFields(target_object) else []; - - local null_fields = [k for k in std.objectFields(patch) if patch[k] == null]; - local both_fields = std.setUnion(target_fields, std.objectFields(patch)); - - { - [k]: - if !std.objectHas(patch, k) then - target_object[k] - else if !std.objectHas(target_object, k) then - std.mergePatch(null, patch[k]) tailstrict - else - std.mergePatch(target_object[k], patch[k]) tailstrict - for k in std.setDiff(both_fields, null_fields) - } - else - patch, - - get(o, f, default=null, inc_hidden=true):: - if std.objectHasEx(o, f, inc_hidden) then o[f] else default, - - resolvePath(f, r):: - local arr = std.split(f, '/'); - std.join('/', std.makeArray(std.length(arr) - 1, function(i) arr[i]) + [r]), - - find(value, arr):: - if !std.isArray(arr) then - error 'find second parameter should be an array, got ' + std.type(arr) - else - std.filter(function(i) arr[i] == value, std.range(0, std.length(arr) - 1)), - - // Compat - __compare_array(arr1, arr2):: - assert std.isArray(arr1) && std.isArray(arr2); - std.__compare(arr1, arr2), - __array_less(arr1, arr2):: std.__compare_array(arr1, arr2) == -1, - __array_greater(arr1, arr2):: std.__compare_array(arr1, arr2) == 1, - __array_less_or_equal(arr1, arr2):: std.__compare_array(arr1, arr2) <= 0, - __array_greater_or_equal(arr1, arr2):: std.__compare_array(arr1, arr2) >= 0, -} diff --git a/crates/jrsonnet-stdlib/src/strings.rs b/crates/jrsonnet-stdlib/src/strings.rs index c46cc149..b214b957 100644 --- a/crates/jrsonnet-stdlib/src/strings.rs +++ b/crates/jrsonnet-stdlib/src/strings.rs @@ -1,9 +1,11 @@ +use std::collections::BTreeSet; + use jrsonnet_evaluator::{ bail, error::{ErrorKind::*, Result}, function::builtin, - typed::{Either2, M1}, - val::ArrValue, + typed::{Either2, Typed, M1}, + val::{ArrValue, IndexableVal}, Either, IStr, Val, }; @@ -27,6 +29,20 @@ pub fn builtin_str_replace(str: String, from: IStr, to: IStr) -> String { str.replace(&from as &str, &to as &str) } +#[builtin] +pub fn builtin_escape_string_bash(str_: String) -> String { + const QUOTE: char = '\''; + let mut out = str_.replace(QUOTE, "'\"'\"'"); + out.insert(0, QUOTE); + out.push(QUOTE); + out +} + +#[builtin] +pub fn builtin_escape_string_dollars(str_: String) -> String { + str_.replace('$', "$$") +} + #[builtin] pub fn builtin_is_empty(str: String) -> bool { str.is_empty() @@ -65,6 +81,12 @@ pub fn builtin_splitlimitr(str: IStr, c: IStr, maxsplits: Either![usize, M1]) -> } } +#[builtin] +pub fn builtin_split(str: IStr, c: IStr) -> ArrValue { + use Either2::*; + builtin_splitlimit(str, c, B(M1)) +} + #[builtin] pub fn builtin_ascii_upper(str: IStr) -> String { str.to_ascii_uppercase() @@ -94,7 +116,9 @@ pub fn builtin_find_substr(pat: IStr, str: IStr) -> ArrValue { .enumerate() { if &strb[i..i + pat.len()] == pat { - out.push(Val::Num(ch_idx as f64)); + out.push(Val::Num( + ch_idx.try_into().expect("unrealisticly long string"), + )); } } out.into() @@ -195,6 +219,53 @@ pub fn builtin_bigint(v: Either![f64, IStr]) -> Result { }) } +#[builtin] +pub fn builtin_string_chars(str: IStr) -> ArrValue { + ArrValue::chars(str.chars()) +} + +#[builtin] +pub fn builtin_lstrip_chars(str: IStr, chars: IndexableVal) -> Result { + if str.is_empty() || chars.is_empty() { + return Ok(str); + } + + let pattern = new_trim_pattern(chars)?; + Ok(str.as_str().trim_start_matches(pattern).into()) +} + +#[builtin] +pub fn builtin_rstrip_chars(str: IStr, chars: IndexableVal) -> Result { + if str.is_empty() || chars.is_empty() { + return Ok(str); + } + + let pattern = new_trim_pattern(chars)?; + Ok(str.as_str().trim_end_matches(pattern).into()) +} + +#[builtin] +pub fn builtin_strip_chars(str: IStr, chars: IndexableVal) -> Result { + if str.is_empty() || chars.is_empty() { + return Ok(str); + } + + let pattern = new_trim_pattern(chars)?; + Ok(str.as_str().trim_matches(pattern).into()) +} + +fn new_trim_pattern(chars: IndexableVal) -> Result bool> { + let chars: BTreeSet = match chars { + IndexableVal::Str(chars) => chars.chars().collect(), + IndexableVal::Arr(chars) => chars + .iter() + .filter_map(|it| it.map(|it| char::from_untyped(it).ok()).transpose()) + .collect::>()?, + }; + + Ok(move |char| chars.contains(&char)) +} + #[cfg(test)] mod tests { use super::*; @@ -223,8 +294,3 @@ mod tests { assert_eq!(parse_nat::<16>("BbC").unwrap(), 0xBBC as f64); } } - -#[builtin] -pub fn builtin_string_chars(str: IStr) -> ArrValue { - ArrValue::chars(str.chars()) -} diff --git a/flake.lock b/flake.lock index 267fb5d4..783377eb 100644 --- a/flake.lock +++ b/flake.lock @@ -7,11 +7,11 @@ ] }, "locked": { - "lastModified": 1714536327, - "narHash": "sha256-zu4+LcygJwdyFHunTMeDFltBZ9+hoWvR/1A7IEy7ChA=", + "lastModified": 1715274763, + "narHash": "sha256-3Iv1PGHJn9sV3HO4FlOVaaztOxa9uGLfOmUWrH7v7+A=", "owner": "ipetkov", "repo": "crane", - "rev": "3124551aebd8db15d4560716d4f903bd44c64e4a", + "rev": "27025ab71bdca30e7ed0a16c88fd74c5970fc7f5", "type": "github" }, "original": { @@ -40,11 +40,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1714595735, - "narHash": "sha256-8MDOiHrg2mOylcC7wpx1U1mk9V5VranG7wKyenhHnic=", + "lastModified": 1715551360, + "narHash": "sha256-fGYt2XnTYUS4Q0eH8tVu3ki1+m9YTgZ+NjlfkMKzko0=", "owner": "nixos", "repo": "nixpkgs", - "rev": "9d7a1659bc5c6be24ac46407b91807c6e3e0227d", + "rev": "836306cd7bbb9e0f89c557b2ae14df09e573ee89", "type": "github" }, "original": { @@ -71,11 +71,11 @@ ] }, "locked": { - "lastModified": 1714529851, - "narHash": "sha256-YMKJW880f7LHXVRzu93xa6Ek+QLECIu0IRQbXbzZe38=", + "lastModified": 1715480255, + "narHash": "sha256-gEZl8nYidQwqJhOigJ91JDjoBFoPEWVsd82AKnaE7Go=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "9ca720fdcf7865385ae3b93ecdf65f1a64cb475e", + "rev": "d690205a4f01ec0930303c4204e5063958e51255", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 632351b0..981186b0 100644 --- a/flake.nix +++ b/flake.nix @@ -116,7 +116,7 @@ }; }; devShells.default = craneLib.devShell { - nativeBuildInputs = with pkgs; [ + packages = with pkgs; [ alejandra cargo-edit cargo-asm @@ -126,6 +126,9 @@ lld hyperfine graphviz + ] ++ lib.optionals (!stdenv.isDarwin) [ + valgrind + kcachegrind ]; }; } diff --git a/nix/jrsonnet-release.nix b/nix/jrsonnet-release.nix index 1b54e875..a7d120f7 100644 --- a/nix/jrsonnet-release.nix +++ b/nix/jrsonnet-release.nix @@ -2,18 +2,21 @@ fetchFromGitHub, rustPlatform, makeWrapper, + # This derivation should only be used for benchmarks-against-release task + forBenchmarks ? true, + _unused ? forBenchmarks, }: rustPlatform.buildRustPackage rec { pname = "jrsonnet"; - version = "pre9"; + version = "release"; src = fetchFromGitHub { owner = "CertainLach"; repo = pname; - rev = "5dc3b98bcc3b9848031f17165bcc2e86e8a65ba3"; - hash = "sha256-KM1yqsFzt7Vj4xiEzJJiuFaG49/utF80r9A2dSwCAjo="; + rev = "ad68a2495da324ce7a893992a6b32851849c64eb"; + hash = "sha256-N2z0JcJG6iQ+eAE1GGF+c1+T7Pti8oCgx+QWdhT+33M="; }; - cargoHash = "sha256-y2YiktT1h263vpFaC+kRL8yaAWQThhEkS+NSQ6B6Ylk="; + cargoHash = "sha256-A/sdqI51kD7Tfo9R95ep2CecaSEzSz3suhZXdND6/nQ="; cargoTestFlags = ["--package=jrsonnet --features=mimalloc,legacy-this-file"]; cargoBuildFlags = ["--package=jrsonnet --features=mimalloc,legacy-this-file"]; diff --git a/nix/jrsonnet.nix b/nix/jrsonnet.nix index 773d99b4..1090137a 100644 --- a/nix/jrsonnet.nix +++ b/nix/jrsonnet.nix @@ -17,7 +17,7 @@ with lib; pname = "jrsonnet"; version = "current${optionalString withNightlyFeatures "-nightly"}${optionalString withExperimentalFeatures "-experimental"}"; - cargoExtraArgs = "--locked --features=mimalloc,legacy-this-file${optionalString withNightlyFeatures ",nightly"}${optionalString withExperimentalFeatures ",experimental"}"; + cargoExtraArgs = "--locked --features=mimalloc${optionalString withNightlyFeatures ",nightly"}${optionalString withExperimentalFeatures ",experimental"}"; nativeBuildInputs = [makeWrapper]; diff --git a/tests/Cargo.toml b/tests/Cargo.toml index fb4d5f54..3c3fa3a3 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -12,3 +12,5 @@ jrsonnet-evaluator.workspace = true jrsonnet-gcmodule.workspace = true jrsonnet-stdlib.workspace = true serde.workspace = true +json-structural-diff.workspace = true +serde_json.workspace = true diff --git a/tests/golden/builtin_strings_string.jsonnet b/tests/golden/builtin_strings_string.jsonnet new file mode 100644 index 00000000..8464de5f --- /dev/null +++ b/tests/golden/builtin_strings_string.jsonnet @@ -0,0 +1,21 @@ +{ + lstripChars_singleChar: std.lstripChars("aaabcdef", "a"), + lstripChars_multipleChars: std.lstripChars("klmn", "kql"), + lstripChars_array: std.lstripChars("forward", [1, "f", [], "o", "d", "for"]), + + rstripChars_singleChar: std.rstripChars("nice_boy", "y"), + rstripChars_multipleChars: std.rstripChars("amoguass", "sa"), + rstripChars_array: std.rstripChars("cool just cool", ["o", "l", 12.2323443]), + + stripChars_singleCharL: std.stripChars("feefoofaa", "f"), + stripChars_singleCharR: std.stripChars("lolkekw", "w"), + stripChars_singleChar: std.stripChars("joper jej", "j"), + + stripChars_multipleCharsL: std.stripChars("abcdefg", "cab"), + stripChars_multipleCharsR: std.stripChars("still breathing", "gthin"), + stripChars_multipleChars: std.stripChars("sus sus sus", "us"), + + stripChars_arrayL: std.stripChars("chel medvedo svin", ["c", 3204990, {"svin": {}}, "vi"]), + stripChars_arrayR: std.stripChars("lach-vs-miri", ["r", "i", "craft", "is", "mine"]), + stripChars_array: std.stripChars("UwU Lel Stosh", ["h", "U", "s", {}, [], null, "w", [1, 2, 3]]), +} diff --git a/tests/golden/builtin_strings_string.jsonnet.golden b/tests/golden/builtin_strings_string.jsonnet.golden new file mode 100644 index 00000000..47ef53c8 --- /dev/null +++ b/tests/golden/builtin_strings_string.jsonnet.golden @@ -0,0 +1,17 @@ +{ + "lstripChars_array": "rward", + "lstripChars_multipleChars": "mn", + "lstripChars_singleChar": "bcdef", + "rstripChars_array": "cool just c", + "rstripChars_multipleChars": "amogu", + "rstripChars_singleChar": "nice_bo", + "stripChars_array": " Lel Sto", + "stripChars_arrayL": "hel medvedo svin", + "stripChars_arrayR": "lach-vs-m", + "stripChars_multipleChars": " sus ", + "stripChars_multipleCharsL": "defg", + "stripChars_multipleCharsR": "still brea", + "stripChars_singleChar": "oper je", + "stripChars_singleCharL": "eefoofaa", + "stripChars_singleCharR": "lolkek" +} \ No newline at end of file diff --git a/tests/tests/as_native.rs b/tests/tests/as_native.rs index 78ff61ec..54d4058f 100644 --- a/tests/tests/as_native.rs +++ b/tests/tests/as_native.rs @@ -1,14 +1,16 @@ -use jrsonnet_evaluator::{Result, State}; -use jrsonnet_stdlib::StateExt; +use jrsonnet_evaluator::{trace::PathResolver, FileImportResolver, Result, State}; +use jrsonnet_stdlib::ContextInitializer; mod common; #[test] fn as_native() -> Result<()> { - let s = State::default(); - s.with_stdlib(); + let mut s = State::builder(); + s.context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())) + .import_resolver(FileImportResolver::default()); + let s = s.build(); - let val = s.evaluate_snippet("snip".to_owned(), r#"function(a, b) a + b"#)?; + let val = s.evaluate_snippet("snip".to_owned(), r"function(a, b) a + b")?; let func = val.as_func().expect("this is function"); let native = func.into_native::<((u32, u32), u32)>(); diff --git a/tests/tests/builtin.rs b/tests/tests/builtin.rs index cadf389d..8f806578 100644 --- a/tests/tests/builtin.rs +++ b/tests/tests/builtin.rs @@ -2,10 +2,13 @@ mod common; use jrsonnet_evaluator::{ function::{builtin, builtin::Builtin, CallLocation, FuncVal}, + parser::Source, + trace::PathResolver, typed::Typed, - ContextBuilder, Result, State, Thunk, Val, + ContextBuilder, ContextInitializer, FileImportResolver, Result, State, Thunk, Val, }; -use jrsonnet_stdlib::StateExt; +use jrsonnet_gcmodule::Trace; +use jrsonnet_stdlib::ContextInitializer as StdContextInitializer; #[builtin] fn a() -> Result { @@ -29,15 +32,30 @@ fn basic_function() -> Result<()> { fn native_add(a: u32, b: u32) -> Result { Ok(a + b) } +#[derive(Trace)] +struct NativeAddContextInitializer; +impl ContextInitializer for NativeAddContextInitializer { + fn populate(&self, _for_file: Source, builder: &mut ContextBuilder) { + builder.bind( + "nativeAdd", + Thunk::evaluated(Val::function(native_add::INST)), + ); + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } +} #[test] fn call_from_code() -> Result<()> { - let s = State::default(); - s.with_stdlib(); - s.add_global( - "nativeAdd".into(), - Thunk::evaluated(Val::function(native_add::INST)), - ); + let mut s = State::builder(); + s.context_initializer(( + StdContextInitializer::new(PathResolver::new_cwd_fallback()), + NativeAddContextInitializer, + )) + .import_resolver(FileImportResolver::default()); + let s = s.build(); let v = s.evaluate_snippet( "snip".to_owned(), @@ -62,15 +80,27 @@ fn curried_add(this: &curried_add, b: u32) -> Result { fn curry_add(a: u32) -> Result { Ok(FuncVal::builtin(curried_add { a })) } +#[derive(Trace)] +struct CurryAddContextInitializer; +impl ContextInitializer for CurryAddContextInitializer { + fn populate(&self, _for_file: Source, builder: &mut ContextBuilder) { + builder.bind("curryAdd", Thunk::evaluated(Val::function(curry_add::INST))); + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } +} #[test] fn nonstatic_builtin() -> Result<()> { - let s = State::default(); - s.with_stdlib(); - s.add_global( - "curryAdd".into(), - Thunk::evaluated(Val::function(curry_add::INST)), - ); + let mut s = State::builder(); + s.context_initializer(( + StdContextInitializer::new(PathResolver::new_cwd_fallback()), + CurryAddContextInitializer, + )) + .import_resolver(FileImportResolver::default()); + let s = s.build(); let v = s.evaluate_snippet( "snip".to_owned(), diff --git a/tests/tests/common.rs b/tests/tests/common.rs index 0dcaa361..481883f5 100644 --- a/tests/tests/common.rs +++ b/tests/tests/common.rs @@ -1,8 +1,10 @@ use jrsonnet_evaluator::{ bail, function::{builtin, FuncVal}, - ObjValueBuilder, Result, State, Thunk, Val, + parser::Source, + ContextBuilder, ContextInitializer as ContextInitializerT, ObjValueBuilder, Result, Thunk, Val, }; +use jrsonnet_gcmodule::Trace; #[macro_export] macro_rules! ensure_eq { @@ -74,11 +76,18 @@ fn param_names(fun: FuncVal) -> Vec { } } -#[allow(dead_code)] -pub fn with_test(s: &State) { - let mut bobj = ObjValueBuilder::new(); - bobj.method("assertThrow", assert_throw::INST); - bobj.method("paramNames", param_names::INST); +#[derive(Trace)] +pub struct ContextInitializer; +impl ContextInitializerT for ContextInitializer { + fn populate(&self, _for_file: Source, builder: &mut ContextBuilder) { + let mut bobj = ObjValueBuilder::new(); + bobj.method("assertThrow", assert_throw::INST); + bobj.method("paramNames", param_names::INST); - s.add_global("test".into(), Thunk::evaluated(Val::Obj(bobj.build()))) + builder.bind("test", Thunk::evaluated(Val::Obj(bobj.build()))); + } + + fn as_any(&self) -> &dyn std::any::Any { + self + } } diff --git a/tests/tests/golden.rs b/tests/tests/golden.rs index 0ec1379f..c9e10f99 100644 --- a/tests/tests/golden.rs +++ b/tests/tests/golden.rs @@ -8,15 +8,19 @@ use jrsonnet_evaluator::{ trace::{CompactFormat, PathResolver, TraceFormat}, FileImportResolver, State, }; -use jrsonnet_stdlib::StateExt; - +use jrsonnet_stdlib::ContextInitializer; mod common; +use common::ContextInitializer as TestContextInitializer; fn run(file: &Path) -> String { - let s = State::default(); - s.with_stdlib(); - common::with_test(&s); - s.set_import_resolver(FileImportResolver::default()); + let mut s = State::builder(); + s.context_initializer(( + ContextInitializer::new(PathResolver::new_cwd_fallback()), + TestContextInitializer, + )) + .import_resolver(FileImportResolver::default()); + let s = s.build(); + let trace_format = CompactFormat { resolver: PathResolver::FileName, max_trace: 20, @@ -35,6 +39,8 @@ fn run(file: &Path) -> String { #[test] fn test() -> io::Result<()> { + use json_structural_diff::JsonDiff; + let mut root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); root.push("golden"); @@ -54,6 +60,35 @@ fn test() -> io::Result<()> { } else { let golden = fs::read_to_string(golden_path)?; + match (serde_json::from_str(&result), serde_json::from_str(&golden)) { + (Err(_), Ok(_)) => assert_eq!( + result, + golden, + "unexpected error for golden {}", + entry.path().display() + ), + (Ok(_), Err(_)) => assert_eq!( + result, + golden, + "expected error for golden {}", + entry.path().display() + ), + (Ok(result), Ok(golden)) => { + // Show diff relative to golden`. + let diff = JsonDiff::diff_string(&golden, &result, false); + if let Some(diff) = diff { + panic!( + "Result \n{result:#}\n\ + and golden \n{golden:#}\n\ + did not match structurally:\n{diff:#}\n\ + for golden {}", + entry.path().display() + ); + } + } + (Err(_), Err(_)) => {} + }; + assert_eq!( result, golden, diff --git a/tests/tests/sanity.rs b/tests/tests/sanity.rs index 705b2f4a..0b7c5a24 100644 --- a/tests/tests/sanity.rs +++ b/tests/tests/sanity.rs @@ -1,16 +1,18 @@ use jrsonnet_evaluator::{ bail, - trace::{CompactFormat, TraceFormat}, - Result, State, Val, + trace::{CompactFormat, PathResolver, TraceFormat}, + FileImportResolver, Result, State, Val, }; -use jrsonnet_stdlib::StateExt; +use jrsonnet_stdlib::ContextInitializer; mod common; #[test] fn assert_positive() -> Result<()> { - let s = State::default(); - s.with_stdlib(); + let mut s = State::builder(); + s.context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())) + .import_resolver(FileImportResolver::default()); + let s = s.build(); let v = s.evaluate_snippet("snip".to_owned(), "assert 1 == 1: 'fail'; null")?; ensure_val_eq!(v, Val::Null); @@ -22,8 +24,11 @@ fn assert_positive() -> Result<()> { #[test] fn assert_negative() -> Result<()> { - let s = State::default(); - s.with_stdlib(); + let mut s = State::builder(); + s.context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())) + .import_resolver(FileImportResolver::default()); + let s = s.build(); + let trace_format = CompactFormat::default(); { @@ -38,7 +43,7 @@ fn assert_negative() -> Result<()> { bail!("assertion should fail") }; let e = trace_format.format(&e).unwrap(); - ensure!(e.starts_with("runtime error: Assertion failed. 1 != 2")) + ensure!(e.starts_with("runtime error: assertion failed: A != B\nA: 1\nB: 2\n")); } Ok(()) diff --git a/tests/tests/std_native.rs b/tests/tests/std_native.rs index 381cb56f..3f62b32e 100644 --- a/tests/tests/std_native.rs +++ b/tests/tests/std_native.rs @@ -8,10 +8,11 @@ fn example_native(a: u32, b: u32) -> u32 { #[test] fn std_native() { - let state = State::default(); - let std = ContextInitializer::new(state.clone(), PathResolver::Absolute); + let mut state = State::builder(); + let std = ContextInitializer::new(PathResolver::Absolute); std.add_native("example", example_native::INST); - state.set_context_initializer(std); + state.context_initializer(std); + let state = state.build(); assert!(state .evaluate_snippet("test", "std.native('example')(1, 3) == 4") diff --git a/tests/tests/suite.rs b/tests/tests/suite.rs index f7045eb2..b137b51d 100644 --- a/tests/tests/suite.rs +++ b/tests/tests/suite.rs @@ -4,18 +4,23 @@ use std::{ }; use jrsonnet_evaluator::{ - trace::{CompactFormat, TraceFormat}, + trace::{CompactFormat, PathResolver, TraceFormat}, FileImportResolver, State, Val, }; -use jrsonnet_stdlib::StateExt; +use jrsonnet_stdlib::ContextInitializer; mod common; +use common::ContextInitializer as TestContextInitializer; fn run(file: &Path) { - let s = State::default(); - s.with_stdlib(); - common::with_test(&s); - s.set_import_resolver(FileImportResolver::default()); + let mut s = State::builder(); + s.context_initializer(( + ContextInitializer::new(PathResolver::new_cwd_fallback()), + TestContextInitializer, + )) + .import_resolver(FileImportResolver::default()); + let s = s.build(); + let trace_format = CompactFormat::default(); match s.import(file) { diff --git a/tests/tests/typed_obj.rs b/tests/tests/typed_obj.rs index e97bdf43..544ea220 100644 --- a/tests/tests/typed_obj.rs +++ b/tests/tests/typed_obj.rs @@ -2,8 +2,8 @@ mod common; use std::fmt::Debug; -use jrsonnet_evaluator::{typed::Typed, Result, State}; -use jrsonnet_stdlib::StateExt; +use jrsonnet_evaluator::{trace::PathResolver, typed::Typed, Result, State}; +use jrsonnet_stdlib::ContextInitializer; #[derive(Clone, Typed, PartialEq, Debug)] struct A { @@ -23,8 +23,10 @@ fn test_roundtrip(value: T) -> Result<()> #[test] fn simple_object() -> Result<()> { - let s = State::default(); - s.with_stdlib(); + let mut s = State::builder(); + s.context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())); + let s = s.build(); + let a = A::from_untyped(s.evaluate_snippet("snip".to_owned(), "{a: 1, b: 2}")?)?; ensure_eq!(a, A { a: 1, b: 2 }); test_roundtrip(a)?; @@ -40,8 +42,10 @@ struct B { #[test] fn renamed_field() -> Result<()> { - let s = State::default(); - s.with_stdlib(); + let mut s = State::builder(); + s.context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())); + let s = s.build(); + let b = B::from_untyped(s.evaluate_snippet("snip".to_owned(), "{a: 1, c: 2}")?)?; ensure_eq!(b, B { a: 1, b: 2 }); ensure_eq!( @@ -69,8 +73,10 @@ struct Object { #[test] fn flattened_object() -> Result<()> { - let s = State::default(); - s.with_stdlib(); + let mut s = State::builder(); + s.context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())); + let s = s.build(); + let obj = Object::from_untyped( s.evaluate_snippet("snip".to_owned(), "{apiVersion: 'ver', kind: 'kind', b: 2}")?, )?; @@ -100,8 +106,10 @@ struct C { #[test] fn optional_field_some() -> Result<()> { - let s = State::default(); - s.with_stdlib(); + let mut s = State::builder(); + s.context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())); + let s = s.build(); + let c = C::from_untyped(s.evaluate_snippet("snip".to_owned(), "{a: 1, b: 2}")?)?; ensure_eq!(c, C { a: Some(1), b: 2 }); ensure_eq!( @@ -114,8 +122,10 @@ fn optional_field_some() -> Result<()> { #[test] fn optional_field_none() -> Result<()> { - let s = State::default(); - s.with_stdlib(); + let mut s = State::builder(); + s.context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())); + let s = s.build(); + let c = C::from_untyped(s.evaluate_snippet("snip".to_owned(), "{b: 2}")?)?; ensure_eq!(c, C { a: None, b: 2 }); ensure_eq!( @@ -140,8 +150,10 @@ struct E { #[test] fn flatten_optional_some() -> Result<()> { - let s = State::default(); - s.with_stdlib(); + let mut s = State::builder(); + s.context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())); + let s = s.build(); + let d = D::from_untyped(s.evaluate_snippet("snip".to_owned(), "{b: 2, v:1}")?)?; ensure_eq!( d, @@ -160,8 +172,10 @@ fn flatten_optional_some() -> Result<()> { #[test] fn flatten_optional_none() -> Result<()> { - let s = State::default(); - s.with_stdlib(); + let mut s = State::builder(); + s.context_initializer(ContextInitializer::new(PathResolver::new_cwd_fallback())); + let s = s.build(); + let d = D::from_untyped(s.evaluate_snippet("snip".to_owned(), "{b: 2, v: '1'}")?)?; ensure_eq!(d, D { e: None, b: 2 }); ensure_eq!( diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index fde51b3b..920c0026 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -3,6 +3,7 @@ name = "xtask" version = "0.1.0" edition = "2021" publish = false +build = "build.rs" [lints] workspace = true @@ -15,3 +16,4 @@ proc-macro2.workspace = true quote.workspace = true ungrammar.workspace = true xshell.workspace = true +clap = {workspace = true, features = ["derive"]} diff --git a/xtask/build.rs b/xtask/build.rs new file mode 100644 index 00000000..6fdc47bf --- /dev/null +++ b/xtask/build.rs @@ -0,0 +1,7 @@ +fn main() { + println!( + "cargo:rustc-env=TARGET_PLATFORM={}", + &std::env::var("TARGET").unwrap() + ); + println!("cargo:rerun-if-changed-env=TARGET"); +} diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 058f07bc..e21533c6 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -1,7 +1,110 @@ use anyhow::Result; +use clap::Parser; +use xshell::{cmd, Shell}; mod sourcegen; +#[derive(Parser)] +enum Opts { + /// Generate files for rowan parser + Sourcegen, + /// Profile file execution + Profile { + #[arg(long, default_value = "true")] + hyperfine: bool, + #[arg(long)] + callgrind: bool, + #[arg(long)] + cachegrind: bool, + #[arg(long, default_value = env!("TARGET_PLATFORM"))] + target: String, + args: Vec, + }, + /// Run all lints enforced by this repo + Lint { + /// Also fix found issues when possible. + #[arg(long)] + fix: bool, + }, + /// Build and run test file from `bindings/c` + TestCBindings { + #[arg(long, default_value = env!("TARGET_PLATFORM"))] + target: String, + /// Which bindings file to build and run + #[arg(long, default_value = "libjsonnet_test_file")] + test_file: String, + args: Vec, + }, +} + fn main() -> Result<()> { - sourcegen::generate_ungrammar() + let sh = Shell::new()?; + match Opts::parse() { + Opts::Sourcegen => sourcegen::generate_ungrammar(), + Opts::Profile { + hyperfine, + callgrind, + cachegrind, + args, + target, + } => { + let out = sh.create_temp_dir()?; + + // build-std + cmd!( + sh, + "cargo build -Zbuild-std --target={target} --profile releasedebug" + ) + .run()?; + let built = format!("./target/{target}/releasedebug/jrsonnet"); + let bench_cmd = format!("{built} {}", args.join(" ")); + if hyperfine { + cmd!(sh, "hyperfine {bench_cmd}").run()?; + } + if callgrind { + let args = args.clone(); + let mut callgrind_out = out.path().to_owned(); + callgrind_out.push("callgrind.out.1"); + cmd!(sh, "valgrind --tool=callgrind --dump-instr=yes --collect-jumps=yes --callgrind-out-file={callgrind_out} {built} {args...}").run()?; + cmd!(sh, "kcachegrind {callgrind_out}").run()?; + } + if cachegrind { + let mut cachegrind_out = out.path().to_owned(); + cachegrind_out.push("cachegrind.out.1"); + cmd!(sh, "valgrind --tool=cachegrind --cachegrind-out-file={cachegrind_out} {built} {args...}").run()?; + cmd!(sh, "kcachegrind {cachegrind_out}").run()?; + } + + Ok(()) + } + Opts::Lint { fix } => { + let fmt_check = if fix { None } else { Some("--check") }; + cmd!(sh, "cargo fmt {fmt_check...}").run()?; + Ok(()) + } + Opts::TestCBindings { + target, + test_file, + args, + } => { + cmd!( + sh, + "cargo build -p libjsonnet --target={target} --release --no-default-features --features=interop-common,interop-threading" + ) + .run()?; + let built = format!("./target/{target}/release/libjsonnet.a"); + let c_bindings = "./bindings/c/"; + cmd!(sh, "cp {built} {c_bindings}").run()?; + sh.change_dir(c_bindings); + + // TODO: Pass target to gcc? + cmd!(sh, "gcc -c {test_file}.c").run()?; + cmd!(sh, "gcc -o {test_file} -lc -lm {test_file}.o libjsonnet.a").run()?; + let sh = Shell::new()?; + + cmd!(sh, "{c_bindings}{test_file} {args...}").run()?; + + Ok(()) + } + } } diff --git a/xtask/src/sourcegen/ast.rs b/xtask/src/sourcegen/ast.rs index b3c8b9f1..c18a8ff6 100644 --- a/xtask/src/sourcegen/ast.rs +++ b/xtask/src/sourcegen/ast.rs @@ -72,7 +72,7 @@ impl Field { pub fn is_many(&self) -> bool { matches!( self, - Field::Node { + Self::Node { cardinality: Cardinality::Many, .. } @@ -81,44 +81,41 @@ impl Field { pub fn token_name(&self) -> Option { match self { - Field::Token(token) => Some(token.clone()), - _ => None, + Self::Token(token) => Some(token.clone()), + Self::Node { .. } => None, } } pub fn token_kind(&self, kinds: &KindsSrc) -> Option { match self { - Field::Token(token) => Some(kinds.token(token).expect("token exists").reference()), - _ => None, + Self::Token(token) => Some(kinds.token(token).expect("token exists").reference()), + Self::Node { .. } => None, } } pub fn is_token_enum(&self, grammar: &AstSrc) -> bool { match self { - Field::Node { ty, .. } => grammar.token_enums.iter().any(|e| &e.name == ty), - _ => false, + Self::Node { ty, .. } => grammar.token_enums.iter().any(|e| &e.name == ty), + Self::Token(_) => false, } } pub fn method_name(&self, kinds: &KindsSrc) -> proc_macro2::Ident { match self { - Field::Token(name) => kinds.token(name).expect("token exists").method_name(), - Field::Node { name, .. } => { + Self::Token(name) => kinds.token(name).expect("token exists").method_name(), + Self::Node { name, .. } => { format_ident!("{}", name) } } } pub fn ty(&self) -> proc_macro2::Ident { match self { - Field::Token(_) => format_ident!("SyntaxToken"), - Field::Node { ty, .. } => format_ident!("{}", ty), + Self::Token(_) => format_ident!("SyntaxToken"), + Self::Node { ty, .. } => format_ident!("{}", ty), } } } pub fn lower(kinds: &KindsSrc, grammar: &Grammar) -> AstSrc { - let mut res = AstSrc { - // tokens, - ..Default::default() - }; + let mut res = AstSrc::default(); let nodes = grammar.iter().collect::>(); @@ -135,16 +132,15 @@ pub fn lower(kinds: &KindsSrc, grammar: &Grammar) -> AstSrc { }; res.enums.push(enum_src); } - None => match lower_token_enum(grammar, rule) { - Some(variants) => { + None => { + if let Some(variants) = lower_token_enum(grammar, rule) { let tokens_enum_src = AstTokenEnumSrc { doc: Vec::new(), name, variants, }; res.token_enums.push(tokens_enum_src); - } - None => { + } else { let mut fields = Vec::new(); lower_rule(&mut fields, grammar, None, rule, false); let mut types = HashMap::new(); @@ -173,7 +169,7 @@ pub fn lower(kinds: &KindsSrc, grammar: &Grammar) -> AstSrc { fields, }); } - }, + } } } @@ -240,7 +236,7 @@ fn lower_rule( acc.push(field); } Rule::Token(token) => { - assert!(label.is_none(), "uexpected label: {:?}", label); + assert!(label.is_none(), "uexpected label: {label:?}"); let name = grammar[*token].name.clone(); let field = Field::Token(name); acc.push(field); @@ -267,7 +263,7 @@ fn lower_rule( } Rule::Seq(rules) | Rule::Alt(rules) => { for rule in rules { - lower_rule(acc, grammar, label, rule, in_optional) + lower_rule(acc, grammar, label, rule, in_optional); } } Rule::Opt(rule) => lower_rule(acc, grammar, label, rule, true), diff --git a/xtask/src/sourcegen/kinds.rs b/xtask/src/sourcegen/kinds.rs index 6ad67e0e..dad68aad 100644 --- a/xtask/src/sourcegen/kinds.rs +++ b/xtask/src/sourcegen/kinds.rs @@ -41,33 +41,33 @@ pub enum TokenKind { impl TokenKind { pub fn grammar_name(&self) -> &str { match self { - TokenKind::Keyword { code, .. } => code, - TokenKind::Literal { grammar_name, .. } => grammar_name, - TokenKind::Meta { grammar_name, .. } => grammar_name, - TokenKind::Error { grammar_name, .. } => grammar_name, + Self::Keyword { code, .. } => code, + Self::Literal { grammar_name, .. } + | Self::Meta { grammar_name, .. } + | Self::Error { grammar_name, .. } => grammar_name, } } /// How this keyword should appear in kinds enum, screaming snake cased pub fn name(&self) -> &str { match self { - TokenKind::Keyword { name, .. } => name, - TokenKind::Literal { name, .. } => name, - TokenKind::Meta { name, .. } => name, - TokenKind::Error { name, .. } => name, + Self::Keyword { name, .. } + | Self::Literal { name, .. } + | Self::Meta { name, .. } + | Self::Error { name, .. } => name, } } pub fn expand_kind(&self) -> TokenStream { let name = format_ident!("{}", self.name()); let attr = match self { - TokenKind::Keyword { code, .. } => quote! {#[token(#code)]}, - TokenKind::Literal { regex, lexer, .. } => { + Self::Keyword { code, .. } => quote! {#[token(#code)]}, + Self::Literal { regex, lexer, .. } => { let lexer = lexer .as_deref() .map(TokenStream::from_str) .map(|r| r.expect("path is correct")); quote! {#[regex(#regex, #lexer)]} } - TokenKind::Error { + Self::Error { regex, priority, .. } if regex.is_some() => { let priority = priority.map(|p| quote! {, priority = #p}); @@ -82,7 +82,7 @@ impl TokenKind { } pub fn expand_t_macros(&self) -> Option { match self { - TokenKind::Keyword { code, name } => { + Self::Keyword { code, name } => { let code = escape_token_macro(code); let name = format_ident!("{name}"); Some(quote! { @@ -98,29 +98,26 @@ impl TokenKind { /// Keywords are referenced with `T![_]` macro, /// and literals are referenced directly by name pub fn reference(&self) -> TokenStream { - match self { - TokenKind::Keyword { code, .. } => { - let code = escape_token_macro(code); - quote! {T![#code]} - } - _ => { - let name = self.name(); - let ident = format_ident!("{name}"); - quote! {#ident} - } + if let Self::Keyword { code, .. } = self { + let code = escape_token_macro(code); + quote! {T![#code]} + } else { + let name = self.name(); + let ident = format_ident!("{name}"); + quote! {#ident} } } pub fn method_name(&self) -> Ident { match self { - TokenKind::Keyword { name, .. } => { + Self::Keyword { name, .. } => { format_ident!("{}_token", name.to_lowercase()) } - TokenKind::Literal { name, .. } => { + Self::Literal { name, .. } => { format_ident!("{}_lit", name.to_lowercase()) } - TokenKind::Meta { name, .. } => format_ident!("{}_meta", name.to_lowercase()), - TokenKind::Error { name, .. } => format_ident!("{}_error", name.to_lowercase()), + Self::Meta { name, .. } => format_ident!("{}_meta", name.to_lowercase()), + Self::Error { name, .. } => format_ident!("{}_error", name.to_lowercase()), } } } @@ -188,15 +185,14 @@ impl KindsSrc { .is_none(), "token already defined: {}", token.grammar_name() - ) + ); } pub fn define_node(&mut self, node: &str) { assert!( self.defined_node_names.insert(node.to_owned()), - "node name already defined: {}", - node + "node name already defined: {node}" ); - self.nodes.push(node.to_string()) + self.nodes.push(node.to_string()); } pub fn token(&self, tok: &str) -> Option<&TokenKind> { self.defined_tokens.get(tok) diff --git a/xtask/src/sourcegen/mod.rs b/xtask/src/sourcegen/mod.rs index 723ac5c1..27737f7e 100644 --- a/xtask/src/sourcegen/mod.rs +++ b/xtask/src/sourcegen/mod.rs @@ -49,27 +49,27 @@ pub fn generate_ungrammar() -> Result<()> { match special { SpecialName::Literal => panic!("literal is not defined: {name}"), SpecialName::Meta => { - eprintln!("implicit meta: {}", name); + eprintln!("implicit meta: {name}"); kinds.define_token(TokenKind::Meta { grammar_name: token.to_owned(), - name: format!("META_{}", name), - }) + name: format!("META_{name}"), + }); } SpecialName::Error => { - eprintln!("implicit error: {}", name); + eprintln!("implicit error: {name}"); kinds.define_token(TokenKind::Error { grammar_name: token.to_owned(), - name: format!("ERROR_{}", name), + name: format!("ERROR_{name}"), regex: None, priority: None, is_lexer_error: true, - }) + }); } }; continue; }; let name = to_upper_snake_case(token); - eprintln!("implicit kw: {}", token); + eprintln!("implicit kw: {token}"); kinds.define_token(TokenKind::Keyword { code: token.to_owned(), name: format!("{name}_KW"), @@ -98,14 +98,14 @@ pub fn generate_ungrammar() -> Result<()> { "/../crates/jrsonnet-rowan-parser/src/generated/syntax_kinds.rs", )), &syntax_kinds, - )?; + ); ensure_file_contents( &PathBuf::from(concat!( env!("CARGO_MANIFEST_DIR"), "/../crates/jrsonnet-rowan-parser/src/generated/nodes.rs", )), &nodes, - )?; + ); Ok(()) } @@ -189,6 +189,7 @@ fn generate_syntax_kinds(kinds: &KindsSrc, grammar: &AstSrc) -> Result { reformat(&ast.to_string()) } +#[allow(clippy::too_many_lines, clippy::cognitive_complexity)] fn generate_nodes(kinds: &KindsSrc, grammar: &AstSrc) -> Result { let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar .nodes @@ -524,7 +525,7 @@ fn generate_nodes(kinds: &KindsSrc, grammar: &AstSrc) -> Result { fn write_doc_comment(contents: &[String], dest: &mut String) { use std::fmt::Write; for line in contents { - writeln!(dest, "///{}", line).unwrap(); + writeln!(dest, "///{line}").unwrap(); } } diff --git a/xtask/src/sourcegen/util.rs b/xtask/src/sourcegen/util.rs index 0f63f951..b335f730 100644 --- a/xtask/src/sourcegen/util.rs +++ b/xtask/src/sourcegen/util.rs @@ -1,3 +1,5 @@ +// FIXME: Replace various helper here with inflector? + use std::{fs, path::Path}; use anyhow::Result; @@ -5,11 +7,11 @@ use xshell::{cmd, Shell}; /// Checks that the `file` has the specified `contents`. If that is not the /// case, updates the file and then fails the test. -pub fn ensure_file_contents(file: &Path, contents: &str) -> Result<()> { +pub fn ensure_file_contents(file: &Path, contents: &str) { if let Ok(old_contents) = fs::read_to_string(file) { if normalize_newlines(&old_contents) == normalize_newlines(contents) { // File is already up to date. - return Ok(()); + return; } } @@ -18,7 +20,6 @@ pub fn ensure_file_contents(file: &Path, contents: &str) -> Result<()> { let _ = fs::create_dir_all(parent); } fs::write(file, contents).unwrap(); - Ok(()) } // Eww, someone configured git to use crlf? @@ -26,8 +27,9 @@ fn normalize_newlines(s: &str) -> String { s.replace("\r\n", "\n") } -pub(crate) fn pluralize(s: &str) -> String { - format!("{}s", s) +pub fn pluralize(s: &str) -> String { + // FIXME: Inflector? + format!("{s}s") } pub fn to_upper_snake_case(s: &str) -> String { @@ -35,7 +37,7 @@ pub fn to_upper_snake_case(s: &str) -> String { let mut prev = false; for c in s.chars() { if c.is_ascii_uppercase() && prev { - buf.push('_') + buf.push('_'); } prev = true; @@ -48,7 +50,7 @@ pub fn to_lower_snake_case(s: &str) -> String { let mut prev = false; for c in s.chars() { if c.is_ascii_uppercase() && prev { - buf.push('_') + buf.push('_'); } prev = true;