diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml new file mode 100644 index 00000000..8d97cbfc --- /dev/null +++ b/.github/workflows/checks.yaml @@ -0,0 +1,25 @@ +name: Checks +on: [ pull_request ] +jobs: + tests: + name: Tests + uses: ./.github/workflows/test.yaml + lints: + name: Lints + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.4 + - uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 + - uses: auguwu/clippy-action@1.3.0 + with: + token: ${{ secrets.GITHUB_TOKEN }} + permissions: + checks: write + formatting: + name: Formatting + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.4 + - uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 + - uses: actions-rust-lang/rustfmt@v1.1.0 + diff --git a/.github/workflows/clippy_check.yml b/.github/workflows/clippy_check.yml deleted file mode 100644 index 0791a4a4..00000000 --- a/.github/workflows/clippy_check.yml +++ /dev/null @@ -1,15 +0,0 @@ -on: [push, pull_request] -name: Clippy check -jobs: - clippy_check: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-10-28 - components: clippy - override: true - - uses: actions-rs/clippy-check@v1 - with: - token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yaml similarity index 70% rename from .github/workflows/release.yml rename to .github/workflows/release.yaml index 2ced5cc0..9817dd0e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yaml @@ -1,54 +1,27 @@ -name: CI -on: [push] +name: Release +on: + push: + tags: [ 'v*' ] jobs: - test: - name: Test Suite - runs-on: ubuntu-latest - steps: - - name: Checkout sources - uses: actions/checkout@v2 - - name: Install stable toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - - name: Run tests - uses: actions-rs/cargo@v1 - with: - command: test - args: --all + tests: + uses: ./.github/workflows/test.yaml cargo-release: - if: startsWith(github.ref, 'refs/tags/') && !endsWith(github.ref, '-test') - needs: [test] + if: !endsWith(github.ref, '-test') + needs: [ tests ] runs-on: ubuntu-latest steps: - - name: Checkout sources - uses: actions/checkout@v2 + - uses: actions/checkout@v4.1.4 - name: Install stable toolchain - uses: actions-rs/toolchain@v1 + uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 with: toolchain: stable - override: true - - name: Install cargo release command - uses: actions-rs/cargo@v1 - with: - command: install - args: cargo-release - - name: Run cargo login - uses: actions-rs/cargo@v1 - with: - command: login - args: ${{ secrets.CARGO_TOKEN }} - - name: Publish crates - uses: actions-rs/cargo@v1 - with: - command: release - args: --no-dev-version --skip-push --skip-tag --no-confirm + - run: cargo install cargo-release + - run: cargo login ${{ secrets.CARGO_TOKEN }} + - run: cargo release --no-dev-version --skip-push --skip-tag --no-confirm github-release: - if: startsWith(github.ref, 'refs/tags/') - needs: [test] + needs: [ tests ] strategy: matrix: target: @@ -108,14 +81,12 @@ jobs: run: sudo apt update - name: Install stable toolchain - uses: actions-rs/toolchain@v1 + uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 with: toolchain: stable - override: true target: ${{ matrix.target }} - - name: Checkout - uses: actions/checkout@v2 + - uses: actions/checkout@v4.1.4 - name: Add experimental flags if: ${{ endsWith(github.ref, '-test' )}} @@ -147,10 +118,7 @@ jobs: - name: Run build if: ${{ !startsWith(matrix.target, 'aarch64-unknown-linux-') }} - uses: actions-rs/cargo@v1 - with: - command: build - args: --bin=jrsonnet --release --target ${{ matrix.target }} ${{ env.EXPERIMENTAL_FLAGS }} + run: cargo build --bin=jrsonnet --release --target ${{ matrix.target }} ${{ env.EXPERIMENTAL_FLAGS }} - name: Package shell: bash @@ -164,7 +132,7 @@ jobs: run: shasum -a 256 ${{ matrix.name }} > ${{ matrix.name }}.sha256 - name: Publish - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2.0.4 with: draft: true files: "jrsonnet*" diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 00000000..0bee7a4f --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,21 @@ +name: Test +on: [ workflow_call ] +jobs: + test: + name: Test + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.4 + - uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 + - run: cargo test --all + test-stable: + name: Test on stable + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.4 + - name: Install the latest stable toolchain + uses: actions-rust-lang/setup-rust-toolchain@v1.8.0 + with: + toolchain: stable + - run: cargo test --all + diff --git a/Cargo.toml b/Cargo.toml index 2ace9a6e..0ed1e381 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -30,7 +30,7 @@ jrsonnet-gcmodule = "0.3.6" # and other libraries want to handle spans etc by itself, which is okay for compiler diagnostics, but is # bad for interpreter, where interpreter and parser are paired much closer. hi-doc = "0.1.1" -annotate-snippets = "0.10.1" +annotate-snippets = "0.11.2" # CLI clap = "4.5" @@ -39,16 +39,16 @@ clap_complete = "4.5" # Parsing, manifestification is implemented manually everywhere # Note on serde_yaml_with_quirks: This is a fork of serde-yaml with legacy yaml 1.1 support: # https://github.com/dtolnay/serde-yaml/pull/225 -serde = "1.0.197" -serde_json = "1.0.114" +serde = "1.0.202" +serde_json = "1.0.117" serde_yaml_with_quirks = "0.8.24" # Error handling -anyhow = "1.0.80" -thiserror = "1.0" +anyhow = "1.0.83" +thiserror = "1.0.60" # Code formatting -dprint-core = "0.65.0" +dprint-core = "0.66.2" # Stdlib hashing functions md5 = "0.7.0" @@ -63,36 +63,36 @@ bincode = "1.3" # Source code parsing. # Jrsonnet has two parsers for jsonnet - one is for execution, and another is for better parsing diagnostics/lints/LSP. # First (and fast one) is based on peg, second is based on rowan. -peg = "0.8.2" +peg = "0.8.3" logos = "0.14.0" ungrammar = "1.16.1" -rowan = "0.15" +rowan = "0.15.15" mimallocator = "0.1.3" indoc = "2.0" -insta = "1.35" +insta = "1.39" tempfile = "3.10" pathdiff = "0.2.1" -hashbrown = "0.14.3" +hashbrown = "0.14.5" static_assertions = "1.1" rustc-hash = "1.1" -num-bigint = "0.4.4" -derivative = "2.2.0" -strsim = "0.11.0" +num-bigint = "0.4.5" +derivative = "2.2" +strsim = "0.11.1" structdump = "0.2.0" -proc-macro2 = "1.0" -quote = "1.0" -syn = "2.0" +proc-macro2 = "1.0.82" +quote = "1.0.36" +syn = "2.0.63" drop_bomb = "0.1.5" -base64 = "0.21.7" -indexmap = "2.2.3" +base64 = "0.22.1" +indexmap = "2.2" itertools = "0.12.1" -xshell = "0.2.5" +xshell = "0.2.6" lsp-server = "0.7.6" -lsp-types = "0.95.0" +lsp-types = "0.95.1" -regex = "1.10.3" +regex = "1.10" lru = "0.12.2" [workspace.lints.rust] diff --git a/cmds/jrsonnet-fmt/src/tests.rs b/cmds/jrsonnet-fmt/src/tests.rs index 992d2353..e774fb03 100644 --- a/cmds/jrsonnet-fmt/src/tests.rs +++ b/cmds/jrsonnet-fmt/src/tests.rs @@ -23,7 +23,7 @@ fn reformat(input: &str) -> String { #[test] fn complex_comments_snapshot() { - insta::assert_display_snapshot!(reformat(indoc!( + insta::assert_snapshot!(reformat(indoc!( "{ comments: { _: '', diff --git a/crates/jrsonnet-cli/src/tla.rs b/crates/jrsonnet-cli/src/tla.rs index ddd906f3..6d1c4174 100644 --- a/crates/jrsonnet-cli/src/tla.rs +++ b/crates/jrsonnet-cli/src/tla.rs @@ -14,7 +14,7 @@ use crate::{ExtFile, ExtStr}; pub struct TlaOpts { /// Add top level string argument. /// Top level arguments will be passed to function before manifestification stage. - /// This is preferred to ExtVars method. + /// This is preferred to [`ExtVars`] method. /// If [=data] is not set then it will be read from `name` env variable. #[clap(long, short = 'A', name = "name[=tla data]", number_of_values = 1)] tla_str: Vec, diff --git a/crates/jrsonnet-evaluator/src/arr/mod.rs b/crates/jrsonnet-evaluator/src/arr/mod.rs index dc1fe794..7aefaa48 100644 --- a/crates/jrsonnet-evaluator/src/arr/mod.rs +++ b/crates/jrsonnet-evaluator/src/arr/mod.rs @@ -10,8 +10,7 @@ use jrsonnet_parser::LocExpr; use crate::{function::FuncVal, gc::TraceBox, tb, Context, Result, Thunk, Val}; mod spec; -pub use spec::ArrayLike; -pub(crate) use spec::*; +pub use spec::{ArrayLike, *}; /// Represents a Jsonnet array value. #[derive(Debug, Clone, Trace)] diff --git a/crates/jrsonnet-evaluator/src/function/arglike.rs b/crates/jrsonnet-evaluator/src/function/arglike.rs index 48eb786a..ec8490d8 100644 --- a/crates/jrsonnet-evaluator/src/function/arglike.rs +++ b/crates/jrsonnet-evaluator/src/function/arglike.rs @@ -76,13 +76,6 @@ impl ArgLike for TlaArg { } } -mod sealed { - /// Implemented for `ArgsLike`, where only unnamed arguments present - pub trait Unnamed {} - /// Implemented for `ArgsLike`, where only named arguments present - pub trait Named {} -} - pub trait ArgsLike { fn unnamed_len(&self) -> usize; fn unnamed_iter( @@ -182,7 +175,6 @@ impl ArgsLike for ArgsDesc { } } -impl sealed::Named for HashMap {} impl ArgsLike for HashMap { fn unnamed_len(&self) -> usize { 0 @@ -247,7 +239,6 @@ impl ArgsLike for GcHashMap { macro_rules! impl_args_like { ($count:expr; $($gen:ident)*) => { - impl<$($gen: ArgLike,)*> sealed::Unnamed for ($($gen,)*) {} impl<$($gen: ArgLike,)*> ArgsLike for ($($gen,)*) { fn unnamed_len(&self) -> usize { $count @@ -279,7 +270,6 @@ macro_rules! impl_args_like { } impl<$($gen: ArgLike,)*> OptionalContext for ($($gen,)*) where $($gen: OptionalContext),* {} - impl<$($gen: ArgLike,)*> sealed::Named for ($((IStr, $gen),)*) {} impl<$($gen: ArgLike,)*> ArgsLike for ($((IStr, $gen),)*) { fn unnamed_len(&self) -> usize { 0 diff --git a/crates/jrsonnet-evaluator/src/integrations/serde.rs b/crates/jrsonnet-evaluator/src/integrations/serde.rs index 4fc20b9e..0511b61e 100644 --- a/crates/jrsonnet-evaluator/src/integrations/serde.rs +++ b/crates/jrsonnet-evaluator/src/integrations/serde.rs @@ -346,9 +346,9 @@ impl SerializeMap for IntoObjValueSerializer { type Ok = Val; type Error = JrError; - fn serialize_key(&mut self, key: &T) -> Result<()> + fn serialize_key(&mut self, key: &T) -> Result<()> where - T: Serialize, + T: ?Sized + Serialize, { let key = key.serialize(IntoValSerializer)?; let key = key.to_string()?; @@ -356,9 +356,9 @@ impl SerializeMap for IntoObjValueSerializer { Ok(()) } - fn serialize_value(&mut self, value: &T) -> Result<()> + fn serialize_value(&mut self, value: &T) -> Result<()> where - T: Serialize, + T: ?Sized + Serialize, { let key = self.key.take().expect("no serialize_key called"); let value = value.serialize(IntoValSerializer)?; @@ -367,10 +367,10 @@ impl SerializeMap for IntoObjValueSerializer { } // TODO: serialize_key/serialize_value - fn serialize_entry(&mut self, key: &K, value: &V) -> Result<()> + fn serialize_entry(&mut self, key: &K, value: &V) -> Result<()> where - K: Serialize, - V: Serialize, + K: ?Sized + Serialize, + V: ?Sized + Serialize, { let key = key.serialize(IntoValSerializer)?; let key = key.to_string()?; @@ -394,9 +394,9 @@ impl SerializeStruct for IntoObjValueSerializer { type Ok = Val; type Error = JrError; - fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<()> + fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<()> where - T: Serialize, + T: ?Sized + Serialize, { SerializeMap::serialize_entry(self, key, value)?; Ok(()) @@ -411,9 +411,9 @@ impl SerializeStructVariant for IntoObjValueSerializer { type Error = JrError; - fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<()> + fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<()> where - T: Serialize, + T: ?Sized + Serialize, { SerializeMap::serialize_entry(self, key, value)?; Ok(()) @@ -504,9 +504,9 @@ impl Serializer for IntoValSerializer { Ok(Val::Null) } - fn serialize_some(self, value: &T) -> Result + fn serialize_some(self, value: &T) -> Result where - T: Serialize, + T: ?Sized + Serialize, { value.serialize(self) } @@ -528,14 +528,14 @@ impl Serializer for IntoValSerializer { Ok(Val::Str(variant.into())) } - fn serialize_newtype_struct(self, _name: &'static str, value: &T) -> Result + fn serialize_newtype_struct(self, _name: &'static str, value: &T) -> Result where - T: Serialize, + T: ?Sized + Serialize, { value.serialize(self) } - fn serialize_newtype_variant( + fn serialize_newtype_variant( self, _name: &'static str, _variant_index: u32, @@ -543,7 +543,7 @@ impl Serializer for IntoValSerializer { value: &T, ) -> Result where - T: Serialize, + T: ?Sized + Serialize, { let mut out = ObjValue::builder_with_capacity(1); let value = value.serialize(self)?; diff --git a/crates/jrsonnet-evaluator/src/manifest.rs b/crates/jrsonnet-evaluator/src/manifest.rs index 97affc72..6cb66379 100644 --- a/crates/jrsonnet-evaluator/src/manifest.rs +++ b/crates/jrsonnet-evaluator/src/manifest.rs @@ -1,4 +1,4 @@ -use std::{borrow::Cow, fmt::Write}; +use std::{borrow::Cow, fmt::Write, ptr}; use crate::{bail, Result, ResultExt, State, Val}; @@ -464,7 +464,7 @@ static ESCAPE: [u8; 256] = [ pub fn escape_string_json_buf(value: &str, buf: &mut String) { // Safety: we only write correct utf-8 in this function - let buf: &mut Vec = unsafe { &mut *(buf as *mut String).cast::>() }; + let buf: &mut Vec = unsafe { &mut *ptr::from_mut(buf).cast::>() }; let bytes = value.as_bytes(); // Perfect for ascii strings, removes any reallocations diff --git a/crates/jrsonnet-evaluator/src/obj.rs b/crates/jrsonnet-evaluator/src/obj.rs index 13524775..672d2505 100644 --- a/crates/jrsonnet-evaluator/src/obj.rs +++ b/crates/jrsonnet-evaluator/src/obj.rs @@ -88,7 +88,7 @@ mod ordering { } } -use ordering::*; +use ordering::{FieldIndex, FieldSortKey, SuperDepth}; // 0 - add // 12 - visibility diff --git a/crates/jrsonnet-evaluator/src/stack.rs b/crates/jrsonnet-evaluator/src/stack.rs index 22dfbffe..d770ea9b 100644 --- a/crates/jrsonnet-evaluator/src/stack.rs +++ b/crates/jrsonnet-evaluator/src/stack.rs @@ -8,6 +8,7 @@ struct StackLimit { } #[cfg(feature = "nightly")] +#[allow(clippy::thread_local_initializer_can_be_made_const)] #[thread_local] static STACK_LIMIT: StackLimit = StackLimit { max_stack_size: Cell::new(200), @@ -15,9 +16,11 @@ static STACK_LIMIT: StackLimit = StackLimit { }; #[cfg(not(feature = "nightly"))] thread_local! { - static STACK_LIMIT: StackLimit = StackLimit { - max_stack_size: Cell::new(200), - current_depth: Cell::new(0), + static STACK_LIMIT: StackLimit = const { + StackLimit { + max_stack_size: Cell::new(200), + current_depth: Cell::new(0), + } }; } @@ -40,7 +43,7 @@ impl Drop for StackDepthGuard { fn drop(&mut self) { STACK_LIMIT .current_depth - .set(STACK_LIMIT.current_depth.get() - 1) + .set(STACK_LIMIT.current_depth.get() - 1); } #[cfg(not(feature = "nightly"))] fn drop(&mut self) { @@ -75,7 +78,7 @@ pub struct StackDepthLimitOverrideGuard { impl Drop for StackDepthLimitOverrideGuard { #[cfg(feature = "nightly")] fn drop(&mut self) { - STACK_LIMIT.max_stack_size.set(self.old_limit) + STACK_LIMIT.max_stack_size.set(self.old_limit); } #[cfg(not(feature = "nightly"))] fn drop(&mut self) { diff --git a/crates/jrsonnet-evaluator/src/typed/conversions.rs b/crates/jrsonnet-evaluator/src/typed/conversions.rs index 49cff9c6..75f65341 100644 --- a/crates/jrsonnet-evaluator/src/typed/conversions.rs +++ b/crates/jrsonnet-evaluator/src/typed/conversions.rs @@ -143,6 +143,7 @@ macro_rules! impl_int { _ => unreachable!(), } } + #[allow(clippy::cast_lossless)] fn into_untyped(value: Self) -> Result { Ok(Val::Num(value as f64)) } @@ -199,6 +200,7 @@ macro_rules! impl_bounded_int { } } + #[allow(clippy::cast_lossless)] fn into_untyped(value: Self) -> Result { Ok(Val::Num(value.0 as f64)) } diff --git a/crates/jrsonnet-rowan-parser/src/marker.rs b/crates/jrsonnet-rowan-parser/src/marker.rs index 1a4a97ed..5bca186c 100644 --- a/crates/jrsonnet-rowan-parser/src/marker.rs +++ b/crates/jrsonnet-rowan-parser/src/marker.rs @@ -22,6 +22,7 @@ use crate::{ pub struct FinishedRanger { pub start_token: usize, + #[allow(dead_code)] pub end_token: usize, } impl FinishedRanger { diff --git a/crates/jrsonnet-rowan-parser/src/tests.rs b/crates/jrsonnet-rowan-parser/src/tests.rs index 8f22f949..34181c1b 100644 --- a/crates/jrsonnet-rowan-parser/src/tests.rs +++ b/crates/jrsonnet-rowan-parser/src/tests.rs @@ -1,4 +1,5 @@ -#![cfg(never)] +// `never` +#![cfg(any())] use miette::{ Diagnostic, GraphicalReportHandler, GraphicalTheme, LabeledSpan, ThemeCharacters, ThemeStyles, diff --git a/crates/jrsonnet-stdlib/Cargo.toml b/crates/jrsonnet-stdlib/Cargo.toml index 51997394..3d655abd 100644 --- a/crates/jrsonnet-stdlib/Cargo.toml +++ b/crates/jrsonnet-stdlib/Cargo.toml @@ -12,19 +12,23 @@ workspace = true [features] default = ["codegenerated-stdlib"] -# Speed-up initialization by generating code for parsed stdlib, instead -# of invoking parser for it +# Speed-up initialization by generating code for parsed stdlib, +# instead of invoking parser for it. +# This is mutually exclusive with `serialized-stdlib`. codegenerated-stdlib = ["jrsonnet-parser/structdump"] +# Use the embedded serialized stdlib. +# This is mutually exclusive with `codegenerated-stdlib`. +serialized-stdlib = [] # Enables legacy `std.thisFile` support, at the cost of worse caching legacy-this-file = [] # Add order preservation flag to some functions exp-preserve-order = ["jrsonnet-evaluator/exp-preserve-order"] # Bigint type -exp-bigint = ["num-bigint", "jrsonnet-evaluator/exp-bigint"] +exp-bigint = ["dep:num-bigint", "jrsonnet-evaluator/exp-bigint"] exp-null-coaelse = ["jrsonnet-parser/exp-null-coaelse", "jrsonnet-evaluator/exp-null-coaelse"] # std.regexMatch and other helpers -exp-regex = ["regex", "lru", "rustc-hash"] +exp-regex = ["dep:regex", "dep:lru", "dep:rustc-hash"] [dependencies] jrsonnet-evaluator.workspace = true diff --git a/crates/jrsonnet-stdlib/src/expr.rs b/crates/jrsonnet-stdlib/src/expr.rs index 8ada6ddf..d3c530f7 100644 --- a/crates/jrsonnet-stdlib/src/expr.rs +++ b/crates/jrsonnet-stdlib/src/expr.rs @@ -1,7 +1,11 @@ use jrsonnet_parser::LocExpr; pub fn stdlib_expr() -> LocExpr { - #[cfg(feature = "serialized-stdlib")] + #[cfg(all(feature = "serialized-stdlib", feature = "codegenerated-stdlib"))] + compile_error!( + "features `serialized-stdlib` and `codegenerated-stdlib` are mutually exclusive" + ); + #[cfg(all(feature = "serialized-stdlib", not(feature = "codegenerated-stdlib")))] { use bincode::{BincodeRead, DefaultOptions, Options}; use serde::{Deserialize, Deserializer}; @@ -77,7 +81,7 @@ pub fn stdlib_expr() -> LocExpr { LocExpr::deserialize(&mut deserializer).unwrap() } - #[cfg(feature = "codegenerated-stdlib")] + #[cfg(all(feature = "codegenerated-stdlib", not(feature = "serialized-stdlib")))] { mod structdump_import { pub(super) use std::{option::Option, rc::Rc, vec}; @@ -88,7 +92,7 @@ pub fn stdlib_expr() -> LocExpr { include!(concat!(env!("OUT_DIR"), "/stdlib.rs")) } - #[cfg(not(feature = "codegenerated-stdlib"))] + #[cfg(not(any(feature = "serialized-stdlib", feature = "codegenerated-stdlib")))] { use jrsonnet_parser::Source; diff --git a/crates/jrsonnet-stdlib/src/lib.rs b/crates/jrsonnet-stdlib/src/lib.rs index 00e55ad1..71a57c1f 100644 --- a/crates/jrsonnet-stdlib/src/lib.rs +++ b/crates/jrsonnet-stdlib/src/lib.rs @@ -1,9 +1,15 @@ +#![allow(clippy::similar_names)] + use std::{ cell::{Ref, RefCell, RefMut}, collections::HashMap, rc::Rc, }; +pub use arrays::*; +pub use compat::*; +pub use encoding::*; +pub use hash::*; use jrsonnet_evaluator::{ error::{ErrorKind::*, Result}, function::{CallLocation, FuncVal, TlaArg}, @@ -13,40 +19,37 @@ use jrsonnet_evaluator::{ }; use jrsonnet_gcmodule::Trace; use jrsonnet_parser::Source; - -mod expr; -mod types; -pub use types::*; -mod arrays; -pub use arrays::*; -mod math; +pub use manifest::*; pub use math::*; -mod operator; +pub use misc::*; +pub use objects::*; pub use operator::*; -mod sort; +pub use parse::*; +pub use sets::*; pub use sort::*; -mod hash; -pub use hash::*; +pub use strings::*; +pub use types::*; + +#[cfg(feature = "exp-regex")] +pub use crate::regex::*; + +mod arrays; +mod compat; mod encoding; -pub use encoding::*; -mod objects; -pub use objects::*; +mod expr; +mod hash; mod manifest; -pub use manifest::*; -mod parse; -pub use parse::*; -mod strings; -pub use strings::*; +mod math; mod misc; -pub use misc::*; -mod sets; -pub use sets::*; -mod compat; -pub use compat::*; +mod objects; +mod operator; +mod parse; #[cfg(feature = "exp-regex")] mod regex; -#[cfg(feature = "exp-regex")] -pub use crate::regex::*; +mod sets; +mod sort; +mod strings; +mod types; #[allow(clippy::too_many_lines)] pub fn stdlib_uncached(settings: Rc>) -> ObjValue { @@ -258,9 +261,7 @@ pub fn stdlib_uncached(settings: Rc>) -> ObjValue { ); builder.method( "regexGlobalReplace", - builtin_regex_global_replace { - cache: regex_cache.clone(), - }, + builtin_regex_global_replace { cache: regex_cache }, ); }; @@ -409,16 +410,15 @@ impl jrsonnet_evaluator::ContextInitializer for ContextInitializer { } #[cfg(feature = "legacy-this-file")] fn populate(&self, source: Source, builder: &mut ContextBuilder) { - use jrsonnet_evaluator::val::StrValue; - let mut std = ObjValueBuilder::new(); std.with_super(self.stdlib_obj.clone()); - std.field("thisFile") - .hide() - .value(match source.source_path().path() { - Some(p) => self.settings().path_resolver.resolve(p), - None => source.source_path().to_string(), - }); + std.field("thisFile").hide().value({ + let source_path = source.source_path(); + source_path.path().map_or_else( + || source_path.to_string(), + |p| self.settings().path_resolver.resolve(p), + ) + }); let stdlib_with_this_file = std.build(); builder.bind("std", Thunk::evaluated(Val::Obj(stdlib_with_this_file))); diff --git a/crates/jrsonnet-stdlib/src/regex.rs b/crates/jrsonnet-stdlib/src/regex.rs index 0ce334bf..3c80e3dc 100644 --- a/crates/jrsonnet-stdlib/src/regex.rs +++ b/crates/jrsonnet-stdlib/src/regex.rs @@ -50,16 +50,16 @@ pub fn regex_match_inner(regex: &Regex, str: String) -> Result { for ele in captured.iter().skip(1) { if let Some(ele) = ele { - captures.push(Val::Str(StrValue::Flat(ele.as_str().into()))) + captures.push(Val::Str(StrValue::Flat(ele.as_str().into()))); } else { - captures.push(Val::Str(StrValue::Flat(IStr::empty()))) + captures.push(Val::Str(StrValue::Flat(IStr::empty()))); } } for (i, name) in regex .capture_names() .skip(1) .enumerate() - .flat_map(|(i, v)| Some((i, v?))) + .filter_map(|(i, v)| Some((i, v?))) { let capture = captures[i].clone(); named_captures.field(name).try_value(capture)?; diff --git a/flake.lock b/flake.lock index c8bda9c0..a019bebc 100644 --- a/flake.lock +++ b/flake.lock @@ -7,11 +7,11 @@ ] }, "locked": { - "lastModified": 1711299236, - "narHash": "sha256-6/JsyozOMKN8LUGqWMopKTSiK8N79T8Q+hcxu2KkTXg=", + "lastModified": 1715274763, + "narHash": "sha256-3Iv1PGHJn9sV3HO4FlOVaaztOxa9uGLfOmUWrH7v7+A=", "owner": "ipetkov", "repo": "crane", - "rev": "880573f80d09e18a11713f402b9e6172a085449f", + "rev": "27025ab71bdca30e7ed0a16c88fd74c5970fc7f5", "type": "github" }, "original": { @@ -25,11 +25,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1705309234, - "narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=", + "lastModified": 1710146030, + "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", "owner": "numtide", "repo": "flake-utils", - "rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26", + "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", "type": "github" }, "original": { @@ -40,11 +40,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1705391267, - "narHash": "sha256-gGVm9QudiRtYTX8PN9cTTy7uuJcL4I2lRMoPx496kXk=", + "lastModified": 1715790495, + "narHash": "sha256-qozzVYpBdBjHkCJGEPc0QowlfdbesySYn5Mx0prTjmo=", "owner": "nixos", "repo": "nixpkgs", - "rev": "41a9a7f170c740acb24f3390323877d11c69d5ee", + "rev": "34f9145a6b46d78586fa31a4bea9a976221ed220", "type": "github" }, "original": { @@ -71,11 +71,11 @@ ] }, "locked": { - "lastModified": 1705371439, - "narHash": "sha256-P1kulUXpYWkcrjiX3sV4j8ACJZh9XXSaaD+jDLBDLKo=", + "lastModified": 1715739484, + "narHash": "sha256-5zlSuCM54jH6tXi8OILZ7opT+lBYUkGU9eOMEvJh9HU=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "b21f3c0d5bf0f0179f5f0140e8e0cd099618bd04", + "rev": "3d27c65641a61d36f1c7616d6150524cd9a2a5f7", "type": "github" }, "original": { diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 7ea3efab..a0080e68 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2024-01-10" +channel = "nightly-2024-05-10" components = ["rustfmt", "clippy", "rust-analyzer", "rust-src"] diff --git a/tests/tests/golden.rs b/tests/tests/golden.rs index 9e678578..0ec1379f 100644 --- a/tests/tests/golden.rs +++ b/tests/tests/golden.rs @@ -27,11 +27,7 @@ fn run(file: &Path) -> String { Ok(v) => v, Err(e) => return trace_format.format(&e).unwrap(), }; - match v.manifest( - JsonFormat::default(), - #[cfg(feature = "exp-preserve-order")] - false, - ) { + match v.manifest(JsonFormat::default()) { Ok(v) => v.to_string(), Err(e) => trace_format.format(&e).unwrap(), } diff --git a/xtask/src/sourcegen/ast.rs b/xtask/src/sourcegen/ast.rs index 1cf29178..b3c8b9f1 100644 --- a/xtask/src/sourcegen/ast.rs +++ b/xtask/src/sourcegen/ast.rs @@ -62,6 +62,7 @@ pub struct AstEnumSrc { #[derive(Debug, Clone)] pub struct AstTokenEnumSrc { + #[allow(dead_code)] pub doc: Vec, pub name: String, pub variants: Vec, diff --git a/xtask/src/sourcegen/kinds.rs b/xtask/src/sourcegen/kinds.rs index 6dd2a707..6ad67e0e 100644 --- a/xtask/src/sourcegen/kinds.rs +++ b/xtask/src/sourcegen/kinds.rs @@ -14,6 +14,7 @@ pub enum TokenKind { Error { grammar_name: String, name: String, + #[allow(dead_code)] /// Is this error returned by lexer directly, or from lex.rs is_lexer_error: bool, regex: Option,