Skip to content

Commit

Permalink
use schema analyzer to integrate with dynamica resource limits + reso…
Browse files Browse the repository at this point in the history
…urce limits section
  • Loading branch information
mssalemi committed Sep 12, 2024
1 parent cead712 commit 529a290
Show file tree
Hide file tree
Showing 9 changed files with 302 additions and 18 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ rmp-serde = "1.3"
is-terminal = "0.4.13"
wasmprof = "0.7.0"
bluejay-core = { version = "=0.2.0" }
bluejay-parser = { version = "=0.2.0" }
bluejay-parser = { version = "=0.2.0", features = ["format-errors"] }
bluejay-validator = { version = "=0.2.0" }

[dev-dependencies]
Expand Down
4 changes: 2 additions & 2 deletions src/bluejay_schema_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ mod tests {
}

#[test]
fn test_accurate_scale_limits_for_nested_array() {
fn test_no_double_counting_for_duplicate_fields_with_array() {
let schema_string = r#"
directive @scaleLimits(rate: Float!) on FIELD_DEFINITION
type Query {
Expand Down Expand Up @@ -241,7 +241,7 @@ mod tests {
}

#[test]
fn test_no_double_counting_for_duplicate_fields_with_nested_array() {
fn test_scale_factor_with_nested_array() {
let schema_string = r#"
directive @scaleLimits(rate: Float!) on FIELD_DEFINITION
type Query {
Expand Down
3 changes: 3 additions & 0 deletions src/engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ pub struct FunctionRunParams<'a> {
pub input: Vec<u8>,
pub export: &'a str,
pub profile_opts: Option<&'a ProfileOpts>,
pub scale_factor: f64,
}

const STARTING_FUEL: u64 = u64::MAX;
Expand Down Expand Up @@ -114,6 +115,7 @@ pub fn run(params: FunctionRunParams) -> Result<FunctionRunResult> {
input,
export,
profile_opts,
scale_factor,
} = params;

let engine = Engine::new(
Expand Down Expand Up @@ -231,6 +233,7 @@ pub fn run(params: FunctionRunParams) -> Result<FunctionRunResult> {
input: function_run_input,
output,
profile: profile_data,
scale_factor,
};

Ok(function_run_result)
Expand Down
129 changes: 121 additions & 8 deletions src/function_run_result.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,27 +28,66 @@ pub struct FunctionRunResult {
pub output: FunctionOutput,
#[serde(skip)]
pub profile: Option<String>,
#[serde(skip)]
pub scale_factor: f64,
}

const DEFAULT_INSTRUCTIONS_LIMIT: u64 = 11_000_000;
const DEFAULT_INPUT_SIZE_LIMIT: u64 = 64_000;
const DEFAULT_OUTPUT_SIZE_LIMIT: u64 = 20_000;

pub fn get_json_size_as_bytes(value: &serde_json::Value) -> usize {
serde_json::to_vec(value).map(|v| v.len()).unwrap_or(0)
}

impl FunctionRunResult {
pub fn to_json(&self) -> String {
serde_json::to_string_pretty(&self).unwrap_or_else(|error| error.to_string())
}

pub fn input_size(&self) -> usize {
get_json_size_as_bytes(&self.input)
}

pub fn output_size(&self) -> usize {
match &self.output {
FunctionOutput::JsonOutput(value) => get_json_size_as_bytes(value),
FunctionOutput::InvalidJsonOutput(_value) => 0,
}
}
}

fn humanize_size(title: &str, size_bytes: u64, size_limit: u64) -> String {
let size_humanized = match size_bytes {
0..=1023 => format!("{}B", size_bytes),
1024..=1_048_575 => format!("{:.2}KB", size_bytes as f64 / 1024.0),
1_048_576..=1_073_741_823 => format!("{:.2}MB", size_bytes as f64 / 1_048_576.0),
_ => {
format!("{:.2}GB", size_bytes as f64 / 1_073_741_824.0)
}
};

if size_bytes > size_limit {
format!("{}: {}", title, size_humanized).red().to_string()
} else {
format!("{}: {}", title, size_humanized)
}
}

fn humanize_instructions(instructions: u64) -> String {
fn humanize_instructions(title: &str, instructions: u64, instructions_limit: u64) -> String {
let instructions_humanized = match instructions {
0..=999 => instructions.to_string(),
1000..=999_999 => format!("{}K", instructions as f64 / 1000.0),
1_000_000..=999_999_999 => format!("{}M", instructions as f64 / 1_000_000.0),
1_000_000_000..=u64::MAX => format!("{}B", instructions as f64 / 1_000_000_000.0),
};

match instructions {
0..=11_000_000 => format!("Instructions: {instructions_humanized}"),
11_000_001..=u64::MAX => format!("Instructions: {instructions_humanized}")
if instructions > instructions_limit {
format!("{}: {}", title, instructions_humanized)
.red()
.to_string(),
.to_string()
} else {
format!("{}: {}", title, instructions_humanized)
}
}

Expand Down Expand Up @@ -107,15 +146,83 @@ impl fmt::Display for FunctionRunResult {
}
}

let input_size_limit = self.scale_factor * DEFAULT_INPUT_SIZE_LIMIT as f64;
let output_size_limit = self.scale_factor * DEFAULT_OUTPUT_SIZE_LIMIT as f64;
let instructions_size_limit = self.scale_factor * DEFAULT_INSTRUCTIONS_LIMIT as f64;

writeln!(
formatter,
"\n{}\n\n",
" Resource Limits "
.black()
.on_bright_magenta()
)?;

writeln!(
formatter,
"{}",
humanize_size(
"Input Size",
input_size_limit as u64,
input_size_limit as u64
)
)?;

writeln!(
formatter,
"{}",
humanize_size(
"Output Size",
output_size_limit as u64,
output_size_limit as u64
)
)?;
writeln!(
formatter,
"{}",
humanize_instructions(
"Instructions",
instructions_size_limit as u64,
instructions_size_limit as u64
)
)?;

let title = " Benchmark Results "
.black()
.on_truecolor(150, 191, 72);

write!(formatter, "\n\n{title}\n\n")?;
writeln!(formatter, "Name: {}", self.name)?;
writeln!(formatter, "Linear Memory Usage: {}KB", self.memory_usage)?;
writeln!(formatter, "{}", humanize_instructions(self.instructions))?;
writeln!(formatter, "Size: {}KB\n", self.size)?;
writeln!(
formatter,
"{}",
humanize_instructions(
"Instructions",
self.instructions,
instructions_size_limit as u64
)
)?;
writeln!(
formatter,
"{}",
humanize_size(
"Input Size",
self.input_size() as u64,
input_size_limit as u64,
)
)?;
writeln!(
formatter,
"{}",
humanize_size(
"Output Size",
self.output_size() as u64,
output_size_limit as u64,
)
)?;

writeln!(formatter, "Module Size: {}KB\n", self.size)?;

Ok(())
}
Expand Down Expand Up @@ -145,11 +252,15 @@ mod tests {
"test": "test"
})),
profile: None,
scale_factor: 1.0,
};

let predicate = predicates::str::contains("Instructions: 1.001K")
.and(predicates::str::contains("Linear Memory Usage: 1000KB"))
.and(predicates::str::contains(expected_input_display));
.and(predicates::str::contains(expected_input_display))
.and(predicates::str::contains("Input Size: 28B"))
.and(predicates::str::contains("Output Size: 15B"));
assert!(predicate.eval(&function_run_result.to_string()));

assert!(predicate.eval(&function_run_result.to_string()));
Ok(())
Expand All @@ -172,6 +283,7 @@ mod tests {
"test": "test"
})),
profile: None,
scale_factor: 1.0,
};

let predicate = predicates::str::contains("Instructions: 1")
Expand All @@ -198,6 +310,7 @@ mod tests {
"test": "test"
})),
profile: None,
scale_factor: 1.0,
};

let predicate = predicates::str::contains("Instructions: 999")
Expand Down
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
pub mod bluejay_schema_analyzer;
pub mod engine;
pub mod function_run_result;
pub mod logs;
pub mod scale_limits_analyzer;
66 changes: 59 additions & 7 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,15 @@ use std::{

use anyhow::{anyhow, Result};
use clap::{Parser, ValueEnum};
use function_runner::engine::{run, FunctionRunParams, ProfileOpts};
use function_runner::{
bluejay_schema_analyzer::BluejaySchemaAnalyzer,
engine::{run, FunctionRunParams, ProfileOpts},
};

use is_terminal::IsTerminal;

const PROFILE_DEFAULT_INTERVAL: u32 = 500_000; // every 5us
const DEFAULT_SCALE_FACTOR: f64 = 1.0;

/// Supported input flavors
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, ValueEnum)]
Expand Down Expand Up @@ -60,6 +64,14 @@ struct Opts {

#[clap(short = 'c', long, value_enum, default_value = "json")]
codec: Codec,

/// Path to graphql file containing Function schema; if omitted, defaults will be used to calculate limits.
#[clap(short = 's', long)]
schema_path: Option<PathBuf>,

/// Path to graphql file containing Function input query; if omitted, defaults will be used to calculate limits.
#[clap(short = 'q', long)]
query_path: Option<PathBuf>,
}

impl Opts {
Expand Down Expand Up @@ -89,6 +101,25 @@ impl Opts {

path
}

pub fn read_schema_to_string(&self) -> Option<Result<String>> {
self.schema_path.as_ref().map(read_file_to_string)
}

pub fn read_query_to_string(&self) -> Option<Result<String>> {
self.query_path.as_ref().map(read_file_to_string)
}
}

fn read_file_to_string(file_path: &PathBuf) -> Result<String> {
let mut file = File::open(file_path)
.map_err(|e| anyhow!("Couldn't open file {}: {}", file_path.to_string_lossy(), e))?;

let mut contents = String::new();
file.read_to_string(&mut contents)
.map_err(|e| anyhow!("Couldn't read file {}: {}", file_path.to_string_lossy(), e))?;

Ok(contents)
}

fn main() -> Result<()> {
Expand All @@ -109,27 +140,48 @@ fn main() -> Result<()> {
let mut buffer = Vec::new();
input.read_to_end(&mut buffer)?;

let buffer = match opts.codec {
let schema_string = opts.read_schema_to_string().transpose()?;

let query_string = opts.read_query_to_string().transpose()?;

let (json_value, buffer) = match opts.codec {
Codec::Json => {
let _ = serde_json::from_slice::<serde_json::Value>(&buffer)
let json = serde_json::from_slice::<serde_json::Value>(&buffer)
.map_err(|e| anyhow!("Invalid input JSON: {}", e))?;
buffer
(Some(json), buffer)
}
Codec::Raw => buffer,
Codec::Raw => (None, buffer),
Codec::JsonToMessagepack => {
let json: serde_json::Value = serde_json::from_slice(&buffer)
.map_err(|e| anyhow!("Invalid input JSON: {}", e))?;
rmp_serde::to_vec(&json)
.map_err(|e| anyhow!("Couldn't convert JSON to MessagePack: {}", e))?
let bytes = rmp_serde::to_vec(&json)
.map_err(|e| anyhow!("Couldn't convert JSON to MessagePack: {}", e))?;
(Some(json), bytes)
}
};

let scale_factor = if let (Some(schema_string), Some(query_string), Some(json_value)) =
(schema_string, query_string, json_value)
{
BluejaySchemaAnalyzer::analyze_schema_definition(
&schema_string,
opts.schema_path.as_ref().and_then(|p| p.to_str()),
&query_string,
opts.query_path.as_ref().and_then(|p| p.to_str()),
&json_value,
)?
} else {
DEFAULT_SCALE_FACTOR // Use default scale factor when schema or query is missing
};

let profile_opts = opts.profile_opts();

let function_run_result = run(FunctionRunParams {
function_path: opts.function,
input: buffer,
export: opts.export.as_ref(),
profile_opts: profile_opts.as_ref(),
scale_factor,
})?;

if opts.json {
Expand Down
7 changes: 7 additions & 0 deletions tests/fixtures/query/query.graphql
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
query {
cart {
lines {
quantity
}
}
}
22 changes: 22 additions & 0 deletions tests/fixtures/schema/schema.graphql
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
schema {
query: Query
}

directive @scaleLimits(rate: Float!) on FIELD_DEFINITION

type Attribute {
key: String!
value: String
}

type Cart {
lines: [CartLine!]! @scaleLimits(rate: 0.005)
}

type CartLine {
quantity: Int!
}

type Query {
cart: Cart
}
Loading

0 comments on commit 529a290

Please sign in to comment.