Compare commits

...

18 Commits

Author SHA1 Message Date
Douglas Creager
70a5373791 Include scope kind in scope_count metric, add bar graph 2025-02-07 11:39:01 -05:00
Douglas Creager
8e880c8f61 List comprehension 2025-02-07 10:18:30 -05:00
Douglas Creager
5e34d79cc1 Use default_missing_value instead of double-Option 2025-02-07 10:17:11 -05:00
Douglas Creager
b0beb93517 pre-commit 2025-02-06 17:36:55 -05:00
Douglas Creager
391917bc87 Describe before/after comparisons 2025-02-06 17:28:40 -05:00
Douglas Creager
f587a89a3b Optionally save output to file 2025-02-06 17:24:41 -05:00
Douglas Creager
5a7650d5ee Add README 2025-02-06 17:22:23 -05:00
Douglas Creager
ff5e65f6f5 Add histogram subcommand 2025-02-06 17:13:15 -05:00
Douglas Creager
88ef456757 Add initial plot script 2025-02-06 16:57:30 -05:00
Douglas Creager
1e1470073c Clippy 2025-02-06 14:24:07 -05:00
Douglas Creager
ca237345d9 Add --metrics flag 2025-02-06 14:22:34 -05:00
Douglas Creager
5ec0cb32f8 Include time since start of process 2025-02-06 14:01:23 -05:00
Douglas Creager
e73374c146 Include executable name in metrics 2025-02-06 13:56:46 -05:00
Douglas Creager
382349f85a Better documentation 2025-02-06 13:46:48 -05:00
Douglas Creager
80efb01e5d Hide the thread-local buffer better 2025-02-06 13:46:48 -05:00
Douglas Creager
c8b2cc4e00 Include timestamp in JSON metrics 2025-02-06 13:46:48 -05:00
Douglas Creager
25a84c7b18 Initial JSON metrics exporter 2025-02-06 13:46:48 -05:00
Douglas Creager
1c71f9b8c4 Start recording some metrics 2025-02-06 13:46:48 -05:00
14 changed files with 641 additions and 9 deletions

129
Cargo.lock generated
View File

@@ -471,7 +471,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
dependencies = [
"lazy_static",
"windows-sys 0.48.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -480,7 +480,7 @@ version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
dependencies = [
"windows-sys 0.48.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -855,6 +855,12 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
[[package]]
name = "endian-type"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
[[package]]
name = "env_filter"
version = "0.1.3"
@@ -897,7 +903,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
dependencies = [
"libc",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -1475,7 +1481,7 @@ checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37"
dependencies = [
"hermit-abi 0.4.0",
"libc",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -1728,6 +1734,36 @@ version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "metrics"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a7deb012b3b2767169ff203fadb4c6b0b82b947512e5eb9e0b78c2e186ad9e3"
dependencies = [
"ahash",
"portable-atomic",
]
[[package]]
name = "metrics-util"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbd4884b1dd24f7d6628274a2f5ae22465c337c5ba065ec9b6edccddf8acc673"
dependencies = [
"aho-corasick",
"crossbeam-epoch",
"crossbeam-utils",
"hashbrown 0.15.2",
"indexmap",
"metrics",
"ordered-float",
"quanta",
"radix_trie",
"rand 0.8.5",
"rand_xoshiro",
"sketches-ddsketch",
]
[[package]]
name = "mimalloc"
version = "0.1.43"
@@ -1789,6 +1825,15 @@ dependencies = [
"uuid",
]
[[package]]
name = "nibble_vec"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43"
dependencies = [
"smallvec",
]
[[package]]
name = "nix"
version = "0.29.0"
@@ -1904,6 +1949,15 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "ordered-float"
version = "4.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951"
dependencies = [
"num-traits",
]
[[package]]
name = "ordermap"
version = "0.5.5"
@@ -2255,6 +2309,21 @@ dependencies = [
"toml",
]
[[package]]
name = "quanta"
version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bd1fe6824cea6538803de3ff1bc0cf3949024db3d43c9643024bfb33a807c0e"
dependencies = [
"crossbeam-utils",
"libc",
"once_cell",
"raw-cpuid",
"wasi 0.11.0+wasi-snapshot-preview1",
"web-sys",
"winapi",
]
[[package]]
name = "quick-junit"
version = "0.5.1"
@@ -2308,6 +2377,16 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "radix_trie"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd"
dependencies = [
"endian-type",
"nibble_vec",
]
[[package]]
name = "rand"
version = "0.8.5"
@@ -2369,6 +2448,24 @@ dependencies = [
"zerocopy 0.8.14",
]
[[package]]
name = "rand_xoshiro"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
dependencies = [
"rand_core 0.6.4",
]
[[package]]
name = "raw-cpuid"
version = "11.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6928fa44c097620b706542d428957635951bade7143269085389d42c8a4927e"
dependencies = [
"bitflags 2.8.0",
]
[[package]]
name = "rayon"
version = "1.10.0"
@@ -2403,12 +2500,14 @@ dependencies = [
"filetime",
"insta",
"insta-cmd",
"metrics",
"rayon",
"red_knot_project",
"red_knot_python_semantic",
"red_knot_server",
"regex",
"ruff_db",
"ruff_metrics",
"ruff_python_trivia",
"salsa",
"tempfile",
@@ -2461,6 +2560,7 @@ dependencies = [
"insta",
"itertools 0.14.0",
"memchr",
"metrics",
"ordermap",
"quickcheck",
"quickcheck_macros",
@@ -2943,6 +3043,15 @@ dependencies = [
"syn 2.0.98",
]
[[package]]
name = "ruff_metrics"
version = "0.0.0"
dependencies = [
"metrics",
"metrics-util",
"serde_json",
]
[[package]]
name = "ruff_notebook"
version = "0.0.0"
@@ -3283,7 +3392,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -3533,6 +3642,12 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
[[package]]
name = "sketches-ddsketch"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1e9a774a6c28142ac54bb25d25562e6bcf957493a184f15ad4eebccb23e410a"
[[package]]
name = "smallvec"
version = "1.13.2"
@@ -3661,7 +3776,7 @@ dependencies = [
"getrandom 0.3.1",
"once_cell",
"rustix",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -4425,7 +4540,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.48.0",
"windows-sys 0.59.0",
]
[[package]]

View File

@@ -22,6 +22,7 @@ ruff_graph = { path = "crates/ruff_graph" }
ruff_index = { path = "crates/ruff_index" }
ruff_linter = { path = "crates/ruff_linter" }
ruff_macros = { path = "crates/ruff_macros" }
ruff_metrics = { path = "crates/ruff_metrics" }
ruff_notebook = { path = "crates/ruff_notebook" }
ruff_python_ast = { path = "crates/ruff_python_ast" }
ruff_python_codegen = { path = "crates/ruff_python_codegen" }
@@ -105,6 +106,8 @@ lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f
] }
matchit = { version = "0.8.1" }
memchr = { version = "2.7.1" }
metrics = { version = "0.24.1" }
metrics-util = { version = "0.19.0" }
mimalloc = { version = "0.1.39" }
natord = { version = "1.0.9" }
notify = { version = "8.0.0" }

View File

@@ -16,6 +16,7 @@ red_knot_python_semantic = { workspace = true }
red_knot_project = { workspace = true, features = ["zstd"] }
red_knot_server = { workspace = true }
ruff_db = { workspace = true, features = ["os", "cache"] }
ruff_metrics = { workspace = true }
anyhow = { workspace = true }
chrono = { workspace = true }
@@ -24,6 +25,7 @@ colored = { workspace = true }
countme = { workspace = true, features = ["enable"] }
crossbeam = { workspace = true }
ctrlc = { version = "3.4.4" }
metrics = { workspace = true }
rayon = { workspace = true }
salsa = { workspace = true }
tracing = { workspace = true, features = ["release_max_level_debug"] }

View File

@@ -63,6 +63,13 @@ pub(crate) struct CheckCommand {
#[clap(flatten)]
pub(crate) verbosity: Verbosity,
/// Whether to output metrics about type-checking performance. If you provide a path, metrics
/// will be written to that file. If you provide this option but don't provide a path, metrics
/// will be written to a file called `metrics.json` in the current directory. We will _append_
/// metrics to the file if it already exists.
#[arg(long, value_name = "PATH", default_missing_value="metrics.json", num_args=0..=1)]
pub(crate) metrics: Option<SystemPathBuf>,
#[clap(flatten)]
pub(crate) rules: RulesArg,

View File

@@ -2,8 +2,10 @@
use anyhow::Context;
use colored::Colorize;
use ruff_db::system::SystemPathBuf;
use ruff_metrics::JsonRecorder;
use std::fmt;
use std::fs::File;
use std::fs::{File, OpenOptions};
use std::io::BufWriter;
use tracing::{Event, Subscriber};
use tracing_subscriber::filter::LevelFilter;
@@ -252,3 +254,18 @@ where
writeln!(writer)
}
}
pub(crate) fn setup_metrics(dest: Option<&SystemPathBuf>) {
// If --metrics is not provided at all, don't collect any metrics.
let Some(dest) = dest else {
return;
};
let dest = OpenOptions::new()
.append(true)
.create(true)
.open(dest.as_std_path())
.expect("cannot open metrics file");
let recorder = JsonRecorder::new(dest);
metrics::set_global_recorder(recorder).expect("metrics recorder already registered");
}

View File

@@ -5,7 +5,7 @@ use anyhow::Result;
use std::sync::Mutex;
use crate::args::{Args, CheckCommand, Command};
use crate::logging::setup_tracing;
use crate::logging::{setup_metrics, setup_tracing};
use anyhow::{anyhow, Context};
use clap::Parser;
use colored::Colorize;
@@ -68,6 +68,7 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
let verbosity = args.verbosity.level();
countme::enable(verbosity.is_trace());
let _guard = setup_tracing(verbosity)?;
setup_metrics(args.metrics.as_ref());
// The base path to which all CLI arguments are relative to.
let cli_base_path = {

View File

@@ -30,6 +30,7 @@ countme = { workspace = true }
drop_bomb = { workspace = true }
indexmap = { workspace = true }
itertools = { workspace = true }
metrics = { workspace = true }
ordermap = { workspace = true }
salsa = { workspace = true }
thiserror = { workspace = true }

View File

@@ -61,6 +61,9 @@ pub(super) struct SemanticIndexBuilder<'db> {
// Builder state
db: &'db dyn Db,
file: File,
// A shared clone of the path of the file being analyzed. We use this as a label for all of the
// metrics that we export, and this avoids cloning the path into a new string each time.
file_path: Arc<str>,
module: &'db ParsedModule,
scope_stack: Vec<ScopeInfo>,
/// The assignments we're currently visiting, with
@@ -95,9 +98,11 @@ pub(super) struct SemanticIndexBuilder<'db> {
impl<'db> SemanticIndexBuilder<'db> {
pub(super) fn new(db: &'db dyn Db, file: File, parsed: &'db ParsedModule) -> Self {
let file_path = Arc::from(file.path(db).as_str());
let mut builder = Self {
db,
file,
file_path,
module: parsed,
scope_stack: Vec::new(),
current_assignments: vec![],
@@ -186,6 +191,13 @@ impl<'db> SemanticIndexBuilder<'db> {
};
self.try_node_context_stack_manager.enter_nested_scope();
metrics::counter!(
"semantic_index.scope_count",
"file" => self.file_path.clone(),
"kind" => scope.kind().as_str(),
)
.increment(1);
let file_scope_id = self.scopes.push(scope);
self.symbol_tables.push(SymbolTableBuilder::default());
self.use_def_maps.push(UseDefMapBuilder::default());

View File

@@ -213,6 +213,18 @@ impl ScopeKind {
pub const fn is_comprehension(self) -> bool {
matches!(self, ScopeKind::Comprehension)
}
pub const fn as_str(self) -> &'static str {
match self {
Self::Module => "Module",
Self::Annotation => "Annotation",
Self::Class => "Class",
Self::Function => "Function",
Self::Lambda => "Lambda",
Self::Comprehension => "Comprehension",
Self::TypeAlias => "TypeAlias",
}
}
}
/// Symbol table for a specific [`Scope`].

View File

@@ -0,0 +1,19 @@
[package]
name = "ruff_metrics"
version = "0.0.0"
publish = false
authors = { workspace = true }
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
[dependencies]
metrics = { workspace = true }
metrics-util = { workspace = true }
serde_json = { workspace = true }
[lints]
workspace = true

View File

@@ -0,0 +1,87 @@
# Metrics
red-knot can generate metrics that describe its performance while type-checking Python code. To
activate metrics collection, pass the `--metrics` option when invoking red-knot:
```console
$ red_knot check --metrics [rest of arguments]
```
This will cause red-knot to _append_ metrics for the current run to a file called `metrics.json` in
the current directory.
You can then use the `plot_metrics.py` file to generate graphs of those metrics:
```console
$ uv run crates/ruff_metrics/plot_metrics.py counter semantic_index.scope_count --group-by file
```
## Available plots
### `counter`
Shows how the value of a counter increases over time. You can optionally group by one of the counter
metric's fields. Times are shown relative to the start of the process.
```console
$ uv run crates/ruff_metrics/plot_metrics.py counter semantic_index.scope_count --group-by file
```
### `histogram`
Shows the distribution of values of a counter. You must provide a metric field to group by; shows
the maximum values of the counter for each value of this field.
```console
$ uv run crates/ruff_metrics/plot_metrics.py histogram semantic_index.scope_count --group-by file
```
## Before/after comparisons
red-knot will include the executable name as a field called `executable` in every metric data point
that it outputs. You can use this to compare the values of a metric between two versions of
red-knot:
```console
$ git switch main
$ cargo build --bin red_knot --profile=profiling
$ cp target/profiling/red_knot red_knot_main
$ ./red_knot_main check --metrics [rest of arguments]
$ git switch feature-branch
$ cargo build --bin red_knot --profile=profiling
$ cp target/profiling/red_knot red_knot_feature
$ ./red_knot_feature check --metrics [rest of arguments]
$ uv run crates/ruff_metrics/plot_metrics.py -o output.png counter semantic_index.scope_count --group-by executable
```
## Saving output to a file
You can save the plot to a file instead of displaying it by passing in the `-o` or `--output`
option:
```console
$ uv run crates/ruff_metrics/plot_metrics.py -o output.png counter semantic_index.scope_count --group-by file
```
(Note that the `--output` option must come before the subcommand selecting which kind of plot you
want.)
## Overriding the metrics file
You can optionally provide a filename for the `--metrics` option, in which case we will output
metrics data to that file instead of `./metrics.json`:
```console
$ red_knot check --metrics some-other-file.json [rest of arguments]
```
You can then pass the same filename to the `plot_metrics.py` script:
```console
$ uv run crates/ruff_metrics/plot_metrics.py --metrics some-other-file.json counter semantic_index.scope_count --group-by file
```
(Note that the `--metrics` option must come before the subcommand selecting which kind of plot you
want.)

View File

@@ -0,0 +1,107 @@
#!/usr/bin/env uv run
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "pyqt6",
# "matplotlib",
# "numpy",
# "pandas",
# ]
# ///
"""Render metrics that have been produced by the ruff_metrics crate."""
from __future__ import annotations
import argparse
import json
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--metrics", help="JSON file with metrics data", default="metrics.json"
)
parser.add_argument("-o", "--output", help="save image to the given filename")
subparsers = parser.add_subparsers(dest="command")
bar_parser = subparsers.add_parser("bar")
bar_parser.add_argument("key", help="the metric to render")
bar_parser.add_argument("--group-by")
counter_parser = subparsers.add_parser("counter")
counter_parser.add_argument("key", help="the counter metric to render")
counter_parser.add_argument("--group-by", required=False)
histogram_parser = subparsers.add_parser("histogram")
histogram_parser.add_argument("key", help="the metric to render")
histogram_parser.add_argument("--group-by")
histogram_parser.add_argument("--bins", help="number of bins (default: auto)")
args = parser.parse_args()
with open(args.metrics) as f:
results = [json.loads(line) for line in f]
all_data = pd.DataFrame(results)
def get_metric(d: pd.DataFrame, key: str) -> pd.DataFrame:
return d[d["key"] == key]
def plot_counter(d: pd.DataFrame, label: str | None = None) -> None:
plt.xlabel("Time [s]")
d["total"] = d["delta"].cumsum()
plt.plot("since_start", "total", data=d, label=label)
def show_plot():
if args.output:
plt.savefig(args.output, dpi=600)
else:
plt.show()
def cmd_bar() -> None:
data = get_metric(all_data, args.key)
groups = data.groupby(args.group_by, as_index=False)
plt.xlabel(args.group_by)
plt.ylabel("Count")
plt.bar(args.group_by, "delta", data=groups["delta"].sum())
show_plot()
def cmd_counter() -> None:
data = get_metric(all_data, args.key)
plt.ylabel(args.key)
if args.group_by is None:
plot_counter(data)
else:
for group, gd in data.groupby(args.group_by):
plot_counter(gd, group)
if data[args.group_by].nunique() <= 10:
plt.legend(loc="best")
show_plot()
def cmd_histogram() -> None:
data = get_metric(all_data, args.key)
bins = int(args.bins) if args.bins else "auto"
data = data.groupby(args.group_by).last()
plt.xlabel(args.key)
plt.ylabel("Count")
plt.yscale("log")
plt.hist(data["value"], bins=bins)
show_plot()
if args.command == "bar":
cmd_bar()
elif args.command == "counter":
cmd_counter()
elif args.command == "histogram":
cmd_histogram()
else:
print("Missing command")
parser.print_usage()

View File

@@ -0,0 +1,246 @@
use std::cell::RefCell;
use std::fmt::Write as _;
use std::io::Write;
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::{Arc, Mutex};
use std::time::{SystemTime, UNIX_EPOCH};
use metrics::{
Counter, CounterFn, Gauge, GaugeFn, Histogram, HistogramFn, Key, KeyName, Metadata, Recorder,
SharedString, Unit,
};
use metrics_util::registry::{Registry, Storage};
use serde_json::value::Map;
use serde_json::Value;
/// A [metrics] recorder that outputs metrics in a simple JSON format, typically to a file for
/// later analysis. We do not buffer the metrics.
///
/// Each output record will include a `key` field with the name of the metric. Any labels will also
/// appear as additional JSON fields.
///
/// Counters and gauges will include `delta` and `value` fields, providing the amount that the
/// counter changed by, and the resulting total value.
///
/// Histograms will include `value` and `count` fields. We do not aggregate histogram data in any
/// way.
pub struct JsonRecorder {
registry: Registry<Key, PrerenderedAtomicStorage>,
}
impl JsonRecorder {
/// Creates a new `JsonRecorder` that will output JSON metrics to a destination that implements
/// [`std::io::Write`].
pub fn new<D>(dest: D) -> JsonRecorder
where
D: Write + Send + 'static,
{
let dest = Arc::new(Mutex::new(dest));
let executable = std::env::current_exe()
.unwrap()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_string();
let start = SystemTime::now();
let storage = PrerenderedAtomicStorage {
dest,
executable,
start,
};
let registry = Registry::new(storage);
JsonRecorder { registry }
}
}
impl Recorder for JsonRecorder {
// We currently ignore metrics descriptions.
fn describe_counter(&self, _key: KeyName, _unit: Option<Unit>, _description: SharedString) {}
fn describe_gauge(&self, _key: KeyName, _unit: Option<Unit>, _description: SharedString) {}
fn describe_histogram(&self, _key: KeyName, _unit: Option<Unit>, _description: SharedString) {}
fn register_counter(&self, key: &Key, _metadata: &Metadata<'_>) -> Counter {
self.registry
.get_or_create_counter(key, |existing| Counter::from_arc(Arc::clone(existing)))
}
fn register_gauge(&self, key: &Key, _metadata: &Metadata<'_>) -> Gauge {
self.registry
.get_or_create_gauge(key, |existing| Gauge::from_arc(Arc::clone(existing)))
}
fn register_histogram(&self, key: &Key, _metadata: &Metadata<'_>) -> Histogram {
self.registry
.get_or_create_histogram(key, |existing| Histogram::from_arc(Arc::clone(existing)))
}
}
struct PrerenderedAtomicStorage {
dest: Arc<Mutex<dyn Write + Send>>,
executable: String,
start: SystemTime,
}
impl Storage<Key> for PrerenderedAtomicStorage {
type Counter = Arc<Metric>;
type Gauge = Arc<Metric>;
type Histogram = Arc<Metric>;
fn counter(&self, key: &Key) -> Self::Counter {
Arc::new(Metric::new(
self.executable.clone(),
self.start,
key,
self.dest.clone(),
))
}
fn gauge(&self, key: &Key) -> Self::Gauge {
Arc::new(Metric::new(
self.executable.clone(),
self.start,
key,
self.dest.clone(),
))
}
fn histogram(&self, key: &Key) -> Self::Histogram {
Arc::new(Metric::new(
self.executable.clone(),
self.start,
key,
self.dest.clone(),
))
}
}
struct Metric {
/// The metric key's name and labels, rendered into JSON on a single line, with the trailing
/// `}` removed. (This makes it easy to append the JSON rendering of each data point without
/// having to re-render the information about the metrics key.)
name_and_labels: String,
dest: Arc<Mutex<dyn Write + Send>>,
start: SystemTime,
value: AtomicU64,
}
impl Metric {
fn new(
executable: String,
start: SystemTime,
key: &Key,
dest: Arc<Mutex<dyn Write + Send>>,
) -> Metric {
let mut json = Map::default();
json.insert("executable".to_string(), executable.into());
json.insert("key".to_string(), key.name().into());
for label in key.labels() {
json.insert(label.key().to_string(), label.value().into());
}
let mut name_and_labels = serde_json::to_string(&Value::Object(json))
.expect("should always be able to render JSON object containing only strings");
// Trim the trailing '}'
let _ = name_and_labels.pop();
Metric {
name_and_labels,
dest,
start,
value: AtomicU64::default(),
}
}
fn output<F>(&self, f: F)
where
F: FnOnce(&mut String),
{
// Render into a thread-local String buffer, and then output the resulting line in a single
// call. This ensures that the output from multiple threads does not get intermingled.
thread_local! {
static BUFFERS: RefCell<String> = const { RefCell::new(String::new()) };
}
BUFFERS.with(|buffer| {
let mut buffer = buffer.borrow_mut();
buffer.clear();
buffer.push_str(&self.name_and_labels);
if let Ok(timestamp) = SystemTime::now().duration_since(UNIX_EPOCH) {
write!(&mut buffer, ",\"timestamp\":{}", timestamp.as_secs_f64()).unwrap();
}
if let Ok(since_start) = SystemTime::now().duration_since(self.start) {
write!(
&mut buffer,
",\"since_start\":{}",
since_start.as_secs_f64()
)
.unwrap();
}
f(&mut buffer);
buffer.push_str("}\n");
let _ = self.dest.lock().unwrap().write(buffer.as_bytes());
});
}
}
impl CounterFn for Metric {
fn increment(&self, delta: u64) {
let old_value = self.value.fetch_add(delta, Ordering::Relaxed);
let new_value = old_value + delta;
self.output(|buffer| {
write!(buffer, ",\"delta\":{delta},\"value\":{new_value}").unwrap();
});
}
fn absolute(&self, value: u64) {
self.value.store(value, Ordering::Relaxed);
self.output(|buffer| {
write!(buffer, ",\"value\":{value}").unwrap();
});
}
}
impl GaugeFn for Metric {
fn increment(&self, delta: f64) {
let mut new_value: f64 = 0.0;
self.value
.fetch_update(Ordering::Relaxed, Ordering::Relaxed, |old_value| {
new_value = f64::from_bits(old_value) + delta;
Some(f64::to_bits(new_value))
})
.expect("should never fail to update gauge");
self.output(|buffer| {
write!(buffer, ",\"delta\":{delta},\"value\":{new_value}").unwrap();
});
}
fn decrement(&self, delta: f64) {
let mut new_value: f64 = 0.0;
self.value
.fetch_update(Ordering::Relaxed, Ordering::Relaxed, |old_value| {
new_value = f64::from_bits(old_value) - delta;
Some(f64::to_bits(new_value))
})
.expect("should never fail to update gauge");
self.output(|buffer| {
write!(buffer, ",\"delta\":{delta},\"value\":{new_value}").unwrap();
});
}
fn set(&self, value: f64) {
self.value.store(value.to_bits(), Ordering::Relaxed);
self.output(|buffer| {
write!(buffer, ",\"value\":{value}").unwrap();
});
}
}
impl HistogramFn for Metric {
fn record(&self, value: f64) {
self.record_many(value, 1);
}
fn record_many(&self, value: f64, count: usize) {
self.output(|buffer| {
write!(buffer, ",\"value\":{value},\"count\":{count}").unwrap();
});
}
}

View File

@@ -0,0 +1,3 @@
mod json;
pub use json::JsonRecorder;