Skip to content

Commit

Permalink
feat: cell GC, TTL, new metrics format
Browse files Browse the repository at this point in the history
  • Loading branch information
marvin-j97 committed Jan 13, 2024
1 parent 020a928 commit e00fa33
Show file tree
Hide file tree
Showing 16 changed files with 711 additions and 396 deletions.
10 changes: 2 additions & 8 deletions app/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,10 @@
</div>
<script type="module" src="/src/index.tsx"></script>
</body>
<!-- <script id="system-metrics-data" type="application/json; utf-8">
{"key":"sys","columns":{"stats":{"cpu":[{"timestamp":1702428799599715991,"value":{"F64":1.16}},{"timestamp":1702428462658821508,"value":{"F64":0.85}},{"timestamp":1702428420787418324,"value":{"F64":0.91}},{"timestamp":1702428360679595803,"value":{"F64":0.57}},{"timestamp":1702428300599798949,"value":{"F64":0.98}},{"timestamp":1702428240504143600,"value":{"F64":1.39}},{"timestamp":1702428180417658163,"value":{"F64":2.18}}],"mem":[{"timestamp":1702428799599724801,"value":{"U64":7250874368}},{"timestamp":1702428462658830486,"value":{"U64":8368549888}},{"timestamp":1702428420787432495,"value":{"U64":8173436928}},{"timestamp":1702428360679604890,"value":{"U64":8188993536}},{"timestamp":1702428300599813565,"value":{"U64":8119185408}},{"timestamp":1702428240504151855,"value":{"U64":8173551616}},{"timestamp":1702428180417694632,"value":{"U64":8214417408}}]}}}
</script> -->
<script id="system-metrics-data" type="application/json; utf-8">
{{system_metrics}}
</script>
<script id="disk-usage-data" type="application/json; utf-8">
{{disk_usage}}
</script>
<script id="latency-data" type="application/json; utf-8">
{{latency}}
<script id="table-stats-data" type="application/json; utf-8">
{{table_stats}}
</script>
</html>
131 changes: 51 additions & 80 deletions app/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -195,89 +195,44 @@ function StackedAreaChart(props: { yFormatter: (x: any) => string; title: string
/>
}

function extractTimeseries(tableStatsMap: any, name: string) {
return Object.entries<any>(tableStatsMap).map(([tableName, rows]) => ({
name: tableName,
data: (rows.find(r => r.row_key === name)?.columns.value[""] ?? []).map(({ timestamp, value: { F64: bytes } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: bytes,
}))
})).filter(({ data }) => data.length > 0)
}

function App() {
const [sysRow, _] = createSignal(JSON.parse(document.getElementById("system-metrics-data")!.textContent!));
const [tableRows, __] = createSignal(JSON.parse(document.getElementById("disk-usage-data")!.textContent!));
const [latencyRows, ___] = createSignal(JSON.parse(document.getElementById("latency-data")!.textContent!));
const [sysRows, _] = createSignal(JSON.parse(document.getElementById("system-metrics-data")!.textContent!));
const [tableStatsMap, __] = createSignal(JSON.parse(document.getElementById("table-stats-data")!.textContent!));

const cpu = () => sysRow().columns.stats.cpu.map(({ timestamp, value: { F64: pct } }) => ({
const cpu = () => (sysRows().find(x => x.row_key === "sys#cpu")?.columns.value[""] ?? []).map(({ timestamp, value: { F64: pct } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: pct,
}));

const mem = () => sysRow().columns.stats.mem.map(({ timestamp, value: { F64: bytes } }) => ({
const mem = () => (sysRows().find(x => x.row_key === "sys#mem")?.columns.value[""] ?? []).map(({ timestamp, value: { F64: bytes } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: bytes,
}));

const journalCount = () => sysRow().columns.stats.wal_cnt.map(({ timestamp, value: { U8: y } }) => ({
const journalCount = () => (sysRows().find(x => x.row_key === "wal#len")?.columns.value[""] ?? []).map(({ timestamp, value: { Byte: y } }) => ({
x: new Date(timestamp / 1000 / 1000),
y,
}));

const tablesDiskUsage = () => tableRows().map((row) => ({
name: row.row_key.replace("t#", "").replace("usr_", ""),
data: (row.columns.stats.du ?? []).map(({ timestamp, value: { F64: bytes } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: bytes,
}))
})).filter(({ data }) => data.length > 0);

const segmentCounts = () => tableRows().map((row) => ({
name: row.row_key.replace("t#", "").replace("usr_", ""),
data: (row.columns.stats.seg_cnt ?? []).map(({ timestamp, value: { F64: count } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: count,
}))
})).filter(({ data }) => data.length > 0);

const rowCounts = () => tableRows().map((row) => ({
name: row.row_key.replace("t#", "").replace("usr_", ""),
data: (row.columns.stats.row_cnt ?? []).map(({ timestamp, value: { F64: count } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: count,
}))
})).filter(({ data }) => data.length > 0);

const cellCounts = () => tableRows().map((row) => ({
name: row.row_key.replace("t#", "").replace("usr_", ""),
data: (row.columns.stats.cell_cnt ?? []).map(({ timestamp, value: { F64: count } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: count,
}))
})).filter(({ data }) => data.length > 0);

const tablesWriteLatency = () => latencyRows().map((row) => ({
name: row.row_key.replace("t#", "").replace("usr_", ""),
data: (row.columns.lat["w"] ?? []).map(({ timestamp, value: { F64: bytes } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: bytes,
}))
})).filter(({ data }) => data.length > 0);

const tablesPointReadLatency = () => latencyRows().map((row) => ({
name: row.row_key.replace("t#", "").replace("usr_", ""),
data: (row.columns.lat["r#row"] ?? []).map(({ timestamp, value: { F64: bytes } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: bytes,
}))
})).filter(({ data }) => data.length > 0);

const tablesPrefixLatency = () => latencyRows().map((row) => ({
name: row.row_key.replace("t#", "").replace("usr_", ""),
data: (row.columns.lat["r#pfx"] ?? []).map(({ timestamp, value: { F64: bytes } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: bytes,
}))
})).filter(({ data }) => data.length > 0);

const tablesDeletesLatency = () => latencyRows().map((row) => ({
name: row.row_key.replace("t#", "").replace("usr_", ""),
data: (row.columns.lat["del#row"] ?? []).map(({ timestamp, value: { F64: bytes } }) => ({
x: new Date(timestamp / 1000 / 1000),
y: bytes,
}))
})).filter(({ data }) => data.length > 0);
const writeLatency = () => extractTimeseries(tableStatsMap(), "lat#write#batch");
const rowReadLatency = () => extractTimeseries(tableStatsMap(), "lat#read#row");
const prefixLatency = () => extractTimeseries(tableStatsMap(), "lat#read#pfx");
const rowDeleteLatency = () => extractTimeseries(tableStatsMap(), "lat#del#row");
const diskUsage = () => extractTimeseries(tableStatsMap(), "stats#du");
const segmentCount = () => extractTimeseries(tableStatsMap(), "stats#seg_cnt");
const rowCount = () => extractTimeseries(tableStatsMap(), "stats#row_cnt");
const cellCount = () => extractTimeseries(tableStatsMap(), "stats#cell_cnt");
const gcDeleteCount = () => extractTimeseries(tableStatsMap(), "gc#del_cnt");

onMount(() => {
setTimeout(() => window.location.reload(), 60 * 1000)
Expand All @@ -288,7 +243,7 @@ function App() {
<div class="text-center text-xl text-white">
Smoltable
</div>
<div class="grid grid-cols-2 gap-3">
<div class="grid sm:grid-cols-2 gap-3">
<LineChart
title="CPU usage (system)"
yFormatter={(n) => `${Math.round(n)} %`}
Expand Down Expand Up @@ -323,7 +278,7 @@ function App() {
title="Disk usage"
yFormatter={prettyBytes}
series={[
...tablesDiskUsage(),
...diskUsage(),
]}
/>
<LineChart
Expand All @@ -338,7 +293,7 @@ function App() {
return `${(x / 1_000 / 1_000)}M`;
}}
series={[
...segmentCounts(),
...segmentCount(),
]}
/>
<LineChart
Expand All @@ -353,7 +308,7 @@ function App() {
return `${(x / 1_000 / 1_000)}M`;
}}
series={[
...rowCounts(),
...rowCount(),
]}
/>
<LineChart
Expand All @@ -368,7 +323,7 @@ function App() {
return `${(x / 1_000 / 1_000)}M`;
}}
series={[
...cellCounts(),
...cellCount(),
]}
/>
<LineChart
Expand All @@ -384,12 +339,12 @@ function App() {
return `${(x / 1000 / 1000).toFixed(2)} s`
}}
series={[
...tablesWriteLatency(),
...writeLatency(),
]}
/>
<LineChart
alwaysShowLegend
title="Point read latency"
title="Row read latency"
yFormatter={x => {
if (x < 1_000) {
return `${x} µs`
Expand All @@ -400,12 +355,12 @@ function App() {
return `${(x / 1000 / 1000).toFixed(2)} s`
}}
series={[
...tablesPointReadLatency(),
...rowReadLatency(),
]}
/>
<LineChart
alwaysShowLegend
title="Scan latency"
title="Prefix scan latency"
yFormatter={x => {
if (x < 1_000) {
return `${x} µs`
Expand All @@ -416,7 +371,7 @@ function App() {
return `${(x / 1000 / 1000).toFixed(2)} s`
}}
series={[
...tablesPrefixLatency(),
...prefixLatency(),
]}
/>
<LineChart
Expand All @@ -432,7 +387,23 @@ function App() {
return `${(x / 1000 / 1000).toFixed(2)} s`
}}
series={[
...tablesDeletesLatency(),
...rowDeleteLatency(),
]}
/>
<LineChart
alwaysShowLegend
title="Cell GC delete count"
yFormatter={x => {
if (x < 1_000) {
return x;
}
if (x < 1_000_000) {
return `${(x / 1_000)}k`;
}
return `${(x / 1_000 / 1_000)}M`;
}}
series={[
...gcDeleteCount(),
]}
/>
</div>
Expand Down
38 changes: 24 additions & 14 deletions src/api/delete_row.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,16 @@ use actix_web::{
};
use serde::Deserialize;
use serde_json::json;
use std::ops::Deref;

#[derive(Debug, Deserialize)]
pub struct Input {
row_key: String,
// column_filter: Option<ColumnKey>,
}

// TODO: change input format
// TODO: change input format to Vec, atomic multi-row deletes...?

#[delete("/v1/table/{name}/row")]
#[delete("/v1/table/{name}/rows")]
pub async fn handler(
path: Path<String>,
app_state: web::Data<AppState>,
Expand Down Expand Up @@ -62,18 +61,29 @@ pub async fn handler(
None
} else {
Some(micros_total / count as u128)
};
}
.unwrap_or_default();

TableWriter::write_raw(
app_state.metrics_table.deref().clone(),
&RowWriteItem {
row_key: format!("t#{table_name}"),
cells: vec![ColumnWriteItem {
column_key: ColumnKey::try_from("lat:del#row").expect("should be column key"),
timestamp: None,
value: CellValue::F64(micros_total as f64),
}],
},
TableWriter::write_batch(
table.metrics.clone(),
&[
RowWriteItem {
row_key: "lat#del#row".to_string(),
cells: vec![ColumnWriteItem {
column_key: ColumnKey::try_from("value").expect("should be column key"),
timestamp: None,
value: CellValue::F64(micros_total as f64),
}],
},
RowWriteItem {
row_key: "lat#del#cell".to_string(),
cells: vec![ColumnWriteItem {
column_key: ColumnKey::try_from("value").expect("should be column key"),
timestamp: None,
value: CellValue::F64(micros_per_item as f64),
}],
},
],
)
.ok();

Expand Down
13 changes: 6 additions & 7 deletions src/api/get_rows.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ use actix_web::{
};
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::ops::Deref;

#[derive(Debug, Deserialize, Serialize)]
struct Input {
Expand Down Expand Up @@ -64,16 +63,16 @@ pub async fn handler(
Some(micros_total / result.rows.len() as u128)
};

TableWriter::write_raw(
app_state.metrics_table.deref().clone(),
&RowWriteItem {
row_key: format!("t#{table_name}"),
TableWriter::write_batch(
table.metrics.clone(),
&[RowWriteItem {
row_key: "lat#read#row".to_string(),
cells: vec![ColumnWriteItem {
column_key: ColumnKey::try_from("lat:r#row").expect("should be column key"),
column_key: ColumnKey::try_from("value").expect("should be column key"),
timestamp: None,
value: CellValue::F64(micros_per_row.unwrap_or_default() as f64),
}],
},
}],
)
.ok();

Expand Down
13 changes: 6 additions & 7 deletions src/api/prefix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ use actix_web::{
HttpResponse,
};
use serde_json::json;
use std::ops::Deref;

#[post("/v1/table/{name}/prefix")]
pub async fn handler(
Expand Down Expand Up @@ -58,16 +57,16 @@ pub async fn handler(
Some(micros_total / result.rows.len() as u128)
};

TableWriter::write_raw(
app_state.metrics_table.deref().clone(),
&RowWriteItem {
row_key: format!("t#{table_name}"),
TableWriter::write_batch(
table.metrics.clone(),
&[RowWriteItem {
row_key: "lat#read#pfx".to_string(),
cells: vec![ColumnWriteItem {
column_key: ColumnKey::try_from("lat:r#pfx").expect("should be column key"),
column_key: ColumnKey::try_from("value").expect("should be column key"),
timestamp: None,
value: CellValue::F64(micros_total as f64),
}],
},
}],
)
.ok();

Expand Down
Loading

0 comments on commit e00fa33

Please sign in to comment.