Skip to content

Commit 3da8fe4

Browse files
committed
use the trace_metric crate
1 parent 7c41bde commit 3da8fe4

File tree

16 files changed

+70
-188
lines changed

16 files changed

+70
-188
lines changed

Cargo.lock

+4
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

analytic_engine/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ table_engine = { workspace = true }
4646
table_kv = { workspace = true }
4747
tempfile = { workspace = true, optional = true }
4848
tokio = { workspace = true }
49+
trace_metric = { workspace = true }
4950
wal = { workspace = true }
5051
xorfilter-rs = { workspace = true }
5152

analytic_engine/src/instance/flush_compaction.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ use log::{debug, error, info};
2828
use snafu::{Backtrace, ResultExt, Snafu};
2929
use table_engine::{predicate::Predicate, table::Result as TableResult};
3030
use tokio::sync::oneshot;
31+
use trace_metric::Collector;
3132
use wal::manager::WalLocation;
3233

3334
use crate::{
@@ -855,7 +856,7 @@ impl SpaceStore {
855856
let sequence = table_data.last_sequence();
856857
let mut builder = MergeBuilder::new(MergeConfig {
857858
request_id,
858-
metrics_collector: None,
859+
metrics_collector: Collector::new("compaction".to_string()),
859860
// no need to set deadline for compaction
860861
deadline: None,
861862
space_id,

analytic_engine/src/instance/read.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,10 @@ use table_engine::{
2020
stream::{
2121
self, ErrWithSource, PartitionedStreams, RecordBatchStream, SendableRecordBatchStream,
2222
},
23-
table::{Metric, ReadRequest},
23+
table::ReadRequest,
2424
};
2525
use tokio::sync::mpsc::{self, Receiver};
26+
use trace_metric::Metric;
2627

2728
use crate::{
2829
instance::Instance,
@@ -179,7 +180,7 @@ impl Instance {
179180
for read_view in read_views {
180181
let merge_config = MergeConfig {
181182
request_id: request.request_id,
182-
metrics_collector: Some(request.metrics_collector.clone()),
183+
metrics_collector: request.metrics_collector.clone(),
183184
deadline: request.opts.deadline,
184185
space_id: table_data.space_id,
185186
table_id: table_data.id,

analytic_engine/src/row_iter/merge.rs

+30-60
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,10 @@ use common_types::{
2020
};
2121
use common_util::{define_result, error::GenericError};
2222
use futures::{future::try_join_all, StreamExt};
23-
use log::{debug, info, trace};
23+
use log::{debug, trace};
2424
use snafu::{ensure, Backtrace, ResultExt, Snafu};
25-
use table_engine::{
26-
predicate::PredicateRef,
27-
table::{Metric, ReadMetricsCollector, TableId},
28-
};
25+
use table_engine::{predicate::PredicateRef, table::TableId};
26+
use trace_metric::{Collector, TracedMetrics};
2927

3028
use crate::{
3129
row_iter::{
@@ -86,7 +84,7 @@ define_result!(Error);
8684
#[derive(Debug)]
8785
pub struct MergeConfig<'a> {
8886
pub request_id: RequestId,
89-
pub metrics_collector: Option<ReadMetricsCollector>,
87+
pub metrics_collector: Collector,
9088
/// None for background jobs, such as: compaction
9189
pub deadline: Option<Instant>,
9290
pub space_id: SpaceId,
@@ -231,8 +229,12 @@ impl<'a> MergeBuilder<'a> {
231229
self.ssts,
232230
self.config.merge_iter_options,
233231
self.config.reverse,
234-
Metrics::new(self.memtables.len(), sst_streams_num, sst_ids),
235-
self.config.metrics_collector,
232+
Metrics::new(
233+
self.memtables.len(),
234+
sst_streams_num,
235+
sst_ids,
236+
self.config.metrics_collector.clone(),
237+
),
236238
))
237239
}
238240
}
@@ -562,28 +564,44 @@ impl Ord for HeapBufferedStream {
562564
}
563565

564566
/// Metrics for merge iterator.
567+
#[derive(TracedMetrics)]
565568
pub struct Metrics {
569+
#[metric(counter)]
566570
num_memtables: usize,
571+
#[metric(counter)]
567572
num_ssts: usize,
568573
sst_ids: Vec<FileId>,
569574
/// Total rows collected using fetch_rows_from_one_stream().
575+
#[metric(counter)]
570576
total_rows_fetch_from_one: usize,
571577
/// Times to fetch rows from one stream.
578+
#[metric(counter)]
572579
times_fetch_rows_from_one: usize,
573580
/// Times to fetch one row from multiple stream.
581+
#[metric(counter)]
574582
times_fetch_row_from_multiple: usize,
575583
/// Create time of the metrics.
576584
create_at: Instant,
577585
/// Init time cost of the metrics.
586+
#[metric(elapsed)]
578587
init_duration: Duration,
579588
/// Scan time cost of the metrics.
589+
#[metric(elapsed)]
580590
scan_duration: Duration,
581591
/// Scan count
592+
#[metric(counter)]
582593
scan_count: usize,
594+
#[metric(collector)]
595+
metrics_collector: Collector,
583596
}
584597

585598
impl Metrics {
586-
fn new(num_memtables: usize, num_ssts: usize, sst_ids: Vec<FileId>) -> Self {
599+
fn new(
600+
num_memtables: usize,
601+
num_ssts: usize,
602+
sst_ids: Vec<FileId>,
603+
collector: Collector,
604+
) -> Self {
587605
Self {
588606
num_memtables,
589607
num_ssts,
@@ -595,39 +613,9 @@ impl Metrics {
595613
init_duration: Duration::default(),
596614
scan_duration: Duration::default(),
597615
scan_count: 0,
616+
metrics_collector: collector,
598617
}
599618
}
600-
601-
fn collect(&self, collector: &ReadMetricsCollector) {
602-
// TODO: maybe we can define a macro to generate the code.
603-
collector.collect(Metric::counter(
604-
"num_memtables".to_string(),
605-
self.num_memtables,
606-
));
607-
608-
collector.collect(Metric::counter("num_ssts".to_string(), self.num_ssts));
609-
collector.collect(Metric::counter(
610-
"times_fetch_rows_from_one".to_string(),
611-
self.times_fetch_rows_from_one,
612-
));
613-
collector.collect(Metric::counter(
614-
"times_rows_fetch_from_one".to_string(),
615-
self.times_fetch_row_from_multiple,
616-
));
617-
collector.collect(Metric::counter(
618-
"total_rows_fetch_from_one".to_string(),
619-
self.total_rows_fetch_from_one,
620-
));
621-
collector.collect(Metric::elapsed(
622-
"init_duration".to_string(),
623-
self.init_duration,
624-
));
625-
collector.collect(Metric::elapsed(
626-
"scan_duration".to_string(),
627-
self.scan_duration,
628-
));
629-
collector.collect(Metric::counter("scan_count".to_string(), self.scan_count));
630-
}
631619
}
632620

633621
impl fmt::Debug for Metrics {
@@ -667,7 +655,6 @@ pub struct MergeIterator {
667655
iter_options: IterOptions,
668656
reverse: bool,
669657
metrics: Metrics,
670-
metrics_collector: Option<ReadMetricsCollector>,
671658
}
672659

673660
impl MergeIterator {
@@ -681,7 +668,6 @@ impl MergeIterator {
681668
iter_options: IterOptions,
682669
reverse: bool,
683670
metrics: Metrics,
684-
metrics_collector: Option<ReadMetricsCollector>,
685671
) -> Self {
686672
let heap_cap = streams.len();
687673
let record_batch_builder =
@@ -699,7 +685,6 @@ impl MergeIterator {
699685
iter_options,
700686
reverse,
701687
metrics,
702-
metrics_collector,
703688
}
704689
}
705690

@@ -893,19 +878,6 @@ impl MergeIterator {
893878
}
894879
}
895880

896-
impl Drop for MergeIterator {
897-
fn drop(&mut self) {
898-
if let Some(collector) = &self.metrics_collector {
899-
self.metrics.collect(collector);
900-
}
901-
902-
info!(
903-
"Merge iterator dropped, table_id:{:?}, request_id:{}, metrics:{:?}, iter_options:{:?},",
904-
self.table_id, self.request_id, self.metrics, self.iter_options,
905-
);
906-
}
907-
}
908-
909881
#[async_trait]
910882
impl RecordBatchWithKeyIterator for MergeIterator {
911883
type Error = Error;
@@ -968,8 +940,7 @@ mod tests {
968940
Vec::new(),
969941
IterOptions::default(),
970942
false,
971-
Metrics::new(1, 1, vec![]),
972-
None,
943+
Metrics::new(1, 1, vec![], Collector::new("".to_string())),
973944
);
974945

975946
check_iterator(
@@ -1022,8 +993,7 @@ mod tests {
1022993
Vec::new(),
1023994
IterOptions::default(),
1024995
true,
1025-
Metrics::new(1, 1, vec![]),
1026-
None,
996+
Metrics::new(1, 1, vec![], Collector::new("".to_string())),
1027997
);
1028998

1029999
check_iterator(

analytic_engine/src/table/mod.rs

+4-3
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,12 @@ use table_engine::{
1717
stream::{PartitionedStreams, SendableRecordBatchStream},
1818
table::{
1919
AlterOptions, AlterSchema, AlterSchemaRequest, Compact, Flush, FlushRequest, Get,
20-
GetInvalidPrimaryKey, GetNullPrimaryKey, GetRequest, ReadMetricsCollector, ReadOptions,
21-
ReadOrder, ReadRequest, Result, Scan, Table, TableId, TableStats, Write, WriteRequest,
20+
GetInvalidPrimaryKey, GetNullPrimaryKey, GetRequest, ReadOptions, ReadOrder, ReadRequest,
21+
Result, Scan, Table, TableId, TableStats, Write, WriteRequest,
2222
},
2323
};
2424
use tokio::sync::oneshot;
25+
use trace_metric::Collector;
2526

2627
use self::data::TableDataRef;
2728
use crate::{
@@ -179,7 +180,7 @@ impl Table for TableImpl {
179180
projected_schema: request.projected_schema,
180181
predicate,
181182
order: ReadOrder::None,
182-
metrics_collector: ReadMetricsCollector::new(),
183+
metrics_collector: Collector::new("".to_string()),
183184
};
184185
let mut batch_stream = self
185186
.read(read_request)

analytic_engine/src/tests/table.rs

+3-5
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,9 @@ use table_engine::{
2020
self,
2121
engine::{CreateTableRequest, TableState},
2222
predicate::Predicate,
23-
table::{
24-
GetRequest, ReadMetricsCollector, ReadOptions, ReadOrder, ReadRequest, SchemaId, TableId,
25-
TableSeq,
26-
},
23+
table::{GetRequest, ReadOptions, ReadOrder, ReadRequest, SchemaId, TableId, TableSeq},
2724
};
25+
use trace_metric::Collector;
2826

2927
use crate::{table_options, tests::row_util};
3028

@@ -188,7 +186,7 @@ pub fn new_read_all_request_with_order(
188186
projected_schema: ProjectedSchema::no_projection(schema),
189187
predicate: Arc::new(Predicate::empty()),
190188
order,
191-
metrics_collector: ReadMetricsCollector::new(),
189+
metrics_collector: Collector::new("".to_string()),
192190
}
193191
}
194192

benchmarks/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ snafu = { workspace = true }
3232
table_engine = { workspace = true }
3333
table_kv = { workspace = true }
3434
tokio = { workspace = true }
35+
trace_metric = { workspace = true }
3536
wal = { workspace = true }
3637
zstd = { workspace = true }
3738

benchmarks/src/merge_memtable_bench.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ use common_util::runtime::Runtime;
3535
use log::info;
3636
use object_store::{LocalFileSystem, ObjectStoreRef};
3737
use table_engine::{predicate::Predicate, table::TableId};
38+
use trace_metric::Collector;
3839

3940
use crate::{config::MergeMemTableBenchConfig, util};
4041

@@ -142,7 +143,7 @@ impl MergeMemTableBench {
142143
let store_picker: ObjectStorePickerRef = Arc::new(self.store.clone());
143144
let mut builder = MergeBuilder::new(MergeConfig {
144145
request_id,
145-
metrics_collector: None,
146+
metrics_collector: Collector::new("".to_string()),
146147
deadline: None,
147148
space_id,
148149
table_id,

benchmarks/src/merge_sst_bench.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ use log::info;
2929
use object_store::{LocalFileSystem, ObjectStoreRef};
3030
use table_engine::{predicate::Predicate, table::TableId};
3131
use tokio::sync::mpsc::{self, UnboundedReceiver};
32+
use trace_metric::Collector;
3233

3334
use crate::{config::MergeSstBenchConfig, util};
3435

@@ -125,7 +126,7 @@ impl MergeSstBench {
125126
let store_picker: ObjectStorePickerRef = Arc::new(self.store.clone());
126127
let mut builder = MergeBuilder::new(MergeConfig {
127128
request_id,
128-
metrics_collector: None,
129+
metrics_collector: Collector::new("".to_string()),
129130
deadline: None,
130131
space_id,
131132
table_id,

benchmarks/src/sst_tools.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ use object_store::{LocalFileSystem, ObjectStoreRef, Path};
3333
use serde::Deserialize;
3434
use table_engine::{predicate::Predicate, table::TableId};
3535
use tokio::sync::mpsc;
36+
use trace_metric::Collector;
3637

3738
use crate::{config::BenchPredicate, util};
3839

@@ -220,7 +221,7 @@ pub async fn merge_sst(config: MergeSstConfig, runtime: Arc<Runtime>) {
220221

221222
let mut builder = MergeBuilder::new(MergeConfig {
222223
request_id,
223-
metrics_collector: None,
224+
metrics_collector: Collector::new("".to_string()),
224225
deadline: None,
225226
space_id,
226227
table_id,

system_catalog/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -24,3 +24,4 @@ prost = { workspace = true }
2424
snafu = { workspace = true }
2525
table_engine = { workspace = true }
2626
tokio = { workspace = true }
27+
trace_metric = { workspace = true }

0 commit comments

Comments
 (0)