Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: warning when query's timestamp is exceeding table's ttl #1054

Merged
merged 12 commits into from
Jul 11, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 11 additions & 10 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,10 @@ lazy_static = "1.4.0"
log = "0.4"
logger = { path = "components/logger" }
lru = "0.7.6"
influxql-logical-planner = { git = "https://github.com/CeresDB/influxql", rev = "935e037a5ad6eb142a93f3e9eb321ee72e28cbad", package = "iox_query_influxql" }
influxql-parser = { git = "https://github.com/CeresDB/influxql", rev = "935e037a5ad6eb142a93f3e9eb321ee72e28cbad", package = "influxdb_influxql_parser" }
influxql-query = { git = "https://github.com/CeresDB/influxql", rev = "935e037a5ad6eb142a93f3e9eb321ee72e28cbad", package = "iox_query" }
influxql-schema = { git = "https://github.com/CeresDB/influxql", rev = "935e037a5ad6eb142a93f3e9eb321ee72e28cbad", package = "schema" }
influxql-logical-planner = { git = "https://github.com/CeresDB/influxql", rev = "b65a125b9cdfa3121a3c8843bc48441b91049e31", package = "iox_query_influxql" }
influxql-parser = { git = "https://github.com/CeresDB/influxql", rev = "b65a125b9cdfa3121a3c8843bc48441b91049e31", package = "influxdb_influxql_parser" }
influxql-query = { git = "https://github.com/CeresDB/influxql", rev = "b65a125b9cdfa3121a3c8843bc48441b91049e31", package = "iox_query" }
influxql-schema = { git = "https://github.com/CeresDB/influxql", rev = "b65a125b9cdfa3121a3c8843bc48441b91049e31", package = "schema" }
interpreters = { path = "interpreters" }
itertools = "0.10.5"
meta_client = { path = "meta_client" }
Expand Down
2 changes: 1 addition & 1 deletion analytic_engine/src/compaction/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

use std::{collections::HashMap, fmt, str::FromStr, sync::Arc};

use common_types::COMPACTION_STRATEGY;
use common_util::config::{ReadableSize, TimeUnit};
use serde::{Deserialize, Serialize};
use snafu::{ensure, Backtrace, GenerateBacktrace, ResultExt, Snafu};
Expand All @@ -13,7 +14,6 @@ use crate::{
compaction::picker::{CommonCompactionPicker, CompactionPickerRef},
sst::file::{FileHandle, Level},
table::data::TableDataRef,
table_options::COMPACTION_STRATEGY,
};

mod metrics;
Expand Down
39 changes: 13 additions & 26 deletions analytic_engine/src/table_options.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,32 +5,24 @@
use std::{collections::HashMap, string::ToString, time::Duration};

use ceresdbproto::manifest as manifest_pb;
use common_types::time::Timestamp;
use common_types::{
time::Timestamp, ARENA_BLOCK_SIZE, COMPACTION_STRATEGY, COMPRESSION, ENABLE_TTL,
NUM_ROWS_PER_ROW_GROUP, OPTION_KEY_ENABLE_TTL, SEGMENT_DURATION, STORAGE_FORMAT, TTL,
UPDATE_MODE, WRITE_BUFFER_SIZE,
};
use common_util::{
config::{ReadableDuration, ReadableSize, TimeUnit},
define_result,
time::DurationExt,
time::{parse_duration, DurationExt},
};
use datafusion::parquet::basic::Compression as ParquetCompression;
use serde::{Deserialize, Serialize};
use snafu::{Backtrace, GenerateBacktrace, OptionExt, ResultExt, Snafu};
use table_engine::OPTION_KEY_ENABLE_TTL;

use crate::compaction::{
self, CompactionStrategy, SizeTieredCompactionOptions, TimeWindowCompactionOptions,
};

pub const SEGMENT_DURATION: &str = "segment_duration";
pub const ENABLE_TTL: &str = OPTION_KEY_ENABLE_TTL;
pub const TTL: &str = "ttl";
pub const ARENA_BLOCK_SIZE: &str = "arena_block_size";
pub const WRITE_BUFFER_SIZE: &str = "write_buffer_size";
pub const COMPACTION_STRATEGY: &str = "compaction_strategy";
pub const NUM_ROWS_PER_ROW_GROUP: &str = "num_rows_per_row_group";
pub const UPDATE_MODE: &str = "update_mode";
pub const COMPRESSION: &str = "compression";
pub const STORAGE_FORMAT: &str = "storage_format";

const UPDATE_MODE_OVERWRITE: &str = "OVERWRITE";
const UPDATE_MODE_APPEND: &str = "APPEND";
const COMPRESSION_UNCOMPRESSED: &str = "UNCOMPRESSED";
Expand Down Expand Up @@ -64,8 +56,11 @@ const MAX_NUM_ROWS_PER_ROW_GROUP: usize = 10_000_000;
#[derive(Debug, Snafu)]
#[allow(clippy::enum_variant_names)]
pub enum Error {
#[snafu(display("Failed to parse duration, err:{}.\nBacktrace:\n{}", err, backtrace))]
ParseDuration { err: String, backtrace: Backtrace },
#[snafu(display("Failed to parse duration, err:{}.\nBacktrace:\n{}", source, backtrace))]
ParseDuration {
source: common_util::time::Error,
backtrace: Backtrace,
},

#[snafu(display("Failed to parse size, err:{}.\nBacktrace:\n{}", err, backtrace))]
ParseSize { err: String, backtrace: Backtrace },
Expand Down Expand Up @@ -707,15 +702,15 @@ fn merge_table_options(
let mut table_opts = table_old_opts.clone();
if is_create {
if let Some(v) = options.get(SEGMENT_DURATION) {
table_opts.segment_duration = Some(parse_duration(v)?);
table_opts.segment_duration = Some(parse_duration(v).context(ParseDuration)?);
}
if let Some(v) = options.get(UPDATE_MODE) {
table_opts.update_mode = UpdateMode::parse_from(v)?;
}
}

if let Some(v) = options.get(TTL) {
table_opts.ttl = parse_duration(v)?;
table_opts.ttl = parse_duration(v).context(ParseDuration)?;
}
if let Some(v) = options.get(OPTION_KEY_ENABLE_TTL) {
table_opts.enable_ttl = v.parse::<bool>().context(ParseBool)?;
Expand Down Expand Up @@ -744,14 +739,6 @@ fn merge_table_options(
Ok(table_opts)
}

fn parse_duration(v: &str) -> Result<ReadableDuration> {
v.parse::<ReadableDuration>()
.map_err(|err| Error::ParseDuration {
err,
backtrace: Backtrace::generate(),
})
}

fn parse_size(v: &str) -> Result<ReadableSize> {
v.parse::<ReadableSize>().map_err(|err| Error::ParseSize {
err,
Expand Down
4 changes: 2 additions & 2 deletions analytic_engine/src/tests/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ impl Builder {

pub fn enable_ttl(mut self, enable_ttl: bool) -> Self {
self.create_request.options.insert(
table_engine::OPTION_KEY_ENABLE_TTL.to_string(),
common_types::OPTION_KEY_ENABLE_TTL.to_string(),
enable_ttl.to_string(),
);
self
Expand All @@ -286,7 +286,7 @@ impl Builder {
pub fn ttl(mut self, duration: ReadableDuration) -> Self {
self.create_request
.options
.insert(table_options::TTL.to_string(), duration.to_string());
.insert(common_types::TTL.to_string(), duration.to_string());
self
}

Expand Down
13 changes: 13 additions & 0 deletions common_types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,18 @@ pub const MAX_SEQUENCE_NUMBER: u64 = u64::MAX;
/// sequence number should starts from 1.
pub const MIN_SEQUENCE_NUMBER: u64 = 0;

/// Enable ttl key
pub const OPTION_KEY_ENABLE_TTL: &str = "enable_ttl";
pub const SEGMENT_DURATION: &str = "segment_duration";
pub const ENABLE_TTL: &str = OPTION_KEY_ENABLE_TTL;
pub const TTL: &str = "ttl";
pub const ARENA_BLOCK_SIZE: &str = "arena_block_size";
pub const WRITE_BUFFER_SIZE: &str = "write_buffer_size";
pub const COMPACTION_STRATEGY: &str = "compaction_strategy";
pub const NUM_ROWS_PER_ROW_GROUP: &str = "num_rows_per_row_group";
pub const UPDATE_MODE: &str = "update_mode";
pub const COMPRESSION: &str = "compression";
pub const STORAGE_FORMAT: &str = "storage_format";

#[cfg(any(test, feature = "test"))]
pub mod tests;
20 changes: 20 additions & 0 deletions common_util/src/time.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,18 @@ use std::{

use chrono::{DateTime, Utc};
use common_types::time::Timestamp;
use snafu::{Backtrace, GenerateBacktrace, Snafu};

use crate::config::ReadableDuration;

#[derive(Debug, Snafu)]
#[allow(clippy::enum_variant_names)]
pub enum Error {
#[snafu(display("Failed to parse duration, err:{}.\nBacktrace:\n{}", err, backtrace))]
ParseDuration { err: String, backtrace: Backtrace },
}

define_result!(Error);

pub trait DurationExt {
/// Convert into u64.
Expand Down Expand Up @@ -78,6 +90,14 @@ pub fn try_to_millis(ts: i64) -> Option<Timestamp> {
None
}

pub fn parse_duration(v: &str) -> Result<ReadableDuration> {
v.parse::<ReadableDuration>()
.map_err(|err| Error::ParseDuration {
err,
backtrace: Backtrace::generate(),
})
}

#[cfg(test)]
mod tests {
use std::thread;
Expand Down
1 change: 1 addition & 0 deletions proxy/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ df_operator = { workspace = true }
futures = { workspace = true }
http = "0.2"
influxdb-line-protocol = "1.0"
influxql-query = { workspace = true }
interpreters = { workspace = true }
itertools = { workspace = true }
lazy_static = { workspace = true }
Expand Down
5 changes: 5 additions & 0 deletions proxy/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,17 @@ pub enum Error {
msg: String,
source: GenericError,
},

#[snafu(display("Query warning, msg:{msg}"))]
QueryMaybeExceedTTL { msg: String },
}

impl Error {
pub fn code(&self) -> StatusCode {
match *self {
Error::ErrNoCause { code, .. } => code,
Error::ErrWithCause { code, .. } => code,
Error::QueryMaybeExceedTTL { .. } => StatusCode::OK,
Error::Internal { .. } | Error::InternalNoCause { .. } => {
StatusCode::INTERNAL_SERVER_ERROR
}
Expand All @@ -50,6 +54,7 @@ impl Error {
let first_line = error_util::remove_backtrace_from_err(&err_string);
format!("{msg}. Caused by: {first_line}")
}
Error::QueryMaybeExceedTTL { msg } => msg.clone(),
}
}
}
Expand Down
Loading