Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Simple ad-hoc solution for plot discontinuities #7184

Merged
merged 4 commits into from
Aug 14, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 0 additions & 61 deletions crates/store/re_entity_db/tests/clear.rs
Original file line number Diff line number Diff line change
Expand Up @@ -390,64 +390,3 @@ fn clears_respect_index_order() -> anyhow::Result<()> {

Ok(())
}

#[test]
fn clear_and_gc() -> anyhow::Result<()> {
if true {
// TODO(#6552): Keeping this around for now so we don't forget about it, but this cannot work with
// read-time clears.
// We will replace this with a dedicated store API to remove an entity path in its entirety.
return Ok(());
}

re_log::setup_logging();

let mut db = EntityDb::new(StoreId::random(re_log_types::StoreKind::Recording));

let timepoint = TimePoint::default().with(Timeline::new_sequence("blueprint"), 0);
let entity_path: EntityPath = "space_view".into();

// Insert a component, then clear it, then GC.
{
// EntityTree is Empty when we start
assert!(db.tree().is_empty(db.store()));

let point = MyPoint::new(1.0, 2.0);

let chunk = Chunk::builder(entity_path.clone())
.with_component_batch(RowId::new(), timepoint.clone(), &[point] as _)
.build()?;
db.add_chunk(&Arc::new(chunk))?;
eprintln!("{}", db.store());

db.gc_everything_but_the_latest_row_on_non_default_timelines();

let stats = db.store().stats();
assert_eq!(stats.temporal_chunks.num_rows, 1);

let chunk = Chunk::builder(entity_path.clone())
.with_component_batches(
RowId::new(),
timepoint.clone(),
Clear::recursive()
.as_component_batches()
.iter()
.map(|b| b.as_ref()),
)
.build()?;

db.add_chunk(&Arc::new(chunk))?;
eprintln!("{}", db.store());

db.gc_everything_but_the_latest_row_on_non_default_timelines();

// No rows should remain because the table should have been purged
let stats = db.store().stats();
assert_eq!(stats.temporal_chunks.num_rows, 0);

// EntityTree should be empty again when we end since everything was GC'd
assert!(db.tree().is_empty(db.store()));
}

Ok(())
}
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
use itertools::Itertools;

use re_log_types::TimeInt;
use re_chunk_store::{RangeQuery, RowId};
use re_log_types::{EntityPath, TimeInt};
use re_space_view::range_with_blueprint_resolved_data;
use re_types::archetypes;
use re_types::components::AggregationPolicy;
use re_types::components::{AggregationPolicy, ClearIsRecursive};
use re_types::external::arrow2::datatypes::DataType as ArrowDatatype;
use re_types::{
archetypes::SeriesLine,
Expand Down Expand Up @@ -393,9 +394,26 @@ impl SeriesLineSystem {
lhs_time_max <= rhs_time_min
});

let has_discontinuities = {
// Find all clears that may apply, in order to render discontinuities properly.

re_tracing::profile_scope!("discontinuities");

let cleared_indices = collect_recursive_clears(ctx, &query, entity_path);
let has_discontinuities = !cleared_indices.is_empty();
points.extend(cleared_indices.into_iter().map(|(data_time, _)| {
let mut point = default_point.clone();
point.time = data_time.as_i64();
point.attrs.kind = PlotSeriesKind::Clear;
point
}));

has_discontinuities
};

// This is _almost_ sorted already: all the individual chunks are sorted, but we still
// have to deal with overlap chunks.
if !all_chunks_sorted_and_not_overlapped {
// have to deal with overlapped chunks, or discontinuities introduced by query-time clears.
if !all_chunks_sorted_and_not_overlapped || has_discontinuities {
re_tracing::profile_scope!("sort");
points.sort_by_key(|p| p.time);
}
Expand All @@ -413,3 +431,54 @@ impl SeriesLineSystem {
}
}
}

fn collect_recursive_clears(
ctx: &ViewContext<'_>,
query: &RangeQuery,
entity_path: &EntityPath,
) -> Vec<(TimeInt, RowId)> {
re_tracing::profile_function!();

let mut cleared_indices = Vec::new();

let mut clear_entity_path = entity_path.clone();
loop {
let results = ctx.recording().query_caches().range(
ctx.recording_store(),
query,
&clear_entity_path,
[ClearIsRecursive::name()],
);

let empty = Vec::new();
let chunks = results
.components
.get(&ClearIsRecursive::name())
.unwrap_or(&empty);

for chunk in chunks {
cleared_indices.extend(
itertools::izip!(
chunk.iter_component_indices(&query.timeline(), &ClearIsRecursive::name()),
chunk
.iter_component::<ClearIsRecursive>()
.map(|is_recursive| {
is_recursive.as_slice().first().map_or(false, |v| *v.0)
})
)
.filter_map(|(index, is_recursive)| {
let is_recursive = is_recursive || clear_entity_path == *entity_path;
is_recursive.then_some(index)
}),
);
}

let Some(parent_entity_path) = clear_entity_path.parent() else {
break;
};

clear_entity_path = parent_entity_path;
}

cleared_indices
}
7 changes: 3 additions & 4 deletions tests/python/release_checklist/check_scalar_clears.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,7 @@

import rerun as rr

# TODO(#6552): bring this back one we have read-time range clears working.
README = """\
**⚠⚠⚠⚠⚠⚠⚠⚠⚠ [THIS TEST IS TEMPORARILY BROKEN ON PURPOSE (CLICK HERE)](https://github.com/rerun-io/rerun/pull/6586) ⚠⚠⚠⚠⚠⚠⚠⚠⚠**

# Scalar clears

This checks whether scalar time series correctly behave with `Clear`s.
Expand All @@ -22,7 +19,9 @@


def log_readme() -> None:
rr.log("readme", rr.TextDocument(README, media_type=rr.MediaType.MARKDOWN), static=True)
rr.log(
"readme", rr.TextDocument(README, media_type=rr.MediaType.MARKDOWN), static=True
)


def log_plots() -> None:
Expand Down
Loading