Skip to content

Commit

Permalink
refactor: enable RUF005 (#6434)
Browse files Browse the repository at this point in the history
  • Loading branch information
hoxbro authored Nov 16, 2024
1 parent c227510 commit 713d92b
Show file tree
Hide file tree
Showing 72 changed files with 216 additions and 221 deletions.
7 changes: 4 additions & 3 deletions holoviews/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ def public(obj):
SheetCoordinateSystem, AttrTree]
return any([issubclass(obj, bc) for bc in baseclasses])

_public = list({_k for _k, _v in locals().items() if public(_v)})
__all__ = _public + ["boundingregion", "dimension", "layer", "layout",
"ndmapping", "operation", "options", "sheetcoords", "tree", "element"]
__all__ = [
*{_k for _k, _v in locals().items() if public(_v)},
"boundingregion", "dimension", "layer", "layout", "ndmapping", "operation", "options", "sheetcoords", "tree", "element"
]
10 changes: 3 additions & 7 deletions holoviews/core/accessors.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,7 @@ def pipelined_call(*args, **kwargs):

if isinstance(result, Dataset):
result._pipeline = inst_pipeline.instance(
operations=inst_pipeline.operations + [
init_op, call_op
],
operations=[*inst_pipeline.operations, init_op, call_op],
output_type=type(result),
)
elif isinstance(result, MultiDimensionalMapping):
Expand All @@ -69,9 +67,7 @@ def pipelined_call(*args, **kwargs):
args=[key],
)
element._pipeline = inst_pipeline.instance(
operations=inst_pipeline.operations + [
init_op, call_op, getitem_op
],
operations=[*inst_pipeline.operations, init_op, call_op, getitem_op],
output_type=type(result),
)
finally:
Expand Down Expand Up @@ -431,7 +427,7 @@ def __call__(self, specs=None, **dimensions):
if renames:
data = obj.interface.redim(obj, renames)
transform = self._create_expression_transform(kdims, vdims, list(renames.values()))
transforms = obj._transforms + [transform]
transforms = [*obj._transforms, transform]
clone = obj.clone(data, kdims=kdims, vdims=vdims, transforms=transforms)
if self._obj.dimensions(label='name') == clone.dimensions(label='name'):
# Ensure that plot_id is inherited as long as dimension
Expand Down
16 changes: 7 additions & 9 deletions holoviews/core/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ def pipelined_fn(*args, **kwargs):
if not in_method:
if isinstance(result, Dataset):
result._pipeline = inst_pipeline.instance(
operations=inst_pipeline.operations + [op],
operations=[*inst_pipeline.operations, op],
output_type=type(result),
)

Expand All @@ -219,9 +219,7 @@ def pipelined_fn(*args, **kwargs):
args=[key]
)
element._pipeline = inst_pipeline.instance(
operations=inst_pipeline.operations + [
op, getitem_op
],
operations=[*inst_pipeline.operations, op, getitem_op],
output_type=type(result),
)
finally:
Expand Down Expand Up @@ -343,7 +341,7 @@ def __init__(self, data, kdims=None, vdims=None, **kwargs):
kwargs=dict(kwargs, kdims=self.kdims, vdims=self.vdims),
)
self._pipeline = input_pipeline.instance(
operations=input_pipeline.operations + [init_op],
operations=[*input_pipeline.operations, init_op],
output_type=type(self),
)
self._transforms = input_transforms or []
Expand Down Expand Up @@ -606,8 +604,8 @@ def select(self, selection_expr=None, selection_specs=None, **selection):

if selection_specs is not None and not isinstance(selection_specs, (list, tuple)):
selection_specs = [selection_specs]
selection = {dim_name: sel for dim_name, sel in selection.items()
if dim_name in self.dimensions()+['selection_mask']}
sel_dims = (*self.dimensions(), 'selection_mask')
selection = {dim: sel for dim, sel in selection.items() if dim in sel_dims}
if (selection_specs and not any(self.matches(sp) for sp in selection_specs)
or (not selection and not selection_expr)):
return self
Expand Down Expand Up @@ -811,7 +809,7 @@ def sample(self, samples=None, bounds=None, closest=True, **kwargs):
reindexed = selection.clone(new_type=Dataset, datatype=datatype).reindex(kdims)
selection = tuple(reindexed.columns(kdims+self.vdims).values())

datatype = list(core_util.unique_iterator(self.datatype+['dataframe', 'dict']))
datatype = list(core_util.unique_iterator([*self.datatype, 'dataframe', 'dict']))
return self.clone(selection, kdims=kdims, new_type=new_type,
datatype=datatype)

Expand Down Expand Up @@ -1185,7 +1183,7 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True,
Cloned object
"""
if 'datatype' not in overrides:
datatypes = [self.interface.datatype] + self.datatype
datatypes = [self.interface.datatype, *self.datatype]
overrides['datatype'] = list(core_util.unique_iterator(datatypes))

if data is None:
Expand Down
2 changes: 1 addition & 1 deletion holoviews/core/data/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def assign(cls, dataset, new_data):
idx = dataset.get_dimension_index(d)
data[:, idx] = arr
new_cols = [arr for d, arr in new_data.items() if dataset.get_dimension(d) is None]
return np.column_stack([data]+new_cols)
return np.column_stack([data, *new_cols])


@classmethod
Expand Down
2 changes: 1 addition & 1 deletion holoviews/core/data/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def init(cls, eltype, data, kdims, vdims):
if shape[-1] != len(vdims):
raise error('The shape of the value array does not match the number of value dimensions.')
shape = shape[:-1]
if (not expected and shape == (1,)) or (len(set((shape,)+shapes)) == 1 and len(shape) > 1):
if (not expected and shape == (1,)) or (len(shape) > 1 and len({shape, *shapes}) == 1):
# If empty or an irregular mesh
pass
elif len(shape) != len(expected):
Expand Down
2 changes: 1 addition & 1 deletion holoviews/core/data/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def sample(cls, dataset, samples=None):
if len(samples[0]) == 1:
select = {dataset.kdims[0].name: [s[0] for s in samples]}
return tuple(dataset.select(**select).columns().values())
return [c+(dataset.data[dataset._coord2matrix(c)],) for c in samples]
return [(*c, dataset.data[dataset._coord2matrix(c)]) for c in samples]


@classmethod
Expand Down
6 changes: 3 additions & 3 deletions holoviews/core/data/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def __getitem__(self, index):
args=[index],
)
res._pipeline = self.dataset.pipeline.instance(
operations=self.dataset.pipeline.operations + [getitem_op],
operations=[*self.dataset.pipeline.operations, getitem_op],
output_type=type(self.dataset)
)
finally:
Expand Down Expand Up @@ -91,7 +91,7 @@ def _perform_getitem(cls, dataset, index):
kdims = [d for d in dims if d in kdims]
vdims = [d for d in dims if d in vdims]

datatypes = util.unique_iterator([dataset.interface.datatype]+dataset.datatype)
datatypes = util.unique_iterator([dataset.interface.datatype, *dataset.datatype])
datatype = [dt for dt in datatypes if dt in Interface.interfaces and
not Interface.interfaces[dt].gridded]
if not datatype: datatype = ['dataframe', 'dictionary']
Expand All @@ -118,7 +118,7 @@ def _perform_getitem(cls, dataset, indices):
params = {}
if hasattr(ds, 'bounds'):
params['bounds'] = None
return dataset.clone(selected, datatype=[ds.interface.datatype]+ds.datatype, **params)
return dataset.clone(selected, datatype=[ds.interface.datatype, *ds.datatype], **params)


class Interface(param.Parameterized):
Expand Down
4 changes: 2 additions & 2 deletions holoviews/core/data/multipath.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,8 +567,8 @@ def ensure_ring(geom, values=None):
values = geom

breaks = np.where(np.isnan(geom.astype('float')).sum(axis=1))[0]
starts = [0] + list(breaks+1)
ends = list(breaks-1) + [len(geom)-1]
starts = [0, *(breaks + 1)]
ends = [*(breaks - 1), len(geom) - 1]
zipped = zip(geom[starts], geom[ends], ends, values[starts])
unpacked = tuple(zip(*[(v, i+1) for s, e, i, v in zipped
if (s!=e).any()]))
Expand Down
2 changes: 1 addition & 1 deletion holoviews/core/data/spatialpandas.py
Original file line number Diff line number Diff line change
Expand Up @@ -815,7 +815,7 @@ def to_spatialpandas(data, xdim, ydim, columns=None, geom='point'):
converted['geometry'] = GeoSeries(geom_array)
else:
converted['geometry'] = GeoSeries(single_array([]))
return GeoDataFrame(converted, columns=['geometry']+columns)
return GeoDataFrame(converted, columns=['geometry', *columns])


def to_geom_dict(eltype, data, kdims, vdims, interface=None):
Expand Down
2 changes: 1 addition & 1 deletion holoviews/core/data/xarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def validate(cls, dataset, vdims=True):
nonmatching = [f'{kd}: {dims}' for kd, dims in irregular[1:]
if set(dims) != set(irregular[0][1])]
if nonmatching:
nonmatching = ['{}: {}'.format(*irregular[0])] + nonmatching
nonmatching = ['{}: {}'.format(*irregular[0]), *nonmatching]
raise DataError("The dimensions of coordinate arrays "
"on irregular data must match. The "
"following kdims were found to have "
Expand Down
10 changes: 5 additions & 5 deletions holoviews/core/dimension.py
Original file line number Diff line number Diff line change
Expand Up @@ -1067,7 +1067,7 @@ def select(self, selection_specs=None, **kwargs):
selection_specs = [selection_specs]

# Apply all indexes applying on this object
vdims = self.vdims+['value'] if self.vdims else []
vdims = [*self.vdims, 'value'] if self.vdims else []
kdims = self.kdims
local_kwargs = {k: v for k, v in kwargs.items()
if k in kdims+vdims}
Expand Down Expand Up @@ -1106,14 +1106,14 @@ def select(self, selection_specs=None, **kwargs):
return selection
elif type(selection) is not type(self) and isinstance(selection, Dimensioned):
# Apply the selection on the selected object of a different type
dimensions = selection.dimensions() + ['value']
dimensions = [*selection.dimensions(), 'value']
if any(kw in dimensions for kw in kwargs):
selection = selection.select(selection_specs=selection_specs, **kwargs)
elif isinstance(selection, Dimensioned) and selection._deep_indexable:
# Apply the deep selection on each item in local selection
items = []
for k, v in selection.items():
dimensions = v.dimensions() + ['value']
dimensions = [*v.dimensions(), 'value']
if any(kw in dimensions for kw in kwargs):
items.append((k, v.select(selection_specs=selection_specs, **kwargs)))
else:
Expand Down Expand Up @@ -1308,7 +1308,7 @@ class ViewableTree(AttrTree, Dimensioned):
def __init__(self, items=None, identifier=None, parent=None, **kwargs):
if items and all(isinstance(item, Dimensioned) for item in items):
items = self._process_items(items)
params = {p: kwargs.pop(p) for p in list(self.param)+['id', 'plot_id'] if p in kwargs}
params = {p: kwargs.pop(p) for p in [*self.param, 'id', 'plot_id'] if p in kwargs}

AttrTree.__init__(self, items, identifier, parent, **kwargs)
Dimensioned.__init__(self, self.data, **params)
Expand Down Expand Up @@ -1355,7 +1355,7 @@ def _deduplicate_items(cls, items):
counts = defaultdict(lambda: 0)
for path, item in items:
if counter[path] > 1:
path = path + (util.int_to_roman(counts[path]+1),)
path = (*path, util.int_to_roman(counts[path] + 1))
else:
inc = 1
while counts[path]:
Expand Down
2 changes: 1 addition & 1 deletion holoviews/core/layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def __lshift__(self, other):
if isinstance(other, (ViewableElement, NdMapping, Empty)):
return AdjointLayout([self, other])
elif isinstance(other, AdjointLayout):
return AdjointLayout(other.data.values()+[self])
return AdjointLayout([*other.data.values(), self])
else:
raise TypeError(f'Cannot append {type(other).__name__} to a AdjointLayout')

Expand Down
4 changes: 1 addition & 3 deletions holoviews/core/operation.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,9 +155,7 @@ def _apply(self, element, key=None):
and isinstance(element, Dataset) and not in_method):
ret._dataset = element.dataset.clone()
ret._pipeline = element_pipeline.instance(
operations=element_pipeline.operations + [
self.instance(**self.p)
],
operations=[*element_pipeline.operations, self.instance(**self.p)],
)
ret._transforms = element._transforms
return ret
Expand Down
6 changes: 3 additions & 3 deletions holoviews/core/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -767,7 +767,7 @@ def closest(self, obj, group, defaults=True, backend=None):
# Try to get a cache hit in the backend lookup cache
backend = backend or Store.current_backend
cache = Store._lookup_cache.get(backend, {})
cache_key = opts_spec+(group, defaults, id(self.root))
cache_key = (*opts_spec, group, defaults, id(self.root))
if cache_key in cache:
return cache[cache_key]

Expand Down Expand Up @@ -1344,7 +1344,7 @@ def add_style_opts(cls, component, new_options, backend=None):
for option in new_options:
if option not in cls.registry[backend][component].style_opts:
plot_class = cls.registry[backend][component]
plot_class.style_opts = sorted(plot_class.style_opts+[option])
plot_class.style_opts = sorted([*plot_class.style_opts, option])
cls._options[backend][component.name] = Options(
'style', merge_keywords=True, allowed_keywords=new_options
)
Expand Down Expand Up @@ -1592,7 +1592,7 @@ def validate_spec(cls, spec, backends=None):
error_key = (error.invalid_keyword,
error.allowed_keywords.target,
error.group_name)
error_info[error_key+(backend,)] = error.allowed_keywords
error_info[(*error_key, backend)] = error.allowed_keywords
backend_errors[error_key].add(backend)

for ((keyword, target, group_name), backend_error) in backend_errors.items():
Expand Down
6 changes: 4 additions & 2 deletions holoviews/core/overlay.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,5 +328,7 @@ def decollate(self):
return decollate(self)


__all__ = list({_k for _k, _v in locals().items()
if isinstance(_v, type) and issubclass(_v, Dimensioned)}) + ['Overlayable']
__all__ = [
*{_k for _k, _v in locals().items() if isinstance(_v, type) and issubclass(_v, Dimensioned)},
"Overlayable"
]
6 changes: 3 additions & 3 deletions holoviews/core/spaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ def __lshift__(self, other):
if isinstance(other, (ViewableElement, UniformNdMapping, Empty)):
return AdjointLayout([self, other])
elif isinstance(other, AdjointLayout):
return AdjointLayout(other.data+[self])
return AdjointLayout([*other.data, self])
else:
raise TypeError(f'Cannot append {type(other).__name__} to a AdjointLayout')

Expand Down Expand Up @@ -1048,7 +1048,7 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True,
overrides['plot_id'] = self._plot_id
clone = super(UniformNdMapping, self).clone(
callback, shared_data, new_type, link,
*(data,) + args, **overrides)
*(data, *args), **overrides)

# Ensure the clone references this object to ensure
# stream sources are inherited
Expand Down Expand Up @@ -1747,7 +1747,7 @@ def __lshift__(self, other):
if isinstance(other, (ViewableElement, UniformNdMapping)):
return AdjointLayout([self, other])
elif isinstance(other, AdjointLayout):
return AdjointLayout(other.data+[self])
return AdjointLayout([*other.data, self])
else:
raise TypeError(f'Cannot append {type(other).__name__} to a AdjointLayout')

Expand Down
2 changes: 1 addition & 1 deletion holoviews/core/tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def _propagate(self, path, val):
else:
self.data[path] = val
if self.parent is not None:
self.parent._propagate((self.identifier,)+path, val)
self.parent._propagate((self.identifier, *path), val)


def __setitem__(self, identifier, val):
Expand Down
12 changes: 6 additions & 6 deletions holoviews/core/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -994,7 +994,7 @@ def max_range(ranges, combined=True):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'All-NaN (slice|axis) encountered')
values = [tuple(np.nan if v is None else v for v in r) for r in ranges]
if any(isinstance(v, datetime_types) and not isinstance(v, cftime_types+(dt.time,))
if any(isinstance(v, datetime_types) and not isinstance(v, (*cftime_types, dt.time))
for r in values for v in r):
converted = []
for l, h in values:
Expand Down Expand Up @@ -1281,7 +1281,7 @@ def dimension_sort(odict, kdims, vdims, key_index):
indexes = [(dimensions[i], int(i not in range(ndims)),
i if i in range(ndims) else i-ndims)
for i in key_index]
cached_values = {d.name: [None]+list(d.values) for d in dimensions}
cached_values = {d.name: [None, *d.values] for d in dimensions}

if len(set(key_index)) != len(key_index):
raise ValueError("Cannot sort on duplicated dimensions")
Expand Down Expand Up @@ -1434,8 +1434,8 @@ def get_overlay_spec(o, k, v):
Gets the type.group.label + key spec from an Element in an Overlay.
"""
k = wrap_tuple(k)
return ((type(v).__name__, v.group, v.label) + k if len(o.kdims) else
(type(v).__name__,) + k)
return ((type(v).__name__, v.group, v.label, *k) if len(o.kdims) else
(type(v).__name__, *k))


def layer_sort(hmap):
Expand Down Expand Up @@ -1887,9 +1887,9 @@ def make_path_unique(path, counts, new):
path = path[:-1]
else:
added = True
path = path + (int_to_roman(count),)
path = (*path, int_to_roman(count))
if len(path) == 1:
path = path + (int_to_roman(counts.get(path, 1)),)
path = (*path, int_to_roman(counts.get(path, 1)))
if path not in counts:
counts[path] = 1
return path
Expand Down
Loading

0 comments on commit 713d92b

Please sign in to comment.