Skip to content

Commit

Permalink
swapped out unordered_map cache with a boost_lru implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
ghoshkaj committed Apr 1, 2018
1 parent 3a70a13 commit 0659fe2
Show file tree
Hide file tree
Showing 2 changed files with 150 additions and 36 deletions.
47 changes: 11 additions & 36 deletions include/engine/unpacking_cache.hpp
Original file line number Diff line number Diff line change
@@ -1,75 +1,50 @@
#ifndef UNPACKING_CACHE_HPP
#define UNPACKING_CACHE_HPP

#include "util/typedefs.hpp"

#include <boost/functional/hash_fwd.hpp>
#include <unordered_map>
#include <utility>

namespace std
{
template <> struct hash<std::tuple<NodeID, NodeID, std::size_t>>
{
typedef std::tuple<NodeID, NodeID, std::size_t> argument_type;
typedef std::size_t result_type;
result_type operator()(argument_type const &tuple) const noexcept
{
result_type seed = 0;
result_type const h1(std::hash<unsigned int>{}(std::get<0>(tuple)));
result_type const h2(std::hash<unsigned int>{}(std::get<1>(tuple)));
result_type const h3(std::hash<unsigned int>{}(std::get<2>(tuple)));
#include <boost/optional/optional_io.hpp>

boost::hash_combine(seed, h1);
boost::hash_combine(seed, h2);
boost::hash_combine(seed, h3);
#include "../../third_party/compute_detail/lru_cache.hpp"
#include "util/typedefs.hpp"

return seed;
}
};
}
namespace osrm
{
namespace engine
{
class UnpackingCache
{
std::unordered_map<std::tuple<NodeID, NodeID, std::size_t>, EdgeDuration> cache;

private:
boost::compute::detail::lru_cache<std::tuple<NodeID, NodeID, std::size_t>, EdgeDuration> cache;
unsigned current_data_timestamp = 0;

public:
UnpackingCache(unsigned timestamp) : current_data_timestamp(timestamp){};
// UnpackingCache(){};
UnpackingCache(unsigned timestamp) : cache(200), current_data_timestamp(timestamp){};

void Clear(unsigned new_data_timestamp)
{
if (current_data_timestamp != new_data_timestamp)
{
cache.clear();
current_data_timestamp = new_data_timestamp;
}
}

bool IsEdgeInCache(std::tuple<NodeID, NodeID, std::size_t> edge)
{
bool edge_is_in_cache = cache.find(edge) != cache.end();
return edge_is_in_cache;
return cache.contains(edge);
}

void AddEdge(std::tuple<NodeID, NodeID, std::size_t> edge, EdgeDuration duration)
{
cache.insert({edge, duration});
GetDuration(edge);
cache.insert(edge, duration);
}

EdgeDuration GetDuration(std::tuple<NodeID, NodeID, std::size_t> edge)
{
EdgeDuration duration = cache[edge];
return duration;
boost::optional<EdgeDuration> duration = cache.get(edge);
return *duration ? *duration : MAXIMAL_EDGE_DURATION;
}
};
} // engine
} // osrm

#endif // UNPACKING_CACHE_HPP
#endif // UNPACKING_CACHE_HPP
139 changes: 139 additions & 0 deletions third_party/compute_detail/lru_cache.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
//---------------------------------------------------------------------------//
// Copyright (c) 2013 Kyle Lutz <kyle.r.lutz@gmail.com>
//
// Distributed under the Boost Software License, Version 1.0
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt
//
// See http://boostorg.github.com/compute for more information.
//---------------------------------------------------------------------------//

#ifndef BOOST_COMPUTE_DETAIL_LRU_CACHE_HPP
#define BOOST_COMPUTE_DETAIL_LRU_CACHE_HPP

#include <map>
#include <list>
#include <utility>

#include <boost/optional.hpp>

namespace boost {
namespace compute {
namespace detail {

// a cache which evicts the least recently used item when it is full
template<class Key, class Value>
class lru_cache
{
public:
typedef Key key_type;
typedef Value value_type;
typedef std::list<key_type> list_type;
typedef std::map<
key_type,
std::pair<value_type, typename list_type::iterator>
> map_type;

lru_cache(size_t capacity)
: m_capacity(capacity)
{
}

~lru_cache()
{
}

size_t size() const
{
return m_map.size();
}

size_t capacity() const
{
return m_capacity;
}

bool empty() const
{
return m_map.empty();
}

bool contains(const key_type &key)
{
return m_map.find(key) != m_map.end();
}

void insert(const key_type &key, const value_type &value)
{
typename map_type::iterator i = m_map.find(key);
if(i == m_map.end()){
// insert item into the cache, but first check if it is full
if(size() >= m_capacity){
// cache is full, evict the least recently used item
evict();
}

// insert the new item
m_list.push_front(key);
m_map[key] = std::make_pair(value, m_list.begin());
}
}

boost::optional<value_type> get(const key_type &key)
{
// lookup value in the cache
typename map_type::iterator i = m_map.find(key);
if(i == m_map.end()){
// value not in cache
return boost::none;
}

// return the value, but first update its place in the most
// recently used list
typename list_type::iterator j = i->second.second;
if(j != m_list.begin()){
// move item to the front of the most recently used list
m_list.erase(j);
m_list.push_front(key);

// update iterator in map
j = m_list.begin();
const value_type &value = i->second.first;
m_map[key] = std::make_pair(value, j);

// return the value
return value;
}
else {
// the item is already at the front of the most recently
// used list so just return it
return i->second.first;
}
}

void clear()
{
m_map.clear();
m_list.clear();
}

private:
void evict()
{
// evict item from the end of most recently used list
typename list_type::iterator i = --m_list.end();
m_map.erase(*i);
m_list.erase(i);
}

private:
map_type m_map;
list_type m_list;
size_t m_capacity;
};

} // end detail namespace
} // end compute namespace
} // end boost namespace

#endif // BOOST_COMPUTE_DETAIL_LRU_CACHE_HPP

0 comments on commit 0659fe2

Please sign in to comment.