2018-08-23 19:38:57 +00:00
|
|
|
// Copyright 2018 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
2019-02-19 01:58:32 +00:00
|
|
|
#include <mutex>
|
2018-08-29 00:23:44 +00:00
|
|
|
#include <set>
|
2018-11-03 00:04:01 +00:00
|
|
|
#include <unordered_map>
|
2018-08-29 00:23:44 +00:00
|
|
|
|
|
|
|
#include <boost/icl/interval_map.hpp>
|
2018-08-31 16:21:34 +00:00
|
|
|
#include <boost/range/iterator_range_core.hpp>
|
2018-08-23 19:38:57 +00:00
|
|
|
|
|
|
|
#include "common/common_types.h"
|
2018-10-14 20:09:01 +00:00
|
|
|
#include "core/settings.h"
|
2019-02-19 01:58:32 +00:00
|
|
|
#include "video_core/gpu.h"
|
2018-08-28 22:43:08 +00:00
|
|
|
#include "video_core/rasterizer_interface.h"
|
2018-08-23 19:38:57 +00:00
|
|
|
|
2018-10-16 20:51:53 +00:00
|
|
|
class RasterizerCacheObject {
|
|
|
|
public:
|
2019-02-19 01:58:32 +00:00
|
|
|
explicit RasterizerCacheObject(const u8* host_ptr)
|
|
|
|
: host_ptr{host_ptr}, cache_addr{ToCacheAddr(host_ptr)} {}
|
|
|
|
|
2018-11-08 05:31:35 +00:00
|
|
|
virtual ~RasterizerCacheObject();
|
|
|
|
|
2019-02-19 01:58:32 +00:00
|
|
|
CacheAddr GetCacheAddr() const {
|
|
|
|
return cache_addr;
|
|
|
|
}
|
|
|
|
|
|
|
|
const u8* GetHostPtr() const {
|
|
|
|
return host_ptr;
|
|
|
|
}
|
|
|
|
|
2018-10-16 20:51:53 +00:00
|
|
|
/// Gets the address of the shader in guest memory, required for cache management
|
2019-02-19 01:58:32 +00:00
|
|
|
virtual VAddr GetCpuAddr() const = 0;
|
2018-10-16 20:51:53 +00:00
|
|
|
|
|
|
|
/// Gets the size of the shader in guest memory, required for cache management
|
|
|
|
virtual std::size_t GetSizeInBytes() const = 0;
|
|
|
|
|
|
|
|
/// Wriets any cached resources back to memory
|
|
|
|
virtual void Flush() = 0;
|
|
|
|
|
|
|
|
/// Sets whether the cached object should be considered registered
|
|
|
|
void SetIsRegistered(bool registered) {
|
|
|
|
is_registered = registered;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns true if the cached object is registered
|
|
|
|
bool IsRegistered() const {
|
|
|
|
return is_registered;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns true if the cached object is dirty
|
|
|
|
bool IsDirty() const {
|
|
|
|
return is_dirty;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns ticks from when this cached object was last modified
|
|
|
|
u64 GetLastModifiedTicks() const {
|
|
|
|
return last_modified_ticks;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Marks an object as recently modified, used to specify whether it is clean or dirty
|
|
|
|
template <class T>
|
|
|
|
void MarkAsModified(bool dirty, T& cache) {
|
|
|
|
is_dirty = dirty;
|
|
|
|
last_modified_ticks = cache.GetModifiedTicks();
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
bool is_registered{}; ///< Whether the object is currently registered with the cache
|
|
|
|
bool is_dirty{}; ///< Whether the object is dirty (out of sync with guest memory)
|
|
|
|
u64 last_modified_ticks{}; ///< When the object was last modified, used for in-order flushing
|
2019-02-19 01:58:32 +00:00
|
|
|
const u8* host_ptr{}; ///< Pointer to the memory backing this cached region
|
2019-03-27 16:35:31 +00:00
|
|
|
CacheAddr cache_addr{}; ///< Cache address memory, unique from emulated virtual address space
|
2018-10-16 20:51:53 +00:00
|
|
|
};
|
|
|
|
|
2018-08-23 19:38:57 +00:00
|
|
|
template <class T>
|
|
|
|
class RasterizerCache : NonCopyable {
|
2018-10-16 20:51:53 +00:00
|
|
|
friend class RasterizerCacheObject;
|
|
|
|
|
2018-08-23 19:38:57 +00:00
|
|
|
public:
|
2018-11-08 11:08:00 +00:00
|
|
|
explicit RasterizerCache(VideoCore::RasterizerInterface& rasterizer) : rasterizer{rasterizer} {}
|
|
|
|
|
2018-10-16 20:51:53 +00:00
|
|
|
/// Write any cached resources overlapping the specified region back to memory
|
2019-02-19 01:58:32 +00:00
|
|
|
void FlushRegion(CacheAddr addr, std::size_t size) {
|
2019-04-01 16:29:59 +00:00
|
|
|
std::lock_guard lock{mutex};
|
2019-02-19 01:58:32 +00:00
|
|
|
|
2018-10-16 20:51:53 +00:00
|
|
|
const auto& objects{GetSortedObjectsFromRegion(addr, size)};
|
|
|
|
for (auto& object : objects) {
|
|
|
|
FlushObject(object);
|
2018-10-09 23:28:58 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-23 19:38:57 +00:00
|
|
|
/// Mark the specified region as being invalidated
|
2019-02-19 01:58:32 +00:00
|
|
|
void InvalidateRegion(CacheAddr addr, u64 size) {
|
2019-04-01 16:29:59 +00:00
|
|
|
std::lock_guard lock{mutex};
|
2019-02-19 01:58:32 +00:00
|
|
|
|
2018-10-16 20:51:53 +00:00
|
|
|
const auto& objects{GetSortedObjectsFromRegion(addr, size)};
|
|
|
|
for (auto& object : objects) {
|
|
|
|
if (!object->IsRegistered()) {
|
|
|
|
// Skip duplicates
|
|
|
|
continue;
|
2018-08-23 19:38:57 +00:00
|
|
|
}
|
2018-10-16 20:51:53 +00:00
|
|
|
Unregister(object);
|
2018-08-23 19:38:57 +00:00
|
|
|
}
|
2018-08-29 00:23:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Invalidates everything in the cache
|
|
|
|
void InvalidateAll() {
|
2019-04-01 16:29:59 +00:00
|
|
|
std::lock_guard lock{mutex};
|
2019-02-19 01:58:32 +00:00
|
|
|
|
2018-11-03 00:04:01 +00:00
|
|
|
while (interval_cache.begin() != interval_cache.end()) {
|
|
|
|
Unregister(*interval_cache.begin()->second.begin());
|
2018-08-29 00:23:44 +00:00
|
|
|
}
|
2018-08-23 19:38:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
protected:
|
2019-02-19 01:58:32 +00:00
|
|
|
/// Tries to get an object from the cache with the specified cache address
|
|
|
|
T TryGet(CacheAddr addr) const {
|
2018-11-03 00:04:01 +00:00
|
|
|
const auto iter = map_cache.find(addr);
|
|
|
|
if (iter != map_cache.end())
|
|
|
|
return iter->second;
|
2018-08-23 19:38:57 +00:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2019-02-19 01:58:32 +00:00
|
|
|
T TryGet(const void* addr) const {
|
|
|
|
const auto iter = map_cache.find(ToCacheAddr(addr));
|
|
|
|
if (iter != map_cache.end())
|
|
|
|
return iter->second;
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2018-08-23 19:38:57 +00:00
|
|
|
/// Register an object into the cache
|
2019-03-22 01:41:14 +00:00
|
|
|
virtual void Register(const T& object) {
|
2019-04-01 16:29:59 +00:00
|
|
|
std::lock_guard lock{mutex};
|
2019-02-19 01:58:32 +00:00
|
|
|
|
2018-10-16 20:51:53 +00:00
|
|
|
object->SetIsRegistered(true);
|
2018-11-03 00:04:01 +00:00
|
|
|
interval_cache.add({GetInterval(object), ObjectSet{object}});
|
2019-02-19 01:58:32 +00:00
|
|
|
map_cache.insert({object->GetCacheAddr(), object});
|
|
|
|
rasterizer.UpdatePagesCachedCount(object->GetCpuAddr(), object->GetSizeInBytes(), 1);
|
2018-08-23 19:38:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Unregisters an object from the cache
|
2019-03-22 01:41:14 +00:00
|
|
|
virtual void Unregister(const T& object) {
|
2019-04-01 16:29:59 +00:00
|
|
|
std::lock_guard lock{mutex};
|
2018-10-14 20:09:01 +00:00
|
|
|
|
2019-02-19 01:58:32 +00:00
|
|
|
object->SetIsRegistered(false);
|
|
|
|
rasterizer.UpdatePagesCachedCount(object->GetCpuAddr(), object->GetSizeInBytes(), -1);
|
2018-11-03 00:04:01 +00:00
|
|
|
interval_cache.subtract({GetInterval(object), ObjectSet{object}});
|
2019-02-19 01:58:32 +00:00
|
|
|
map_cache.erase(object->GetCacheAddr());
|
2018-08-23 19:38:57 +00:00
|
|
|
}
|
|
|
|
|
2018-10-16 20:51:53 +00:00
|
|
|
/// Returns a ticks counter used for tracking when cached objects were last modified
|
|
|
|
u64 GetModifiedTicks() {
|
2019-04-01 16:29:59 +00:00
|
|
|
std::lock_guard lock{mutex};
|
2019-02-19 01:58:32 +00:00
|
|
|
|
2018-10-16 20:51:53 +00:00
|
|
|
return ++modified_ticks;
|
|
|
|
}
|
|
|
|
|
2019-02-08 23:24:04 +00:00
|
|
|
/// Flushes the specified object, updating appropriate cache state as needed
|
|
|
|
void FlushObject(const T& object) {
|
2019-04-01 16:29:59 +00:00
|
|
|
std::lock_guard lock{mutex};
|
2019-02-19 01:58:32 +00:00
|
|
|
|
2019-02-08 23:24:04 +00:00
|
|
|
if (!object->IsDirty()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
object->Flush();
|
|
|
|
object->MarkAsModified(false, *this);
|
|
|
|
}
|
|
|
|
|
2018-08-23 19:38:57 +00:00
|
|
|
private:
|
2018-10-16 20:51:53 +00:00
|
|
|
/// Returns a list of cached objects from the specified memory region, ordered by access time
|
2019-02-19 01:58:32 +00:00
|
|
|
std::vector<T> GetSortedObjectsFromRegion(CacheAddr addr, u64 size) {
|
2018-10-16 20:51:53 +00:00
|
|
|
if (size == 0) {
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
|
|
|
std::vector<T> objects;
|
|
|
|
const ObjectInterval interval{addr, addr + size};
|
2018-11-03 00:04:01 +00:00
|
|
|
for (auto& pair : boost::make_iterator_range(interval_cache.equal_range(interval))) {
|
2018-10-16 20:51:53 +00:00
|
|
|
for (auto& cached_object : pair.second) {
|
|
|
|
if (!cached_object) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
objects.push_back(cached_object);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
std::sort(objects.begin(), objects.end(), [](const T& a, const T& b) -> bool {
|
|
|
|
return a->GetLastModifiedTicks() < b->GetLastModifiedTicks();
|
|
|
|
});
|
|
|
|
|
|
|
|
return objects;
|
|
|
|
}
|
|
|
|
|
2018-08-29 00:23:44 +00:00
|
|
|
using ObjectSet = std::set<T>;
|
2019-02-19 01:58:32 +00:00
|
|
|
using ObjectCache = std::unordered_map<CacheAddr, T>;
|
|
|
|
using IntervalCache = boost::icl::interval_map<CacheAddr, ObjectSet>;
|
2018-11-03 00:04:01 +00:00
|
|
|
using ObjectInterval = typename IntervalCache::interval_type;
|
2018-08-29 00:23:44 +00:00
|
|
|
|
|
|
|
static auto GetInterval(const T& object) {
|
2019-02-19 01:58:32 +00:00
|
|
|
return ObjectInterval::right_open(object->GetCacheAddr(),
|
|
|
|
object->GetCacheAddr() + object->GetSizeInBytes());
|
2018-08-29 00:23:44 +00:00
|
|
|
}
|
|
|
|
|
2018-11-03 00:04:01 +00:00
|
|
|
ObjectCache map_cache;
|
|
|
|
IntervalCache interval_cache; ///< Cache of objects
|
|
|
|
u64 modified_ticks{}; ///< Counter of cache state ticks, used for in-order flushing
|
2018-11-08 11:08:00 +00:00
|
|
|
VideoCore::RasterizerInterface& rasterizer;
|
2019-02-19 01:58:32 +00:00
|
|
|
std::recursive_mutex mutex;
|
2018-08-23 19:38:57 +00:00
|
|
|
};
|