#include "base/trace.hh"
#include "debug/CacheComp.hh"
+#include "mem/cache/base.hh"
#include "mem/cache/tags/super_blk.hh"
#include "params/BaseCacheCompressor.hh"
compExtraLatency(p.comp_extra_latency),
decompChunksPerCycle(p.decomp_chunks_per_cycle),
decompExtraLatency(p.decomp_extra_latency),
- stats(*this)
+ cache(nullptr), stats(*this)
{
fatal_if(64 % chunkSizeBits,
"64 must be a multiple of the chunk granularity.");
fatal_if(blkSize < sizeThreshold, "Compressed data must fit in a block");
}
+void
+Base::setCache(BaseCache *_cache)
+{
+ assert(!cache);
+ cache = _cache;
+}
+
std::vector<Base::Chunk>
Base::toChunks(const uint64_t* data) const
{
Base::BaseStats::BaseStats(Base& _compressor)
: Stats::Group(&_compressor), compressor(_compressor),
- compressions(this, "compressions",
- "Total number of compressions"),
- failedCompressions(this, "failed_compressions",
- "Total number of failed compressions"),
- compressionSize(this, "compression_size",
- "Number of blocks that were compressed to this power of two size"),
- compressionSizeBits(this, "compression_size_bits",
- "Total compressed data size, in bits"),
- avgCompressionSizeBits(this, "avg_compression_size_bits",
- "Average compression size, in bits"),
- decompressions(this, "total_decompressions",
- "Total number of decompressions")
+ ADD_STAT(compressions, UNIT_COUNT, "Total number of compressions"),
+ ADD_STAT(failedCompressions, UNIT_COUNT,
+ "Total number of failed compressions"),
+ ADD_STAT(compressionSize, UNIT_COUNT,
+ "Number of blocks that were compressed to this power of two "
+ "size"),
+ ADD_STAT(compressionSizeBits, UNIT_BIT,
+ "Total compressed data size, in bits"),
+ ADD_STAT(avgCompressionSizeBits,
+ UNIT_RATE(Stats::Units::Bit, Stats::Units::Count),
+ "Average compression size, in bits"),
+ ADD_STAT(decompressions, UNIT_COUNT, "Total number of decompressions")
{
}