#include "mem/cache/compressors/base.hh"
#include <algorithm>
+#include <climits>
#include <cmath>
#include <cstdint>
#include <string>
#include "base/trace.hh"
#include "debug/CacheComp.hh"
+#include "mem/cache/base.hh"
#include "mem/cache/tags/super_blk.hh"
#include "params/BaseCacheCompressor.hh"
return std::ceil(_size/8);
}
-Base::Base(const Params *p)
- : SimObject(p), blkSize(p->block_size), sizeThreshold(p->size_threshold),
- stats(*this)
+Base::Base(const Params &p)
+ : SimObject(p), blkSize(p.block_size), chunkSizeBits(p.chunk_size_bits),
+ sizeThreshold((blkSize * p.size_threshold_percentage) / 100),
+ compChunksPerCycle(p.comp_chunks_per_cycle),
+ compExtraLatency(p.comp_extra_latency),
+ decompChunksPerCycle(p.decomp_chunks_per_cycle),
+ decompExtraLatency(p.decomp_extra_latency),
+ cache(nullptr), stats(*this)
{
+ fatal_if(64 % chunkSizeBits,
+ "64 must be a multiple of the chunk granularity.");
+
+ fatal_if(((CHAR_BIT * blkSize) / chunkSizeBits) < compChunksPerCycle,
+ "Compressor processes more chunks per cycle than the number of "
+ "chunks in the input");
+ fatal_if(((CHAR_BIT * blkSize) / chunkSizeBits) < decompChunksPerCycle,
+ "Decompressor processes more chunks per cycle than the number of "
+ "chunks in the input");
+
fatal_if(blkSize < sizeThreshold, "Compressed data must fit in a block");
}
void
-Base::compress(const uint64_t* data, Cycles& comp_lat,
- Cycles& decomp_lat, std::size_t& comp_size_bits)
+Base::setCache(BaseCache *_cache)
+{
+ assert(!cache);
+ cache = _cache;
+}
+
+std::vector<Base::Chunk>
+Base::toChunks(const uint64_t* data) const
+{
+ // Number of chunks in a 64-bit value
+ const unsigned num_chunks_per_64 =
+ (sizeof(uint64_t) * CHAR_BIT) / chunkSizeBits;
+
+ // Turn a 64-bit array into a chunkSizeBits-array
+ std::vector<Chunk> chunks((blkSize * CHAR_BIT) / chunkSizeBits, 0);
+ for (int i = 0; i < chunks.size(); i++) {
+ const int index_64 = std::floor(i / (double)num_chunks_per_64);
+ const unsigned start = i % num_chunks_per_64;
+ chunks[i] = bits(data[index_64],
+ (start + 1) * chunkSizeBits - 1, start * chunkSizeBits);
+ }
+
+ return chunks;
+}
+
+void
+Base::fromChunks(const std::vector<Chunk>& chunks, uint64_t* data) const
+{
+ // Number of chunks in a 64-bit value
+ const unsigned num_chunks_per_64 =
+ (sizeof(uint64_t) * CHAR_BIT) / chunkSizeBits;
+
+ // Turn a chunkSizeBits-array into a 64-bit array
+ std::memset(data, 0, blkSize);
+ for (int i = 0; i < chunks.size(); i++) {
+ const int index_64 = std::floor(i / (double)num_chunks_per_64);
+ const unsigned start = i % num_chunks_per_64;
+ replaceBits(data[index_64], (start + 1) * chunkSizeBits - 1,
+ start * chunkSizeBits, chunks[i]);
+ }
+}
+
+std::unique_ptr<Base::CompressionData>
+Base::compress(const uint64_t* data, Cycles& comp_lat, Cycles& decomp_lat)
{
// Apply compression
std::unique_ptr<CompressionData> comp_data =
- compress(data, comp_lat, decomp_lat);
+ compress(toChunks(data), comp_lat, decomp_lat);
// If we are in debug mode apply decompression just after the compression.
// If the results do not match, we've got an error
// Get compression size. If compressed size is greater than the size
// threshold, the compression is seen as unsuccessful
- comp_size_bits = comp_data->getSizeBits();
- if (comp_size_bits >= sizeThreshold * 8) {
- comp_size_bits = blkSize * 8;
+ std::size_t comp_size_bits = comp_data->getSizeBits();
+ if (comp_size_bits > sizeThreshold * CHAR_BIT) {
+ comp_size_bits = blkSize * CHAR_BIT;
+ comp_data->setSizeBits(comp_size_bits);
+ stats.failedCompressions++;
}
// Update stats
stats.compressions++;
stats.compressionSizeBits += comp_size_bits;
- stats.compressionSize[std::ceil(std::log2(comp_size_bits))]++;
+ if (comp_size_bits != 0) {
+ stats.compressionSize[1 + std::ceil(std::log2(comp_size_bits))]++;
+ } else {
+ stats.compressionSize[0]++;
+ }
// Print debug information
DPRINTF(CacheComp, "Compressed cache line from %d to %d bits. " \
"Compression latency: %llu, decompression latency: %llu\n",
blkSize*8, comp_size_bits, comp_lat, decomp_lat);
+
+ return comp_data;
}
Cycles
Base::BaseStats::BaseStats(Base& _compressor)
: Stats::Group(&_compressor), compressor(_compressor),
- compressions(this, "compressions",
- "Total number of compressions"),
- compressionSize(this, "compression_size",
- "Number of blocks that were compressed to this power of two size"),
- compressionSizeBits(this, "compression_size_bits",
- "Total compressed data size, in bits"),
- avgCompressionSizeBits(this, "avg_compression_size_bits",
- "Average compression size, in bits"),
- decompressions(this, "total_decompressions",
- "Total number of decompressions")
+ ADD_STAT(compressions, UNIT_COUNT, "Total number of compressions"),
+ ADD_STAT(failedCompressions, UNIT_COUNT,
+ "Total number of failed compressions"),
+ ADD_STAT(compressionSize, UNIT_COUNT,
+ "Number of blocks that were compressed to this power of two "
+ "size"),
+ ADD_STAT(compressionSizeBits, UNIT_BIT,
+ "Total compressed data size, in bits"),
+ ADD_STAT(avgCompressionSizeBits,
+ UNIT_RATE(Stats::Units::Bit, Stats::Units::Count),
+ "Average compression size, in bits"),
+ ADD_STAT(decompressions, UNIT_COUNT, "Total number of decompressions")
{
}
{
Stats::Group::regStats();
- compressionSize.init(std::log2(compressor.blkSize*8) + 1);
+ // Values comprised are {0, 1, 2, 4, ..., blkSize}
+ compressionSize.init(std::log2(compressor.blkSize*8) + 2);
+ compressionSize.subname(0, "0");
+ compressionSize.subdesc(0,
+ "Number of blocks that compressed to fit in 0 bits");
for (unsigned i = 0; i <= std::log2(compressor.blkSize*8); ++i) {
std::string str_i = std::to_string(1 << i);
- compressionSize.subname(i, str_i);
- compressionSize.subdesc(i,
+ compressionSize.subname(1+i, str_i);
+ compressionSize.subdesc(1+i,
"Number of blocks that compressed to fit in " + str_i + " bits");
}