2 * Copyright (c) 2012-2014,2016-2018 ARM Limited
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder. You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * All rights reserved.
17 * Redistribution and use in source and binary forms, with or without
18 * modification, are permitted provided that the following conditions are
19 * met: redistributions of source code must retain the above copyright
20 * notice, this list of conditions and the following disclaimer;
21 * redistributions in binary form must reproduce the above copyright
22 * notice, this list of conditions and the following disclaimer in the
23 * documentation and/or other materials provided with the distribution;
24 * neither the name of the copyright holders nor the names of its
25 * contributors may be used to endorse or promote products derived from
26 * this software without specific prior written permission.
28 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 * Authors: Erik Hallnor
46 * Declaration of a common base class for cache tagstore objects.
49 #ifndef __MEM_CACHE_TAGS_BASE_HH__
50 #define __MEM_CACHE_TAGS_BASE_HH__
56 #include "base/callback.hh"
57 #include "base/logging.hh"
58 #include "base/statistics.hh"
59 #include "base/types.hh"
60 #include "mem/cache/blk.hh"
61 #include "mem/packet.hh"
62 #include "params/BaseTags.hh"
63 #include "sim/clocked_object.hh"
66 class ReplaceableEntry;
69 * A common base class of Cache tagstore objects.
71 class BaseTags : public ClockedObject
74 /** The block size of the cache. */
75 const unsigned blkSize;
76 /** Mask out all bits that aren't part of the block offset. */
78 /** The size of the cache. */
80 /** The tag lookup latency of the cache. */
81 const Cycles lookupLatency;
83 * The total access latency of the cache. This latency
84 * is different depending on the cache access mode
85 * (parallel or sequential)
87 const Cycles accessLatency;
88 /** Pointer to the parent cache. */
92 * The number of tags that need to be touched to meet the warmup
95 const unsigned warmupBound;
96 /** Marked true when the cache is warmed up. */
99 /** the number of blocks in the cache */
100 const unsigned numBlocks;
102 /** The data blocks, 1 per cache block. */
103 std::unique_ptr<uint8_t[]> dataBlks;
107 * TODO: It would be good if these stats were acquired after warmup.
108 * @addtogroup CacheStatistics
112 /** Per cycle average of the number of tags that hold valid data. */
113 Stats::Average tagsInUse;
115 /** The total number of references to a block before it is replaced. */
116 Stats::Scalar totalRefs;
119 * The number of reference counts sampled. This is different from
120 * replacements because we sample all the valid blocks when the simulator
123 Stats::Scalar sampledRefs;
126 * Average number of references to a block before is was replaced.
127 * @todo This should change to an average stat once we have them.
129 Stats::Formula avgRefs;
131 /** The cycle that the warmup percentage was hit. 0 on failure. */
132 Stats::Scalar warmupCycle;
134 /** Average occupancy of each requestor using the cache */
135 Stats::AverageVector occupancies;
137 /** Average occ % of each requestor using the cache */
138 Stats::Formula avgOccs;
140 /** Occupancy of each context/cpu using the cache */
141 Stats::Vector occupanciesTaskId;
143 /** Occupancy of each context/cpu using the cache */
144 Stats::Vector2d ageTaskId;
146 /** Occ % of each context/cpu using the cache */
147 Stats::Formula percentOccsTaskId;
149 /** Number of tags consulted over all accesses. */
150 Stats::Scalar tagAccesses;
151 /** Number of data blocks consulted over all accesses. */
152 Stats::Scalar dataAccesses;
159 typedef BaseTagsParams Params;
160 BaseTags(const Params *p);
165 virtual ~BaseTags() {}
168 * Set the parent cache back pointer.
169 * @param _cache Pointer to parent cache.
171 void setCache(BaseCache *_cache);
174 * Register local statistics.
179 * Average in the reference count for valid blocks when the simulation
185 * Computes stats just prior to dump event
190 * Print all tags used
195 * Find a block using the memory address
197 virtual CacheBlk * findBlock(Addr addr, bool is_secure) const = 0;
200 * Find a block given set and way.
202 * @param set The set of the block.
203 * @param way The way of the block.
206 virtual ReplaceableEntry* findBlockBySetAndWay(int set, int way) const = 0;
209 * Align an address to the block size.
210 * @param addr the address to align.
211 * @return The block address.
213 Addr blkAlign(Addr addr) const
215 return addr & ~blkMask;
219 * Calculate the block offset of an address.
220 * @param addr the address to get the offset of.
221 * @return the block offset.
223 int extractBlkOffset(Addr addr) const
225 return (addr & blkMask);
229 * Limit the allocation for the cache ways.
230 * @param ways The maximum number of ways available for replacement.
232 virtual void setWayAllocationMax(int ways)
234 panic("This tag class does not implement way allocation limit!\n");
238 * Get the way allocation mask limit.
239 * @return The maximum number of ways available for replacement.
241 virtual int getWayAllocationMax() const
243 panic("This tag class does not implement way allocation limit!\n");
248 * This function updates the tags when a block is invalidated
250 * @param blk A valid block to invalidate.
252 virtual void invalidate(CacheBlk *blk)
255 assert(blk->isValid());
257 occupancies[blk->srcMasterId]--;
258 totalRefs += blk->refCount;
265 * Find replacement victim based on address. If the address requires
266 * blocks to be evicted, their locations are listed for eviction. If a
267 * conventional cache is being used, the list only contains the victim.
268 * However, if using sector or compressed caches, the victim is one of
269 * the blocks to be evicted, but its location is the only one that will
270 * be assigned to the newly allocated block associated to this address.
273 * @param addr Address to find a victim for.
274 * @param is_secure True if the target memory space is secure.
275 * @param evict_blks Cache blocks to be evicted.
276 * @return Cache block to be replaced.
278 virtual CacheBlk* findVictim(Addr addr, const bool is_secure,
279 std::vector<CacheBlk*>& evict_blks) const = 0;
281 virtual CacheBlk* accessBlock(Addr addr, bool is_secure, Cycles &lat) = 0;
283 virtual Addr extractTag(Addr addr) const = 0;
286 * Insert the new block into the cache and update stats.
288 * @param pkt Packet holding the address to update
289 * @param blk The block to update.
291 virtual void insertBlock(const PacketPtr pkt, CacheBlk *blk);
294 * Regenerate the block address.
296 * @param block The block.
297 * @return the block address.
299 virtual Addr regenerateBlkAddr(const CacheBlk* blk) const = 0;
302 * Visit each block in the tags and apply a visitor
304 * The visitor should be a std::function that takes a cache block
305 * reference as its parameter.
307 * @param visitor Visitor to call on each block.
309 virtual void forEachBlk(std::function<void(CacheBlk &)> visitor) = 0;
312 * Find if any of the blocks satisfies a condition
314 * The visitor should be a std::function that takes a cache block
315 * reference as its parameter. The visitor will terminate the
316 * traversal early if the condition is satisfied.
318 * @param visitor Visitor to call on each block.
320 virtual bool anyBlk(std::function<bool(CacheBlk &)> visitor) = 0;
324 * Update the reference stats using data from the input block
326 * @param blk The input block
328 void cleanupRefsVisitor(CacheBlk &blk);
331 * Update the occupancy and age stats using data from the input block
333 * @param blk The input block
335 void computeStatsVisitor(CacheBlk &blk);
338 class BaseTagsCallback : public Callback
342 BaseTagsCallback(BaseTags *t) : tags(t) {}
343 virtual void process() { tags->cleanupRefs(); };
346 class BaseTagsDumpCallback : public Callback
350 BaseTagsDumpCallback(BaseTags *t) : tags(t) {}
351 virtual void process() { tags->computeStats(); };
354 #endif //__MEM_CACHE_TAGS_BASE_HH__