* Add a fetch element and add it to the draw elements.
*/
static inline void
-vsplit_add_cache(struct vsplit_frontend *vsplit, unsigned fetch, unsigned ofbias)
+vsplit_add_cache(struct vsplit_frontend *vsplit, unsigned fetch)
{
unsigned hash;
/* If the value isn't in the cache or it's an overflow due to the
* element bias */
- if (vsplit->cache.fetches[hash] != fetch || ofbias) {
+ if (vsplit->cache.fetches[hash] != fetch) {
/* update cache */
vsplit->cache.fetches[hash] = fetch;
vsplit->cache.draws[hash] = vsplit->cache.num_fetch_elts;
/**
* Returns the base index to the elements array.
- * The value is checked for integer overflow.
+ * The value is checked for integer overflow (not sure it can happen?).
*/
static inline unsigned
vsplit_get_base_idx(unsigned start, unsigned fetch)
return draw_overflow_uadd(start, fetch, MAX_ELT_IDX);
}
-/**
- * Returns the element index adjust for the element bias.
- * The final element index is created from the actual element
- * index, plus the element bias, clamped to maximum element
- * index if that addition overflows.
+/*
+ * The final element index is just element index plus element bias.
*/
-static inline unsigned
-vsplit_get_bias_idx(int idx, int bias, unsigned *ofbias)
-{
- int res = idx + bias;
-
- *ofbias = 0;
-
- if (idx > 0 && bias > 0) {
- if (res < idx) {
- res = DRAW_MAX_FETCH_IDX;
- *ofbias = 1;
- }
- } else if (idx < 0 && bias < 0) {
- if (res > idx) {
- res = DRAW_MAX_FETCH_IDX;
- *ofbias = 1;
- }
- }
-
- return res;
-}
-
#define VSPLIT_CREATE_IDX(elts, start, fetch, elt_bias) \
unsigned elt_idx; \
- unsigned ofbias; \
- elt_idx = vsplit_get_base_idx(start, fetch); \
- elt_idx = vsplit_get_bias_idx(DRAW_GET_IDX(elts, elt_idx), elt_bias, &ofbias)
+ elt_idx = vsplit_get_base_idx(start, fetch); \
+ elt_idx = (unsigned)((int)(DRAW_GET_IDX(elts, elt_idx)) + (int)elt_bias);
+
static inline void
vsplit_add_cache_ubyte(struct vsplit_frontend *vsplit, const ubyte *elts,
{
struct draw_context *draw = vsplit->draw;
VSPLIT_CREATE_IDX(elts, start, fetch, elt_bias);
- vsplit_add_cache(vsplit, elt_idx, ofbias);
+ /* unlike the uint case this can only happen with elt_bias */
+ if (elt_bias && elt_idx == DRAW_MAX_FETCH_IDX && !vsplit->cache.has_max_fetch) {
+ unsigned hash = fetch % MAP_SIZE;
+ vsplit->cache.fetches[hash] = 0;
+ vsplit->cache.has_max_fetch = TRUE;
+ }
+ vsplit_add_cache(vsplit, elt_idx);
}
static inline void
{
struct draw_context *draw = vsplit->draw;
VSPLIT_CREATE_IDX(elts, start, fetch, elt_bias);
- vsplit_add_cache(vsplit, elt_idx, ofbias);
+ /* unlike the uint case this can only happen with elt_bias */
+ if (elt_bias && elt_idx == DRAW_MAX_FETCH_IDX && !vsplit->cache.has_max_fetch) {
+ unsigned hash = fetch % MAP_SIZE;
+ vsplit->cache.fetches[hash] = 0;
+ vsplit->cache.has_max_fetch = TRUE;
+ }
+ vsplit_add_cache(vsplit, elt_idx);
}
unsigned start, unsigned fetch, int elt_bias)
{
struct draw_context *draw = vsplit->draw;
- unsigned raw_elem_idx = start + fetch + elt_bias;
VSPLIT_CREATE_IDX(elts, start, fetch, elt_bias);
-
- /* special care for DRAW_MAX_FETCH_IDX */
- if (raw_elem_idx == DRAW_MAX_FETCH_IDX && !vsplit->cache.has_max_fetch) {
+ /* Take care for DRAW_MAX_FETCH_IDX (since cache is initialized to -1). */
+ if (elt_idx == DRAW_MAX_FETCH_IDX && !vsplit->cache.has_max_fetch) {
unsigned hash = fetch % MAP_SIZE;
- vsplit->cache.fetches[hash] = raw_elem_idx - 1; /* force update */
+ /* force update - any value will do except DRAW_MAX_FETCH_IDX */
+ vsplit->cache.fetches[hash] = 0;
vsplit->cache.has_max_fetch = TRUE;
}
-
- vsplit_add_cache(vsplit, elt_idx, ofbias);
+ vsplit_add_cache(vsplit, elt_idx);
}
if (close)
ADD_CACHE(vsplit, ib, 0, iclose, 0);
}
- else if (ibias > 0) {
+ else {
if (spoken)
ADD_CACHE(vsplit, ib, 0, ispoken, ibias);
if (close)
ADD_CACHE(vsplit, ib, 0, iclose, ibias);
}
- else {
- if (spoken) {
- ADD_CACHE(vsplit, ib, 0, ispoken, ibias);
- }
-
- for (i = spoken; i < icount; i++) {
- ADD_CACHE(vsplit, ib, istart, i, ibias);
- }
-
- if (close) {
- ADD_CACHE(vsplit, ib, 0, iclose, ibias);
- }
- }
vsplit_flush_cache(vsplit, flags);
}