1 # [DRAFT] Multiply and Add Extended Doubleword Unsigned
9 <!-- SVP64: RA,RB,RC,RT have EXTRA2, RS as below
10 <!-- bit 8 of EXTRA is set : RS.[s|v]=RT.[s|v]+MAXVL
11 <!-- bit 8 of EXTRA is clear: RS.[s|v]=RC.[s|v]
12 prod[0:2*XLEN-1] <- (RA) * (RB)
13 sum[0:2*XLEN-1] <- ([0] * XLEN || (RC)) + prod
14 RT <- sum[XLEN:2*XLEN-1]
17 Special Registers Altered:
21 # [DRAFT] Divide/Modulo Double-width Doubleword Unsigned
25 * divmod2du RT,RA,RB,RC
29 <!-- SVP64: RA,RB,RC,RT have EXTRA2, RS as below
30 <!-- bit 8 of EXTRA is set : RS.[s|v]=RT.[s|v]+MAXVL
31 <!-- bit 8 of EXTRA is clear: RS.[s|v]=RC.[s|v]
32 if ((RC) <u (RB)) & ((RB) != [0]*XLEN) then
33 dividend[0:(XLEN*2)-1] <- (RC) || (RA)
34 divisor[0:(XLEN*2)-1] <- [0]*XLEN || (RB)
35 result <- dividend / divisor
36 modulo <- dividend % divisor
37 RT <- result[XLEN:(XLEN*2)-1]
38 RS <- modulo[XLEN:(XLEN*2)-1]
45 Special Registers Altered:
49 # [DRAFT] Double-width Shift Left Doubleword
53 * dsld RT,RA,RB,RC (Rc=0)
54 * dsld. RT,RA,RB,RC (Rc=1)
61 RT <- (v[0:63] & mask) | ((RC) & ¬mask)
67 Special Registers Altered:
71 # [DRAFT] Double-width Shift Right Doubleword
75 * dsrd RT,RA,RB,RC (Rc=0)
76 * dsrd. RT,RA,RB,RC (Rc=1)
81 v <- ROTL64((RA), 64-n)
83 RT <- (v[0:63] & mask) | ((RC) & ¬mask)
89 Special Registers Altered: