Was being calculated and not used. Also was probably incorrect...
enum pipe_error error;
const GLbitfield inputsRead = stfp->Base.Base.InputsRead;
struct ureg_program *ureg;
- GLuint vslot = 0;
uint fs_num_inputs = 0;
ubyte fs_output_semantic_index[PIPE_MAX_SHADER_OUTPUTS];
uint fs_num_outputs = 0;
- /* which vertex output goes to the first fragment input: */
- if (inputsRead & FRAG_BIT_WPOS)
- vslot = 0;
- else
- vslot = 1;
-
/*
* Convert Mesa program inputs to TGSI input register semantics.
*/
defaultInputMapping[attr] = slot;
- stfp->input_map[slot] = vslot++;
-
fs_num_inputs++;
switch (attr) {
GLuint input_to_slot[FRAG_ATTRIB_MAX]; /**< Maps FRAG_ATTRIB_x to slot */
GLuint num_input_slots;
- /** map FP input back to VP output */
- GLuint input_map[PIPE_MAX_SHADER_INPUTS];
-
ubyte input_semantic_name[PIPE_MAX_SHADER_INPUTS];
ubyte input_semantic_index[PIPE_MAX_SHADER_INPUTS];