2010-03-17 Stan Shebs <stan@codesourcery.com>
[binutils-gdb.git] / gdb / ax-gdb.c
1 /* GDB-specific functions for operating on agent expressions.
2
3 Copyright (C) 1998, 1999, 2000, 2001, 2003, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GDB.
7
8 This program is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
12
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20
21 #include "defs.h"
22 #include "symtab.h"
23 #include "symfile.h"
24 #include "gdbtypes.h"
25 #include "language.h"
26 #include "value.h"
27 #include "expression.h"
28 #include "command.h"
29 #include "gdbcmd.h"
30 #include "frame.h"
31 #include "target.h"
32 #include "ax.h"
33 #include "ax-gdb.h"
34 #include "gdb_string.h"
35 #include "block.h"
36 #include "regcache.h"
37 #include "user-regs.h"
38 #include "language.h"
39 #include "dictionary.h"
40 #include "breakpoint.h"
41 #include "tracepoint.h"
42 #include "cp-support.h"
43
44 /* To make sense of this file, you should read doc/agentexpr.texi.
45 Then look at the types and enums in ax-gdb.h. For the code itself,
46 look at gen_expr, towards the bottom; that's the main function that
47 looks at the GDB expressions and calls everything else to generate
48 code.
49
50 I'm beginning to wonder whether it wouldn't be nicer to internally
51 generate trees, with types, and then spit out the bytecode in
52 linear form afterwards; we could generate fewer `swap', `ext', and
53 `zero_ext' bytecodes that way; it would make good constant folding
54 easier, too. But at the moment, I think we should be willing to
55 pay for the simplicity of this code with less-than-optimal bytecode
56 strings.
57
58 Remember, "GBD" stands for "Great Britain, Dammit!" So be careful. */
59 \f
60
61
62 /* Prototypes for local functions. */
63
64 /* There's a standard order to the arguments of these functions:
65 union exp_element ** --- pointer into expression
66 struct agent_expr * --- agent expression buffer to generate code into
67 struct axs_value * --- describes value left on top of stack */
68
69 static struct value *const_var_ref (struct symbol *var);
70 static struct value *const_expr (union exp_element **pc);
71 static struct value *maybe_const_expr (union exp_element **pc);
72
73 static void gen_traced_pop (struct gdbarch *, struct agent_expr *, struct axs_value *);
74
75 static void gen_sign_extend (struct agent_expr *, struct type *);
76 static void gen_extend (struct agent_expr *, struct type *);
77 static void gen_fetch (struct agent_expr *, struct type *);
78 static void gen_left_shift (struct agent_expr *, int);
79
80
81 static void gen_frame_args_address (struct gdbarch *, struct agent_expr *);
82 static void gen_frame_locals_address (struct gdbarch *, struct agent_expr *);
83 static void gen_offset (struct agent_expr *ax, int offset);
84 static void gen_sym_offset (struct agent_expr *, struct symbol *);
85 static void gen_var_ref (struct gdbarch *, struct agent_expr *ax,
86 struct axs_value *value, struct symbol *var);
87
88
89 static void gen_int_literal (struct agent_expr *ax,
90 struct axs_value *value,
91 LONGEST k, struct type *type);
92
93
94 static void require_rvalue (struct agent_expr *ax, struct axs_value *value);
95 static void gen_usual_unary (struct expression *exp, struct agent_expr *ax,
96 struct axs_value *value);
97 static int type_wider_than (struct type *type1, struct type *type2);
98 static struct type *max_type (struct type *type1, struct type *type2);
99 static void gen_conversion (struct agent_expr *ax,
100 struct type *from, struct type *to);
101 static int is_nontrivial_conversion (struct type *from, struct type *to);
102 static void gen_usual_arithmetic (struct expression *exp,
103 struct agent_expr *ax,
104 struct axs_value *value1,
105 struct axs_value *value2);
106 static void gen_integral_promotions (struct expression *exp,
107 struct agent_expr *ax,
108 struct axs_value *value);
109 static void gen_cast (struct agent_expr *ax,
110 struct axs_value *value, struct type *type);
111 static void gen_scale (struct agent_expr *ax,
112 enum agent_op op, struct type *type);
113 static void gen_ptradd (struct agent_expr *ax, struct axs_value *value,
114 struct axs_value *value1, struct axs_value *value2);
115 static void gen_ptrsub (struct agent_expr *ax, struct axs_value *value,
116 struct axs_value *value1, struct axs_value *value2);
117 static void gen_ptrdiff (struct agent_expr *ax, struct axs_value *value,
118 struct axs_value *value1, struct axs_value *value2,
119 struct type *result_type);
120 static void gen_binop (struct agent_expr *ax,
121 struct axs_value *value,
122 struct axs_value *value1,
123 struct axs_value *value2,
124 enum agent_op op,
125 enum agent_op op_unsigned, int may_carry, char *name);
126 static void gen_logical_not (struct agent_expr *ax, struct axs_value *value,
127 struct type *result_type);
128 static void gen_complement (struct agent_expr *ax, struct axs_value *value);
129 static void gen_deref (struct agent_expr *, struct axs_value *);
130 static void gen_address_of (struct agent_expr *, struct axs_value *);
131 static void gen_bitfield_ref (struct expression *exp, struct agent_expr *ax,
132 struct axs_value *value,
133 struct type *type, int start, int end);
134 static void gen_primitive_field (struct expression *exp,
135 struct agent_expr *ax,
136 struct axs_value *value,
137 int offset, int fieldno, struct type *type);
138 static int gen_struct_ref_recursive (struct expression *exp,
139 struct agent_expr *ax,
140 struct axs_value *value,
141 char *field, int offset,
142 struct type *type);
143 static void gen_struct_ref (struct expression *exp, struct agent_expr *ax,
144 struct axs_value *value,
145 char *field,
146 char *operator_name, char *operand_name);
147 static void gen_static_field (struct gdbarch *gdbarch,
148 struct agent_expr *ax, struct axs_value *value,
149 struct type *type, int fieldno);
150 static void gen_repeat (struct expression *exp, union exp_element **pc,
151 struct agent_expr *ax, struct axs_value *value);
152 static void gen_sizeof (struct expression *exp, union exp_element **pc,
153 struct agent_expr *ax, struct axs_value *value,
154 struct type *size_type);
155 static void gen_expr (struct expression *exp, union exp_element **pc,
156 struct agent_expr *ax, struct axs_value *value);
157 static void gen_expr_binop_rest (struct expression *exp,
158 enum exp_opcode op, union exp_element **pc,
159 struct agent_expr *ax,
160 struct axs_value *value,
161 struct axs_value *value1,
162 struct axs_value *value2);
163
164 static void agent_command (char *exp, int from_tty);
165 \f
166
167 /* Detecting constant expressions. */
168
169 /* If the variable reference at *PC is a constant, return its value.
170 Otherwise, return zero.
171
172 Hey, Wally! How can a variable reference be a constant?
173
174 Well, Beav, this function really handles the OP_VAR_VALUE operator,
175 not specifically variable references. GDB uses OP_VAR_VALUE to
176 refer to any kind of symbolic reference: function names, enum
177 elements, and goto labels are all handled through the OP_VAR_VALUE
178 operator, even though they're constants. It makes sense given the
179 situation.
180
181 Gee, Wally, don'cha wonder sometimes if data representations that
182 subvert commonly accepted definitions of terms in favor of heavily
183 context-specific interpretations are really just a tool of the
184 programming hegemony to preserve their power and exclude the
185 proletariat? */
186
187 static struct value *
188 const_var_ref (struct symbol *var)
189 {
190 struct type *type = SYMBOL_TYPE (var);
191
192 switch (SYMBOL_CLASS (var))
193 {
194 case LOC_CONST:
195 return value_from_longest (type, (LONGEST) SYMBOL_VALUE (var));
196
197 case LOC_LABEL:
198 return value_from_pointer (type, (CORE_ADDR) SYMBOL_VALUE_ADDRESS (var));
199
200 default:
201 return 0;
202 }
203 }
204
205
206 /* If the expression starting at *PC has a constant value, return it.
207 Otherwise, return zero. If we return a value, then *PC will be
208 advanced to the end of it. If we return zero, *PC could be
209 anywhere. */
210 static struct value *
211 const_expr (union exp_element **pc)
212 {
213 enum exp_opcode op = (*pc)->opcode;
214 struct value *v1;
215
216 switch (op)
217 {
218 case OP_LONG:
219 {
220 struct type *type = (*pc)[1].type;
221 LONGEST k = (*pc)[2].longconst;
222 (*pc) += 4;
223 return value_from_longest (type, k);
224 }
225
226 case OP_VAR_VALUE:
227 {
228 struct value *v = const_var_ref ((*pc)[2].symbol);
229 (*pc) += 4;
230 return v;
231 }
232
233 /* We could add more operators in here. */
234
235 case UNOP_NEG:
236 (*pc)++;
237 v1 = const_expr (pc);
238 if (v1)
239 return value_neg (v1);
240 else
241 return 0;
242
243 default:
244 return 0;
245 }
246 }
247
248
249 /* Like const_expr, but guarantee also that *PC is undisturbed if the
250 expression is not constant. */
251 static struct value *
252 maybe_const_expr (union exp_element **pc)
253 {
254 union exp_element *tentative_pc = *pc;
255 struct value *v = const_expr (&tentative_pc);
256
257 /* If we got a value, then update the real PC. */
258 if (v)
259 *pc = tentative_pc;
260
261 return v;
262 }
263 \f
264
265 /* Generating bytecode from GDB expressions: general assumptions */
266
267 /* Here are a few general assumptions made throughout the code; if you
268 want to make a change that contradicts one of these, then you'd
269 better scan things pretty thoroughly.
270
271 - We assume that all values occupy one stack element. For example,
272 sometimes we'll swap to get at the left argument to a binary
273 operator. If we decide that void values should occupy no stack
274 elements, or that synthetic arrays (whose size is determined at
275 run time, created by the `@' operator) should occupy two stack
276 elements (address and length), then this will cause trouble.
277
278 - We assume the stack elements are infinitely wide, and that we
279 don't have to worry what happens if the user requests an
280 operation that is wider than the actual interpreter's stack.
281 That is, it's up to the interpreter to handle directly all the
282 integer widths the user has access to. (Woe betide the language
283 with bignums!)
284
285 - We don't support side effects. Thus, we don't have to worry about
286 GCC's generalized lvalues, function calls, etc.
287
288 - We don't support floating point. Many places where we switch on
289 some type don't bother to include cases for floating point; there
290 may be even more subtle ways this assumption exists. For
291 example, the arguments to % must be integers.
292
293 - We assume all subexpressions have a static, unchanging type. If
294 we tried to support convenience variables, this would be a
295 problem.
296
297 - All values on the stack should always be fully zero- or
298 sign-extended.
299
300 (I wasn't sure whether to choose this or its opposite --- that
301 only addresses are assumed extended --- but it turns out that
302 neither convention completely eliminates spurious extend
303 operations (if everything is always extended, then you have to
304 extend after add, because it could overflow; if nothing is
305 extended, then you end up producing extends whenever you change
306 sizes), and this is simpler.) */
307 \f
308
309 /* Generating bytecode from GDB expressions: the `trace' kludge */
310
311 /* The compiler in this file is a general-purpose mechanism for
312 translating GDB expressions into bytecode. One ought to be able to
313 find a million and one uses for it.
314
315 However, at the moment it is HOPELESSLY BRAIN-DAMAGED for the sake
316 of expediency. Let he who is without sin cast the first stone.
317
318 For the data tracing facility, we need to insert `trace' bytecodes
319 before each data fetch; this records all the memory that the
320 expression touches in the course of evaluation, so that memory will
321 be available when the user later tries to evaluate the expression
322 in GDB.
323
324 This should be done (I think) in a post-processing pass, that walks
325 an arbitrary agent expression and inserts `trace' operations at the
326 appropriate points. But it's much faster to just hack them
327 directly into the code. And since we're in a crunch, that's what
328 I've done.
329
330 Setting the flag trace_kludge to non-zero enables the code that
331 emits the trace bytecodes at the appropriate points. */
332 static int trace_kludge;
333
334 /* Scan for all static fields in the given class, including any base
335 classes, and generate tracing bytecodes for each. */
336
337 static void
338 gen_trace_static_fields (struct gdbarch *gdbarch,
339 struct agent_expr *ax,
340 struct type *type)
341 {
342 int i, nbases = TYPE_N_BASECLASSES (type);
343 struct axs_value value;
344
345 CHECK_TYPEDEF (type);
346
347 for (i = TYPE_NFIELDS (type) - 1; i >= nbases; i--)
348 {
349 if (field_is_static (&TYPE_FIELD (type, i)))
350 {
351 gen_static_field (gdbarch, ax, &value, type, i);
352 if (value.optimized_out)
353 continue;
354 switch (value.kind)
355 {
356 case axs_lvalue_memory:
357 {
358 int length = TYPE_LENGTH (check_typedef (value.type));
359
360 ax_const_l (ax, length);
361 ax_simple (ax, aop_trace);
362 }
363 break;
364
365 case axs_lvalue_register:
366 /* We need to mention the register somewhere in the bytecode,
367 so ax_reqs will pick it up and add it to the mask of
368 registers used. */
369 ax_reg (ax, value.u.reg);
370
371 default:
372 break;
373 }
374 }
375 }
376
377 /* Now scan through base classes recursively. */
378 for (i = 0; i < nbases; i++)
379 {
380 struct type *basetype = check_typedef (TYPE_BASECLASS (type, i));
381
382 gen_trace_static_fields (gdbarch, ax, basetype);
383 }
384 }
385
386 /* Trace the lvalue on the stack, if it needs it. In either case, pop
387 the value. Useful on the left side of a comma, and at the end of
388 an expression being used for tracing. */
389 static void
390 gen_traced_pop (struct gdbarch *gdbarch,
391 struct agent_expr *ax, struct axs_value *value)
392 {
393 if (trace_kludge)
394 switch (value->kind)
395 {
396 case axs_rvalue:
397 /* We don't trace rvalues, just the lvalues necessary to
398 produce them. So just dispose of this value. */
399 ax_simple (ax, aop_pop);
400 break;
401
402 case axs_lvalue_memory:
403 {
404 int length = TYPE_LENGTH (check_typedef (value->type));
405
406 /* There's no point in trying to use a trace_quick bytecode
407 here, since "trace_quick SIZE pop" is three bytes, whereas
408 "const8 SIZE trace" is also three bytes, does the same
409 thing, and the simplest code which generates that will also
410 work correctly for objects with large sizes. */
411 ax_const_l (ax, length);
412 ax_simple (ax, aop_trace);
413 }
414 break;
415
416 case axs_lvalue_register:
417 /* We need to mention the register somewhere in the bytecode,
418 so ax_reqs will pick it up and add it to the mask of
419 registers used. */
420 ax_reg (ax, value->u.reg);
421 ax_simple (ax, aop_pop);
422 break;
423 }
424 else
425 /* If we're not tracing, just pop the value. */
426 ax_simple (ax, aop_pop);
427
428 /* To trace C++ classes with static fields stored elsewhere. */
429 if (trace_kludge
430 && (TYPE_CODE (value->type) == TYPE_CODE_STRUCT
431 || TYPE_CODE (value->type) == TYPE_CODE_UNION))
432 gen_trace_static_fields (gdbarch, ax, value->type);
433 }
434 \f
435
436
437 /* Generating bytecode from GDB expressions: helper functions */
438
439 /* Assume that the lower bits of the top of the stack is a value of
440 type TYPE, and the upper bits are zero. Sign-extend if necessary. */
441 static void
442 gen_sign_extend (struct agent_expr *ax, struct type *type)
443 {
444 /* Do we need to sign-extend this? */
445 if (!TYPE_UNSIGNED (type))
446 ax_ext (ax, TYPE_LENGTH (type) * TARGET_CHAR_BIT);
447 }
448
449
450 /* Assume the lower bits of the top of the stack hold a value of type
451 TYPE, and the upper bits are garbage. Sign-extend or truncate as
452 needed. */
453 static void
454 gen_extend (struct agent_expr *ax, struct type *type)
455 {
456 int bits = TYPE_LENGTH (type) * TARGET_CHAR_BIT;
457 /* I just had to. */
458 ((TYPE_UNSIGNED (type) ? ax_zero_ext : ax_ext) (ax, bits));
459 }
460
461
462 /* Assume that the top of the stack contains a value of type "pointer
463 to TYPE"; generate code to fetch its value. Note that TYPE is the
464 target type, not the pointer type. */
465 static void
466 gen_fetch (struct agent_expr *ax, struct type *type)
467 {
468 if (trace_kludge)
469 {
470 /* Record the area of memory we're about to fetch. */
471 ax_trace_quick (ax, TYPE_LENGTH (type));
472 }
473
474 switch (TYPE_CODE (type))
475 {
476 case TYPE_CODE_PTR:
477 case TYPE_CODE_REF:
478 case TYPE_CODE_ENUM:
479 case TYPE_CODE_INT:
480 case TYPE_CODE_CHAR:
481 /* It's a scalar value, so we know how to dereference it. How
482 many bytes long is it? */
483 switch (TYPE_LENGTH (type))
484 {
485 case 8 / TARGET_CHAR_BIT:
486 ax_simple (ax, aop_ref8);
487 break;
488 case 16 / TARGET_CHAR_BIT:
489 ax_simple (ax, aop_ref16);
490 break;
491 case 32 / TARGET_CHAR_BIT:
492 ax_simple (ax, aop_ref32);
493 break;
494 case 64 / TARGET_CHAR_BIT:
495 ax_simple (ax, aop_ref64);
496 break;
497
498 /* Either our caller shouldn't have asked us to dereference
499 that pointer (other code's fault), or we're not
500 implementing something we should be (this code's fault).
501 In any case, it's a bug the user shouldn't see. */
502 default:
503 internal_error (__FILE__, __LINE__,
504 _("gen_fetch: strange size"));
505 }
506
507 gen_sign_extend (ax, type);
508 break;
509
510 default:
511 /* Either our caller shouldn't have asked us to dereference that
512 pointer (other code's fault), or we're not implementing
513 something we should be (this code's fault). In any case,
514 it's a bug the user shouldn't see. */
515 internal_error (__FILE__, __LINE__,
516 _("gen_fetch: bad type code"));
517 }
518 }
519
520
521 /* Generate code to left shift the top of the stack by DISTANCE bits, or
522 right shift it by -DISTANCE bits if DISTANCE < 0. This generates
523 unsigned (logical) right shifts. */
524 static void
525 gen_left_shift (struct agent_expr *ax, int distance)
526 {
527 if (distance > 0)
528 {
529 ax_const_l (ax, distance);
530 ax_simple (ax, aop_lsh);
531 }
532 else if (distance < 0)
533 {
534 ax_const_l (ax, -distance);
535 ax_simple (ax, aop_rsh_unsigned);
536 }
537 }
538 \f
539
540
541 /* Generating bytecode from GDB expressions: symbol references */
542
543 /* Generate code to push the base address of the argument portion of
544 the top stack frame. */
545 static void
546 gen_frame_args_address (struct gdbarch *gdbarch, struct agent_expr *ax)
547 {
548 int frame_reg;
549 LONGEST frame_offset;
550
551 gdbarch_virtual_frame_pointer (gdbarch,
552 ax->scope, &frame_reg, &frame_offset);
553 ax_reg (ax, frame_reg);
554 gen_offset (ax, frame_offset);
555 }
556
557
558 /* Generate code to push the base address of the locals portion of the
559 top stack frame. */
560 static void
561 gen_frame_locals_address (struct gdbarch *gdbarch, struct agent_expr *ax)
562 {
563 int frame_reg;
564 LONGEST frame_offset;
565
566 gdbarch_virtual_frame_pointer (gdbarch,
567 ax->scope, &frame_reg, &frame_offset);
568 ax_reg (ax, frame_reg);
569 gen_offset (ax, frame_offset);
570 }
571
572
573 /* Generate code to add OFFSET to the top of the stack. Try to
574 generate short and readable code. We use this for getting to
575 variables on the stack, and structure members. If we were
576 programming in ML, it would be clearer why these are the same
577 thing. */
578 static void
579 gen_offset (struct agent_expr *ax, int offset)
580 {
581 /* It would suffice to simply push the offset and add it, but this
582 makes it easier to read positive and negative offsets in the
583 bytecode. */
584 if (offset > 0)
585 {
586 ax_const_l (ax, offset);
587 ax_simple (ax, aop_add);
588 }
589 else if (offset < 0)
590 {
591 ax_const_l (ax, -offset);
592 ax_simple (ax, aop_sub);
593 }
594 }
595
596
597 /* In many cases, a symbol's value is the offset from some other
598 address (stack frame, base register, etc.) Generate code to add
599 VAR's value to the top of the stack. */
600 static void
601 gen_sym_offset (struct agent_expr *ax, struct symbol *var)
602 {
603 gen_offset (ax, SYMBOL_VALUE (var));
604 }
605
606
607 /* Generate code for a variable reference to AX. The variable is the
608 symbol VAR. Set VALUE to describe the result. */
609
610 static void
611 gen_var_ref (struct gdbarch *gdbarch, struct agent_expr *ax,
612 struct axs_value *value, struct symbol *var)
613 {
614 /* Dereference any typedefs. */
615 value->type = check_typedef (SYMBOL_TYPE (var));
616 value->optimized_out = 0;
617
618 /* I'm imitating the code in read_var_value. */
619 switch (SYMBOL_CLASS (var))
620 {
621 case LOC_CONST: /* A constant, like an enum value. */
622 ax_const_l (ax, (LONGEST) SYMBOL_VALUE (var));
623 value->kind = axs_rvalue;
624 break;
625
626 case LOC_LABEL: /* A goto label, being used as a value. */
627 ax_const_l (ax, (LONGEST) SYMBOL_VALUE_ADDRESS (var));
628 value->kind = axs_rvalue;
629 break;
630
631 case LOC_CONST_BYTES:
632 internal_error (__FILE__, __LINE__,
633 _("gen_var_ref: LOC_CONST_BYTES symbols are not supported"));
634
635 /* Variable at a fixed location in memory. Easy. */
636 case LOC_STATIC:
637 /* Push the address of the variable. */
638 ax_const_l (ax, SYMBOL_VALUE_ADDRESS (var));
639 value->kind = axs_lvalue_memory;
640 break;
641
642 case LOC_ARG: /* var lives in argument area of frame */
643 gen_frame_args_address (gdbarch, ax);
644 gen_sym_offset (ax, var);
645 value->kind = axs_lvalue_memory;
646 break;
647
648 case LOC_REF_ARG: /* As above, but the frame slot really
649 holds the address of the variable. */
650 gen_frame_args_address (gdbarch, ax);
651 gen_sym_offset (ax, var);
652 /* Don't assume any particular pointer size. */
653 gen_fetch (ax, builtin_type (gdbarch)->builtin_data_ptr);
654 value->kind = axs_lvalue_memory;
655 break;
656
657 case LOC_LOCAL: /* var lives in locals area of frame */
658 gen_frame_locals_address (gdbarch, ax);
659 gen_sym_offset (ax, var);
660 value->kind = axs_lvalue_memory;
661 break;
662
663 case LOC_TYPEDEF:
664 error (_("Cannot compute value of typedef `%s'."),
665 SYMBOL_PRINT_NAME (var));
666 break;
667
668 case LOC_BLOCK:
669 ax_const_l (ax, BLOCK_START (SYMBOL_BLOCK_VALUE (var)));
670 value->kind = axs_rvalue;
671 break;
672
673 case LOC_REGISTER:
674 /* Don't generate any code at all; in the process of treating
675 this as an lvalue or rvalue, the caller will generate the
676 right code. */
677 value->kind = axs_lvalue_register;
678 value->u.reg = SYMBOL_REGISTER_OPS (var)->register_number (var, gdbarch);
679 break;
680
681 /* A lot like LOC_REF_ARG, but the pointer lives directly in a
682 register, not on the stack. Simpler than LOC_REGISTER
683 because it's just like any other case where the thing
684 has a real address. */
685 case LOC_REGPARM_ADDR:
686 ax_reg (ax, SYMBOL_REGISTER_OPS (var)->register_number (var, gdbarch));
687 value->kind = axs_lvalue_memory;
688 break;
689
690 case LOC_UNRESOLVED:
691 {
692 struct minimal_symbol *msym
693 = lookup_minimal_symbol (SYMBOL_LINKAGE_NAME (var), NULL, NULL);
694 if (!msym)
695 error (_("Couldn't resolve symbol `%s'."), SYMBOL_PRINT_NAME (var));
696
697 /* Push the address of the variable. */
698 ax_const_l (ax, SYMBOL_VALUE_ADDRESS (msym));
699 value->kind = axs_lvalue_memory;
700 }
701 break;
702
703 case LOC_COMPUTED:
704 /* FIXME: cagney/2004-01-26: It should be possible to
705 unconditionally call the SYMBOL_COMPUTED_OPS method when available.
706 Unfortunately DWARF 2 stores the frame-base (instead of the
707 function) location in a function's symbol. Oops! For the
708 moment enable this when/where applicable. */
709 SYMBOL_COMPUTED_OPS (var)->tracepoint_var_ref (var, gdbarch, ax, value);
710 break;
711
712 case LOC_OPTIMIZED_OUT:
713 /* Flag this, but don't say anything; leave it up to callers to
714 warn the user. */
715 value->optimized_out = 1;
716 break;
717
718 default:
719 error (_("Cannot find value of botched symbol `%s'."),
720 SYMBOL_PRINT_NAME (var));
721 break;
722 }
723 }
724 \f
725
726
727 /* Generating bytecode from GDB expressions: literals */
728
729 static void
730 gen_int_literal (struct agent_expr *ax, struct axs_value *value, LONGEST k,
731 struct type *type)
732 {
733 ax_const_l (ax, k);
734 value->kind = axs_rvalue;
735 value->type = check_typedef (type);
736 }
737 \f
738
739
740 /* Generating bytecode from GDB expressions: unary conversions, casts */
741
742 /* Take what's on the top of the stack (as described by VALUE), and
743 try to make an rvalue out of it. Signal an error if we can't do
744 that. */
745 static void
746 require_rvalue (struct agent_expr *ax, struct axs_value *value)
747 {
748 /* Only deal with scalars, structs and such may be too large
749 to fit in a stack entry. */
750 value->type = check_typedef (value->type);
751 if (TYPE_CODE (value->type) == TYPE_CODE_ARRAY
752 || TYPE_CODE (value->type) == TYPE_CODE_STRUCT
753 || TYPE_CODE (value->type) == TYPE_CODE_UNION
754 || TYPE_CODE (value->type) == TYPE_CODE_FUNC)
755 error ("Value not scalar: cannot be an rvalue.");
756
757 switch (value->kind)
758 {
759 case axs_rvalue:
760 /* It's already an rvalue. */
761 break;
762
763 case axs_lvalue_memory:
764 /* The top of stack is the address of the object. Dereference. */
765 gen_fetch (ax, value->type);
766 break;
767
768 case axs_lvalue_register:
769 /* There's nothing on the stack, but value->u.reg is the
770 register number containing the value.
771
772 When we add floating-point support, this is going to have to
773 change. What about SPARC register pairs, for example? */
774 ax_reg (ax, value->u.reg);
775 gen_extend (ax, value->type);
776 break;
777 }
778
779 value->kind = axs_rvalue;
780 }
781
782
783 /* Assume the top of the stack is described by VALUE, and perform the
784 usual unary conversions. This is motivated by ANSI 6.2.2, but of
785 course GDB expressions are not ANSI; they're the mishmash union of
786 a bunch of languages. Rah.
787
788 NOTE! This function promises to produce an rvalue only when the
789 incoming value is of an appropriate type. In other words, the
790 consumer of the value this function produces may assume the value
791 is an rvalue only after checking its type.
792
793 The immediate issue is that if the user tries to use a structure or
794 union as an operand of, say, the `+' operator, we don't want to try
795 to convert that structure to an rvalue; require_rvalue will bomb on
796 structs and unions. Rather, we want to simply pass the struct
797 lvalue through unchanged, and let `+' raise an error. */
798
799 static void
800 gen_usual_unary (struct expression *exp, struct agent_expr *ax,
801 struct axs_value *value)
802 {
803 /* We don't have to generate any code for the usual integral
804 conversions, since values are always represented as full-width on
805 the stack. Should we tweak the type? */
806
807 /* Some types require special handling. */
808 switch (TYPE_CODE (value->type))
809 {
810 /* Functions get converted to a pointer to the function. */
811 case TYPE_CODE_FUNC:
812 value->type = lookup_pointer_type (value->type);
813 value->kind = axs_rvalue; /* Should always be true, but just in case. */
814 break;
815
816 /* Arrays get converted to a pointer to their first element, and
817 are no longer an lvalue. */
818 case TYPE_CODE_ARRAY:
819 {
820 struct type *elements = TYPE_TARGET_TYPE (value->type);
821 value->type = lookup_pointer_type (elements);
822 value->kind = axs_rvalue;
823 /* We don't need to generate any code; the address of the array
824 is also the address of its first element. */
825 }
826 break;
827
828 /* Don't try to convert structures and unions to rvalues. Let the
829 consumer signal an error. */
830 case TYPE_CODE_STRUCT:
831 case TYPE_CODE_UNION:
832 return;
833
834 /* If the value is an enum, call it an integer. */
835 case TYPE_CODE_ENUM:
836 value->type = builtin_type (exp->gdbarch)->builtin_int;
837 break;
838 }
839
840 /* If the value is an lvalue, dereference it. */
841 require_rvalue (ax, value);
842 }
843
844
845 /* Return non-zero iff the type TYPE1 is considered "wider" than the
846 type TYPE2, according to the rules described in gen_usual_arithmetic. */
847 static int
848 type_wider_than (struct type *type1, struct type *type2)
849 {
850 return (TYPE_LENGTH (type1) > TYPE_LENGTH (type2)
851 || (TYPE_LENGTH (type1) == TYPE_LENGTH (type2)
852 && TYPE_UNSIGNED (type1)
853 && !TYPE_UNSIGNED (type2)));
854 }
855
856
857 /* Return the "wider" of the two types TYPE1 and TYPE2. */
858 static struct type *
859 max_type (struct type *type1, struct type *type2)
860 {
861 return type_wider_than (type1, type2) ? type1 : type2;
862 }
863
864
865 /* Generate code to convert a scalar value of type FROM to type TO. */
866 static void
867 gen_conversion (struct agent_expr *ax, struct type *from, struct type *to)
868 {
869 /* Perhaps there is a more graceful way to state these rules. */
870
871 /* If we're converting to a narrower type, then we need to clear out
872 the upper bits. */
873 if (TYPE_LENGTH (to) < TYPE_LENGTH (from))
874 gen_extend (ax, from);
875
876 /* If the two values have equal width, but different signednesses,
877 then we need to extend. */
878 else if (TYPE_LENGTH (to) == TYPE_LENGTH (from))
879 {
880 if (TYPE_UNSIGNED (from) != TYPE_UNSIGNED (to))
881 gen_extend (ax, to);
882 }
883
884 /* If we're converting to a wider type, and becoming unsigned, then
885 we need to zero out any possible sign bits. */
886 else if (TYPE_LENGTH (to) > TYPE_LENGTH (from))
887 {
888 if (TYPE_UNSIGNED (to))
889 gen_extend (ax, to);
890 }
891 }
892
893
894 /* Return non-zero iff the type FROM will require any bytecodes to be
895 emitted to be converted to the type TO. */
896 static int
897 is_nontrivial_conversion (struct type *from, struct type *to)
898 {
899 struct agent_expr *ax = new_agent_expr (0);
900 int nontrivial;
901
902 /* Actually generate the code, and see if anything came out. At the
903 moment, it would be trivial to replicate the code in
904 gen_conversion here, but in the future, when we're supporting
905 floating point and the like, it may not be. Doing things this
906 way allows this function to be independent of the logic in
907 gen_conversion. */
908 gen_conversion (ax, from, to);
909 nontrivial = ax->len > 0;
910 free_agent_expr (ax);
911 return nontrivial;
912 }
913
914
915 /* Generate code to perform the "usual arithmetic conversions" (ANSI C
916 6.2.1.5) for the two operands of an arithmetic operator. This
917 effectively finds a "least upper bound" type for the two arguments,
918 and promotes each argument to that type. *VALUE1 and *VALUE2
919 describe the values as they are passed in, and as they are left. */
920 static void
921 gen_usual_arithmetic (struct expression *exp, struct agent_expr *ax,
922 struct axs_value *value1, struct axs_value *value2)
923 {
924 /* Do the usual binary conversions. */
925 if (TYPE_CODE (value1->type) == TYPE_CODE_INT
926 && TYPE_CODE (value2->type) == TYPE_CODE_INT)
927 {
928 /* The ANSI integral promotions seem to work this way: Order the
929 integer types by size, and then by signedness: an n-bit
930 unsigned type is considered "wider" than an n-bit signed
931 type. Promote to the "wider" of the two types, and always
932 promote at least to int. */
933 struct type *target = max_type (builtin_type (exp->gdbarch)->builtin_int,
934 max_type (value1->type, value2->type));
935
936 /* Deal with value2, on the top of the stack. */
937 gen_conversion (ax, value2->type, target);
938
939 /* Deal with value1, not on the top of the stack. Don't
940 generate the `swap' instructions if we're not actually going
941 to do anything. */
942 if (is_nontrivial_conversion (value1->type, target))
943 {
944 ax_simple (ax, aop_swap);
945 gen_conversion (ax, value1->type, target);
946 ax_simple (ax, aop_swap);
947 }
948
949 value1->type = value2->type = check_typedef (target);
950 }
951 }
952
953
954 /* Generate code to perform the integral promotions (ANSI 6.2.1.1) on
955 the value on the top of the stack, as described by VALUE. Assume
956 the value has integral type. */
957 static void
958 gen_integral_promotions (struct expression *exp, struct agent_expr *ax,
959 struct axs_value *value)
960 {
961 const struct builtin_type *builtin = builtin_type (exp->gdbarch);
962
963 if (!type_wider_than (value->type, builtin->builtin_int))
964 {
965 gen_conversion (ax, value->type, builtin->builtin_int);
966 value->type = builtin->builtin_int;
967 }
968 else if (!type_wider_than (value->type, builtin->builtin_unsigned_int))
969 {
970 gen_conversion (ax, value->type, builtin->builtin_unsigned_int);
971 value->type = builtin->builtin_unsigned_int;
972 }
973 }
974
975
976 /* Generate code for a cast to TYPE. */
977 static void
978 gen_cast (struct agent_expr *ax, struct axs_value *value, struct type *type)
979 {
980 /* GCC does allow casts to yield lvalues, so this should be fixed
981 before merging these changes into the trunk. */
982 require_rvalue (ax, value);
983 /* Dereference typedefs. */
984 type = check_typedef (type);
985
986 switch (TYPE_CODE (type))
987 {
988 case TYPE_CODE_PTR:
989 case TYPE_CODE_REF:
990 /* It's implementation-defined, and I'll bet this is what GCC
991 does. */
992 break;
993
994 case TYPE_CODE_ARRAY:
995 case TYPE_CODE_STRUCT:
996 case TYPE_CODE_UNION:
997 case TYPE_CODE_FUNC:
998 error (_("Invalid type cast: intended type must be scalar."));
999
1000 case TYPE_CODE_ENUM:
1001 /* We don't have to worry about the size of the value, because
1002 all our integral values are fully sign-extended, and when
1003 casting pointers we can do anything we like. Is there any
1004 way for us to know what GCC actually does with a cast like
1005 this? */
1006 break;
1007
1008 case TYPE_CODE_INT:
1009 gen_conversion (ax, value->type, type);
1010 break;
1011
1012 case TYPE_CODE_VOID:
1013 /* We could pop the value, and rely on everyone else to check
1014 the type and notice that this value doesn't occupy a stack
1015 slot. But for now, leave the value on the stack, and
1016 preserve the "value == stack element" assumption. */
1017 break;
1018
1019 default:
1020 error (_("Casts to requested type are not yet implemented."));
1021 }
1022
1023 value->type = type;
1024 }
1025 \f
1026
1027
1028 /* Generating bytecode from GDB expressions: arithmetic */
1029
1030 /* Scale the integer on the top of the stack by the size of the target
1031 of the pointer type TYPE. */
1032 static void
1033 gen_scale (struct agent_expr *ax, enum agent_op op, struct type *type)
1034 {
1035 struct type *element = TYPE_TARGET_TYPE (type);
1036
1037 if (TYPE_LENGTH (element) != 1)
1038 {
1039 ax_const_l (ax, TYPE_LENGTH (element));
1040 ax_simple (ax, op);
1041 }
1042 }
1043
1044
1045 /* Generate code for pointer arithmetic PTR + INT. */
1046 static void
1047 gen_ptradd (struct agent_expr *ax, struct axs_value *value,
1048 struct axs_value *value1, struct axs_value *value2)
1049 {
1050 gdb_assert (pointer_type (value1->type));
1051 gdb_assert (TYPE_CODE (value2->type) == TYPE_CODE_INT);
1052
1053 gen_scale (ax, aop_mul, value1->type);
1054 ax_simple (ax, aop_add);
1055 gen_extend (ax, value1->type); /* Catch overflow. */
1056 value->type = value1->type;
1057 value->kind = axs_rvalue;
1058 }
1059
1060
1061 /* Generate code for pointer arithmetic PTR - INT. */
1062 static void
1063 gen_ptrsub (struct agent_expr *ax, struct axs_value *value,
1064 struct axs_value *value1, struct axs_value *value2)
1065 {
1066 gdb_assert (pointer_type (value1->type));
1067 gdb_assert (TYPE_CODE (value2->type) == TYPE_CODE_INT);
1068
1069 gen_scale (ax, aop_mul, value1->type);
1070 ax_simple (ax, aop_sub);
1071 gen_extend (ax, value1->type); /* Catch overflow. */
1072 value->type = value1->type;
1073 value->kind = axs_rvalue;
1074 }
1075
1076
1077 /* Generate code for pointer arithmetic PTR - PTR. */
1078 static void
1079 gen_ptrdiff (struct agent_expr *ax, struct axs_value *value,
1080 struct axs_value *value1, struct axs_value *value2,
1081 struct type *result_type)
1082 {
1083 gdb_assert (pointer_type (value1->type));
1084 gdb_assert (pointer_type (value2->type));
1085
1086 if (TYPE_LENGTH (TYPE_TARGET_TYPE (value1->type))
1087 != TYPE_LENGTH (TYPE_TARGET_TYPE (value2->type)))
1088 error (_("\
1089 First argument of `-' is a pointer, but second argument is neither\n\
1090 an integer nor a pointer of the same type."));
1091
1092 ax_simple (ax, aop_sub);
1093 gen_scale (ax, aop_div_unsigned, value1->type);
1094 value->type = result_type;
1095 value->kind = axs_rvalue;
1096 }
1097
1098
1099 /* Generate code for a binary operator that doesn't do pointer magic.
1100 We set VALUE to describe the result value; we assume VALUE1 and
1101 VALUE2 describe the two operands, and that they've undergone the
1102 usual binary conversions. MAY_CARRY should be non-zero iff the
1103 result needs to be extended. NAME is the English name of the
1104 operator, used in error messages */
1105 static void
1106 gen_binop (struct agent_expr *ax, struct axs_value *value,
1107 struct axs_value *value1, struct axs_value *value2, enum agent_op op,
1108 enum agent_op op_unsigned, int may_carry, char *name)
1109 {
1110 /* We only handle INT op INT. */
1111 if ((TYPE_CODE (value1->type) != TYPE_CODE_INT)
1112 || (TYPE_CODE (value2->type) != TYPE_CODE_INT))
1113 error (_("Invalid combination of types in %s."), name);
1114
1115 ax_simple (ax,
1116 TYPE_UNSIGNED (value1->type) ? op_unsigned : op);
1117 if (may_carry)
1118 gen_extend (ax, value1->type); /* catch overflow */
1119 value->type = value1->type;
1120 value->kind = axs_rvalue;
1121 }
1122
1123
1124 static void
1125 gen_logical_not (struct agent_expr *ax, struct axs_value *value,
1126 struct type *result_type)
1127 {
1128 if (TYPE_CODE (value->type) != TYPE_CODE_INT
1129 && TYPE_CODE (value->type) != TYPE_CODE_PTR)
1130 error (_("Invalid type of operand to `!'."));
1131
1132 ax_simple (ax, aop_log_not);
1133 value->type = result_type;
1134 }
1135
1136
1137 static void
1138 gen_complement (struct agent_expr *ax, struct axs_value *value)
1139 {
1140 if (TYPE_CODE (value->type) != TYPE_CODE_INT)
1141 error (_("Invalid type of operand to `~'."));
1142
1143 ax_simple (ax, aop_bit_not);
1144 gen_extend (ax, value->type);
1145 }
1146 \f
1147
1148
1149 /* Generating bytecode from GDB expressions: * & . -> @ sizeof */
1150
1151 /* Dereference the value on the top of the stack. */
1152 static void
1153 gen_deref (struct agent_expr *ax, struct axs_value *value)
1154 {
1155 /* The caller should check the type, because several operators use
1156 this, and we don't know what error message to generate. */
1157 if (!pointer_type (value->type))
1158 internal_error (__FILE__, __LINE__,
1159 _("gen_deref: expected a pointer"));
1160
1161 /* We've got an rvalue now, which is a pointer. We want to yield an
1162 lvalue, whose address is exactly that pointer. So we don't
1163 actually emit any code; we just change the type from "Pointer to
1164 T" to "T", and mark the value as an lvalue in memory. Leave it
1165 to the consumer to actually dereference it. */
1166 value->type = check_typedef (TYPE_TARGET_TYPE (value->type));
1167 if (TYPE_CODE (value->type) == TYPE_CODE_VOID)
1168 error (_("Attempt to dereference a generic pointer."));
1169 value->kind = ((TYPE_CODE (value->type) == TYPE_CODE_FUNC)
1170 ? axs_rvalue : axs_lvalue_memory);
1171 }
1172
1173
1174 /* Produce the address of the lvalue on the top of the stack. */
1175 static void
1176 gen_address_of (struct agent_expr *ax, struct axs_value *value)
1177 {
1178 /* Special case for taking the address of a function. The ANSI
1179 standard describes this as a special case, too, so this
1180 arrangement is not without motivation. */
1181 if (TYPE_CODE (value->type) == TYPE_CODE_FUNC)
1182 /* The value's already an rvalue on the stack, so we just need to
1183 change the type. */
1184 value->type = lookup_pointer_type (value->type);
1185 else
1186 switch (value->kind)
1187 {
1188 case axs_rvalue:
1189 error (_("Operand of `&' is an rvalue, which has no address."));
1190
1191 case axs_lvalue_register:
1192 error (_("Operand of `&' is in a register, and has no address."));
1193
1194 case axs_lvalue_memory:
1195 value->kind = axs_rvalue;
1196 value->type = lookup_pointer_type (value->type);
1197 break;
1198 }
1199 }
1200
1201 /* Generate code to push the value of a bitfield of a structure whose
1202 address is on the top of the stack. START and END give the
1203 starting and one-past-ending *bit* numbers of the field within the
1204 structure. */
1205 static void
1206 gen_bitfield_ref (struct expression *exp, struct agent_expr *ax,
1207 struct axs_value *value, struct type *type,
1208 int start, int end)
1209 {
1210 /* Note that ops[i] fetches 8 << i bits. */
1211 static enum agent_op ops[]
1212 =
1213 {aop_ref8, aop_ref16, aop_ref32, aop_ref64};
1214 static int num_ops = (sizeof (ops) / sizeof (ops[0]));
1215
1216 /* We don't want to touch any byte that the bitfield doesn't
1217 actually occupy; we shouldn't make any accesses we're not
1218 explicitly permitted to. We rely here on the fact that the
1219 bytecode `ref' operators work on unaligned addresses.
1220
1221 It takes some fancy footwork to get the stack to work the way
1222 we'd like. Say we're retrieving a bitfield that requires three
1223 fetches. Initially, the stack just contains the address:
1224 addr
1225 For the first fetch, we duplicate the address
1226 addr addr
1227 then add the byte offset, do the fetch, and shift and mask as
1228 needed, yielding a fragment of the value, properly aligned for
1229 the final bitwise or:
1230 addr frag1
1231 then we swap, and repeat the process:
1232 frag1 addr --- address on top
1233 frag1 addr addr --- duplicate it
1234 frag1 addr frag2 --- get second fragment
1235 frag1 frag2 addr --- swap again
1236 frag1 frag2 frag3 --- get third fragment
1237 Notice that, since the third fragment is the last one, we don't
1238 bother duplicating the address this time. Now we have all the
1239 fragments on the stack, and we can simply `or' them together,
1240 yielding the final value of the bitfield. */
1241
1242 /* The first and one-after-last bits in the field, but rounded down
1243 and up to byte boundaries. */
1244 int bound_start = (start / TARGET_CHAR_BIT) * TARGET_CHAR_BIT;
1245 int bound_end = (((end + TARGET_CHAR_BIT - 1)
1246 / TARGET_CHAR_BIT)
1247 * TARGET_CHAR_BIT);
1248
1249 /* current bit offset within the structure */
1250 int offset;
1251
1252 /* The index in ops of the opcode we're considering. */
1253 int op;
1254
1255 /* The number of fragments we generated in the process. Probably
1256 equal to the number of `one' bits in bytesize, but who cares? */
1257 int fragment_count;
1258
1259 /* Dereference any typedefs. */
1260 type = check_typedef (type);
1261
1262 /* Can we fetch the number of bits requested at all? */
1263 if ((end - start) > ((1 << num_ops) * 8))
1264 internal_error (__FILE__, __LINE__,
1265 _("gen_bitfield_ref: bitfield too wide"));
1266
1267 /* Note that we know here that we only need to try each opcode once.
1268 That may not be true on machines with weird byte sizes. */
1269 offset = bound_start;
1270 fragment_count = 0;
1271 for (op = num_ops - 1; op >= 0; op--)
1272 {
1273 /* number of bits that ops[op] would fetch */
1274 int op_size = 8 << op;
1275
1276 /* The stack at this point, from bottom to top, contains zero or
1277 more fragments, then the address. */
1278
1279 /* Does this fetch fit within the bitfield? */
1280 if (offset + op_size <= bound_end)
1281 {
1282 /* Is this the last fragment? */
1283 int last_frag = (offset + op_size == bound_end);
1284
1285 if (!last_frag)
1286 ax_simple (ax, aop_dup); /* keep a copy of the address */
1287
1288 /* Add the offset. */
1289 gen_offset (ax, offset / TARGET_CHAR_BIT);
1290
1291 if (trace_kludge)
1292 {
1293 /* Record the area of memory we're about to fetch. */
1294 ax_trace_quick (ax, op_size / TARGET_CHAR_BIT);
1295 }
1296
1297 /* Perform the fetch. */
1298 ax_simple (ax, ops[op]);
1299
1300 /* Shift the bits we have to their proper position.
1301 gen_left_shift will generate right shifts when the operand
1302 is negative.
1303
1304 A big-endian field diagram to ponder:
1305 byte 0 byte 1 byte 2 byte 3 byte 4 byte 5 byte 6 byte 7
1306 +------++------++------++------++------++------++------++------+
1307 xxxxAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCxxxxxxxxxxx
1308 ^ ^ ^ ^
1309 bit number 16 32 48 53
1310 These are bit numbers as supplied by GDB. Note that the
1311 bit numbers run from right to left once you've fetched the
1312 value!
1313
1314 A little-endian field diagram to ponder:
1315 byte 7 byte 6 byte 5 byte 4 byte 3 byte 2 byte 1 byte 0
1316 +------++------++------++------++------++------++------++------+
1317 xxxxxxxxxxxAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCxxxx
1318 ^ ^ ^ ^ ^
1319 bit number 48 32 16 4 0
1320
1321 In both cases, the most significant end is on the left
1322 (i.e. normal numeric writing order), which means that you
1323 don't go crazy thinking about `left' and `right' shifts.
1324
1325 We don't have to worry about masking yet:
1326 - If they contain garbage off the least significant end, then we
1327 must be looking at the low end of the field, and the right
1328 shift will wipe them out.
1329 - If they contain garbage off the most significant end, then we
1330 must be looking at the most significant end of the word, and
1331 the sign/zero extension will wipe them out.
1332 - If we're in the interior of the word, then there is no garbage
1333 on either end, because the ref operators zero-extend. */
1334 if (gdbarch_byte_order (exp->gdbarch) == BFD_ENDIAN_BIG)
1335 gen_left_shift (ax, end - (offset + op_size));
1336 else
1337 gen_left_shift (ax, offset - start);
1338
1339 if (!last_frag)
1340 /* Bring the copy of the address up to the top. */
1341 ax_simple (ax, aop_swap);
1342
1343 offset += op_size;
1344 fragment_count++;
1345 }
1346 }
1347
1348 /* Generate enough bitwise `or' operations to combine all the
1349 fragments we left on the stack. */
1350 while (fragment_count-- > 1)
1351 ax_simple (ax, aop_bit_or);
1352
1353 /* Sign- or zero-extend the value as appropriate. */
1354 ((TYPE_UNSIGNED (type) ? ax_zero_ext : ax_ext) (ax, end - start));
1355
1356 /* This is *not* an lvalue. Ugh. */
1357 value->kind = axs_rvalue;
1358 value->type = type;
1359 }
1360
1361 /* Generate bytecodes for field number FIELDNO of type TYPE. OFFSET
1362 is an accumulated offset (in bytes), will be nonzero for objects
1363 embedded in other objects, like C++ base classes. Behavior should
1364 generally follow value_primitive_field. */
1365
1366 static void
1367 gen_primitive_field (struct expression *exp,
1368 struct agent_expr *ax, struct axs_value *value,
1369 int offset, int fieldno, struct type *type)
1370 {
1371 /* Is this a bitfield? */
1372 if (TYPE_FIELD_PACKED (type, fieldno))
1373 gen_bitfield_ref (exp, ax, value, TYPE_FIELD_TYPE (type, fieldno),
1374 (offset * TARGET_CHAR_BIT
1375 + TYPE_FIELD_BITPOS (type, fieldno)),
1376 (offset * TARGET_CHAR_BIT
1377 + TYPE_FIELD_BITPOS (type, fieldno)
1378 + TYPE_FIELD_BITSIZE (type, fieldno)));
1379 else
1380 {
1381 gen_offset (ax, offset
1382 + TYPE_FIELD_BITPOS (type, fieldno) / TARGET_CHAR_BIT);
1383 value->kind = axs_lvalue_memory;
1384 value->type = TYPE_FIELD_TYPE (type, fieldno);
1385 }
1386 }
1387
1388 /* Search for the given field in either the given type or one of its
1389 base classes. Return 1 if found, 0 if not. */
1390
1391 static int
1392 gen_struct_ref_recursive (struct expression *exp, struct agent_expr *ax,
1393 struct axs_value *value,
1394 char *field, int offset, struct type *type)
1395 {
1396 int i, rslt;
1397 int nbases = TYPE_N_BASECLASSES (type);
1398
1399 CHECK_TYPEDEF (type);
1400
1401 for (i = TYPE_NFIELDS (type) - 1; i >= nbases; i--)
1402 {
1403 char *this_name = TYPE_FIELD_NAME (type, i);
1404
1405 if (this_name)
1406 {
1407 if (strcmp (field, this_name) == 0)
1408 {
1409 /* Note that bytecodes for the struct's base (aka
1410 "this") will have been generated already, which will
1411 be unnecessary but not harmful if the static field is
1412 being handled as a global. */
1413 if (field_is_static (&TYPE_FIELD (type, i)))
1414 {
1415 gen_static_field (exp->gdbarch, ax, value, type, i);
1416 if (value->optimized_out)
1417 error (_("static field `%s' has been optimized out, cannot use"),
1418 field);
1419 return 1;
1420 }
1421
1422 gen_primitive_field (exp, ax, value, offset, i, type);
1423 return 1;
1424 }
1425 #if 0 /* is this right? */
1426 if (this_name[0] == '\0')
1427 internal_error (__FILE__, __LINE__,
1428 _("find_field: anonymous unions not supported"));
1429 #endif
1430 }
1431 }
1432
1433 /* Now scan through base classes recursively. */
1434 for (i = 0; i < nbases; i++)
1435 {
1436 struct type *basetype = check_typedef (TYPE_BASECLASS (type, i));
1437
1438 rslt = gen_struct_ref_recursive (exp, ax, value, field,
1439 offset + TYPE_BASECLASS_BITPOS (type, i) / TARGET_CHAR_BIT,
1440 basetype);
1441 if (rslt)
1442 return 1;
1443 }
1444
1445 /* Not found anywhere, flag so caller can complain. */
1446 return 0;
1447 }
1448
1449 /* Generate code to reference the member named FIELD of a structure or
1450 union. The top of the stack, as described by VALUE, should have
1451 type (pointer to a)* struct/union. OPERATOR_NAME is the name of
1452 the operator being compiled, and OPERAND_NAME is the kind of thing
1453 it operates on; we use them in error messages. */
1454 static void
1455 gen_struct_ref (struct expression *exp, struct agent_expr *ax,
1456 struct axs_value *value, char *field,
1457 char *operator_name, char *operand_name)
1458 {
1459 struct type *type;
1460 int found;
1461
1462 /* Follow pointers until we reach a non-pointer. These aren't the C
1463 semantics, but they're what the normal GDB evaluator does, so we
1464 should at least be consistent. */
1465 while (pointer_type (value->type))
1466 {
1467 require_rvalue (ax, value);
1468 gen_deref (ax, value);
1469 }
1470 type = check_typedef (value->type);
1471
1472 /* This must yield a structure or a union. */
1473 if (TYPE_CODE (type) != TYPE_CODE_STRUCT
1474 && TYPE_CODE (type) != TYPE_CODE_UNION)
1475 error (_("The left operand of `%s' is not a %s."),
1476 operator_name, operand_name);
1477
1478 /* And it must be in memory; we don't deal with structure rvalues,
1479 or structures living in registers. */
1480 if (value->kind != axs_lvalue_memory)
1481 error (_("Structure does not live in memory."));
1482
1483 /* Search through fields and base classes recursively. */
1484 found = gen_struct_ref_recursive (exp, ax, value, field, 0, type);
1485
1486 if (!found)
1487 error (_("Couldn't find member named `%s' in struct/union/class `%s'"),
1488 field, TYPE_TAG_NAME (type));
1489 }
1490
1491 static int
1492 gen_namespace_elt (struct expression *exp,
1493 struct agent_expr *ax, struct axs_value *value,
1494 const struct type *curtype, char *name);
1495 static int
1496 gen_maybe_namespace_elt (struct expression *exp,
1497 struct agent_expr *ax, struct axs_value *value,
1498 const struct type *curtype, char *name);
1499
1500 static void
1501 gen_static_field (struct gdbarch *gdbarch,
1502 struct agent_expr *ax, struct axs_value *value,
1503 struct type *type, int fieldno)
1504 {
1505 if (TYPE_FIELD_LOC_KIND (type, fieldno) == FIELD_LOC_KIND_PHYSADDR)
1506 {
1507 ax_const_l (ax, TYPE_FIELD_STATIC_PHYSADDR (type, fieldno));
1508 value->kind = axs_lvalue_memory;
1509 value->type = TYPE_FIELD_TYPE (type, fieldno);
1510 value->optimized_out = 0;
1511 }
1512 else
1513 {
1514 char *phys_name = TYPE_FIELD_STATIC_PHYSNAME (type, fieldno);
1515 struct symbol *sym = lookup_symbol (phys_name, 0, VAR_DOMAIN, 0);
1516
1517 if (sym)
1518 {
1519 gen_var_ref (gdbarch, ax, value, sym);
1520
1521 /* Don't error if the value was optimized out, we may be
1522 scanning all static fields and just want to pass over this
1523 and continue with the rest. */
1524 }
1525 else
1526 {
1527 /* Silently assume this was optimized out; class printing
1528 will let the user know why the data is missing. */
1529 value->optimized_out = 1;
1530 }
1531 }
1532 }
1533
1534 static int
1535 gen_struct_elt_for_reference (struct expression *exp,
1536 struct agent_expr *ax, struct axs_value *value,
1537 struct type *type, char *fieldname)
1538 {
1539 struct type *t = type;
1540 int i;
1541 struct value *v, *result;
1542
1543 if (TYPE_CODE (t) != TYPE_CODE_STRUCT
1544 && TYPE_CODE (t) != TYPE_CODE_UNION)
1545 internal_error (__FILE__, __LINE__,
1546 _("non-aggregate type to gen_struct_elt_for_reference"));
1547
1548 for (i = TYPE_NFIELDS (t) - 1; i >= TYPE_N_BASECLASSES (t); i--)
1549 {
1550 char *t_field_name = TYPE_FIELD_NAME (t, i);
1551
1552 if (t_field_name && strcmp (t_field_name, fieldname) == 0)
1553 {
1554 if (field_is_static (&TYPE_FIELD (t, i)))
1555 {
1556 gen_static_field (exp->gdbarch, ax, value, t, i);
1557 if (value->optimized_out)
1558 error (_("static field `%s' has been optimized out, cannot use"),
1559 fieldname);
1560 return 1;
1561 }
1562 if (TYPE_FIELD_PACKED (t, i))
1563 error (_("pointers to bitfield members not allowed"));
1564
1565 /* FIXME we need a way to do "want_address" equivalent */
1566
1567 error (_("Cannot reference non-static field \"%s\""), fieldname);
1568 }
1569 }
1570
1571 /* FIXME add other scoped-reference cases here */
1572
1573 /* Do a last-ditch lookup. */
1574 return gen_maybe_namespace_elt (exp, ax, value, type, fieldname);
1575 }
1576
1577 /* C++: Return the member NAME of the namespace given by the type
1578 CURTYPE. */
1579
1580 static int
1581 gen_namespace_elt (struct expression *exp,
1582 struct agent_expr *ax, struct axs_value *value,
1583 const struct type *curtype, char *name)
1584 {
1585 int found = gen_maybe_namespace_elt (exp, ax, value, curtype, name);
1586
1587 if (!found)
1588 error (_("No symbol \"%s\" in namespace \"%s\"."),
1589 name, TYPE_TAG_NAME (curtype));
1590
1591 return found;
1592 }
1593
1594 /* A helper function used by value_namespace_elt and
1595 value_struct_elt_for_reference. It looks up NAME inside the
1596 context CURTYPE; this works if CURTYPE is a namespace or if CURTYPE
1597 is a class and NAME refers to a type in CURTYPE itself (as opposed
1598 to, say, some base class of CURTYPE). */
1599
1600 static int
1601 gen_maybe_namespace_elt (struct expression *exp,
1602 struct agent_expr *ax, struct axs_value *value,
1603 const struct type *curtype, char *name)
1604 {
1605 const char *namespace_name = TYPE_TAG_NAME (curtype);
1606 struct symbol *sym;
1607
1608 sym = cp_lookup_symbol_namespace (namespace_name, name,
1609 block_for_pc (ax->scope),
1610 VAR_DOMAIN);
1611
1612 if (sym == NULL)
1613 return 0;
1614
1615 gen_var_ref (exp->gdbarch, ax, value, sym);
1616
1617 if (value->optimized_out)
1618 error (_("`%s' has been optimized out, cannot use"),
1619 SYMBOL_PRINT_NAME (sym));
1620
1621 return 1;
1622 }
1623
1624
1625 static int
1626 gen_aggregate_elt_ref (struct expression *exp,
1627 struct agent_expr *ax, struct axs_value *value,
1628 struct type *type, char *field,
1629 char *operator_name, char *operand_name)
1630 {
1631 switch (TYPE_CODE (type))
1632 {
1633 case TYPE_CODE_STRUCT:
1634 case TYPE_CODE_UNION:
1635 return gen_struct_elt_for_reference (exp, ax, value, type, field);
1636 break;
1637 case TYPE_CODE_NAMESPACE:
1638 return gen_namespace_elt (exp, ax, value, type, field);
1639 break;
1640 default:
1641 internal_error (__FILE__, __LINE__,
1642 _("non-aggregate type in gen_aggregate_elt_ref"));
1643 }
1644
1645 return 0;
1646 }
1647
1648 /* Generate code for GDB's magical `repeat' operator.
1649 LVALUE @ INT creates an array INT elements long, and whose elements
1650 have the same type as LVALUE, located in memory so that LVALUE is
1651 its first element. For example, argv[0]@argc gives you the array
1652 of command-line arguments.
1653
1654 Unfortunately, because we have to know the types before we actually
1655 have a value for the expression, we can't implement this perfectly
1656 without changing the type system, having values that occupy two
1657 stack slots, doing weird things with sizeof, etc. So we require
1658 the right operand to be a constant expression. */
1659 static void
1660 gen_repeat (struct expression *exp, union exp_element **pc,
1661 struct agent_expr *ax, struct axs_value *value)
1662 {
1663 struct axs_value value1;
1664 /* We don't want to turn this into an rvalue, so no conversions
1665 here. */
1666 gen_expr (exp, pc, ax, &value1);
1667 if (value1.kind != axs_lvalue_memory)
1668 error (_("Left operand of `@' must be an object in memory."));
1669
1670 /* Evaluate the length; it had better be a constant. */
1671 {
1672 struct value *v = const_expr (pc);
1673 int length;
1674
1675 if (!v)
1676 error (_("Right operand of `@' must be a constant, in agent expressions."));
1677 if (TYPE_CODE (value_type (v)) != TYPE_CODE_INT)
1678 error (_("Right operand of `@' must be an integer."));
1679 length = value_as_long (v);
1680 if (length <= 0)
1681 error (_("Right operand of `@' must be positive."));
1682
1683 /* The top of the stack is already the address of the object, so
1684 all we need to do is frob the type of the lvalue. */
1685 {
1686 /* FIXME-type-allocation: need a way to free this type when we are
1687 done with it. */
1688 struct type *array
1689 = lookup_array_range_type (value1.type, 0, length - 1);
1690
1691 value->kind = axs_lvalue_memory;
1692 value->type = array;
1693 }
1694 }
1695 }
1696
1697
1698 /* Emit code for the `sizeof' operator.
1699 *PC should point at the start of the operand expression; we advance it
1700 to the first instruction after the operand. */
1701 static void
1702 gen_sizeof (struct expression *exp, union exp_element **pc,
1703 struct agent_expr *ax, struct axs_value *value,
1704 struct type *size_type)
1705 {
1706 /* We don't care about the value of the operand expression; we only
1707 care about its type. However, in the current arrangement, the
1708 only way to find an expression's type is to generate code for it.
1709 So we generate code for the operand, and then throw it away,
1710 replacing it with code that simply pushes its size. */
1711 int start = ax->len;
1712 gen_expr (exp, pc, ax, value);
1713
1714 /* Throw away the code we just generated. */
1715 ax->len = start;
1716
1717 ax_const_l (ax, TYPE_LENGTH (value->type));
1718 value->kind = axs_rvalue;
1719 value->type = size_type;
1720 }
1721 \f
1722
1723 /* Generating bytecode from GDB expressions: general recursive thingy */
1724
1725 /* XXX: i18n */
1726 /* A gen_expr function written by a Gen-X'er guy.
1727 Append code for the subexpression of EXPR starting at *POS_P to AX. */
1728 static void
1729 gen_expr (struct expression *exp, union exp_element **pc,
1730 struct agent_expr *ax, struct axs_value *value)
1731 {
1732 /* Used to hold the descriptions of operand expressions. */
1733 struct axs_value value1, value2, value3;
1734 enum exp_opcode op = (*pc)[0].opcode, op2;
1735 int if1, go1, if2, go2, end;
1736
1737 /* If we're looking at a constant expression, just push its value. */
1738 {
1739 struct value *v = maybe_const_expr (pc);
1740
1741 if (v)
1742 {
1743 ax_const_l (ax, value_as_long (v));
1744 value->kind = axs_rvalue;
1745 value->type = check_typedef (value_type (v));
1746 return;
1747 }
1748 }
1749
1750 /* Otherwise, go ahead and generate code for it. */
1751 switch (op)
1752 {
1753 /* Binary arithmetic operators. */
1754 case BINOP_ADD:
1755 case BINOP_SUB:
1756 case BINOP_MUL:
1757 case BINOP_DIV:
1758 case BINOP_REM:
1759 case BINOP_LSH:
1760 case BINOP_RSH:
1761 case BINOP_SUBSCRIPT:
1762 case BINOP_BITWISE_AND:
1763 case BINOP_BITWISE_IOR:
1764 case BINOP_BITWISE_XOR:
1765 case BINOP_EQUAL:
1766 case BINOP_NOTEQUAL:
1767 case BINOP_LESS:
1768 case BINOP_GTR:
1769 case BINOP_LEQ:
1770 case BINOP_GEQ:
1771 (*pc)++;
1772 gen_expr (exp, pc, ax, &value1);
1773 gen_usual_unary (exp, ax, &value1);
1774 gen_expr_binop_rest (exp, op, pc, ax, value, &value1, &value2);
1775 break;
1776
1777 case BINOP_LOGICAL_AND:
1778 (*pc)++;
1779 /* Generate the obvious sequence of tests and jumps. */
1780 gen_expr (exp, pc, ax, &value1);
1781 gen_usual_unary (exp, ax, &value1);
1782 if1 = ax_goto (ax, aop_if_goto);
1783 go1 = ax_goto (ax, aop_goto);
1784 ax_label (ax, if1, ax->len);
1785 gen_expr (exp, pc, ax, &value2);
1786 gen_usual_unary (exp, ax, &value2);
1787 if2 = ax_goto (ax, aop_if_goto);
1788 go2 = ax_goto (ax, aop_goto);
1789 ax_label (ax, if2, ax->len);
1790 ax_const_l (ax, 1);
1791 end = ax_goto (ax, aop_goto);
1792 ax_label (ax, go1, ax->len);
1793 ax_label (ax, go2, ax->len);
1794 ax_const_l (ax, 0);
1795 ax_label (ax, end, ax->len);
1796 value->kind = axs_rvalue;
1797 value->type = language_bool_type (exp->language_defn, exp->gdbarch);
1798 break;
1799
1800 case BINOP_LOGICAL_OR:
1801 (*pc)++;
1802 /* Generate the obvious sequence of tests and jumps. */
1803 gen_expr (exp, pc, ax, &value1);
1804 gen_usual_unary (exp, ax, &value1);
1805 if1 = ax_goto (ax, aop_if_goto);
1806 gen_expr (exp, pc, ax, &value2);
1807 gen_usual_unary (exp, ax, &value2);
1808 if2 = ax_goto (ax, aop_if_goto);
1809 ax_const_l (ax, 0);
1810 end = ax_goto (ax, aop_goto);
1811 ax_label (ax, if1, ax->len);
1812 ax_label (ax, if2, ax->len);
1813 ax_const_l (ax, 1);
1814 ax_label (ax, end, ax->len);
1815 value->kind = axs_rvalue;
1816 value->type = language_bool_type (exp->language_defn, exp->gdbarch);
1817 break;
1818
1819 case TERNOP_COND:
1820 (*pc)++;
1821 gen_expr (exp, pc, ax, &value1);
1822 gen_usual_unary (exp, ax, &value1);
1823 /* For (A ? B : C), it's easiest to generate subexpression
1824 bytecodes in order, but if_goto jumps on true, so we invert
1825 the sense of A. Then we can do B by dropping through, and
1826 jump to do C. */
1827 gen_logical_not (ax, &value1,
1828 language_bool_type (exp->language_defn, exp->gdbarch));
1829 if1 = ax_goto (ax, aop_if_goto);
1830 gen_expr (exp, pc, ax, &value2);
1831 gen_usual_unary (exp, ax, &value2);
1832 end = ax_goto (ax, aop_goto);
1833 ax_label (ax, if1, ax->len);
1834 gen_expr (exp, pc, ax, &value3);
1835 gen_usual_unary (exp, ax, &value3);
1836 ax_label (ax, end, ax->len);
1837 /* This is arbitary - what if B and C are incompatible types? */
1838 value->type = value2.type;
1839 value->kind = value2.kind;
1840 break;
1841
1842 case BINOP_ASSIGN:
1843 (*pc)++;
1844 if ((*pc)[0].opcode == OP_INTERNALVAR)
1845 {
1846 char *name = internalvar_name ((*pc)[1].internalvar);
1847 struct trace_state_variable *tsv;
1848 (*pc) += 3;
1849 gen_expr (exp, pc, ax, value);
1850 tsv = find_trace_state_variable (name);
1851 if (tsv)
1852 {
1853 ax_tsv (ax, aop_setv, tsv->number);
1854 if (trace_kludge)
1855 ax_tsv (ax, aop_tracev, tsv->number);
1856 }
1857 else
1858 error (_("$%s is not a trace state variable, may not assign to it"), name);
1859 }
1860 else
1861 error (_("May only assign to trace state variables"));
1862 break;
1863
1864 case BINOP_ASSIGN_MODIFY:
1865 (*pc)++;
1866 op2 = (*pc)[0].opcode;
1867 (*pc)++;
1868 (*pc)++;
1869 if ((*pc)[0].opcode == OP_INTERNALVAR)
1870 {
1871 char *name = internalvar_name ((*pc)[1].internalvar);
1872 struct trace_state_variable *tsv;
1873 (*pc) += 3;
1874 tsv = find_trace_state_variable (name);
1875 if (tsv)
1876 {
1877 /* The tsv will be the left half of the binary operation. */
1878 ax_tsv (ax, aop_getv, tsv->number);
1879 if (trace_kludge)
1880 ax_tsv (ax, aop_tracev, tsv->number);
1881 /* Trace state variables are always 64-bit integers. */
1882 value1.kind = axs_rvalue;
1883 value1.type = builtin_type (exp->gdbarch)->builtin_long_long;
1884 /* Now do right half of expression. */
1885 gen_expr_binop_rest (exp, op2, pc, ax, value, &value1, &value2);
1886 /* We have a result of the binary op, set the tsv. */
1887 ax_tsv (ax, aop_setv, tsv->number);
1888 if (trace_kludge)
1889 ax_tsv (ax, aop_tracev, tsv->number);
1890 }
1891 else
1892 error (_("$%s is not a trace state variable, may not assign to it"), name);
1893 }
1894 else
1895 error (_("May only assign to trace state variables"));
1896 break;
1897
1898 /* Note that we need to be a little subtle about generating code
1899 for comma. In C, we can do some optimizations here because
1900 we know the left operand is only being evaluated for effect.
1901 However, if the tracing kludge is in effect, then we always
1902 need to evaluate the left hand side fully, so that all the
1903 variables it mentions get traced. */
1904 case BINOP_COMMA:
1905 (*pc)++;
1906 gen_expr (exp, pc, ax, &value1);
1907 /* Don't just dispose of the left operand. We might be tracing,
1908 in which case we want to emit code to trace it if it's an
1909 lvalue. */
1910 gen_traced_pop (exp->gdbarch, ax, &value1);
1911 gen_expr (exp, pc, ax, value);
1912 /* It's the consumer's responsibility to trace the right operand. */
1913 break;
1914
1915 case OP_LONG: /* some integer constant */
1916 {
1917 struct type *type = (*pc)[1].type;
1918 LONGEST k = (*pc)[2].longconst;
1919 (*pc) += 4;
1920 gen_int_literal (ax, value, k, type);
1921 }
1922 break;
1923
1924 case OP_VAR_VALUE:
1925 gen_var_ref (exp->gdbarch, ax, value, (*pc)[2].symbol);
1926
1927 if (value->optimized_out)
1928 error (_("`%s' has been optimized out, cannot use"),
1929 SYMBOL_PRINT_NAME ((*pc)[2].symbol));
1930
1931 (*pc) += 4;
1932 break;
1933
1934 case OP_REGISTER:
1935 {
1936 const char *name = &(*pc)[2].string;
1937 int reg;
1938 (*pc) += 4 + BYTES_TO_EXP_ELEM ((*pc)[1].longconst + 1);
1939 reg = user_reg_map_name_to_regnum (exp->gdbarch, name, strlen (name));
1940 if (reg == -1)
1941 internal_error (__FILE__, __LINE__,
1942 _("Register $%s not available"), name);
1943 if (reg >= gdbarch_num_regs (exp->gdbarch))
1944 error (_("'%s' is a pseudo-register; "
1945 "GDB cannot yet trace pseudoregister contents."),
1946 name);
1947 value->kind = axs_lvalue_register;
1948 value->u.reg = reg;
1949 value->type = register_type (exp->gdbarch, reg);
1950 }
1951 break;
1952
1953 case OP_INTERNALVAR:
1954 {
1955 const char *name = internalvar_name ((*pc)[1].internalvar);
1956 struct trace_state_variable *tsv;
1957 (*pc) += 3;
1958 tsv = find_trace_state_variable (name);
1959 if (tsv)
1960 {
1961 ax_tsv (ax, aop_getv, tsv->number);
1962 if (trace_kludge)
1963 ax_tsv (ax, aop_tracev, tsv->number);
1964 /* Trace state variables are always 64-bit integers. */
1965 value->kind = axs_rvalue;
1966 value->type = builtin_type (exp->gdbarch)->builtin_long_long;
1967 }
1968 else
1969 error (_("$%s is not a trace state variable; GDB agent expressions cannot use convenience variables."), name);
1970 }
1971 break;
1972
1973 /* Weirdo operator: see comments for gen_repeat for details. */
1974 case BINOP_REPEAT:
1975 /* Note that gen_repeat handles its own argument evaluation. */
1976 (*pc)++;
1977 gen_repeat (exp, pc, ax, value);
1978 break;
1979
1980 case UNOP_CAST:
1981 {
1982 struct type *type = (*pc)[1].type;
1983 (*pc) += 3;
1984 gen_expr (exp, pc, ax, value);
1985 gen_cast (ax, value, type);
1986 }
1987 break;
1988
1989 case UNOP_MEMVAL:
1990 {
1991 struct type *type = check_typedef ((*pc)[1].type);
1992 (*pc) += 3;
1993 gen_expr (exp, pc, ax, value);
1994 /* I'm not sure I understand UNOP_MEMVAL entirely. I think
1995 it's just a hack for dealing with minsyms; you take some
1996 integer constant, pretend it's the address of an lvalue of
1997 the given type, and dereference it. */
1998 if (value->kind != axs_rvalue)
1999 /* This would be weird. */
2000 internal_error (__FILE__, __LINE__,
2001 _("gen_expr: OP_MEMVAL operand isn't an rvalue???"));
2002 value->type = type;
2003 value->kind = axs_lvalue_memory;
2004 }
2005 break;
2006
2007 case UNOP_PLUS:
2008 (*pc)++;
2009 /* + FOO is equivalent to 0 + FOO, which can be optimized. */
2010 gen_expr (exp, pc, ax, value);
2011 gen_usual_unary (exp, ax, value);
2012 break;
2013
2014 case UNOP_NEG:
2015 (*pc)++;
2016 /* -FOO is equivalent to 0 - FOO. */
2017 gen_int_literal (ax, &value1, 0,
2018 builtin_type (exp->gdbarch)->builtin_int);
2019 gen_usual_unary (exp, ax, &value1); /* shouldn't do much */
2020 gen_expr (exp, pc, ax, &value2);
2021 gen_usual_unary (exp, ax, &value2);
2022 gen_usual_arithmetic (exp, ax, &value1, &value2);
2023 gen_binop (ax, value, &value1, &value2, aop_sub, aop_sub, 1, "negation");
2024 break;
2025
2026 case UNOP_LOGICAL_NOT:
2027 (*pc)++;
2028 gen_expr (exp, pc, ax, value);
2029 gen_usual_unary (exp, ax, value);
2030 gen_logical_not (ax, value,
2031 language_bool_type (exp->language_defn, exp->gdbarch));
2032 break;
2033
2034 case UNOP_COMPLEMENT:
2035 (*pc)++;
2036 gen_expr (exp, pc, ax, value);
2037 gen_usual_unary (exp, ax, value);
2038 gen_integral_promotions (exp, ax, value);
2039 gen_complement (ax, value);
2040 break;
2041
2042 case UNOP_IND:
2043 (*pc)++;
2044 gen_expr (exp, pc, ax, value);
2045 gen_usual_unary (exp, ax, value);
2046 if (!pointer_type (value->type))
2047 error (_("Argument of unary `*' is not a pointer."));
2048 gen_deref (ax, value);
2049 break;
2050
2051 case UNOP_ADDR:
2052 (*pc)++;
2053 gen_expr (exp, pc, ax, value);
2054 gen_address_of (ax, value);
2055 break;
2056
2057 case UNOP_SIZEOF:
2058 (*pc)++;
2059 /* Notice that gen_sizeof handles its own operand, unlike most
2060 of the other unary operator functions. This is because we
2061 have to throw away the code we generate. */
2062 gen_sizeof (exp, pc, ax, value,
2063 builtin_type (exp->gdbarch)->builtin_int);
2064 break;
2065
2066 case STRUCTOP_STRUCT:
2067 case STRUCTOP_PTR:
2068 {
2069 int length = (*pc)[1].longconst;
2070 char *name = &(*pc)[2].string;
2071
2072 (*pc) += 4 + BYTES_TO_EXP_ELEM (length + 1);
2073 gen_expr (exp, pc, ax, value);
2074 if (op == STRUCTOP_STRUCT)
2075 gen_struct_ref (exp, ax, value, name, ".", "structure or union");
2076 else if (op == STRUCTOP_PTR)
2077 gen_struct_ref (exp, ax, value, name, "->",
2078 "pointer to a structure or union");
2079 else
2080 /* If this `if' chain doesn't handle it, then the case list
2081 shouldn't mention it, and we shouldn't be here. */
2082 internal_error (__FILE__, __LINE__,
2083 _("gen_expr: unhandled struct case"));
2084 }
2085 break;
2086
2087 case OP_THIS:
2088 {
2089 char *this_name;
2090 struct symbol *func, *sym;
2091 struct block *b;
2092
2093 func = block_linkage_function (block_for_pc (ax->scope));
2094 this_name = language_def (SYMBOL_LANGUAGE (func))->la_name_of_this;
2095 b = SYMBOL_BLOCK_VALUE (func);
2096
2097 /* Calling lookup_block_symbol is necessary to get the LOC_REGISTER
2098 symbol instead of the LOC_ARG one (if both exist). */
2099 sym = lookup_block_symbol (b, this_name, VAR_DOMAIN);
2100 if (!sym)
2101 error (_("no `%s' found"), this_name);
2102
2103 gen_var_ref (exp->gdbarch, ax, value, sym);
2104
2105 if (value->optimized_out)
2106 error (_("`%s' has been optimized out, cannot use"),
2107 SYMBOL_PRINT_NAME (sym));
2108
2109 (*pc) += 2;
2110 }
2111 break;
2112
2113 case OP_SCOPE:
2114 {
2115 struct type *type = (*pc)[1].type;
2116 int length = longest_to_int ((*pc)[2].longconst);
2117 char *name = &(*pc)[3].string;
2118 int found;
2119
2120 found = gen_aggregate_elt_ref (exp, ax, value, type, name,
2121 "?", "??");
2122 if (!found)
2123 error (_("There is no field named %s"), name);
2124 (*pc) += 5 + BYTES_TO_EXP_ELEM (length + 1);
2125 }
2126 break;
2127
2128 case OP_TYPE:
2129 error (_("Attempt to use a type name as an expression."));
2130
2131 default:
2132 error (_("Unsupported operator %s (%d) in expression."),
2133 op_string (op), op);
2134 }
2135 }
2136
2137 /* This handles the middle-to-right-side of code generation for binary
2138 expressions, which is shared between regular binary operations and
2139 assign-modify (+= and friends) expressions. */
2140
2141 static void
2142 gen_expr_binop_rest (struct expression *exp,
2143 enum exp_opcode op, union exp_element **pc,
2144 struct agent_expr *ax, struct axs_value *value,
2145 struct axs_value *value1, struct axs_value *value2)
2146 {
2147 gen_expr (exp, pc, ax, value2);
2148 gen_usual_unary (exp, ax, value2);
2149 gen_usual_arithmetic (exp, ax, value1, value2);
2150 switch (op)
2151 {
2152 case BINOP_ADD:
2153 if (TYPE_CODE (value1->type) == TYPE_CODE_INT
2154 && pointer_type (value2->type))
2155 {
2156 /* Swap the values and proceed normally. */
2157 ax_simple (ax, aop_swap);
2158 gen_ptradd (ax, value, value2, value1);
2159 }
2160 else if (pointer_type (value1->type)
2161 && TYPE_CODE (value2->type) == TYPE_CODE_INT)
2162 gen_ptradd (ax, value, value1, value2);
2163 else
2164 gen_binop (ax, value, value1, value2,
2165 aop_add, aop_add, 1, "addition");
2166 break;
2167 case BINOP_SUB:
2168 if (pointer_type (value1->type)
2169 && TYPE_CODE (value2->type) == TYPE_CODE_INT)
2170 gen_ptrsub (ax,value, value1, value2);
2171 else if (pointer_type (value1->type)
2172 && pointer_type (value2->type))
2173 /* FIXME --- result type should be ptrdiff_t */
2174 gen_ptrdiff (ax, value, value1, value2,
2175 builtin_type (exp->gdbarch)->builtin_long);
2176 else
2177 gen_binop (ax, value, value1, value2,
2178 aop_sub, aop_sub, 1, "subtraction");
2179 break;
2180 case BINOP_MUL:
2181 gen_binop (ax, value, value1, value2,
2182 aop_mul, aop_mul, 1, "multiplication");
2183 break;
2184 case BINOP_DIV:
2185 gen_binop (ax, value, value1, value2,
2186 aop_div_signed, aop_div_unsigned, 1, "division");
2187 break;
2188 case BINOP_REM:
2189 gen_binop (ax, value, value1, value2,
2190 aop_rem_signed, aop_rem_unsigned, 1, "remainder");
2191 break;
2192 case BINOP_LSH:
2193 gen_binop (ax, value, value1, value2,
2194 aop_lsh, aop_lsh, 1, "left shift");
2195 break;
2196 case BINOP_RSH:
2197 gen_binop (ax, value, value1, value2,
2198 aop_rsh_signed, aop_rsh_unsigned, 1, "right shift");
2199 break;
2200 case BINOP_SUBSCRIPT:
2201 {
2202 struct type *type;
2203
2204 if (binop_types_user_defined_p (op, value1->type, value2->type))
2205 {
2206 error (_("\
2207 cannot subscript requested type: cannot call user defined functions"));
2208 }
2209 else
2210 {
2211 /* If the user attempts to subscript something that is not
2212 an array or pointer type (like a plain int variable for
2213 example), then report this as an error. */
2214 type = check_typedef (value1->type);
2215 if (TYPE_CODE (type) != TYPE_CODE_ARRAY
2216 && TYPE_CODE (type) != TYPE_CODE_PTR)
2217 {
2218 if (TYPE_NAME (type))
2219 error (_("cannot subscript something of type `%s'"),
2220 TYPE_NAME (type));
2221 else
2222 error (_("cannot subscript requested type"));
2223 }
2224 }
2225
2226 if (!is_integral_type (value2->type))
2227 error (_("Argument to arithmetic operation not a number or boolean."));
2228
2229 gen_ptradd (ax, value, value1, value2);
2230 gen_deref (ax, value);
2231 break;
2232 }
2233 case BINOP_BITWISE_AND:
2234 gen_binop (ax, value, value1, value2,
2235 aop_bit_and, aop_bit_and, 0, "bitwise and");
2236 break;
2237
2238 case BINOP_BITWISE_IOR:
2239 gen_binop (ax, value, value1, value2,
2240 aop_bit_or, aop_bit_or, 0, "bitwise or");
2241 break;
2242
2243 case BINOP_BITWISE_XOR:
2244 gen_binop (ax, value, value1, value2,
2245 aop_bit_xor, aop_bit_xor, 0, "bitwise exclusive-or");
2246 break;
2247
2248 case BINOP_EQUAL:
2249 gen_binop (ax, value, value1, value2,
2250 aop_equal, aop_equal, 0, "equal");
2251 break;
2252
2253 case BINOP_NOTEQUAL:
2254 gen_binop (ax, value, value1, value2,
2255 aop_equal, aop_equal, 0, "equal");
2256 gen_logical_not (ax, value,
2257 language_bool_type (exp->language_defn,
2258 exp->gdbarch));
2259 break;
2260
2261 case BINOP_LESS:
2262 gen_binop (ax, value, value1, value2,
2263 aop_less_signed, aop_less_unsigned, 0, "less than");
2264 break;
2265
2266 case BINOP_GTR:
2267 ax_simple (ax, aop_swap);
2268 gen_binop (ax, value, value1, value2,
2269 aop_less_signed, aop_less_unsigned, 0, "less than");
2270 break;
2271
2272 case BINOP_LEQ:
2273 ax_simple (ax, aop_swap);
2274 gen_binop (ax, value, value1, value2,
2275 aop_less_signed, aop_less_unsigned, 0, "less than");
2276 gen_logical_not (ax, value,
2277 language_bool_type (exp->language_defn,
2278 exp->gdbarch));
2279 break;
2280
2281 case BINOP_GEQ:
2282 gen_binop (ax, value, value1, value2,
2283 aop_less_signed, aop_less_unsigned, 0, "less than");
2284 gen_logical_not (ax, value,
2285 language_bool_type (exp->language_defn,
2286 exp->gdbarch));
2287 break;
2288
2289 default:
2290 /* We should only list operators in the outer case statement
2291 that we actually handle in the inner case statement. */
2292 internal_error (__FILE__, __LINE__,
2293 _("gen_expr: op case sets don't match"));
2294 }
2295 }
2296 \f
2297
2298 /* Given a single variable and a scope, generate bytecodes to trace
2299 its value. This is for use in situations where we have only a
2300 variable's name, and no parsed expression; for instance, when the
2301 name comes from a list of local variables of a function. */
2302
2303 struct agent_expr *
2304 gen_trace_for_var (CORE_ADDR scope, struct gdbarch *gdbarch,
2305 struct symbol *var)
2306 {
2307 struct cleanup *old_chain = 0;
2308 struct agent_expr *ax = new_agent_expr (scope);
2309 struct axs_value value;
2310
2311 old_chain = make_cleanup_free_agent_expr (ax);
2312
2313 trace_kludge = 1;
2314 gen_var_ref (gdbarch, ax, &value, var);
2315
2316 /* If there is no actual variable to trace, flag it by returning
2317 an empty agent expression. */
2318 if (value.optimized_out)
2319 {
2320 do_cleanups (old_chain);
2321 return NULL;
2322 }
2323
2324 /* Make sure we record the final object, and get rid of it. */
2325 gen_traced_pop (gdbarch, ax, &value);
2326
2327 /* Oh, and terminate. */
2328 ax_simple (ax, aop_end);
2329
2330 /* We have successfully built the agent expr, so cancel the cleanup
2331 request. If we add more cleanups that we always want done, this
2332 will have to get more complicated. */
2333 discard_cleanups (old_chain);
2334 return ax;
2335 }
2336
2337 /* Generating bytecode from GDB expressions: driver */
2338
2339 /* Given a GDB expression EXPR, return bytecode to trace its value.
2340 The result will use the `trace' and `trace_quick' bytecodes to
2341 record the value of all memory touched by the expression. The
2342 caller can then use the ax_reqs function to discover which
2343 registers it relies upon. */
2344 struct agent_expr *
2345 gen_trace_for_expr (CORE_ADDR scope, struct expression *expr)
2346 {
2347 struct cleanup *old_chain = 0;
2348 struct agent_expr *ax = new_agent_expr (scope);
2349 union exp_element *pc;
2350 struct axs_value value;
2351
2352 old_chain = make_cleanup_free_agent_expr (ax);
2353
2354 pc = expr->elts;
2355 trace_kludge = 1;
2356 gen_expr (expr, &pc, ax, &value);
2357
2358 /* Make sure we record the final object, and get rid of it. */
2359 gen_traced_pop (expr->gdbarch, ax, &value);
2360
2361 /* Oh, and terminate. */
2362 ax_simple (ax, aop_end);
2363
2364 /* We have successfully built the agent expr, so cancel the cleanup
2365 request. If we add more cleanups that we always want done, this
2366 will have to get more complicated. */
2367 discard_cleanups (old_chain);
2368 return ax;
2369 }
2370
2371 /* Given a GDB expression EXPR, return a bytecode sequence that will
2372 evaluate and return a result. The bytecodes will do a direct
2373 evaluation, using the current data on the target, rather than
2374 recording blocks of memory and registers for later use, as
2375 gen_trace_for_expr does. The generated bytecode sequence leaves
2376 the result of expression evaluation on the top of the stack. */
2377
2378 struct agent_expr *
2379 gen_eval_for_expr (CORE_ADDR scope, struct expression *expr)
2380 {
2381 struct cleanup *old_chain = 0;
2382 struct agent_expr *ax = new_agent_expr (scope);
2383 union exp_element *pc;
2384 struct axs_value value;
2385
2386 old_chain = make_cleanup_free_agent_expr (ax);
2387
2388 pc = expr->elts;
2389 trace_kludge = 0;
2390 gen_expr (expr, &pc, ax, &value);
2391
2392 /* Oh, and terminate. */
2393 ax_simple (ax, aop_end);
2394
2395 /* We have successfully built the agent expr, so cancel the cleanup
2396 request. If we add more cleanups that we always want done, this
2397 will have to get more complicated. */
2398 discard_cleanups (old_chain);
2399 return ax;
2400 }
2401
2402 static void
2403 agent_command (char *exp, int from_tty)
2404 {
2405 struct cleanup *old_chain = 0;
2406 struct expression *expr;
2407 struct agent_expr *agent;
2408 struct frame_info *fi = get_current_frame (); /* need current scope */
2409
2410 /* We don't deal with overlay debugging at the moment. We need to
2411 think more carefully about this. If you copy this code into
2412 another command, change the error message; the user shouldn't
2413 have to know anything about agent expressions. */
2414 if (overlay_debugging)
2415 error (_("GDB can't do agent expression translation with overlays."));
2416
2417 if (exp == 0)
2418 error_no_arg (_("expression to translate"));
2419
2420 expr = parse_expression (exp);
2421 old_chain = make_cleanup (free_current_contents, &expr);
2422 agent = gen_trace_for_expr (get_frame_pc (fi), expr);
2423 make_cleanup_free_agent_expr (agent);
2424 ax_print (gdb_stdout, agent);
2425
2426 /* It would be nice to call ax_reqs here to gather some general info
2427 about the expression, and then print out the result. */
2428
2429 do_cleanups (old_chain);
2430 dont_repeat ();
2431 }
2432
2433 /* Parse the given expression, compile it into an agent expression
2434 that does direct evaluation, and display the resulting
2435 expression. */
2436
2437 static void
2438 agent_eval_command (char *exp, int from_tty)
2439 {
2440 struct cleanup *old_chain = 0;
2441 struct expression *expr;
2442 struct agent_expr *agent;
2443 struct frame_info *fi = get_current_frame (); /* need current scope */
2444
2445 /* We don't deal with overlay debugging at the moment. We need to
2446 think more carefully about this. If you copy this code into
2447 another command, change the error message; the user shouldn't
2448 have to know anything about agent expressions. */
2449 if (overlay_debugging)
2450 error (_("GDB can't do agent expression translation with overlays."));
2451
2452 if (exp == 0)
2453 error_no_arg (_("expression to translate"));
2454
2455 expr = parse_expression (exp);
2456 old_chain = make_cleanup (free_current_contents, &expr);
2457 agent = gen_eval_for_expr (get_frame_pc (fi), expr);
2458 make_cleanup_free_agent_expr (agent);
2459 ax_print (gdb_stdout, agent);
2460
2461 /* It would be nice to call ax_reqs here to gather some general info
2462 about the expression, and then print out the result. */
2463
2464 do_cleanups (old_chain);
2465 dont_repeat ();
2466 }
2467 \f
2468
2469 /* Initialization code. */
2470
2471 void _initialize_ax_gdb (void);
2472 void
2473 _initialize_ax_gdb (void)
2474 {
2475 add_cmd ("agent", class_maintenance, agent_command,
2476 _("Translate an expression into remote agent bytecode for tracing."),
2477 &maintenancelist);
2478
2479 add_cmd ("agent-eval", class_maintenance, agent_eval_command,
2480 _("Translate an expression into remote agent bytecode for evaluation."),
2481 &maintenancelist);
2482 }