1 /* DWARF 2 Expression Evaluator.
3 Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 Contributed by Daniel Berlin (dan@dberlin.org)
8 This file is part of GDB.
10 This program is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3 of the License, or
13 (at your option) any later version.
15 This program is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program. If not, see <http://www.gnu.org/licenses/>. */
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
32 /* Local prototypes. */
34 static void execute_stack_op (struct dwarf_expr_context
*,
35 const gdb_byte
*, const gdb_byte
*);
37 /* Cookie for gdbarch data. */
39 static struct gdbarch_data
*dwarf_arch_cookie
;
41 /* This holds gdbarch-specific types used by the DWARF expression
42 evaluator. See comments in execute_stack_op. */
44 struct dwarf_gdbarch_types
46 struct type
*dw_types
[3];
49 /* Allocate and fill in dwarf_gdbarch_types for an arch. */
52 dwarf_gdbarch_types_init (struct gdbarch
*gdbarch
)
54 struct dwarf_gdbarch_types
*types
55 = GDBARCH_OBSTACK_ZALLOC (gdbarch
, struct dwarf_gdbarch_types
);
57 /* The types themselves are lazily initialized. */
62 /* Return the type used for DWARF operations where the type is
63 unspecified in the DWARF spec. Only certain sizes are
67 dwarf_expr_address_type (struct dwarf_expr_context
*ctx
)
69 struct dwarf_gdbarch_types
*types
= gdbarch_data (ctx
->gdbarch
,
73 if (ctx
->addr_size
== 2)
75 else if (ctx
->addr_size
== 4)
77 else if (ctx
->addr_size
== 8)
80 error (_("Unsupported address size in DWARF expressions: %d bits"),
83 if (types
->dw_types
[ndx
] == NULL
)
85 = arch_integer_type (ctx
->gdbarch
,
87 0, "<signed DWARF address type>");
89 return types
->dw_types
[ndx
];
92 /* Create a new context for the expression evaluator. */
94 struct dwarf_expr_context
*
95 new_dwarf_expr_context (void)
97 struct dwarf_expr_context
*retval
;
99 retval
= xcalloc (1, sizeof (struct dwarf_expr_context
));
100 retval
->stack_len
= 0;
101 retval
->stack_allocated
= 10;
102 retval
->stack
= xmalloc (retval
->stack_allocated
103 * sizeof (struct dwarf_stack_value
));
104 retval
->num_pieces
= 0;
106 retval
->max_recursion_depth
= 0x100;
110 /* Release the memory allocated to CTX. */
113 free_dwarf_expr_context (struct dwarf_expr_context
*ctx
)
120 /* Helper for make_cleanup_free_dwarf_expr_context. */
123 free_dwarf_expr_context_cleanup (void *arg
)
125 free_dwarf_expr_context (arg
);
128 /* Return a cleanup that calls free_dwarf_expr_context. */
131 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context
*ctx
)
133 return make_cleanup (free_dwarf_expr_context_cleanup
, ctx
);
136 /* Expand the memory allocated to CTX's stack to contain at least
137 NEED more elements than are currently used. */
140 dwarf_expr_grow_stack (struct dwarf_expr_context
*ctx
, size_t need
)
142 if (ctx
->stack_len
+ need
> ctx
->stack_allocated
)
144 size_t newlen
= ctx
->stack_len
+ need
+ 10;
146 ctx
->stack
= xrealloc (ctx
->stack
,
147 newlen
* sizeof (struct dwarf_stack_value
));
148 ctx
->stack_allocated
= newlen
;
152 /* Push VALUE onto CTX's stack. */
155 dwarf_expr_push (struct dwarf_expr_context
*ctx
, struct value
*value
,
158 struct dwarf_stack_value
*v
;
160 dwarf_expr_grow_stack (ctx
, 1);
161 v
= &ctx
->stack
[ctx
->stack_len
++];
163 v
->in_stack_memory
= in_stack_memory
;
166 /* Push VALUE onto CTX's stack. */
169 dwarf_expr_push_address (struct dwarf_expr_context
*ctx
, CORE_ADDR value
,
172 dwarf_expr_push (ctx
,
173 value_from_ulongest (dwarf_expr_address_type (ctx
), value
),
177 /* Pop the top item off of CTX's stack. */
180 dwarf_expr_pop (struct dwarf_expr_context
*ctx
)
182 if (ctx
->stack_len
<= 0)
183 error (_("dwarf expression stack underflow"));
187 /* Retrieve the N'th item on CTX's stack. */
190 dwarf_expr_fetch (struct dwarf_expr_context
*ctx
, int n
)
192 if (ctx
->stack_len
<= n
)
193 error (_("Asked for position %d of stack, "
194 "stack only has %d elements on it."),
196 return ctx
->stack
[ctx
->stack_len
- (1 + n
)].value
;
199 /* Require that TYPE be an integral type; throw an exception if not. */
202 dwarf_require_integral (struct type
*type
)
204 if (TYPE_CODE (type
) != TYPE_CODE_INT
205 && TYPE_CODE (type
) != TYPE_CODE_CHAR
206 && TYPE_CODE (type
) != TYPE_CODE_BOOL
)
207 error (_("integral type expected in DWARF expression"));
210 /* Return the unsigned form of TYPE. TYPE is necessarily an integral
214 get_unsigned_type (struct gdbarch
*gdbarch
, struct type
*type
)
216 switch (TYPE_LENGTH (type
))
219 return builtin_type (gdbarch
)->builtin_uint8
;
221 return builtin_type (gdbarch
)->builtin_uint16
;
223 return builtin_type (gdbarch
)->builtin_uint32
;
225 return builtin_type (gdbarch
)->builtin_uint64
;
227 error (_("no unsigned variant found for type, while evaluating "
228 "DWARF expression"));
232 /* Return the signed form of TYPE. TYPE is necessarily an integral
236 get_signed_type (struct gdbarch
*gdbarch
, struct type
*type
)
238 switch (TYPE_LENGTH (type
))
241 return builtin_type (gdbarch
)->builtin_int8
;
243 return builtin_type (gdbarch
)->builtin_int16
;
245 return builtin_type (gdbarch
)->builtin_int32
;
247 return builtin_type (gdbarch
)->builtin_int64
;
249 error (_("no signed variant found for type, while evaluating "
250 "DWARF expression"));
254 /* Retrieve the N'th item on CTX's stack, converted to an address. */
257 dwarf_expr_fetch_address (struct dwarf_expr_context
*ctx
, int n
)
259 struct value
*result_val
= dwarf_expr_fetch (ctx
, n
);
260 enum bfd_endian byte_order
= gdbarch_byte_order (ctx
->gdbarch
);
263 dwarf_require_integral (value_type (result_val
));
264 result
= extract_unsigned_integer (value_contents (result_val
),
265 TYPE_LENGTH (value_type (result_val
)),
268 /* For most architectures, calling extract_unsigned_integer() alone
269 is sufficient for extracting an address. However, some
270 architectures (e.g. MIPS) use signed addresses and using
271 extract_unsigned_integer() will not produce a correct
272 result. Make sure we invoke gdbarch_integer_to_address()
273 for those architectures which require it. */
274 if (gdbarch_integer_to_address_p (ctx
->gdbarch
))
276 gdb_byte
*buf
= alloca (ctx
->addr_size
);
277 struct type
*int_type
= get_unsigned_type (ctx
->gdbarch
,
278 value_type (result_val
));
280 store_unsigned_integer (buf
, ctx
->addr_size
, byte_order
, result
);
281 return gdbarch_integer_to_address (ctx
->gdbarch
, int_type
, buf
);
284 return (CORE_ADDR
) result
;
287 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */
290 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context
*ctx
, int n
)
292 if (ctx
->stack_len
<= n
)
293 error (_("Asked for position %d of stack, "
294 "stack only has %d elements on it."),
296 return ctx
->stack
[ctx
->stack_len
- (1 + n
)].in_stack_memory
;
299 /* Return true if the expression stack is empty. */
302 dwarf_expr_stack_empty_p (struct dwarf_expr_context
*ctx
)
304 return ctx
->stack_len
== 0;
307 /* Add a new piece to CTX's piece list. */
309 add_piece (struct dwarf_expr_context
*ctx
, ULONGEST size
, ULONGEST offset
)
311 struct dwarf_expr_piece
*p
;
315 ctx
->pieces
= xrealloc (ctx
->pieces
,
317 * sizeof (struct dwarf_expr_piece
)));
319 p
= &ctx
->pieces
[ctx
->num_pieces
- 1];
320 p
->location
= ctx
->location
;
324 if (p
->location
== DWARF_VALUE_LITERAL
)
326 p
->v
.literal
.data
= ctx
->data
;
327 p
->v
.literal
.length
= ctx
->len
;
329 else if (dwarf_expr_stack_empty_p (ctx
))
331 p
->location
= DWARF_VALUE_OPTIMIZED_OUT
;
332 /* Also reset the context's location, for our callers. This is
333 a somewhat strange approach, but this lets us avoid setting
334 the location to DWARF_VALUE_MEMORY in all the individual
335 cases in the evaluator. */
336 ctx
->location
= DWARF_VALUE_OPTIMIZED_OUT
;
338 else if (p
->location
== DWARF_VALUE_MEMORY
)
340 p
->v
.mem
.addr
= dwarf_expr_fetch_address (ctx
, 0);
341 p
->v
.mem
.in_stack_memory
= dwarf_expr_fetch_in_stack_memory (ctx
, 0);
343 else if (p
->location
== DWARF_VALUE_IMPLICIT_POINTER
)
345 p
->v
.ptr
.die
= ctx
->len
;
346 p
->v
.ptr
.offset
= value_as_long (dwarf_expr_fetch (ctx
, 0));
348 else if (p
->location
== DWARF_VALUE_REGISTER
)
349 p
->v
.regno
= value_as_long (dwarf_expr_fetch (ctx
, 0));
352 p
->v
.value
= dwarf_expr_fetch (ctx
, 0);
356 /* Evaluate the expression at ADDR (LEN bytes long) using the context
360 dwarf_expr_eval (struct dwarf_expr_context
*ctx
, const gdb_byte
*addr
,
363 int old_recursion_depth
= ctx
->recursion_depth
;
365 execute_stack_op (ctx
, addr
, addr
+ len
);
367 /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */
369 gdb_assert (ctx
->recursion_depth
== old_recursion_depth
);
372 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
373 by R, and return the new value of BUF. Verify that it doesn't extend
377 read_uleb128 (const gdb_byte
*buf
, const gdb_byte
*buf_end
, ULONGEST
* r
)
386 error (_("read_uleb128: Corrupted DWARF expression."));
389 result
|= ((ULONGEST
) (byte
& 0x7f)) << shift
;
390 if ((byte
& 0x80) == 0)
398 /* Decode the signed LEB128 constant at BUF into the variable pointed to
399 by R, and return the new value of BUF. Verify that it doesn't extend
403 read_sleb128 (const gdb_byte
*buf
, const gdb_byte
*buf_end
, LONGEST
* r
)
412 error (_("read_sleb128: Corrupted DWARF expression."));
415 result
|= ((ULONGEST
) (byte
& 0x7f)) << shift
;
417 if ((byte
& 0x80) == 0)
420 if (shift
< (sizeof (*r
) * 8) && (byte
& 0x40) != 0)
421 result
|= -(1 << shift
);
428 /* Check that the current operator is either at the end of an
429 expression, or that it is followed by a composition operator. */
432 dwarf_expr_require_composition (const gdb_byte
*op_ptr
, const gdb_byte
*op_end
,
435 /* It seems like DW_OP_GNU_uninit should be handled here. However,
436 it doesn't seem to make sense for DW_OP_*_value, and it was not
437 checked at the other place that this function is called. */
438 if (op_ptr
!= op_end
&& *op_ptr
!= DW_OP_piece
&& *op_ptr
!= DW_OP_bit_piece
)
439 error (_("DWARF-2 expression error: `%s' operations must be "
440 "used either alone or in conjuction with DW_OP_piece "
441 "or DW_OP_bit_piece."),
445 /* Return true iff the types T1 and T2 are "the same". This only does
446 checks that might reasonably be needed to compare DWARF base
450 base_types_equal_p (struct type
*t1
, struct type
*t2
)
452 if (TYPE_CODE (t1
) != TYPE_CODE (t2
))
454 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
456 return TYPE_LENGTH (t1
) == TYPE_LENGTH (t2
);
459 /* A convenience function to call get_base_type on CTX and return the
460 result. DIE is the DIE whose type we need. SIZE is non-zero if
461 this function should verify that the resulting type has the correct
465 dwarf_get_base_type (struct dwarf_expr_context
*ctx
, ULONGEST die
, int size
)
469 if (ctx
->get_base_type
)
471 result
= ctx
->get_base_type (ctx
, die
);
473 error (_("Could not find type for DW_OP_GNU_const_type"));
474 if (size
!= 0 && TYPE_LENGTH (result
) != size
)
475 error (_("DW_OP_GNU_const_type has different sizes for type and data"));
478 /* Anything will do. */
479 result
= builtin_type (ctx
->gdbarch
)->builtin_int
;
484 /* The engine for the expression evaluator. Using the context in CTX,
485 evaluate the expression between OP_PTR and OP_END. */
488 execute_stack_op (struct dwarf_expr_context
*ctx
,
489 const gdb_byte
*op_ptr
, const gdb_byte
*op_end
)
491 enum bfd_endian byte_order
= gdbarch_byte_order (ctx
->gdbarch
);
492 /* Old-style "untyped" DWARF values need special treatment in a
493 couple of places, specifically DW_OP_mod and DW_OP_shr. We need
494 a special type for these values so we can distinguish them from
495 values that have an explicit type, because explicitly-typed
496 values do not need special treatment. This special type must be
497 different (in the `==' sense) from any base type coming from the
499 struct type
*address_type
= dwarf_expr_address_type (ctx
);
501 ctx
->location
= DWARF_VALUE_MEMORY
;
502 ctx
->initialized
= 1; /* Default is initialized. */
504 if (ctx
->recursion_depth
> ctx
->max_recursion_depth
)
505 error (_("DWARF-2 expression error: Loop detected (%d)."),
506 ctx
->recursion_depth
);
507 ctx
->recursion_depth
++;
509 while (op_ptr
< op_end
)
511 enum dwarf_location_atom op
= *op_ptr
++;
513 /* Assume the value is not in stack memory.
514 Code that knows otherwise sets this to 1.
515 Some arithmetic on stack addresses can probably be assumed to still
516 be a stack address, but we skip this complication for now.
517 This is just an optimization, so it's always ok to punt
518 and leave this as 0. */
519 int in_stack_memory
= 0;
520 ULONGEST uoffset
, reg
;
522 struct value
*result_val
= NULL
;
558 result
= op
- DW_OP_lit0
;
559 result_val
= value_from_ulongest (address_type
, result
);
563 result
= extract_unsigned_integer (op_ptr
,
564 ctx
->addr_size
, byte_order
);
565 op_ptr
+= ctx
->addr_size
;
566 /* Some versions of GCC emit DW_OP_addr before
567 DW_OP_GNU_push_tls_address. In this case the value is an
568 index, not an address. We don't support things like
569 branching between the address and the TLS op. */
570 if (op_ptr
>= op_end
|| *op_ptr
!= DW_OP_GNU_push_tls_address
)
571 result
+= ctx
->offset
;
572 result_val
= value_from_ulongest (address_type
, result
);
576 result
= extract_unsigned_integer (op_ptr
, 1, byte_order
);
577 result_val
= value_from_ulongest (address_type
, result
);
581 result
= extract_signed_integer (op_ptr
, 1, byte_order
);
582 result_val
= value_from_ulongest (address_type
, result
);
586 result
= extract_unsigned_integer (op_ptr
, 2, byte_order
);
587 result_val
= value_from_ulongest (address_type
, result
);
591 result
= extract_signed_integer (op_ptr
, 2, byte_order
);
592 result_val
= value_from_ulongest (address_type
, result
);
596 result
= extract_unsigned_integer (op_ptr
, 4, byte_order
);
597 result_val
= value_from_ulongest (address_type
, result
);
601 result
= extract_signed_integer (op_ptr
, 4, byte_order
);
602 result_val
= value_from_ulongest (address_type
, result
);
606 result
= extract_unsigned_integer (op_ptr
, 8, byte_order
);
607 result_val
= value_from_ulongest (address_type
, result
);
611 result
= extract_signed_integer (op_ptr
, 8, byte_order
);
612 result_val
= value_from_ulongest (address_type
, result
);
616 op_ptr
= read_uleb128 (op_ptr
, op_end
, &uoffset
);
618 result_val
= value_from_ulongest (address_type
, result
);
621 op_ptr
= read_sleb128 (op_ptr
, op_end
, &offset
);
623 result_val
= value_from_ulongest (address_type
, result
);
626 /* The DW_OP_reg operations are required to occur alone in
627 location expressions. */
661 && *op_ptr
!= DW_OP_piece
662 && *op_ptr
!= DW_OP_bit_piece
663 && *op_ptr
!= DW_OP_GNU_uninit
)
664 error (_("DWARF-2 expression error: DW_OP_reg operations must be "
665 "used either alone or in conjuction with DW_OP_piece "
666 "or DW_OP_bit_piece."));
668 result
= op
- DW_OP_reg0
;
669 result_val
= value_from_ulongest (address_type
, result
);
670 ctx
->location
= DWARF_VALUE_REGISTER
;
674 op_ptr
= read_uleb128 (op_ptr
, op_end
, ®
);
675 dwarf_expr_require_composition (op_ptr
, op_end
, "DW_OP_regx");
678 result_val
= value_from_ulongest (address_type
, result
);
679 ctx
->location
= DWARF_VALUE_REGISTER
;
682 case DW_OP_implicit_value
:
686 op_ptr
= read_uleb128 (op_ptr
, op_end
, &len
);
687 if (op_ptr
+ len
> op_end
)
688 error (_("DW_OP_implicit_value: too few bytes available."));
691 ctx
->location
= DWARF_VALUE_LITERAL
;
693 dwarf_expr_require_composition (op_ptr
, op_end
,
694 "DW_OP_implicit_value");
698 case DW_OP_stack_value
:
699 ctx
->location
= DWARF_VALUE_STACK
;
700 dwarf_expr_require_composition (op_ptr
, op_end
, "DW_OP_stack_value");
703 case DW_OP_GNU_implicit_pointer
:
708 /* The referred-to DIE. */
709 ctx
->len
= extract_unsigned_integer (op_ptr
, ctx
->addr_size
,
711 op_ptr
+= ctx
->addr_size
;
713 /* The byte offset into the data. */
714 op_ptr
= read_sleb128 (op_ptr
, op_end
, &len
);
715 result
= (ULONGEST
) len
;
716 result_val
= value_from_ulongest (address_type
, result
);
718 ctx
->location
= DWARF_VALUE_IMPLICIT_POINTER
;
719 dwarf_expr_require_composition (op_ptr
, op_end
,
720 "DW_OP_GNU_implicit_pointer");
757 op_ptr
= read_sleb128 (op_ptr
, op_end
, &offset
);
758 result
= (ctx
->read_reg
) (ctx
->baton
, op
- DW_OP_breg0
);
760 result_val
= value_from_ulongest (address_type
, result
);
765 op_ptr
= read_uleb128 (op_ptr
, op_end
, ®
);
766 op_ptr
= read_sleb128 (op_ptr
, op_end
, &offset
);
767 result
= (ctx
->read_reg
) (ctx
->baton
, reg
);
769 result_val
= value_from_ulongest (address_type
, result
);
774 const gdb_byte
*datastart
;
776 unsigned int before_stack_len
;
778 op_ptr
= read_sleb128 (op_ptr
, op_end
, &offset
);
779 /* Rather than create a whole new context, we simply
780 record the stack length before execution, then reset it
781 afterwards, effectively erasing whatever the recursive
783 before_stack_len
= ctx
->stack_len
;
784 /* FIXME: cagney/2003-03-26: This code should be using
785 get_frame_base_address(), and then implement a dwarf2
786 specific this_base method. */
787 (ctx
->get_frame_base
) (ctx
->baton
, &datastart
, &datalen
);
788 dwarf_expr_eval (ctx
, datastart
, datalen
);
789 if (ctx
->location
== DWARF_VALUE_MEMORY
)
790 result
= dwarf_expr_fetch_address (ctx
, 0);
791 else if (ctx
->location
== DWARF_VALUE_REGISTER
)
793 = (ctx
->read_reg
) (ctx
->baton
,
794 value_as_long (dwarf_expr_fetch (ctx
, 0)));
796 error (_("Not implemented: computing frame "
797 "base using explicit value operator"));
798 result
= result
+ offset
;
799 result_val
= value_from_ulongest (address_type
, result
);
801 ctx
->stack_len
= before_stack_len
;
802 ctx
->location
= DWARF_VALUE_MEMORY
;
807 result_val
= dwarf_expr_fetch (ctx
, 0);
808 in_stack_memory
= dwarf_expr_fetch_in_stack_memory (ctx
, 0);
812 dwarf_expr_pop (ctx
);
817 result_val
= dwarf_expr_fetch (ctx
, offset
);
818 in_stack_memory
= dwarf_expr_fetch_in_stack_memory (ctx
, offset
);
823 struct dwarf_stack_value t1
, t2
;
825 if (ctx
->stack_len
< 2)
826 error (_("Not enough elements for "
827 "DW_OP_swap. Need 2, have %d."),
829 t1
= ctx
->stack
[ctx
->stack_len
- 1];
830 t2
= ctx
->stack
[ctx
->stack_len
- 2];
831 ctx
->stack
[ctx
->stack_len
- 1] = t2
;
832 ctx
->stack
[ctx
->stack_len
- 2] = t1
;
837 result_val
= dwarf_expr_fetch (ctx
, 1);
838 in_stack_memory
= dwarf_expr_fetch_in_stack_memory (ctx
, 1);
843 struct dwarf_stack_value t1
, t2
, t3
;
845 if (ctx
->stack_len
< 3)
846 error (_("Not enough elements for "
847 "DW_OP_rot. Need 3, have %d."),
849 t1
= ctx
->stack
[ctx
->stack_len
- 1];
850 t2
= ctx
->stack
[ctx
->stack_len
- 2];
851 t3
= ctx
->stack
[ctx
->stack_len
- 3];
852 ctx
->stack
[ctx
->stack_len
- 1] = t2
;
853 ctx
->stack
[ctx
->stack_len
- 2] = t3
;
854 ctx
->stack
[ctx
->stack_len
- 3] = t1
;
859 case DW_OP_deref_size
:
860 case DW_OP_GNU_deref_type
:
862 int addr_size
= (op
== DW_OP_deref
? ctx
->addr_size
: *op_ptr
++);
863 gdb_byte
*buf
= alloca (addr_size
);
864 CORE_ADDR addr
= dwarf_expr_fetch_address (ctx
, 0);
867 dwarf_expr_pop (ctx
);
869 if (op
== DW_OP_GNU_deref_type
)
873 op_ptr
= read_uleb128 (op_ptr
, op_end
, &type_die
);
874 type
= dwarf_get_base_type (ctx
, type_die
, 0);
879 (ctx
->read_mem
) (ctx
->baton
, buf
, addr
, addr_size
);
881 /* If the size of the object read from memory is different
882 from the type length, we need to zero-extend it. */
883 if (TYPE_LENGTH (type
) != addr_size
)
886 extract_unsigned_integer (buf
, addr_size
, byte_order
);
888 buf
= alloca (TYPE_LENGTH (type
));
889 store_unsigned_integer (buf
, TYPE_LENGTH (type
),
893 result_val
= value_from_contents_and_address (type
, buf
, addr
);
900 case DW_OP_plus_uconst
:
902 /* Unary operations. */
903 result_val
= dwarf_expr_fetch (ctx
, 0);
904 dwarf_expr_pop (ctx
);
909 if (value_less (result_val
,
910 value_zero (value_type (result_val
), not_lval
)))
911 result_val
= value_neg (result_val
);
914 result_val
= value_neg (result_val
);
917 dwarf_require_integral (value_type (result_val
));
918 result_val
= value_complement (result_val
);
920 case DW_OP_plus_uconst
:
921 dwarf_require_integral (value_type (result_val
));
922 result
= value_as_long (result_val
);
923 op_ptr
= read_uleb128 (op_ptr
, op_end
, ®
);
925 result_val
= value_from_ulongest (address_type
, result
);
949 /* Binary operations. */
950 struct value
*first
, *second
;
952 second
= dwarf_expr_fetch (ctx
, 0);
953 dwarf_expr_pop (ctx
);
955 first
= dwarf_expr_fetch (ctx
, 0);
956 dwarf_expr_pop (ctx
);
958 if (! base_types_equal_p (value_type (first
), value_type (second
)))
959 error (_("Incompatible types on DWARF stack"));
964 dwarf_require_integral (value_type (first
));
965 dwarf_require_integral (value_type (second
));
966 result_val
= value_binop (first
, second
, BINOP_BITWISE_AND
);
969 result_val
= value_binop (first
, second
, BINOP_DIV
);
972 result_val
= value_binop (first
, second
, BINOP_SUB
);
977 struct type
*orig_type
= value_type (first
);
979 /* We have to special-case "old-style" untyped values
980 -- these must have mod computed using unsigned
982 if (orig_type
== address_type
)
985 = get_unsigned_type (ctx
->gdbarch
, orig_type
);
988 first
= value_cast (utype
, first
);
989 second
= value_cast (utype
, second
);
991 /* Note that value_binop doesn't handle float or
992 decimal float here. This seems unimportant. */
993 result_val
= value_binop (first
, second
, BINOP_MOD
);
995 result_val
= value_cast (orig_type
, result_val
);
999 result_val
= value_binop (first
, second
, BINOP_MUL
);
1002 dwarf_require_integral (value_type (first
));
1003 dwarf_require_integral (value_type (second
));
1004 result_val
= value_binop (first
, second
, BINOP_BITWISE_IOR
);
1007 result_val
= value_binop (first
, second
, BINOP_ADD
);
1010 dwarf_require_integral (value_type (first
));
1011 dwarf_require_integral (value_type (second
));
1012 result_val
= value_binop (first
, second
, BINOP_LSH
);
1015 dwarf_require_integral (value_type (first
));
1016 dwarf_require_integral (value_type (second
));
1017 if (!TYPE_UNSIGNED (value_type (first
)))
1020 = get_unsigned_type (ctx
->gdbarch
, value_type (first
));
1022 first
= value_cast (utype
, first
);
1025 result_val
= value_binop (first
, second
, BINOP_RSH
);
1026 /* Make sure we wind up with the same type we started
1028 if (value_type (result_val
) != value_type (second
))
1029 result_val
= value_cast (value_type (second
), result_val
);
1032 dwarf_require_integral (value_type (first
));
1033 dwarf_require_integral (value_type (second
));
1034 if (TYPE_UNSIGNED (value_type (first
)))
1037 = get_signed_type (ctx
->gdbarch
, value_type (first
));
1039 first
= value_cast (stype
, first
);
1042 result_val
= value_binop (first
, second
, BINOP_RSH
);
1043 /* Make sure we wind up with the same type we started
1045 if (value_type (result_val
) != value_type (second
))
1046 result_val
= value_cast (value_type (second
), result_val
);
1049 dwarf_require_integral (value_type (first
));
1050 dwarf_require_integral (value_type (second
));
1051 result_val
= value_binop (first
, second
, BINOP_BITWISE_XOR
);
1054 /* A <= B is !(B < A). */
1055 result
= ! value_less (second
, first
);
1056 result_val
= value_from_ulongest (address_type
, result
);
1059 /* A >= B is !(A < B). */
1060 result
= ! value_less (first
, second
);
1061 result_val
= value_from_ulongest (address_type
, result
);
1064 result
= value_equal (first
, second
);
1065 result_val
= value_from_ulongest (address_type
, result
);
1068 result
= value_less (first
, second
);
1069 result_val
= value_from_ulongest (address_type
, result
);
1072 /* A > B is B < A. */
1073 result
= value_less (second
, first
);
1074 result_val
= value_from_ulongest (address_type
, result
);
1077 result
= ! value_equal (first
, second
);
1078 result_val
= value_from_ulongest (address_type
, result
);
1081 internal_error (__FILE__
, __LINE__
,
1082 _("Can't be reached."));
1087 case DW_OP_call_frame_cfa
:
1088 result
= (ctx
->get_frame_cfa
) (ctx
->baton
);
1089 result_val
= value_from_ulongest (address_type
, result
);
1090 in_stack_memory
= 1;
1093 case DW_OP_GNU_push_tls_address
:
1094 /* Variable is at a constant offset in the thread-local
1095 storage block into the objfile for the current thread and
1096 the dynamic linker module containing this expression. Here
1097 we return returns the offset from that base. The top of the
1098 stack has the offset from the beginning of the thread
1099 control block at which the variable is located. Nothing
1100 should follow this operator, so the top of stack would be
1102 result
= value_as_long (dwarf_expr_fetch (ctx
, 0));
1103 dwarf_expr_pop (ctx
);
1104 result
= (ctx
->get_tls_address
) (ctx
->baton
, result
);
1105 result_val
= value_from_ulongest (address_type
, result
);
1109 offset
= extract_signed_integer (op_ptr
, 2, byte_order
);
1118 offset
= extract_signed_integer (op_ptr
, 2, byte_order
);
1120 val
= dwarf_expr_fetch (ctx
, 0);
1121 dwarf_require_integral (value_type (val
));
1122 if (value_as_long (val
) != 0)
1124 dwarf_expr_pop (ctx
);
1135 /* Record the piece. */
1136 op_ptr
= read_uleb128 (op_ptr
, op_end
, &size
);
1137 add_piece (ctx
, 8 * size
, 0);
1139 /* Pop off the address/regnum, and reset the location
1141 if (ctx
->location
!= DWARF_VALUE_LITERAL
1142 && ctx
->location
!= DWARF_VALUE_OPTIMIZED_OUT
)
1143 dwarf_expr_pop (ctx
);
1144 ctx
->location
= DWARF_VALUE_MEMORY
;
1148 case DW_OP_bit_piece
:
1150 ULONGEST size
, offset
;
1152 /* Record the piece. */
1153 op_ptr
= read_uleb128 (op_ptr
, op_end
, &size
);
1154 op_ptr
= read_uleb128 (op_ptr
, op_end
, &offset
);
1155 add_piece (ctx
, size
, offset
);
1157 /* Pop off the address/regnum, and reset the location
1159 if (ctx
->location
!= DWARF_VALUE_LITERAL
1160 && ctx
->location
!= DWARF_VALUE_OPTIMIZED_OUT
)
1161 dwarf_expr_pop (ctx
);
1162 ctx
->location
= DWARF_VALUE_MEMORY
;
1166 case DW_OP_GNU_uninit
:
1167 if (op_ptr
!= op_end
)
1168 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
1169 "be the very last op."));
1171 ctx
->initialized
= 0;
1175 result
= extract_unsigned_integer (op_ptr
, 2, byte_order
);
1177 ctx
->dwarf_call (ctx
, result
);
1181 result
= extract_unsigned_integer (op_ptr
, 4, byte_order
);
1183 ctx
->dwarf_call (ctx
, result
);
1186 case DW_OP_GNU_entry_value
:
1187 /* This operation is not yet supported by GDB. */
1188 ctx
->location
= DWARF_VALUE_OPTIMIZED_OUT
;
1190 ctx
->num_pieces
= 0;
1191 goto abort_expression
;
1193 case DW_OP_GNU_const_type
:
1197 const gdb_byte
*data
;
1200 op_ptr
= read_uleb128 (op_ptr
, op_end
, &type_die
);
1205 type
= dwarf_get_base_type (ctx
, type_die
, n
);
1206 result_val
= value_from_contents (type
, data
);
1210 case DW_OP_GNU_regval_type
:
1215 op_ptr
= read_uleb128 (op_ptr
, op_end
, ®
);
1216 op_ptr
= read_uleb128 (op_ptr
, op_end
, &type_die
);
1218 type
= dwarf_get_base_type (ctx
, type_die
, 0);
1219 result
= (ctx
->read_reg
) (ctx
->baton
, reg
);
1220 result_val
= value_from_ulongest (type
, result
);
1224 case DW_OP_GNU_convert
:
1225 case DW_OP_GNU_reinterpret
:
1230 op_ptr
= read_uleb128 (op_ptr
, op_end
, &type_die
);
1232 type
= dwarf_get_base_type (ctx
, type_die
, 0);
1234 result_val
= dwarf_expr_fetch (ctx
, 0);
1235 dwarf_expr_pop (ctx
);
1237 if (op
== DW_OP_GNU_convert
)
1238 result_val
= value_cast (type
, result_val
);
1239 else if (type
== value_type (result_val
))
1243 else if (TYPE_LENGTH (type
)
1244 != TYPE_LENGTH (value_type (result_val
)))
1245 error (_("DW_OP_GNU_reinterpret has wrong size"));
1248 = value_from_contents (type
,
1249 value_contents_all (result_val
));
1254 error (_("Unhandled dwarf expression opcode 0x%x"), op
);
1257 /* Most things push a result value. */
1258 gdb_assert (result_val
!= NULL
);
1259 dwarf_expr_push (ctx
, result_val
, in_stack_memory
);
1264 /* To simplify our main caller, if the result is an implicit
1265 pointer, then make a pieced value. This is ok because we can't
1266 have implicit pointers in contexts where pieces are invalid. */
1267 if (ctx
->location
== DWARF_VALUE_IMPLICIT_POINTER
)
1268 add_piece (ctx
, 8 * ctx
->addr_size
, 0);
1271 ctx
->recursion_depth
--;
1272 gdb_assert (ctx
->recursion_depth
>= 0);
1276 _initialize_dwarf2expr (void)
1279 = gdbarch_data_register_post_init (dwarf_gdbarch_types_init
);