2010-07-02 Ulrich Weigand <uweigand@de.ibm.com>
[binutils-gdb.git] / gdb / dwarf2expr.c
1 /* DWARF 2 Expression Evaluator.
2
3 Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 Contributed by Daniel Berlin (dan@dberlin.org)
7
8 This file is part of GDB.
9
10 This program is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3 of the License, or
13 (at your option) any later version.
14
15 This program is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22
23 #include "defs.h"
24 #include "symtab.h"
25 #include "gdbtypes.h"
26 #include "value.h"
27 #include "gdbcore.h"
28 #include "dwarf2.h"
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
31
32 /* Local prototypes. */
33
34 static void execute_stack_op (struct dwarf_expr_context *,
35 const gdb_byte *, const gdb_byte *);
36
37 /* Create a new context for the expression evaluator. */
38
39 struct dwarf_expr_context *
40 new_dwarf_expr_context (void)
41 {
42 struct dwarf_expr_context *retval;
43
44 retval = xcalloc (1, sizeof (struct dwarf_expr_context));
45 retval->stack_len = 0;
46 retval->stack_allocated = 10;
47 retval->stack = xmalloc (retval->stack_allocated
48 * sizeof (struct dwarf_stack_value));
49 retval->num_pieces = 0;
50 retval->pieces = 0;
51 retval->max_recursion_depth = 0x100;
52 return retval;
53 }
54
55 /* Release the memory allocated to CTX. */
56
57 void
58 free_dwarf_expr_context (struct dwarf_expr_context *ctx)
59 {
60 xfree (ctx->stack);
61 xfree (ctx->pieces);
62 xfree (ctx);
63 }
64
65 /* Helper for make_cleanup_free_dwarf_expr_context. */
66
67 static void
68 free_dwarf_expr_context_cleanup (void *arg)
69 {
70 free_dwarf_expr_context (arg);
71 }
72
73 /* Return a cleanup that calls free_dwarf_expr_context. */
74
75 struct cleanup *
76 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx)
77 {
78 return make_cleanup (free_dwarf_expr_context_cleanup, ctx);
79 }
80
81 /* Expand the memory allocated to CTX's stack to contain at least
82 NEED more elements than are currently used. */
83
84 static void
85 dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need)
86 {
87 if (ctx->stack_len + need > ctx->stack_allocated)
88 {
89 size_t newlen = ctx->stack_len + need + 10;
90
91 ctx->stack = xrealloc (ctx->stack,
92 newlen * sizeof (struct dwarf_stack_value));
93 ctx->stack_allocated = newlen;
94 }
95 }
96
97 /* Push VALUE onto CTX's stack. */
98
99 void
100 dwarf_expr_push (struct dwarf_expr_context *ctx, ULONGEST value,
101 int in_stack_memory)
102 {
103 struct dwarf_stack_value *v;
104
105 /* We keep all stack elements within the range defined by the
106 DWARF address size. */
107 if (ctx->addr_size < sizeof (ULONGEST))
108 value &= ((ULONGEST) 1 << (ctx->addr_size * HOST_CHAR_BIT)) - 1;
109
110 dwarf_expr_grow_stack (ctx, 1);
111 v = &ctx->stack[ctx->stack_len++];
112 v->value = value;
113 v->in_stack_memory = in_stack_memory;
114 }
115
116 /* Pop the top item off of CTX's stack. */
117
118 void
119 dwarf_expr_pop (struct dwarf_expr_context *ctx)
120 {
121 if (ctx->stack_len <= 0)
122 error (_("dwarf expression stack underflow"));
123 ctx->stack_len--;
124 }
125
126 /* Retrieve the N'th item on CTX's stack. */
127
128 ULONGEST
129 dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n)
130 {
131 if (ctx->stack_len <= n)
132 error (_("Asked for position %d of stack, stack only has %d elements on it."),
133 n, ctx->stack_len);
134 return ctx->stack[ctx->stack_len - (1 + n)].value;
135
136 }
137
138 /* Retrieve the N'th item on CTX's stack, converted to an address. */
139
140 CORE_ADDR
141 dwarf_expr_fetch_address (struct dwarf_expr_context *ctx, int n)
142 {
143 ULONGEST result = dwarf_expr_fetch (ctx, n);
144
145 /* For most architectures, calling extract_unsigned_integer() alone
146 is sufficient for extracting an address. However, some
147 architectures (e.g. MIPS) use signed addresses and using
148 extract_unsigned_integer() will not produce a correct
149 result. Make sure we invoke gdbarch_integer_to_address()
150 for those architectures which require it. */
151 if (gdbarch_integer_to_address_p (ctx->gdbarch))
152 {
153 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
154 gdb_byte *buf = alloca (ctx->addr_size);
155 struct type *int_type;
156
157 switch (ctx->addr_size)
158 {
159 case 2:
160 int_type = builtin_type (ctx->gdbarch)->builtin_uint16;
161 break;
162 case 4:
163 int_type = builtin_type (ctx->gdbarch)->builtin_uint32;
164 break;
165 case 8:
166 int_type = builtin_type (ctx->gdbarch)->builtin_uint64;
167 break;
168 default:
169 internal_error (__FILE__, __LINE__,
170 _("Unsupported address size.\n"));
171 }
172
173 store_unsigned_integer (buf, ctx->addr_size, byte_order, result);
174 return gdbarch_integer_to_address (ctx->gdbarch, int_type, buf);
175 }
176
177 return (CORE_ADDR) result;
178 }
179
180 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */
181
182 int
183 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n)
184 {
185 if (ctx->stack_len <= n)
186 error (_("Asked for position %d of stack, stack only has %d elements on it."),
187 n, ctx->stack_len);
188 return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory;
189
190 }
191
192 /* Return true if the expression stack is empty. */
193
194 static int
195 dwarf_expr_stack_empty_p (struct dwarf_expr_context *ctx)
196 {
197 return ctx->stack_len == 0;
198 }
199
200 /* Add a new piece to CTX's piece list. */
201 static void
202 add_piece (struct dwarf_expr_context *ctx, ULONGEST size, ULONGEST offset)
203 {
204 struct dwarf_expr_piece *p;
205
206 ctx->num_pieces++;
207
208 ctx->pieces = xrealloc (ctx->pieces,
209 (ctx->num_pieces
210 * sizeof (struct dwarf_expr_piece)));
211
212 p = &ctx->pieces[ctx->num_pieces - 1];
213 p->location = ctx->location;
214 p->size = size;
215 p->offset = offset;
216
217 if (p->location == DWARF_VALUE_LITERAL)
218 {
219 p->v.literal.data = ctx->data;
220 p->v.literal.length = ctx->len;
221 }
222 else if (dwarf_expr_stack_empty_p (ctx))
223 {
224 p->location = DWARF_VALUE_OPTIMIZED_OUT;
225 /* Also reset the context's location, for our callers. This is
226 a somewhat strange approach, but this lets us avoid setting
227 the location to DWARF_VALUE_MEMORY in all the individual
228 cases in the evaluator. */
229 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
230 }
231 else if (p->location == DWARF_VALUE_MEMORY)
232 {
233 p->v.mem.addr = dwarf_expr_fetch_address (ctx, 0);
234 p->v.mem.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
235 }
236 else
237 {
238 p->v.value = dwarf_expr_fetch (ctx, 0);
239 }
240 }
241
242 /* Evaluate the expression at ADDR (LEN bytes long) using the context
243 CTX. */
244
245 void
246 dwarf_expr_eval (struct dwarf_expr_context *ctx, const gdb_byte *addr,
247 size_t len)
248 {
249 int old_recursion_depth = ctx->recursion_depth;
250
251 execute_stack_op (ctx, addr, addr + len);
252
253 /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */
254
255 gdb_assert (ctx->recursion_depth == old_recursion_depth);
256 }
257
258 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
259 by R, and return the new value of BUF. Verify that it doesn't extend
260 past BUF_END. */
261
262 const gdb_byte *
263 read_uleb128 (const gdb_byte *buf, const gdb_byte *buf_end, ULONGEST * r)
264 {
265 unsigned shift = 0;
266 ULONGEST result = 0;
267 gdb_byte byte;
268
269 while (1)
270 {
271 if (buf >= buf_end)
272 error (_("read_uleb128: Corrupted DWARF expression."));
273
274 byte = *buf++;
275 result |= (byte & 0x7f) << shift;
276 if ((byte & 0x80) == 0)
277 break;
278 shift += 7;
279 }
280 *r = result;
281 return buf;
282 }
283
284 /* Decode the signed LEB128 constant at BUF into the variable pointed to
285 by R, and return the new value of BUF. Verify that it doesn't extend
286 past BUF_END. */
287
288 const gdb_byte *
289 read_sleb128 (const gdb_byte *buf, const gdb_byte *buf_end, LONGEST * r)
290 {
291 unsigned shift = 0;
292 LONGEST result = 0;
293 gdb_byte byte;
294
295 while (1)
296 {
297 if (buf >= buf_end)
298 error (_("read_sleb128: Corrupted DWARF expression."));
299
300 byte = *buf++;
301 result |= (byte & 0x7f) << shift;
302 shift += 7;
303 if ((byte & 0x80) == 0)
304 break;
305 }
306 if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0)
307 result |= -(1 << shift);
308
309 *r = result;
310 return buf;
311 }
312 \f
313
314 /* Check that the current operator is either at the end of an
315 expression, or that it is followed by a composition operator. */
316
317 void
318 dwarf_expr_require_composition (const gdb_byte *op_ptr, const gdb_byte *op_end,
319 const char *op_name)
320 {
321 /* It seems like DW_OP_GNU_uninit should be handled here. However,
322 it doesn't seem to make sense for DW_OP_*_value, and it was not
323 checked at the other place that this function is called. */
324 if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece)
325 error (_("DWARF-2 expression error: `%s' operations must be "
326 "used either alone or in conjuction with DW_OP_piece "
327 "or DW_OP_bit_piece."),
328 op_name);
329 }
330
331 /* The engine for the expression evaluator. Using the context in CTX,
332 evaluate the expression between OP_PTR and OP_END. */
333
334 static void
335 execute_stack_op (struct dwarf_expr_context *ctx,
336 const gdb_byte *op_ptr, const gdb_byte *op_end)
337 {
338 #define sign_ext(x) ((LONGEST) (((x) ^ sign_bit) - sign_bit))
339 ULONGEST sign_bit = (ctx->addr_size >= sizeof (ULONGEST) ? 0
340 : ((ULONGEST) 1) << (ctx->addr_size * 8 - 1));
341 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
342
343 ctx->location = DWARF_VALUE_MEMORY;
344 ctx->initialized = 1; /* Default is initialized. */
345
346 if (ctx->recursion_depth > ctx->max_recursion_depth)
347 error (_("DWARF-2 expression error: Loop detected (%d)."),
348 ctx->recursion_depth);
349 ctx->recursion_depth++;
350
351 while (op_ptr < op_end)
352 {
353 enum dwarf_location_atom op = *op_ptr++;
354 ULONGEST result;
355 /* Assume the value is not in stack memory.
356 Code that knows otherwise sets this to 1.
357 Some arithmetic on stack addresses can probably be assumed to still
358 be a stack address, but we skip this complication for now.
359 This is just an optimization, so it's always ok to punt
360 and leave this as 0. */
361 int in_stack_memory = 0;
362 ULONGEST uoffset, reg;
363 LONGEST offset;
364
365 switch (op)
366 {
367 case DW_OP_lit0:
368 case DW_OP_lit1:
369 case DW_OP_lit2:
370 case DW_OP_lit3:
371 case DW_OP_lit4:
372 case DW_OP_lit5:
373 case DW_OP_lit6:
374 case DW_OP_lit7:
375 case DW_OP_lit8:
376 case DW_OP_lit9:
377 case DW_OP_lit10:
378 case DW_OP_lit11:
379 case DW_OP_lit12:
380 case DW_OP_lit13:
381 case DW_OP_lit14:
382 case DW_OP_lit15:
383 case DW_OP_lit16:
384 case DW_OP_lit17:
385 case DW_OP_lit18:
386 case DW_OP_lit19:
387 case DW_OP_lit20:
388 case DW_OP_lit21:
389 case DW_OP_lit22:
390 case DW_OP_lit23:
391 case DW_OP_lit24:
392 case DW_OP_lit25:
393 case DW_OP_lit26:
394 case DW_OP_lit27:
395 case DW_OP_lit28:
396 case DW_OP_lit29:
397 case DW_OP_lit30:
398 case DW_OP_lit31:
399 result = op - DW_OP_lit0;
400 break;
401
402 case DW_OP_addr:
403 result = extract_unsigned_integer (op_ptr,
404 ctx->addr_size, byte_order);
405 op_ptr += ctx->addr_size;
406 break;
407
408 case DW_OP_const1u:
409 result = extract_unsigned_integer (op_ptr, 1, byte_order);
410 op_ptr += 1;
411 break;
412 case DW_OP_const1s:
413 result = extract_signed_integer (op_ptr, 1, byte_order);
414 op_ptr += 1;
415 break;
416 case DW_OP_const2u:
417 result = extract_unsigned_integer (op_ptr, 2, byte_order);
418 op_ptr += 2;
419 break;
420 case DW_OP_const2s:
421 result = extract_signed_integer (op_ptr, 2, byte_order);
422 op_ptr += 2;
423 break;
424 case DW_OP_const4u:
425 result = extract_unsigned_integer (op_ptr, 4, byte_order);
426 op_ptr += 4;
427 break;
428 case DW_OP_const4s:
429 result = extract_signed_integer (op_ptr, 4, byte_order);
430 op_ptr += 4;
431 break;
432 case DW_OP_const8u:
433 result = extract_unsigned_integer (op_ptr, 8, byte_order);
434 op_ptr += 8;
435 break;
436 case DW_OP_const8s:
437 result = extract_signed_integer (op_ptr, 8, byte_order);
438 op_ptr += 8;
439 break;
440 case DW_OP_constu:
441 op_ptr = read_uleb128 (op_ptr, op_end, &uoffset);
442 result = uoffset;
443 break;
444 case DW_OP_consts:
445 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
446 result = offset;
447 break;
448
449 /* The DW_OP_reg operations are required to occur alone in
450 location expressions. */
451 case DW_OP_reg0:
452 case DW_OP_reg1:
453 case DW_OP_reg2:
454 case DW_OP_reg3:
455 case DW_OP_reg4:
456 case DW_OP_reg5:
457 case DW_OP_reg6:
458 case DW_OP_reg7:
459 case DW_OP_reg8:
460 case DW_OP_reg9:
461 case DW_OP_reg10:
462 case DW_OP_reg11:
463 case DW_OP_reg12:
464 case DW_OP_reg13:
465 case DW_OP_reg14:
466 case DW_OP_reg15:
467 case DW_OP_reg16:
468 case DW_OP_reg17:
469 case DW_OP_reg18:
470 case DW_OP_reg19:
471 case DW_OP_reg20:
472 case DW_OP_reg21:
473 case DW_OP_reg22:
474 case DW_OP_reg23:
475 case DW_OP_reg24:
476 case DW_OP_reg25:
477 case DW_OP_reg26:
478 case DW_OP_reg27:
479 case DW_OP_reg28:
480 case DW_OP_reg29:
481 case DW_OP_reg30:
482 case DW_OP_reg31:
483 if (op_ptr != op_end
484 && *op_ptr != DW_OP_piece
485 && *op_ptr != DW_OP_bit_piece
486 && *op_ptr != DW_OP_GNU_uninit)
487 error (_("DWARF-2 expression error: DW_OP_reg operations must be "
488 "used either alone or in conjuction with DW_OP_piece "
489 "or DW_OP_bit_piece."));
490
491 result = op - DW_OP_reg0;
492 ctx->location = DWARF_VALUE_REGISTER;
493 break;
494
495 case DW_OP_regx:
496 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
497 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_regx");
498
499 result = reg;
500 ctx->location = DWARF_VALUE_REGISTER;
501 break;
502
503 case DW_OP_implicit_value:
504 {
505 ULONGEST len;
506
507 op_ptr = read_uleb128 (op_ptr, op_end, &len);
508 if (op_ptr + len > op_end)
509 error (_("DW_OP_implicit_value: too few bytes available."));
510 ctx->len = len;
511 ctx->data = op_ptr;
512 ctx->location = DWARF_VALUE_LITERAL;
513 op_ptr += len;
514 dwarf_expr_require_composition (op_ptr, op_end,
515 "DW_OP_implicit_value");
516 }
517 goto no_push;
518
519 case DW_OP_stack_value:
520 ctx->location = DWARF_VALUE_STACK;
521 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_stack_value");
522 goto no_push;
523
524 case DW_OP_breg0:
525 case DW_OP_breg1:
526 case DW_OP_breg2:
527 case DW_OP_breg3:
528 case DW_OP_breg4:
529 case DW_OP_breg5:
530 case DW_OP_breg6:
531 case DW_OP_breg7:
532 case DW_OP_breg8:
533 case DW_OP_breg9:
534 case DW_OP_breg10:
535 case DW_OP_breg11:
536 case DW_OP_breg12:
537 case DW_OP_breg13:
538 case DW_OP_breg14:
539 case DW_OP_breg15:
540 case DW_OP_breg16:
541 case DW_OP_breg17:
542 case DW_OP_breg18:
543 case DW_OP_breg19:
544 case DW_OP_breg20:
545 case DW_OP_breg21:
546 case DW_OP_breg22:
547 case DW_OP_breg23:
548 case DW_OP_breg24:
549 case DW_OP_breg25:
550 case DW_OP_breg26:
551 case DW_OP_breg27:
552 case DW_OP_breg28:
553 case DW_OP_breg29:
554 case DW_OP_breg30:
555 case DW_OP_breg31:
556 {
557 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
558 result = (ctx->read_reg) (ctx->baton, op - DW_OP_breg0);
559 result += offset;
560 }
561 break;
562 case DW_OP_bregx:
563 {
564 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
565 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
566 result = (ctx->read_reg) (ctx->baton, reg);
567 result += offset;
568 }
569 break;
570 case DW_OP_fbreg:
571 {
572 const gdb_byte *datastart;
573 size_t datalen;
574 unsigned int before_stack_len;
575
576 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
577 /* Rather than create a whole new context, we simply
578 record the stack length before execution, then reset it
579 afterwards, effectively erasing whatever the recursive
580 call put there. */
581 before_stack_len = ctx->stack_len;
582 /* FIXME: cagney/2003-03-26: This code should be using
583 get_frame_base_address(), and then implement a dwarf2
584 specific this_base method. */
585 (ctx->get_frame_base) (ctx->baton, &datastart, &datalen);
586 dwarf_expr_eval (ctx, datastart, datalen);
587 if (ctx->location == DWARF_VALUE_MEMORY)
588 result = dwarf_expr_fetch_address (ctx, 0);
589 else if (ctx->location == DWARF_VALUE_REGISTER)
590 result = (ctx->read_reg) (ctx->baton, dwarf_expr_fetch (ctx, 0));
591 else
592 error (_("Not implemented: computing frame base using explicit value operator"));
593 result = result + offset;
594 in_stack_memory = 1;
595 ctx->stack_len = before_stack_len;
596 ctx->location = DWARF_VALUE_MEMORY;
597 }
598 break;
599
600 case DW_OP_dup:
601 result = dwarf_expr_fetch (ctx, 0);
602 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
603 break;
604
605 case DW_OP_drop:
606 dwarf_expr_pop (ctx);
607 goto no_push;
608
609 case DW_OP_pick:
610 offset = *op_ptr++;
611 result = dwarf_expr_fetch (ctx, offset);
612 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset);
613 break;
614
615 case DW_OP_swap:
616 {
617 struct dwarf_stack_value t1, t2;
618
619 if (ctx->stack_len < 2)
620 error (_("Not enough elements for DW_OP_swap. Need 2, have %d."),
621 ctx->stack_len);
622 t1 = ctx->stack[ctx->stack_len - 1];
623 t2 = ctx->stack[ctx->stack_len - 2];
624 ctx->stack[ctx->stack_len - 1] = t2;
625 ctx->stack[ctx->stack_len - 2] = t1;
626 goto no_push;
627 }
628
629 case DW_OP_over:
630 result = dwarf_expr_fetch (ctx, 1);
631 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1);
632 break;
633
634 case DW_OP_rot:
635 {
636 struct dwarf_stack_value t1, t2, t3;
637
638 if (ctx->stack_len < 3)
639 error (_("Not enough elements for DW_OP_rot. Need 3, have %d."),
640 ctx->stack_len);
641 t1 = ctx->stack[ctx->stack_len - 1];
642 t2 = ctx->stack[ctx->stack_len - 2];
643 t3 = ctx->stack[ctx->stack_len - 3];
644 ctx->stack[ctx->stack_len - 1] = t2;
645 ctx->stack[ctx->stack_len - 2] = t3;
646 ctx->stack[ctx->stack_len - 3] = t1;
647 goto no_push;
648 }
649
650 case DW_OP_deref:
651 case DW_OP_deref_size:
652 {
653 int addr_size = (op == DW_OP_deref ? ctx->addr_size : *op_ptr++);
654 gdb_byte *buf = alloca (addr_size);
655 CORE_ADDR addr = dwarf_expr_fetch_address (ctx, 0);
656 dwarf_expr_pop (ctx);
657
658 (ctx->read_mem) (ctx->baton, buf, addr, addr_size);
659 result = extract_unsigned_integer (buf, addr_size, byte_order);
660 break;
661 }
662
663 case DW_OP_abs:
664 case DW_OP_neg:
665 case DW_OP_not:
666 case DW_OP_plus_uconst:
667 /* Unary operations. */
668 result = dwarf_expr_fetch (ctx, 0);
669 dwarf_expr_pop (ctx);
670
671 switch (op)
672 {
673 case DW_OP_abs:
674 if (sign_ext (result) < 0)
675 result = -result;
676 break;
677 case DW_OP_neg:
678 result = -result;
679 break;
680 case DW_OP_not:
681 result = ~result;
682 break;
683 case DW_OP_plus_uconst:
684 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
685 result += reg;
686 break;
687 }
688 break;
689
690 case DW_OP_and:
691 case DW_OP_div:
692 case DW_OP_minus:
693 case DW_OP_mod:
694 case DW_OP_mul:
695 case DW_OP_or:
696 case DW_OP_plus:
697 case DW_OP_shl:
698 case DW_OP_shr:
699 case DW_OP_shra:
700 case DW_OP_xor:
701 case DW_OP_le:
702 case DW_OP_ge:
703 case DW_OP_eq:
704 case DW_OP_lt:
705 case DW_OP_gt:
706 case DW_OP_ne:
707 {
708 /* Binary operations. */
709 ULONGEST first, second;
710
711 second = dwarf_expr_fetch (ctx, 0);
712 dwarf_expr_pop (ctx);
713
714 first = dwarf_expr_fetch (ctx, 0);
715 dwarf_expr_pop (ctx);
716
717 switch (op)
718 {
719 case DW_OP_and:
720 result = first & second;
721 break;
722 case DW_OP_div:
723 if (!second)
724 error (_("Division by zero"));
725 result = sign_ext (first) / sign_ext (second);
726 break;
727 case DW_OP_minus:
728 result = first - second;
729 break;
730 case DW_OP_mod:
731 if (!second)
732 error (_("Division by zero"));
733 result = first % second;
734 break;
735 case DW_OP_mul:
736 result = first * second;
737 break;
738 case DW_OP_or:
739 result = first | second;
740 break;
741 case DW_OP_plus:
742 result = first + second;
743 break;
744 case DW_OP_shl:
745 result = first << second;
746 break;
747 case DW_OP_shr:
748 result = first >> second;
749 break;
750 case DW_OP_shra:
751 result = sign_ext (first) >> second;
752 break;
753 case DW_OP_xor:
754 result = first ^ second;
755 break;
756 case DW_OP_le:
757 result = sign_ext (first) <= sign_ext (second);
758 break;
759 case DW_OP_ge:
760 result = sign_ext (first) >= sign_ext (second);
761 break;
762 case DW_OP_eq:
763 result = sign_ext (first) == sign_ext (second);
764 break;
765 case DW_OP_lt:
766 result = sign_ext (first) < sign_ext (second);
767 break;
768 case DW_OP_gt:
769 result = sign_ext (first) > sign_ext (second);
770 break;
771 case DW_OP_ne:
772 result = sign_ext (first) != sign_ext (second);
773 break;
774 default:
775 internal_error (__FILE__, __LINE__,
776 _("Can't be reached."));
777 }
778 }
779 break;
780
781 case DW_OP_call_frame_cfa:
782 result = (ctx->get_frame_cfa) (ctx->baton);
783 in_stack_memory = 1;
784 break;
785
786 case DW_OP_GNU_push_tls_address:
787 /* Variable is at a constant offset in the thread-local
788 storage block into the objfile for the current thread and
789 the dynamic linker module containing this expression. Here
790 we return returns the offset from that base. The top of the
791 stack has the offset from the beginning of the thread
792 control block at which the variable is located. Nothing
793 should follow this operator, so the top of stack would be
794 returned. */
795 result = dwarf_expr_fetch (ctx, 0);
796 dwarf_expr_pop (ctx);
797 result = (ctx->get_tls_address) (ctx->baton, result);
798 break;
799
800 case DW_OP_skip:
801 offset = extract_signed_integer (op_ptr, 2, byte_order);
802 op_ptr += 2;
803 op_ptr += offset;
804 goto no_push;
805
806 case DW_OP_bra:
807 offset = extract_signed_integer (op_ptr, 2, byte_order);
808 op_ptr += 2;
809 if (dwarf_expr_fetch (ctx, 0) != 0)
810 op_ptr += offset;
811 dwarf_expr_pop (ctx);
812 goto no_push;
813
814 case DW_OP_nop:
815 goto no_push;
816
817 case DW_OP_piece:
818 {
819 ULONGEST size;
820
821 /* Record the piece. */
822 op_ptr = read_uleb128 (op_ptr, op_end, &size);
823 add_piece (ctx, 8 * size, 0);
824
825 /* Pop off the address/regnum, and reset the location
826 type. */
827 if (ctx->location != DWARF_VALUE_LITERAL
828 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
829 dwarf_expr_pop (ctx);
830 ctx->location = DWARF_VALUE_MEMORY;
831 }
832 goto no_push;
833
834 case DW_OP_bit_piece:
835 {
836 ULONGEST size, offset;
837
838 /* Record the piece. */
839 op_ptr = read_uleb128 (op_ptr, op_end, &size);
840 op_ptr = read_uleb128 (op_ptr, op_end, &offset);
841 add_piece (ctx, size, offset);
842
843 /* Pop off the address/regnum, and reset the location
844 type. */
845 if (ctx->location != DWARF_VALUE_LITERAL
846 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
847 dwarf_expr_pop (ctx);
848 ctx->location = DWARF_VALUE_MEMORY;
849 }
850 goto no_push;
851
852 case DW_OP_GNU_uninit:
853 if (op_ptr != op_end)
854 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
855 "be the very last op."));
856
857 ctx->initialized = 0;
858 goto no_push;
859
860 case DW_OP_call2:
861 result = extract_unsigned_integer (op_ptr, 2, byte_order);
862 op_ptr += 2;
863 ctx->dwarf_call (ctx, result);
864 goto no_push;
865
866 case DW_OP_call4:
867 result = extract_unsigned_integer (op_ptr, 4, byte_order);
868 op_ptr += 4;
869 ctx->dwarf_call (ctx, result);
870 goto no_push;
871
872 default:
873 error (_("Unhandled dwarf expression opcode 0x%x"), op);
874 }
875
876 /* Most things push a result value. */
877 dwarf_expr_push (ctx, result, in_stack_memory);
878 no_push:;
879 }
880
881 ctx->recursion_depth--;
882 gdb_assert (ctx->recursion_depth >= 0);
883 #undef sign_ext
884 }