* [PATCH 1/3] Write CodeView information about local static variables
@ 2024-08-13 0:24 Mark Harmstone
2024-08-13 0:24 ` [PATCH 2/3] Write CodeView information about enregistered variables Mark Harmstone
` (2 more replies)
0 siblings, 3 replies; 5+ messages in thread
From: Mark Harmstone @ 2024-08-13 0:24 UTC (permalink / raw)
To: gcc-patches; +Cc: Mark Harmstone
Outputs CodeView S_LDATA32 symbols, for static variables within
functions, along with S_BLOCK32 and S_END for the beginning and end of
lexical blocks.
gcc/
* dwarf2codeview.cc (enum cv_sym_type): Add S_END and S_BLOCK32.
(write_local_s_ldata32): New function.
(write_unoptimized_local_variable): New function.
(write_s_block32): New function.
(write_s_end): New function.
(write_unoptimized_function_vars): New function.
(write_function): Call write_unoptimized_function_vars.
---
gcc/dwarf2codeview.cc | 258 ++++++++++++++++++++++++++++++++++++++++++
1 file changed, 258 insertions(+)
diff --git a/gcc/dwarf2codeview.cc b/gcc/dwarf2codeview.cc
index 7e4faaa9388..cb2d64bfcc6 100644
--- a/gcc/dwarf2codeview.cc
+++ b/gcc/dwarf2codeview.cc
@@ -70,6 +70,8 @@ along with GCC; see the file COPYING3. If not see
/* This is enum SYM_ENUM_e in Microsoft's cvinfo.h. */
enum cv_sym_type {
+ S_END = 0x0006,
+ S_BLOCK32 = 0x1103,
S_LDATA32 = 0x110c,
S_GDATA32 = 0x110d,
S_COMPILE3 = 0x113c,
@@ -986,6 +988,260 @@ end:
free (s->data_symbol.name);
}
+/* Write an S_LDATA32 symbol, representing a static variable within a function.
+ This symbol can also appear outside of a function block - see
+ write_data_symbol. */
+
+static void
+write_local_s_ldata32 (dw_die_ref die, dw_loc_descr_ref loc_ref)
+{
+ unsigned int label_num = ++sym_label_num;
+ const char *name = get_AT_string (die, DW_AT_name);
+ uint32_t type;
+
+ /* This is struct datasym in binutils:
+
+ struct datasym
+ {
+ uint16_t size;
+ uint16_t kind;
+ uint32_t type;
+ uint32_t offset;
+ uint16_t section;
+ char name[];
+ } ATTRIBUTE_PACKED;
+ */
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ asm_fprintf (asm_out_file,
+ "%L" SYMBOL_END_LABEL "%u - %L" SYMBOL_START_LABEL "%u\n",
+ label_num, label_num);
+
+ targetm.asm_out.internal_label (asm_out_file, SYMBOL_START_LABEL, label_num);
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ fprint_whex (asm_out_file, S_LDATA32);
+ putc ('\n', asm_out_file);
+
+ type = get_type_num (get_AT_ref (die, DW_AT_type), false, false);
+
+ fputs (integer_asm_op (4, false), asm_out_file);
+ fprint_whex (asm_out_file, type);
+ putc ('\n', asm_out_file);
+
+ asm_fprintf (asm_out_file, "\t.secrel32 ");
+ output_addr_const (asm_out_file, loc_ref->dw_loc_oprnd1.v.val_addr);
+ fputc ('\n', asm_out_file);
+
+ asm_fprintf (asm_out_file, "\t.secidx ");
+ output_addr_const (asm_out_file, loc_ref->dw_loc_oprnd1.v.val_addr);
+ fputc ('\n', asm_out_file);
+
+ ASM_OUTPUT_ASCII (asm_out_file, name, strlen (name) + 1);
+
+ ASM_OUTPUT_ALIGN (asm_out_file, 2);
+
+ targetm.asm_out.internal_label (asm_out_file, SYMBOL_END_LABEL, label_num);
+}
+
+/* Write a symbol representing an unoptimized variable within a function, if
+ we're able to translate the DIE's DW_AT_location into its CodeView
+ equivalent. */
+
+static void
+write_unoptimized_local_variable (dw_die_ref die)
+{
+ dw_attr_node *loc;
+ dw_loc_descr_ref loc_ref;
+
+ loc = get_AT (die, DW_AT_location);
+ if (!loc)
+ return;
+
+ if (loc->dw_attr_val.val_class != dw_val_class_loc)
+ return;
+
+ loc_ref = loc->dw_attr_val.v.val_loc;
+ if (!loc_ref)
+ return;
+
+ switch (loc_ref->dw_loc_opc)
+ {
+ case DW_OP_addr:
+ write_local_s_ldata32 (die, loc_ref);
+ break;
+
+ default:
+ break;
+ }
+}
+
+/* Translate a DW_TAG_lexical_block DIE into an S_BLOCK32 symbol, representing
+ a block within an unoptimized function. Returns false if we're not able
+ to resolve the location, which will prevent the caller from issuing an
+ unneeded S_END. */
+
+static bool
+write_s_block32 (dw_die_ref die)
+{
+ unsigned int label_num = ++sym_label_num;
+ dw_attr_node *loc_low, *loc_high;
+ const char *label_low, *label_high;
+ rtx rtx_low, rtx_high;
+
+ /* This is struct blocksym in binutils and BLOCKSYM32 in Microsoft's
+ cvinfo.h:
+
+ struct blocksym
+ {
+ uint16_t size;
+ uint16_t kind;
+ uint32_t parent;
+ uint32_t end;
+ uint32_t len;
+ uint32_t offset;
+ uint16_t section;
+ char name[];
+ } ATTRIBUTE_PACKED;
+ */
+
+ loc_low = get_AT (die, DW_AT_low_pc);
+ if (!loc_low)
+ return false;
+
+ if (loc_low->dw_attr_val.val_class != dw_val_class_lbl_id)
+ return false;
+
+ label_low = loc_low->dw_attr_val.v.val_lbl_id;
+ if (!label_low)
+ return false;
+
+ rtx_low = gen_rtx_SYMBOL_REF (Pmode, label_low);
+
+ loc_high = get_AT (die, DW_AT_high_pc);
+ if (!loc_high)
+ return false;
+
+ if (loc_high->dw_attr_val.val_class != dw_val_class_high_pc)
+ return false;
+
+ label_high = loc_high->dw_attr_val.v.val_lbl_id;
+ if (!label_high)
+ return false;
+
+ rtx_high = gen_rtx_SYMBOL_REF (Pmode, label_high);
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ asm_fprintf (asm_out_file,
+ "%L" SYMBOL_END_LABEL "%u - %L" SYMBOL_START_LABEL "%u\n",
+ label_num, label_num);
+
+ targetm.asm_out.internal_label (asm_out_file, SYMBOL_START_LABEL, label_num);
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ fprint_whex (asm_out_file, S_BLOCK32);
+ putc ('\n', asm_out_file);
+
+ /* The parent and end fields get filled in by the linker. */
+
+ fputs (integer_asm_op (4, false), asm_out_file);
+ fprint_whex (asm_out_file, 0);
+ putc ('\n', asm_out_file);
+
+ fputs (integer_asm_op (4, false), asm_out_file);
+ fprint_whex (asm_out_file, 0);
+ putc ('\n', asm_out_file);
+
+ fputs (integer_asm_op (4, false), asm_out_file);
+ output_addr_const (asm_out_file, rtx_high);
+ fputs (" - ", asm_out_file);
+ output_addr_const (asm_out_file, rtx_low);
+ putc ('\n', asm_out_file);
+
+ asm_fprintf (asm_out_file, "\t.secrel32 ");
+ output_addr_const (asm_out_file, rtx_low);
+ fputc ('\n', asm_out_file);
+
+ asm_fprintf (asm_out_file, "\t.secidx ");
+ output_addr_const (asm_out_file, rtx_low);
+ fputc ('\n', asm_out_file);
+
+ ASM_OUTPUT_ASCII (asm_out_file, "", 1);
+
+ ASM_OUTPUT_ALIGN (asm_out_file, 2);
+
+ targetm.asm_out.internal_label (asm_out_file, SYMBOL_END_LABEL, label_num);
+
+ return true;
+}
+
+/* Write an S_END symbol, which is used to finish off a number of different
+ symbol types. Here we use it to mark the S_BLOCK32 as finished. */
+
+static void
+write_s_end (void)
+{
+ unsigned int label_num = ++sym_label_num;
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ asm_fprintf (asm_out_file,
+ "%L" SYMBOL_END_LABEL "%u - %L" SYMBOL_START_LABEL "%u\n",
+ label_num, label_num);
+
+ targetm.asm_out.internal_label (asm_out_file, SYMBOL_START_LABEL, label_num);
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ fprint_whex (asm_out_file, S_END);
+ putc ('\n', asm_out_file);
+
+ ASM_OUTPUT_ALIGN (asm_out_file, 2);
+
+ targetm.asm_out.internal_label (asm_out_file, SYMBOL_END_LABEL, label_num);
+}
+
+/* Loop through the DIEs in an unoptimized function, writing out any variables
+ or blocks that we encounter. */
+
+static void
+write_unoptimized_function_vars (dw_die_ref die)
+{
+ dw_die_ref first_child, c;
+
+ first_child = dw_get_die_child (die);
+
+ if (!first_child)
+ return;
+
+ c = first_child;
+ do
+ {
+ c = dw_get_die_sib (c);
+
+ switch (dw_get_die_tag (c))
+ {
+ case DW_TAG_variable:
+ write_unoptimized_local_variable (c);
+ break;
+
+ case DW_TAG_lexical_block:
+ {
+ bool block_started = write_s_block32 (c);
+
+ write_unoptimized_function_vars (c);
+
+ if (block_started)
+ write_s_end ();
+
+ break;
+ }
+
+ default:
+ break;
+ }
+ }
+ while (c != first_child);
+}
+
/* Write an S_GPROC32_ID symbol, representing a global function, or an
S_LPROC32_ID symbol, for a static function. */
@@ -1111,6 +1367,8 @@ write_function (codeview_symbol *s)
targetm.asm_out.internal_label (asm_out_file, SYMBOL_END_LABEL, label_num);
+ write_unoptimized_function_vars (s->function.die);
+
/* Output the S_PROC_ID_END record. */
label_num = ++sym_label_num;
--
2.44.2
^ permalink raw reply [flat|nested] 5+ messages in thread
* [PATCH 2/3] Write CodeView information about enregistered variables
2024-08-13 0:24 [PATCH 1/3] Write CodeView information about local static variables Mark Harmstone
@ 2024-08-13 0:24 ` Mark Harmstone
2024-08-13 0:24 ` [PATCH 3/3] Write CodeView information about stack variables Mark Harmstone
2024-08-14 4:09 ` [PATCH 1/3] Write CodeView information about local static variables Jeff Law
2 siblings, 0 replies; 5+ messages in thread
From: Mark Harmstone @ 2024-08-13 0:24 UTC (permalink / raw)
To: gcc-patches; +Cc: Mark Harmstone
Outputs CodeView S_REGISTER symbols, representing local variables or
parameters that are held in a register.
gcc/
* dwarf2codeview.cc (enum cv_sym_type): Add S_REGISTER.
(enum cv_x86_register): New type.
(enum cv_amd64_register): New type.
(dwarf_reg_to_cv): New function.
(write_s_register): New function.
(write_unoptimized_local_variable): Handle parameters and DW_OP_reg*
location types.
---
gcc/dwarf2codeview.cc | 1188 +++++++++++++++++++++++++++++++++++++++++
1 file changed, 1188 insertions(+)
diff --git a/gcc/dwarf2codeview.cc b/gcc/dwarf2codeview.cc
index cb2d64bfcc6..4596408f2bb 100644
--- a/gcc/dwarf2codeview.cc
+++ b/gcc/dwarf2codeview.cc
@@ -72,6 +72,7 @@ along with GCC; see the file COPYING3. If not see
enum cv_sym_type {
S_END = 0x0006,
S_BLOCK32 = 0x1103,
+ S_REGISTER = 0x1106,
S_LDATA32 = 0x110c,
S_GDATA32 = 0x110d,
S_COMPILE3 = 0x113c,
@@ -110,6 +111,962 @@ enum cv_leaf_type {
LF_UQUADWORD = 0x800a
};
+/* These come from enum CV_HREG_e in Microsoft's cvconst.h. */
+
+enum cv_x86_register {
+ CV_REG_NONE = 0,
+ CV_REG_AL = 1,
+ CV_REG_CL = 2,
+ CV_REG_DL = 3,
+ CV_REG_BL = 4,
+ CV_REG_AH = 5,
+ CV_REG_CH = 6,
+ CV_REG_DH = 7,
+ CV_REG_BH = 8,
+ CV_REG_AX = 9,
+ CV_REG_CX = 10,
+ CV_REG_DX = 11,
+ CV_REG_BX = 12,
+ CV_REG_SP = 13,
+ CV_REG_BP = 14,
+ CV_REG_SI = 15,
+ CV_REG_DI = 16,
+ CV_REG_EAX = 17,
+ CV_REG_ECX = 18,
+ CV_REG_EDX = 19,
+ CV_REG_EBX = 20,
+ CV_REG_ESP = 21,
+ CV_REG_EBP = 22,
+ CV_REG_ESI = 23,
+ CV_REG_EDI = 24,
+ CV_REG_ES = 25,
+ CV_REG_CS = 26,
+ CV_REG_SS = 27,
+ CV_REG_DS = 28,
+ CV_REG_FS = 29,
+ CV_REG_GS = 30,
+ CV_REG_IP = 31,
+ CV_REG_FLAGS = 32,
+ CV_REG_EIP = 33,
+ CV_REG_EFLAGS = 34,
+ CV_REG_TEMP = 40,
+ CV_REG_TEMPH = 41,
+ CV_REG_QUOTE = 42,
+ CV_REG_PCDR3 = 43,
+ CV_REG_PCDR4 = 44,
+ CV_REG_PCDR5 = 45,
+ CV_REG_PCDR6 = 46,
+ CV_REG_PCDR7 = 47,
+ CV_REG_CR0 = 80,
+ CV_REG_CR1 = 81,
+ CV_REG_CR2 = 82,
+ CV_REG_CR3 = 83,
+ CV_REG_CR4 = 84,
+ CV_REG_DR0 = 90,
+ CV_REG_DR1 = 91,
+ CV_REG_DR2 = 92,
+ CV_REG_DR3 = 93,
+ CV_REG_DR4 = 94,
+ CV_REG_DR5 = 95,
+ CV_REG_DR6 = 96,
+ CV_REG_DR7 = 97,
+ CV_REG_GDTR = 110,
+ CV_REG_GDTL = 111,
+ CV_REG_IDTR = 112,
+ CV_REG_IDTL = 113,
+ CV_REG_LDTR = 114,
+ CV_REG_TR = 115,
+ CV_REG_PSEUDO1 = 116,
+ CV_REG_PSEUDO2 = 117,
+ CV_REG_PSEUDO3 = 118,
+ CV_REG_PSEUDO4 = 119,
+ CV_REG_PSEUDO5 = 120,
+ CV_REG_PSEUDO6 = 121,
+ CV_REG_PSEUDO7 = 122,
+ CV_REG_PSEUDO8 = 123,
+ CV_REG_PSEUDO9 = 124,
+ CV_REG_ST0 = 128,
+ CV_REG_ST1 = 129,
+ CV_REG_ST2 = 130,
+ CV_REG_ST3 = 131,
+ CV_REG_ST4 = 132,
+ CV_REG_ST5 = 133,
+ CV_REG_ST6 = 134,
+ CV_REG_ST7 = 135,
+ CV_REG_CTRL = 136,
+ CV_REG_STAT = 137,
+ CV_REG_TAG = 138,
+ CV_REG_FPIP = 139,
+ CV_REG_FPCS = 140,
+ CV_REG_FPDO = 141,
+ CV_REG_FPDS = 142,
+ CV_REG_ISEM = 143,
+ CV_REG_FPEIP = 144,
+ CV_REG_FPEDO = 145,
+ CV_REG_MM0 = 146,
+ CV_REG_MM1 = 147,
+ CV_REG_MM2 = 148,
+ CV_REG_MM3 = 149,
+ CV_REG_MM4 = 150,
+ CV_REG_MM5 = 151,
+ CV_REG_MM6 = 152,
+ CV_REG_MM7 = 153,
+ CV_REG_XMM0 = 154,
+ CV_REG_XMM1 = 155,
+ CV_REG_XMM2 = 156,
+ CV_REG_XMM3 = 157,
+ CV_REG_XMM4 = 158,
+ CV_REG_XMM5 = 159,
+ CV_REG_XMM6 = 160,
+ CV_REG_XMM7 = 161,
+ CV_REG_XMM00 = 162,
+ CV_REG_XMM01 = 163,
+ CV_REG_XMM02 = 164,
+ CV_REG_XMM03 = 165,
+ CV_REG_XMM10 = 166,
+ CV_REG_XMM11 = 167,
+ CV_REG_XMM12 = 168,
+ CV_REG_XMM13 = 169,
+ CV_REG_XMM20 = 170,
+ CV_REG_XMM21 = 171,
+ CV_REG_XMM22 = 172,
+ CV_REG_XMM23 = 173,
+ CV_REG_XMM30 = 174,
+ CV_REG_XMM31 = 175,
+ CV_REG_XMM32 = 176,
+ CV_REG_XMM33 = 177,
+ CV_REG_XMM40 = 178,
+ CV_REG_XMM41 = 179,
+ CV_REG_XMM42 = 180,
+ CV_REG_XMM43 = 181,
+ CV_REG_XMM50 = 182,
+ CV_REG_XMM51 = 183,
+ CV_REG_XMM52 = 184,
+ CV_REG_XMM53 = 185,
+ CV_REG_XMM60 = 186,
+ CV_REG_XMM61 = 187,
+ CV_REG_XMM62 = 188,
+ CV_REG_XMM63 = 189,
+ CV_REG_XMM70 = 190,
+ CV_REG_XMM71 = 191,
+ CV_REG_XMM72 = 192,
+ CV_REG_XMM73 = 193,
+ CV_REG_XMM0L = 194,
+ CV_REG_XMM1L = 195,
+ CV_REG_XMM2L = 196,
+ CV_REG_XMM3L = 197,
+ CV_REG_XMM4L = 198,
+ CV_REG_XMM5L = 199,
+ CV_REG_XMM6L = 200,
+ CV_REG_XMM7L = 201,
+ CV_REG_XMM0H = 202,
+ CV_REG_XMM1H = 203,
+ CV_REG_XMM2H = 204,
+ CV_REG_XMM3H = 205,
+ CV_REG_XMM4H = 206,
+ CV_REG_XMM5H = 207,
+ CV_REG_XMM6H = 208,
+ CV_REG_XMM7H = 209,
+ CV_REG_MXCSR = 211,
+ CV_REG_EDXEAX = 212,
+ CV_REG_EMM0L = 220,
+ CV_REG_EMM1L = 221,
+ CV_REG_EMM2L = 222,
+ CV_REG_EMM3L = 223,
+ CV_REG_EMM4L = 224,
+ CV_REG_EMM5L = 225,
+ CV_REG_EMM6L = 226,
+ CV_REG_EMM7L = 227,
+ CV_REG_EMM0H = 228,
+ CV_REG_EMM1H = 229,
+ CV_REG_EMM2H = 230,
+ CV_REG_EMM3H = 231,
+ CV_REG_EMM4H = 232,
+ CV_REG_EMM5H = 233,
+ CV_REG_EMM6H = 234,
+ CV_REG_EMM7H = 235,
+ CV_REG_MM00 = 236,
+ CV_REG_MM01 = 237,
+ CV_REG_MM10 = 238,
+ CV_REG_MM11 = 239,
+ CV_REG_MM20 = 240,
+ CV_REG_MM21 = 241,
+ CV_REG_MM30 = 242,
+ CV_REG_MM31 = 243,
+ CV_REG_MM40 = 244,
+ CV_REG_MM41 = 245,
+ CV_REG_MM50 = 246,
+ CV_REG_MM51 = 247,
+ CV_REG_MM60 = 248,
+ CV_REG_MM61 = 249,
+ CV_REG_MM70 = 250,
+ CV_REG_MM71 = 251,
+ CV_REG_YMM0 = 252,
+ CV_REG_YMM1 = 253,
+ CV_REG_YMM2 = 254,
+ CV_REG_YMM3 = 255,
+ CV_REG_YMM4 = 256,
+ CV_REG_YMM5 = 257,
+ CV_REG_YMM6 = 258,
+ CV_REG_YMM7 = 259,
+ CV_REG_YMM0H = 260,
+ CV_REG_YMM1H = 261,
+ CV_REG_YMM2H = 262,
+ CV_REG_YMM3H = 263,
+ CV_REG_YMM4H = 264,
+ CV_REG_YMM5H = 265,
+ CV_REG_YMM6H = 266,
+ CV_REG_YMM7H = 267,
+ CV_REG_YMM0I0 = 268,
+ CV_REG_YMM0I1 = 269,
+ CV_REG_YMM0I2 = 270,
+ CV_REG_YMM0I3 = 271,
+ CV_REG_YMM1I0 = 272,
+ CV_REG_YMM1I1 = 273,
+ CV_REG_YMM1I2 = 274,
+ CV_REG_YMM1I3 = 275,
+ CV_REG_YMM2I0 = 276,
+ CV_REG_YMM2I1 = 277,
+ CV_REG_YMM2I2 = 278,
+ CV_REG_YMM2I3 = 279,
+ CV_REG_YMM3I0 = 280,
+ CV_REG_YMM3I1 = 281,
+ CV_REG_YMM3I2 = 282,
+ CV_REG_YMM3I3 = 283,
+ CV_REG_YMM4I0 = 284,
+ CV_REG_YMM4I1 = 285,
+ CV_REG_YMM4I2 = 286,
+ CV_REG_YMM4I3 = 287,
+ CV_REG_YMM5I0 = 288,
+ CV_REG_YMM5I1 = 289,
+ CV_REG_YMM5I2 = 290,
+ CV_REG_YMM5I3 = 291,
+ CV_REG_YMM6I0 = 292,
+ CV_REG_YMM6I1 = 293,
+ CV_REG_YMM6I2 = 294,
+ CV_REG_YMM6I3 = 295,
+ CV_REG_YMM7I0 = 296,
+ CV_REG_YMM7I1 = 297,
+ CV_REG_YMM7I2 = 298,
+ CV_REG_YMM7I3 = 299,
+ CV_REG_YMM0F0 = 300,
+ CV_REG_YMM0F1 = 301,
+ CV_REG_YMM0F2 = 302,
+ CV_REG_YMM0F3 = 303,
+ CV_REG_YMM0F4 = 304,
+ CV_REG_YMM0F5 = 305,
+ CV_REG_YMM0F6 = 306,
+ CV_REG_YMM0F7 = 307,
+ CV_REG_YMM1F0 = 308,
+ CV_REG_YMM1F1 = 309,
+ CV_REG_YMM1F2 = 310,
+ CV_REG_YMM1F3 = 311,
+ CV_REG_YMM1F4 = 312,
+ CV_REG_YMM1F5 = 313,
+ CV_REG_YMM1F6 = 314,
+ CV_REG_YMM1F7 = 315,
+ CV_REG_YMM2F0 = 316,
+ CV_REG_YMM2F1 = 317,
+ CV_REG_YMM2F2 = 318,
+ CV_REG_YMM2F3 = 319,
+ CV_REG_YMM2F4 = 320,
+ CV_REG_YMM2F5 = 321,
+ CV_REG_YMM2F6 = 322,
+ CV_REG_YMM2F7 = 323,
+ CV_REG_YMM3F0 = 324,
+ CV_REG_YMM3F1 = 325,
+ CV_REG_YMM3F2 = 326,
+ CV_REG_YMM3F3 = 327,
+ CV_REG_YMM3F4 = 328,
+ CV_REG_YMM3F5 = 329,
+ CV_REG_YMM3F6 = 330,
+ CV_REG_YMM3F7 = 331,
+ CV_REG_YMM4F0 = 332,
+ CV_REG_YMM4F1 = 333,
+ CV_REG_YMM4F2 = 334,
+ CV_REG_YMM4F3 = 335,
+ CV_REG_YMM4F4 = 336,
+ CV_REG_YMM4F5 = 337,
+ CV_REG_YMM4F6 = 338,
+ CV_REG_YMM4F7 = 339,
+ CV_REG_YMM5F0 = 340,
+ CV_REG_YMM5F1 = 341,
+ CV_REG_YMM5F2 = 342,
+ CV_REG_YMM5F3 = 343,
+ CV_REG_YMM5F4 = 344,
+ CV_REG_YMM5F5 = 345,
+ CV_REG_YMM5F6 = 346,
+ CV_REG_YMM5F7 = 347,
+ CV_REG_YMM6F0 = 348,
+ CV_REG_YMM6F1 = 349,
+ CV_REG_YMM6F2 = 350,
+ CV_REG_YMM6F3 = 351,
+ CV_REG_YMM6F4 = 352,
+ CV_REG_YMM6F5 = 353,
+ CV_REG_YMM6F6 = 354,
+ CV_REG_YMM6F7 = 355,
+ CV_REG_YMM7F0 = 356,
+ CV_REG_YMM7F1 = 357,
+ CV_REG_YMM7F2 = 358,
+ CV_REG_YMM7F3 = 359,
+ CV_REG_YMM7F4 = 360,
+ CV_REG_YMM7F5 = 361,
+ CV_REG_YMM7F6 = 362,
+ CV_REG_YMM7F7 = 363,
+ CV_REG_YMM0D0 = 364,
+ CV_REG_YMM0D1 = 365,
+ CV_REG_YMM0D2 = 366,
+ CV_REG_YMM0D3 = 367,
+ CV_REG_YMM1D0 = 368,
+ CV_REG_YMM1D1 = 369,
+ CV_REG_YMM1D2 = 370,
+ CV_REG_YMM1D3 = 371,
+ CV_REG_YMM2D0 = 372,
+ CV_REG_YMM2D1 = 373,
+ CV_REG_YMM2D2 = 374,
+ CV_REG_YMM2D3 = 375,
+ CV_REG_YMM3D0 = 376,
+ CV_REG_YMM3D1 = 377,
+ CV_REG_YMM3D2 = 378,
+ CV_REG_YMM3D3 = 379,
+ CV_REG_YMM4D0 = 380,
+ CV_REG_YMM4D1 = 381,
+ CV_REG_YMM4D2 = 382,
+ CV_REG_YMM4D3 = 383,
+ CV_REG_YMM5D0 = 384,
+ CV_REG_YMM5D1 = 385,
+ CV_REG_YMM5D2 = 386,
+ CV_REG_YMM5D3 = 387,
+ CV_REG_YMM6D0 = 388,
+ CV_REG_YMM6D1 = 389,
+ CV_REG_YMM6D2 = 390,
+ CV_REG_YMM6D3 = 391,
+ CV_REG_YMM7D0 = 392,
+ CV_REG_YMM7D1 = 393,
+ CV_REG_YMM7D2 = 394,
+ CV_REG_YMM7D3 = 395,
+ CV_REG_BND0 = 396,
+ CV_REG_BND1 = 397,
+ CV_REG_BND2 = 398,
+ CV_REG_BND3 = 399
+};
+
+enum cv_amd64_register {
+ CV_AMD64_NOREG = 0,
+ CV_AMD64_AL = 1,
+ CV_AMD64_CL = 2,
+ CV_AMD64_DL = 3,
+ CV_AMD64_BL = 4,
+ CV_AMD64_AH = 5,
+ CV_AMD64_CH = 6,
+ CV_AMD64_DH = 7,
+ CV_AMD64_BH = 8,
+ CV_AMD64_AX = 9,
+ CV_AMD64_CX = 10,
+ CV_AMD64_DX = 11,
+ CV_AMD64_BX = 12,
+ CV_AMD64_SP = 13,
+ CV_AMD64_BP = 14,
+ CV_AMD64_SI = 15,
+ CV_AMD64_DI = 16,
+ CV_AMD64_EAX = 17,
+ CV_AMD64_ECX = 18,
+ CV_AMD64_EDX = 19,
+ CV_AMD64_EBX = 20,
+ CV_AMD64_ESP = 21,
+ CV_AMD64_EBP = 22,
+ CV_AMD64_ESI = 23,
+ CV_AMD64_EDI = 24,
+ CV_AMD64_ES = 25,
+ CV_AMD64_CS = 26,
+ CV_AMD64_SS = 27,
+ CV_AMD64_DS = 28,
+ CV_AMD64_FS = 29,
+ CV_AMD64_GS = 30,
+ CV_AMD64_FLAGS = 32,
+ CV_AMD64_RIP = 33,
+ CV_AMD64_EFLAGS = 34,
+ CV_AMD64_CR0 = 80,
+ CV_AMD64_CR1 = 81,
+ CV_AMD64_CR2 = 82,
+ CV_AMD64_CR3 = 83,
+ CV_AMD64_CR4 = 84,
+ CV_AMD64_CR8 = 88,
+ CV_AMD64_DR0 = 90,
+ CV_AMD64_DR1 = 91,
+ CV_AMD64_DR2 = 92,
+ CV_AMD64_DR3 = 93,
+ CV_AMD64_DR4 = 94,
+ CV_AMD64_DR5 = 95,
+ CV_AMD64_DR6 = 96,
+ CV_AMD64_DR7 = 97,
+ CV_AMD64_DR8 = 98,
+ CV_AMD64_DR9 = 99,
+ CV_AMD64_DR10 = 100,
+ CV_AMD64_DR11 = 101,
+ CV_AMD64_DR12 = 102,
+ CV_AMD64_DR13 = 103,
+ CV_AMD64_DR14 = 104,
+ CV_AMD64_DR15 = 105,
+ CV_AMD64_GDTR = 110,
+ CV_AMD64_GDTL = 111,
+ CV_AMD64_IDTR = 112,
+ CV_AMD64_IDTL = 113,
+ CV_AMD64_LDTR = 114,
+ CV_AMD64_TR = 115,
+ CV_AMD64_ST0 = 128,
+ CV_AMD64_ST1 = 129,
+ CV_AMD64_ST2 = 130,
+ CV_AMD64_ST3 = 131,
+ CV_AMD64_ST4 = 132,
+ CV_AMD64_ST5 = 133,
+ CV_AMD64_ST6 = 134,
+ CV_AMD64_ST7 = 135,
+ CV_AMD64_CTRL = 136,
+ CV_AMD64_STAT = 137,
+ CV_AMD64_TAG = 138,
+ CV_AMD64_FPIP = 139,
+ CV_AMD64_FPCS = 140,
+ CV_AMD64_FPDO = 141,
+ CV_AMD64_FPDS = 142,
+ CV_AMD64_ISEM = 143,
+ CV_AMD64_FPEIP = 144,
+ CV_AMD64_FPEDO = 145,
+ CV_AMD64_MM0 = 146,
+ CV_AMD64_MM1 = 147,
+ CV_AMD64_MM2 = 148,
+ CV_AMD64_MM3 = 149,
+ CV_AMD64_MM4 = 150,
+ CV_AMD64_MM5 = 151,
+ CV_AMD64_MM6 = 152,
+ CV_AMD64_MM7 = 153,
+ CV_AMD64_XMM0 = 154,
+ CV_AMD64_XMM1 = 155,
+ CV_AMD64_XMM2 = 156,
+ CV_AMD64_XMM3 = 157,
+ CV_AMD64_XMM4 = 158,
+ CV_AMD64_XMM5 = 159,
+ CV_AMD64_XMM6 = 160,
+ CV_AMD64_XMM7 = 161,
+ CV_AMD64_XMM0_0 = 162,
+ CV_AMD64_XMM0_1 = 163,
+ CV_AMD64_XMM0_2 = 164,
+ CV_AMD64_XMM0_3 = 165,
+ CV_AMD64_XMM1_0 = 166,
+ CV_AMD64_XMM1_1 = 167,
+ CV_AMD64_XMM1_2 = 168,
+ CV_AMD64_XMM1_3 = 169,
+ CV_AMD64_XMM2_0 = 170,
+ CV_AMD64_XMM2_1 = 171,
+ CV_AMD64_XMM2_2 = 172,
+ CV_AMD64_XMM2_3 = 173,
+ CV_AMD64_XMM3_0 = 174,
+ CV_AMD64_XMM3_1 = 175,
+ CV_AMD64_XMM3_2 = 176,
+ CV_AMD64_XMM3_3 = 177,
+ CV_AMD64_XMM4_0 = 178,
+ CV_AMD64_XMM4_1 = 179,
+ CV_AMD64_XMM4_2 = 180,
+ CV_AMD64_XMM4_3 = 181,
+ CV_AMD64_XMM5_0 = 182,
+ CV_AMD64_XMM5_1 = 183,
+ CV_AMD64_XMM5_2 = 184,
+ CV_AMD64_XMM5_3 = 185,
+ CV_AMD64_XMM6_0 = 186,
+ CV_AMD64_XMM6_1 = 187,
+ CV_AMD64_XMM6_2 = 188,
+ CV_AMD64_XMM6_3 = 189,
+ CV_AMD64_XMM7_0 = 190,
+ CV_AMD64_XMM7_1 = 191,
+ CV_AMD64_XMM7_2 = 192,
+ CV_AMD64_XMM7_3 = 193,
+ CV_AMD64_XMM0L = 194,
+ CV_AMD64_XMM1L = 195,
+ CV_AMD64_XMM2L = 196,
+ CV_AMD64_XMM3L = 197,
+ CV_AMD64_XMM4L = 198,
+ CV_AMD64_XMM5L = 199,
+ CV_AMD64_XMM6L = 200,
+ CV_AMD64_XMM7L = 201,
+ CV_AMD64_XMM0H = 202,
+ CV_AMD64_XMM1H = 203,
+ CV_AMD64_XMM2H = 204,
+ CV_AMD64_XMM3H = 205,
+ CV_AMD64_XMM4H = 206,
+ CV_AMD64_XMM5H = 207,
+ CV_AMD64_XMM6H = 208,
+ CV_AMD64_XMM7H = 209,
+ CV_AMD64_MXCSR = 211,
+ CV_AMD64_EMM0L = 220,
+ CV_AMD64_EMM1L = 221,
+ CV_AMD64_EMM2L = 222,
+ CV_AMD64_EMM3L = 223,
+ CV_AMD64_EMM4L = 224,
+ CV_AMD64_EMM5L = 225,
+ CV_AMD64_EMM6L = 226,
+ CV_AMD64_EMM7L = 227,
+ CV_AMD64_EMM0H = 228,
+ CV_AMD64_EMM1H = 229,
+ CV_AMD64_EMM2H = 230,
+ CV_AMD64_EMM3H = 231,
+ CV_AMD64_EMM4H = 232,
+ CV_AMD64_EMM5H = 233,
+ CV_AMD64_EMM6H = 234,
+ CV_AMD64_EMM7H = 235,
+ CV_AMD64_MM00 = 236,
+ CV_AMD64_MM01 = 237,
+ CV_AMD64_MM10 = 238,
+ CV_AMD64_MM11 = 239,
+ CV_AMD64_MM20 = 240,
+ CV_AMD64_MM21 = 241,
+ CV_AMD64_MM30 = 242,
+ CV_AMD64_MM31 = 243,
+ CV_AMD64_MM40 = 244,
+ CV_AMD64_MM41 = 245,
+ CV_AMD64_MM50 = 246,
+ CV_AMD64_MM51 = 247,
+ CV_AMD64_MM60 = 248,
+ CV_AMD64_MM61 = 249,
+ CV_AMD64_MM70 = 250,
+ CV_AMD64_MM71 = 251,
+ CV_AMD64_XMM8 = 252,
+ CV_AMD64_XMM9 = 253,
+ CV_AMD64_XMM10 = 254,
+ CV_AMD64_XMM11 = 255,
+ CV_AMD64_XMM12 = 256,
+ CV_AMD64_XMM13 = 257,
+ CV_AMD64_XMM14 = 258,
+ CV_AMD64_XMM15 = 259,
+ CV_AMD64_XMM8_0 = 260,
+ CV_AMD64_XMM8_1 = 261,
+ CV_AMD64_XMM8_2 = 262,
+ CV_AMD64_XMM8_3 = 263,
+ CV_AMD64_XMM9_0 = 264,
+ CV_AMD64_XMM9_1 = 265,
+ CV_AMD64_XMM9_2 = 266,
+ CV_AMD64_XMM9_3 = 267,
+ CV_AMD64_XMM10_0 = 268,
+ CV_AMD64_XMM10_1 = 269,
+ CV_AMD64_XMM10_2 = 270,
+ CV_AMD64_XMM10_3 = 271,
+ CV_AMD64_XMM11_0 = 272,
+ CV_AMD64_XMM11_1 = 273,
+ CV_AMD64_XMM11_2 = 274,
+ CV_AMD64_XMM11_3 = 275,
+ CV_AMD64_XMM12_0 = 276,
+ CV_AMD64_XMM12_1 = 277,
+ CV_AMD64_XMM12_2 = 278,
+ CV_AMD64_XMM12_3 = 279,
+ CV_AMD64_XMM13_0 = 280,
+ CV_AMD64_XMM13_1 = 281,
+ CV_AMD64_XMM13_2 = 282,
+ CV_AMD64_XMM13_3 = 283,
+ CV_AMD64_XMM14_0 = 284,
+ CV_AMD64_XMM14_1 = 285,
+ CV_AMD64_XMM14_2 = 286,
+ CV_AMD64_XMM14_3 = 287,
+ CV_AMD64_XMM15_0 = 288,
+ CV_AMD64_XMM15_1 = 289,
+ CV_AMD64_XMM15_2 = 290,
+ CV_AMD64_XMM15_3 = 291,
+ CV_AMD64_XMM8L = 292,
+ CV_AMD64_XMM9L = 293,
+ CV_AMD64_XMM10L = 294,
+ CV_AMD64_XMM11L = 295,
+ CV_AMD64_XMM12L = 296,
+ CV_AMD64_XMM13L = 297,
+ CV_AMD64_XMM14L = 298,
+ CV_AMD64_XMM15L = 299,
+ CV_AMD64_XMM8H = 300,
+ CV_AMD64_XMM9H = 301,
+ CV_AMD64_XMM10H = 302,
+ CV_AMD64_XMM11H = 303,
+ CV_AMD64_XMM12H = 304,
+ CV_AMD64_XMM13H = 305,
+ CV_AMD64_XMM14H = 306,
+ CV_AMD64_XMM15H = 307,
+ CV_AMD64_EMM8L = 308,
+ CV_AMD64_EMM9L = 309,
+ CV_AMD64_EMM10L = 310,
+ CV_AMD64_EMM11L = 311,
+ CV_AMD64_EMM12L = 312,
+ CV_AMD64_EMM13L = 313,
+ CV_AMD64_EMM14L = 314,
+ CV_AMD64_EMM15L = 315,
+ CV_AMD64_EMM8H = 316,
+ CV_AMD64_EMM9H = 317,
+ CV_AMD64_EMM10H = 318,
+ CV_AMD64_EMM11H = 319,
+ CV_AMD64_EMM12H = 320,
+ CV_AMD64_EMM13H = 321,
+ CV_AMD64_EMM14H = 322,
+ CV_AMD64_EMM15H = 323,
+ CV_AMD64_SIL = 324,
+ CV_AMD64_DIL = 325,
+ CV_AMD64_BPL = 326,
+ CV_AMD64_SPL = 327,
+ CV_AMD64_RAX = 328,
+ CV_AMD64_RBX = 329,
+ CV_AMD64_RCX = 330,
+ CV_AMD64_RDX = 331,
+ CV_AMD64_RSI = 332,
+ CV_AMD64_RDI = 333,
+ CV_AMD64_RBP = 334,
+ CV_AMD64_RSP = 335,
+ CV_AMD64_R8 = 336,
+ CV_AMD64_R9 = 337,
+ CV_AMD64_R10 = 338,
+ CV_AMD64_R11 = 339,
+ CV_AMD64_R12 = 340,
+ CV_AMD64_R13 = 341,
+ CV_AMD64_R14 = 342,
+ CV_AMD64_R15 = 343,
+ CV_AMD64_R8B = 344,
+ CV_AMD64_R9B = 345,
+ CV_AMD64_R10B = 346,
+ CV_AMD64_R11B = 347,
+ CV_AMD64_R12B = 348,
+ CV_AMD64_R13B = 349,
+ CV_AMD64_R14B = 350,
+ CV_AMD64_R15B = 351,
+ CV_AMD64_R8W = 352,
+ CV_AMD64_R9W = 353,
+ CV_AMD64_R10W = 354,
+ CV_AMD64_R11W = 355,
+ CV_AMD64_R12W = 356,
+ CV_AMD64_R13W = 357,
+ CV_AMD64_R14W = 358,
+ CV_AMD64_R15W = 359,
+ CV_AMD64_R8D = 360,
+ CV_AMD64_R9D = 361,
+ CV_AMD64_R10D = 362,
+ CV_AMD64_R11D = 363,
+ CV_AMD64_R12D = 364,
+ CV_AMD64_R13D = 365,
+ CV_AMD64_R14D = 366,
+ CV_AMD64_R15D = 367,
+ CV_AMD64_YMM0 = 368,
+ CV_AMD64_YMM1 = 369,
+ CV_AMD64_YMM2 = 370,
+ CV_AMD64_YMM3 = 371,
+ CV_AMD64_YMM4 = 372,
+ CV_AMD64_YMM5 = 373,
+ CV_AMD64_YMM6 = 374,
+ CV_AMD64_YMM7 = 375,
+ CV_AMD64_YMM8 = 376,
+ CV_AMD64_YMM9 = 377,
+ CV_AMD64_YMM10 = 378,
+ CV_AMD64_YMM11 = 379,
+ CV_AMD64_YMM12 = 380,
+ CV_AMD64_YMM13 = 381,
+ CV_AMD64_YMM14 = 382,
+ CV_AMD64_YMM15 = 383,
+ CV_AMD64_YMM0H = 384,
+ CV_AMD64_YMM1H = 385,
+ CV_AMD64_YMM2H = 386,
+ CV_AMD64_YMM3H = 387,
+ CV_AMD64_YMM4H = 388,
+ CV_AMD64_YMM5H = 389,
+ CV_AMD64_YMM6H = 390,
+ CV_AMD64_YMM7H = 391,
+ CV_AMD64_YMM8H = 392,
+ CV_AMD64_YMM9H = 393,
+ CV_AMD64_YMM10H = 394,
+ CV_AMD64_YMM11H = 395,
+ CV_AMD64_YMM12H = 396,
+ CV_AMD64_YMM13H = 397,
+ CV_AMD64_YMM14H = 398,
+ CV_AMD64_YMM15H = 399,
+ CV_AMD64_XMM0IL = 400,
+ CV_AMD64_XMM1IL = 401,
+ CV_AMD64_XMM2IL = 402,
+ CV_AMD64_XMM3IL = 403,
+ CV_AMD64_XMM4IL = 404,
+ CV_AMD64_XMM5IL = 405,
+ CV_AMD64_XMM6IL = 406,
+ CV_AMD64_XMM7IL = 407,
+ CV_AMD64_XMM8IL = 408,
+ CV_AMD64_XMM9IL = 409,
+ CV_AMD64_XMM10IL = 410,
+ CV_AMD64_XMM11IL = 411,
+ CV_AMD64_XMM12IL = 412,
+ CV_AMD64_XMM13IL = 413,
+ CV_AMD64_XMM14IL = 414,
+ CV_AMD64_XMM15IL = 415,
+ CV_AMD64_XMM0IH = 416,
+ CV_AMD64_XMM1IH = 417,
+ CV_AMD64_XMM2IH = 418,
+ CV_AMD64_XMM3IH = 419,
+ CV_AMD64_XMM4IH = 420,
+ CV_AMD64_XMM5IH = 421,
+ CV_AMD64_XMM6IH = 422,
+ CV_AMD64_XMM7IH = 423,
+ CV_AMD64_XMM8IH = 424,
+ CV_AMD64_XMM9IH = 425,
+ CV_AMD64_XMM10IH = 426,
+ CV_AMD64_XMM11IH = 427,
+ CV_AMD64_XMM12IH = 428,
+ CV_AMD64_XMM13IH = 429,
+ CV_AMD64_XMM14IH = 430,
+ CV_AMD64_XMM15IH = 431,
+ CV_AMD64_YMM0I0 = 432,
+ CV_AMD64_YMM0I1 = 433,
+ CV_AMD64_YMM0I2 = 434,
+ CV_AMD64_YMM0I3 = 435,
+ CV_AMD64_YMM1I0 = 436,
+ CV_AMD64_YMM1I1 = 437,
+ CV_AMD64_YMM1I2 = 438,
+ CV_AMD64_YMM1I3 = 439,
+ CV_AMD64_YMM2I0 = 440,
+ CV_AMD64_YMM2I1 = 441,
+ CV_AMD64_YMM2I2 = 442,
+ CV_AMD64_YMM2I3 = 443,
+ CV_AMD64_YMM3I0 = 444,
+ CV_AMD64_YMM3I1 = 445,
+ CV_AMD64_YMM3I2 = 446,
+ CV_AMD64_YMM3I3 = 447,
+ CV_AMD64_YMM4I0 = 448,
+ CV_AMD64_YMM4I1 = 449,
+ CV_AMD64_YMM4I2 = 450,
+ CV_AMD64_YMM4I3 = 451,
+ CV_AMD64_YMM5I0 = 452,
+ CV_AMD64_YMM5I1 = 453,
+ CV_AMD64_YMM5I2 = 454,
+ CV_AMD64_YMM5I3 = 455,
+ CV_AMD64_YMM6I0 = 456,
+ CV_AMD64_YMM6I1 = 457,
+ CV_AMD64_YMM6I2 = 458,
+ CV_AMD64_YMM6I3 = 459,
+ CV_AMD64_YMM7I0 = 460,
+ CV_AMD64_YMM7I1 = 461,
+ CV_AMD64_YMM7I2 = 462,
+ CV_AMD64_YMM7I3 = 463,
+ CV_AMD64_YMM8I0 = 464,
+ CV_AMD64_YMM8I1 = 465,
+ CV_AMD64_YMM8I2 = 466,
+ CV_AMD64_YMM8I3 = 467,
+ CV_AMD64_YMM9I0 = 468,
+ CV_AMD64_YMM9I1 = 469,
+ CV_AMD64_YMM9I2 = 470,
+ CV_AMD64_YMM9I3 = 471,
+ CV_AMD64_YMM10I0 = 472,
+ CV_AMD64_YMM10I1 = 473,
+ CV_AMD64_YMM10I2 = 474,
+ CV_AMD64_YMM10I3 = 475,
+ CV_AMD64_YMM11I0 = 476,
+ CV_AMD64_YMM11I1 = 477,
+ CV_AMD64_YMM11I2 = 478,
+ CV_AMD64_YMM11I3 = 479,
+ CV_AMD64_YMM12I0 = 480,
+ CV_AMD64_YMM12I1 = 481,
+ CV_AMD64_YMM12I2 = 482,
+ CV_AMD64_YMM12I3 = 483,
+ CV_AMD64_YMM13I0 = 484,
+ CV_AMD64_YMM13I1 = 485,
+ CV_AMD64_YMM13I2 = 486,
+ CV_AMD64_YMM13I3 = 487,
+ CV_AMD64_YMM14I0 = 488,
+ CV_AMD64_YMM14I1 = 489,
+ CV_AMD64_YMM14I2 = 490,
+ CV_AMD64_YMM14I3 = 491,
+ CV_AMD64_YMM15I0 = 492,
+ CV_AMD64_YMM15I1 = 493,
+ CV_AMD64_YMM15I2 = 494,
+ CV_AMD64_YMM15I3 = 495,
+ CV_AMD64_YMM0F0 = 496,
+ CV_AMD64_YMM0F1 = 497,
+ CV_AMD64_YMM0F2 = 498,
+ CV_AMD64_YMM0F3 = 499,
+ CV_AMD64_YMM0F4 = 500,
+ CV_AMD64_YMM0F5 = 501,
+ CV_AMD64_YMM0F6 = 502,
+ CV_AMD64_YMM0F7 = 503,
+ CV_AMD64_YMM1F0 = 504,
+ CV_AMD64_YMM1F1 = 505,
+ CV_AMD64_YMM1F2 = 506,
+ CV_AMD64_YMM1F3 = 507,
+ CV_AMD64_YMM1F4 = 508,
+ CV_AMD64_YMM1F5 = 509,
+ CV_AMD64_YMM1F6 = 510,
+ CV_AMD64_YMM1F7 = 511,
+ CV_AMD64_YMM2F0 = 512,
+ CV_AMD64_YMM2F1 = 513,
+ CV_AMD64_YMM2F2 = 514,
+ CV_AMD64_YMM2F3 = 515,
+ CV_AMD64_YMM2F4 = 516,
+ CV_AMD64_YMM2F5 = 517,
+ CV_AMD64_YMM2F6 = 518,
+ CV_AMD64_YMM2F7 = 519,
+ CV_AMD64_YMM3F0 = 520,
+ CV_AMD64_YMM3F1 = 521,
+ CV_AMD64_YMM3F2 = 522,
+ CV_AMD64_YMM3F3 = 523,
+ CV_AMD64_YMM3F4 = 524,
+ CV_AMD64_YMM3F5 = 525,
+ CV_AMD64_YMM3F6 = 526,
+ CV_AMD64_YMM3F7 = 527,
+ CV_AMD64_YMM4F0 = 528,
+ CV_AMD64_YMM4F1 = 529,
+ CV_AMD64_YMM4F2 = 530,
+ CV_AMD64_YMM4F3 = 531,
+ CV_AMD64_YMM4F4 = 532,
+ CV_AMD64_YMM4F5 = 533,
+ CV_AMD64_YMM4F6 = 534,
+ CV_AMD64_YMM4F7 = 535,
+ CV_AMD64_YMM5F0 = 536,
+ CV_AMD64_YMM5F1 = 537,
+ CV_AMD64_YMM5F2 = 538,
+ CV_AMD64_YMM5F3 = 539,
+ CV_AMD64_YMM5F4 = 540,
+ CV_AMD64_YMM5F5 = 541,
+ CV_AMD64_YMM5F6 = 542,
+ CV_AMD64_YMM5F7 = 543,
+ CV_AMD64_YMM6F0 = 544,
+ CV_AMD64_YMM6F1 = 545,
+ CV_AMD64_YMM6F2 = 546,
+ CV_AMD64_YMM6F3 = 547,
+ CV_AMD64_YMM6F4 = 548,
+ CV_AMD64_YMM6F5 = 549,
+ CV_AMD64_YMM6F6 = 550,
+ CV_AMD64_YMM6F7 = 551,
+ CV_AMD64_YMM7F0 = 552,
+ CV_AMD64_YMM7F1 = 553,
+ CV_AMD64_YMM7F2 = 554,
+ CV_AMD64_YMM7F3 = 555,
+ CV_AMD64_YMM7F4 = 556,
+ CV_AMD64_YMM7F5 = 557,
+ CV_AMD64_YMM7F6 = 558,
+ CV_AMD64_YMM7F7 = 559,
+ CV_AMD64_YMM8F0 = 560,
+ CV_AMD64_YMM8F1 = 561,
+ CV_AMD64_YMM8F2 = 562,
+ CV_AMD64_YMM8F3 = 563,
+ CV_AMD64_YMM8F4 = 564,
+ CV_AMD64_YMM8F5 = 565,
+ CV_AMD64_YMM8F6 = 566,
+ CV_AMD64_YMM8F7 = 567,
+ CV_AMD64_YMM9F0 = 568,
+ CV_AMD64_YMM9F1 = 569,
+ CV_AMD64_YMM9F2 = 570,
+ CV_AMD64_YMM9F3 = 571,
+ CV_AMD64_YMM9F4 = 572,
+ CV_AMD64_YMM9F5 = 573,
+ CV_AMD64_YMM9F6 = 574,
+ CV_AMD64_YMM9F7 = 575,
+ CV_AMD64_YMM10F0 = 576,
+ CV_AMD64_YMM10F1 = 577,
+ CV_AMD64_YMM10F2 = 578,
+ CV_AMD64_YMM10F3 = 579,
+ CV_AMD64_YMM10F4 = 580,
+ CV_AMD64_YMM10F5 = 581,
+ CV_AMD64_YMM10F6 = 582,
+ CV_AMD64_YMM10F7 = 583,
+ CV_AMD64_YMM11F0 = 584,
+ CV_AMD64_YMM11F1 = 585,
+ CV_AMD64_YMM11F2 = 586,
+ CV_AMD64_YMM11F3 = 587,
+ CV_AMD64_YMM11F4 = 588,
+ CV_AMD64_YMM11F5 = 589,
+ CV_AMD64_YMM11F6 = 590,
+ CV_AMD64_YMM11F7 = 591,
+ CV_AMD64_YMM12F0 = 592,
+ CV_AMD64_YMM12F1 = 593,
+ CV_AMD64_YMM12F2 = 594,
+ CV_AMD64_YMM12F3 = 595,
+ CV_AMD64_YMM12F4 = 596,
+ CV_AMD64_YMM12F5 = 597,
+ CV_AMD64_YMM12F6 = 598,
+ CV_AMD64_YMM12F7 = 599,
+ CV_AMD64_YMM13F0 = 600,
+ CV_AMD64_YMM13F1 = 601,
+ CV_AMD64_YMM13F2 = 602,
+ CV_AMD64_YMM13F3 = 603,
+ CV_AMD64_YMM13F4 = 604,
+ CV_AMD64_YMM13F5 = 605,
+ CV_AMD64_YMM13F6 = 606,
+ CV_AMD64_YMM13F7 = 607,
+ CV_AMD64_YMM14F0 = 608,
+ CV_AMD64_YMM14F1 = 609,
+ CV_AMD64_YMM14F2 = 610,
+ CV_AMD64_YMM14F3 = 611,
+ CV_AMD64_YMM14F4 = 612,
+ CV_AMD64_YMM14F5 = 613,
+ CV_AMD64_YMM14F6 = 614,
+ CV_AMD64_YMM14F7 = 615,
+ CV_AMD64_YMM15F0 = 616,
+ CV_AMD64_YMM15F1 = 617,
+ CV_AMD64_YMM15F2 = 618,
+ CV_AMD64_YMM15F3 = 619,
+ CV_AMD64_YMM15F4 = 620,
+ CV_AMD64_YMM15F5 = 621,
+ CV_AMD64_YMM15F6 = 622,
+ CV_AMD64_YMM15F7 = 623,
+ CV_AMD64_YMM0D0 = 624,
+ CV_AMD64_YMM0D1 = 625,
+ CV_AMD64_YMM0D2 = 626,
+ CV_AMD64_YMM0D3 = 627,
+ CV_AMD64_YMM1D0 = 628,
+ CV_AMD64_YMM1D1 = 629,
+ CV_AMD64_YMM1D2 = 630,
+ CV_AMD64_YMM1D3 = 631,
+ CV_AMD64_YMM2D0 = 632,
+ CV_AMD64_YMM2D1 = 633,
+ CV_AMD64_YMM2D2 = 634,
+ CV_AMD64_YMM2D3 = 635,
+ CV_AMD64_YMM3D0 = 636,
+ CV_AMD64_YMM3D1 = 637,
+ CV_AMD64_YMM3D2 = 638,
+ CV_AMD64_YMM3D3 = 639,
+ CV_AMD64_YMM4D0 = 640,
+ CV_AMD64_YMM4D1 = 641,
+ CV_AMD64_YMM4D2 = 642,
+ CV_AMD64_YMM4D3 = 643,
+ CV_AMD64_YMM5D0 = 644,
+ CV_AMD64_YMM5D1 = 645,
+ CV_AMD64_YMM5D2 = 646,
+ CV_AMD64_YMM5D3 = 647,
+ CV_AMD64_YMM6D0 = 648,
+ CV_AMD64_YMM6D1 = 649,
+ CV_AMD64_YMM6D2 = 650,
+ CV_AMD64_YMM6D3 = 651,
+ CV_AMD64_YMM7D0 = 652,
+ CV_AMD64_YMM7D1 = 653,
+ CV_AMD64_YMM7D2 = 654,
+ CV_AMD64_YMM7D3 = 655,
+ CV_AMD64_YMM8D0 = 656,
+ CV_AMD64_YMM8D1 = 657,
+ CV_AMD64_YMM8D2 = 658,
+ CV_AMD64_YMM8D3 = 659,
+ CV_AMD64_YMM9D0 = 660,
+ CV_AMD64_YMM9D1 = 661,
+ CV_AMD64_YMM9D2 = 662,
+ CV_AMD64_YMM9D3 = 663,
+ CV_AMD64_YMM10D0 = 664,
+ CV_AMD64_YMM10D1 = 665,
+ CV_AMD64_YMM10D2 = 666,
+ CV_AMD64_YMM10D3 = 667,
+ CV_AMD64_YMM11D0 = 668,
+ CV_AMD64_YMM11D1 = 669,
+ CV_AMD64_YMM11D2 = 670,
+ CV_AMD64_YMM11D3 = 671,
+ CV_AMD64_YMM12D0 = 672,
+ CV_AMD64_YMM12D1 = 673,
+ CV_AMD64_YMM12D2 = 674,
+ CV_AMD64_YMM12D3 = 675,
+ CV_AMD64_YMM13D0 = 676,
+ CV_AMD64_YMM13D1 = 677,
+ CV_AMD64_YMM13D2 = 678,
+ CV_AMD64_YMM13D3 = 679,
+ CV_AMD64_YMM14D0 = 680,
+ CV_AMD64_YMM14D1 = 681,
+ CV_AMD64_YMM14D2 = 682,
+ CV_AMD64_YMM14D3 = 683,
+ CV_AMD64_YMM15D0 = 684,
+ CV_AMD64_YMM15D1 = 685,
+ CV_AMD64_YMM15D2 = 686,
+ CV_AMD64_YMM15D3 = 687
+};
+
struct codeview_string
{
codeview_string *next;
@@ -1044,6 +2001,200 @@ write_local_s_ldata32 (dw_die_ref die, dw_loc_descr_ref loc_ref)
targetm.asm_out.internal_label (asm_out_file, SYMBOL_END_LABEL, label_num);
}
+/* Try to translate a DWARF register number into its CodeView equivalent. */
+
+static uint16_t
+dwarf_reg_to_cv (unsigned int regno)
+{
+ static const cv_amd64_register amd64_reg_mapping[] = {
+ CV_AMD64_RAX,
+ CV_AMD64_RDX,
+ CV_AMD64_RCX,
+ CV_AMD64_RBX,
+ CV_AMD64_RSI,
+ CV_AMD64_RDI,
+ CV_AMD64_RBP,
+ CV_AMD64_RSP,
+ CV_AMD64_R8,
+ CV_AMD64_R9,
+ CV_AMD64_R10,
+ CV_AMD64_R11,
+ CV_AMD64_R12,
+ CV_AMD64_R13,
+ CV_AMD64_R14,
+ CV_AMD64_R15,
+ CV_AMD64_RIP,
+ CV_AMD64_XMM0,
+ CV_AMD64_XMM1,
+ CV_AMD64_XMM2,
+ CV_AMD64_XMM3,
+ CV_AMD64_XMM4,
+ CV_AMD64_XMM5,
+ CV_AMD64_XMM6,
+ CV_AMD64_XMM7,
+ CV_AMD64_XMM8,
+ CV_AMD64_XMM9,
+ CV_AMD64_XMM10,
+ CV_AMD64_XMM11,
+ CV_AMD64_XMM12,
+ CV_AMD64_XMM13,
+ CV_AMD64_XMM14,
+ CV_AMD64_XMM15,
+ CV_AMD64_ST0,
+ CV_AMD64_ST1,
+ CV_AMD64_ST2,
+ CV_AMD64_ST3,
+ CV_AMD64_ST4,
+ CV_AMD64_ST5,
+ CV_AMD64_ST6,
+ CV_AMD64_ST7,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_EFLAGS,
+ CV_AMD64_ES,
+ CV_AMD64_CS,
+ CV_AMD64_SS,
+ CV_AMD64_DS,
+ CV_AMD64_FS,
+ CV_AMD64_GS,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_NOREG,
+ CV_AMD64_TR,
+ CV_AMD64_LDTR,
+ CV_AMD64_MXCSR,
+ CV_AMD64_CTRL,
+ CV_AMD64_STAT
+ };
+
+ static const cv_x86_register x86_reg_mapping[] = {
+ CV_REG_EAX,
+ CV_REG_ECX,
+ CV_REG_EDX,
+ CV_REG_EBX,
+ CV_REG_EBP,
+ CV_REG_ESP,
+ CV_REG_ESI,
+ CV_REG_EDI,
+ CV_REG_EIP,
+ CV_REG_EFLAGS,
+ CV_REG_CS,
+ CV_REG_SS,
+ CV_REG_DS,
+ CV_REG_ES,
+ CV_REG_FS,
+ CV_REG_GS,
+ CV_REG_ST0,
+ CV_REG_ST1,
+ CV_REG_ST2,
+ CV_REG_ST3,
+ CV_REG_ST4,
+ CV_REG_ST5,
+ CV_REG_ST6,
+ CV_REG_ST7,
+ CV_REG_CTRL,
+ CV_REG_STAT,
+ CV_REG_TAG,
+ CV_REG_FPCS,
+ CV_REG_FPIP,
+ CV_REG_FPDS,
+ CV_REG_FPDO,
+ CV_REG_NONE,
+ CV_REG_XMM0,
+ CV_REG_XMM1,
+ CV_REG_XMM2,
+ CV_REG_XMM3,
+ CV_REG_XMM4,
+ CV_REG_XMM5,
+ CV_REG_XMM6,
+ CV_REG_XMM7,
+ CV_REG_MXCSR
+ };
+
+ if (TARGET_64BIT)
+ {
+ if (regno < sizeof (amd64_reg_mapping) / sizeof (*amd64_reg_mapping))
+ return amd64_reg_mapping[regno];
+
+ return CV_AMD64_NOREG;
+ }
+ else
+ {
+ if (regno < sizeof (x86_reg_mapping) / sizeof (*x86_reg_mapping))
+ return x86_reg_mapping[regno];
+
+ return CV_REG_NONE;
+ }
+}
+
+/* Write an S_REGISTER symbol, representing an unoptimized variable that has
+ been assigned to a register. */
+
+static void
+write_s_register (dw_die_ref die, dw_loc_descr_ref loc_ref)
+{
+ unsigned int label_num = ++sym_label_num;
+ const char *name = get_AT_string (die, DW_AT_name);
+ uint16_t regno;
+ uint32_t type;
+
+ /* This is struct regsym in binutils and REGSYM in Microsoft's cvinfo.h:
+
+ struct regsym
+ {
+ uint16_t size;
+ uint16_t kind;
+ uint32_t type;
+ uint16_t reg;
+ char name[];
+ } ATTRIBUTE_PACKED;
+ */
+
+ if (loc_ref->dw_loc_opc == DW_OP_regx)
+ regno = dwarf_reg_to_cv (loc_ref->dw_loc_oprnd1.v.val_int);
+ else
+ regno = dwarf_reg_to_cv (loc_ref->dw_loc_opc - DW_OP_reg0);
+
+ if (regno == 0)
+ return;
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ asm_fprintf (asm_out_file,
+ "%L" SYMBOL_END_LABEL "%u - %L" SYMBOL_START_LABEL "%u\n",
+ label_num, label_num);
+
+ targetm.asm_out.internal_label (asm_out_file, SYMBOL_START_LABEL, label_num);
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ fprint_whex (asm_out_file, S_REGISTER);
+ putc ('\n', asm_out_file);
+
+ type = get_type_num (get_AT_ref (die, DW_AT_type), false, false);
+
+ fputs (integer_asm_op (4, false), asm_out_file);
+ fprint_whex (asm_out_file, type);
+ putc ('\n', asm_out_file);
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ fprint_whex (asm_out_file, regno);
+ putc ('\n', asm_out_file);
+
+ ASM_OUTPUT_ASCII (asm_out_file, name, strlen (name) + 1);
+
+ ASM_OUTPUT_ALIGN (asm_out_file, 2);
+
+ targetm.asm_out.internal_label (asm_out_file, SYMBOL_END_LABEL, label_num);
+}
+
/* Write a symbol representing an unoptimized variable within a function, if
we're able to translate the DIE's DW_AT_location into its CodeView
equivalent. */
@@ -1071,6 +2222,42 @@ write_unoptimized_local_variable (dw_die_ref die)
write_local_s_ldata32 (die, loc_ref);
break;
+ case DW_OP_reg0:
+ case DW_OP_reg1:
+ case DW_OP_reg2:
+ case DW_OP_reg3:
+ case DW_OP_reg4:
+ case DW_OP_reg5:
+ case DW_OP_reg6:
+ case DW_OP_reg7:
+ case DW_OP_reg8:
+ case DW_OP_reg9:
+ case DW_OP_reg10:
+ case DW_OP_reg11:
+ case DW_OP_reg12:
+ case DW_OP_reg13:
+ case DW_OP_reg14:
+ case DW_OP_reg15:
+ case DW_OP_reg16:
+ case DW_OP_reg17:
+ case DW_OP_reg18:
+ case DW_OP_reg19:
+ case DW_OP_reg20:
+ case DW_OP_reg21:
+ case DW_OP_reg22:
+ case DW_OP_reg23:
+ case DW_OP_reg24:
+ case DW_OP_reg25:
+ case DW_OP_reg26:
+ case DW_OP_reg27:
+ case DW_OP_reg28:
+ case DW_OP_reg29:
+ case DW_OP_reg30:
+ case DW_OP_reg31:
+ case DW_OP_regx:
+ write_s_register (die, loc_ref);
+ break;
+
default:
break;
}
@@ -1219,6 +2406,7 @@ write_unoptimized_function_vars (dw_die_ref die)
switch (dw_get_die_tag (c))
{
+ case DW_TAG_formal_parameter:
case DW_TAG_variable:
write_unoptimized_local_variable (c);
break;
--
2.44.2
^ permalink raw reply [flat|nested] 5+ messages in thread
* [PATCH 3/3] Write CodeView information about stack variables
2024-08-13 0:24 [PATCH 1/3] Write CodeView information about local static variables Mark Harmstone
2024-08-13 0:24 ` [PATCH 2/3] Write CodeView information about enregistered variables Mark Harmstone
@ 2024-08-13 0:24 ` Mark Harmstone
2024-08-14 4:09 ` [PATCH 1/3] Write CodeView information about local static variables Jeff Law
2 siblings, 0 replies; 5+ messages in thread
From: Mark Harmstone @ 2024-08-13 0:24 UTC (permalink / raw)
To: gcc-patches; +Cc: Mark Harmstone
Outputs CodeView S_REGREL32 symbols for unoptimized local variables that
are stored on the stack. This includes a change to dwarf2out.cc to make
it easier to extract the function frame base without having to worry
about the function prologue or epilogue.
gcc/
* dwarf2codeview.cc (enum cv_sym_type): Add S_REGREL32.
(write_fbreg_variable): New function.
(write_unoptimized_local_variable): Add fblock parameter, and handle
DW_OP_fbreg locations.
(write_unoptimized_function_vars): Add fbloc parameter.
(write_function): Extract frame base from DWARF.
* dwarf2out.cc (convert_cfa_to_fb_loc_list): Output simplified frame
base information for CodeView.
---
gcc/dwarf2codeview.cc | 105 +++++++++++++++++++++++++++++++++++++++---
gcc/dwarf2out.cc | 23 +++++++++
2 files changed, 122 insertions(+), 6 deletions(-)
diff --git a/gcc/dwarf2codeview.cc b/gcc/dwarf2codeview.cc
index 4596408f2bb..e01515a0ec4 100644
--- a/gcc/dwarf2codeview.cc
+++ b/gcc/dwarf2codeview.cc
@@ -75,6 +75,7 @@ enum cv_sym_type {
S_REGISTER = 0x1106,
S_LDATA32 = 0x110c,
S_GDATA32 = 0x110d,
+ S_REGREL32 = 0x1111,
S_COMPILE3 = 0x113c,
S_LPROC32_ID = 0x1146,
S_GPROC32_ID = 0x1147,
@@ -2195,12 +2196,94 @@ write_s_register (dw_die_ref die, dw_loc_descr_ref loc_ref)
targetm.asm_out.internal_label (asm_out_file, SYMBOL_END_LABEL, label_num);
}
+/* Write an S_REGREL32 symbol in order to represent an unoptimized stack
+ variable. The memory address is given by a register value plus an offset,
+ so we need to parse the function's DW_AT_frame_base attribute for this. */
+
+static void
+write_fbreg_variable (dw_die_ref die, dw_loc_descr_ref loc_ref,
+ dw_loc_descr_ref fbloc)
+{
+ unsigned int label_num = ++sym_label_num;
+ const char *name = get_AT_string (die, DW_AT_name);
+ uint32_t type;
+ uint16_t regno;
+ int offset;
+
+ /* This is struct regrel in binutils and REGREL32 in Microsoft's cvinfo.h:
+
+ struct regrel
+ {
+ uint16_t size;
+ uint16_t kind;
+ uint32_t offset;
+ uint32_t type;
+ uint16_t reg;
+ char name[];
+ } ATTRIBUTE_PACKED;
+ */
+
+ if (!fbloc)
+ return;
+
+ if (fbloc->dw_loc_opc >= DW_OP_breg0 && fbloc->dw_loc_opc <= DW_OP_breg31)
+ {
+ regno = dwarf_reg_to_cv (fbloc->dw_loc_opc - DW_OP_breg0);
+ offset = fbloc->dw_loc_oprnd1.v.val_int;
+ }
+ else if (fbloc->dw_loc_opc == DW_OP_bregx)
+ {
+ regno = dwarf_reg_to_cv (fbloc->dw_loc_oprnd1.v.val_int);
+ offset = fbloc->dw_loc_oprnd2.v.val_int;
+ }
+ else
+ {
+ return;
+ }
+
+ if (loc_ref->dw_loc_oprnd1.val_class != dw_val_class_unsigned_const)
+ return;
+
+ offset += loc_ref->dw_loc_oprnd1.v.val_int;
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ asm_fprintf (asm_out_file,
+ "%L" SYMBOL_END_LABEL "%u - %L" SYMBOL_START_LABEL "%u\n",
+ label_num, label_num);
+
+ targetm.asm_out.internal_label (asm_out_file, SYMBOL_START_LABEL, label_num);
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ fprint_whex (asm_out_file, S_REGREL32);
+ putc ('\n', asm_out_file);
+
+ fputs (integer_asm_op (4, false), asm_out_file);
+ fprint_whex (asm_out_file, offset);
+ putc ('\n', asm_out_file);
+
+ type = get_type_num (get_AT_ref (die, DW_AT_type), false, false);
+
+ fputs (integer_asm_op (4, false), asm_out_file);
+ fprint_whex (asm_out_file, type);
+ putc ('\n', asm_out_file);
+
+ fputs (integer_asm_op (2, false), asm_out_file);
+ fprint_whex (asm_out_file, regno);
+ putc ('\n', asm_out_file);
+
+ ASM_OUTPUT_ASCII (asm_out_file, name, strlen (name) + 1);
+
+ ASM_OUTPUT_ALIGN (asm_out_file, 2);
+
+ targetm.asm_out.internal_label (asm_out_file, SYMBOL_END_LABEL, label_num);
+}
+
/* Write a symbol representing an unoptimized variable within a function, if
we're able to translate the DIE's DW_AT_location into its CodeView
equivalent. */
static void
-write_unoptimized_local_variable (dw_die_ref die)
+write_unoptimized_local_variable (dw_die_ref die, dw_loc_descr_ref fbloc)
{
dw_attr_node *loc;
dw_loc_descr_ref loc_ref;
@@ -2258,6 +2341,10 @@ write_unoptimized_local_variable (dw_die_ref die)
write_s_register (die, loc_ref);
break;
+ case DW_OP_fbreg:
+ write_fbreg_variable (die, loc_ref, fbloc);
+ break;
+
default:
break;
}
@@ -2390,7 +2477,7 @@ write_s_end (void)
or blocks that we encounter. */
static void
-write_unoptimized_function_vars (dw_die_ref die)
+write_unoptimized_function_vars (dw_die_ref die, dw_loc_descr_ref fbloc)
{
dw_die_ref first_child, c;
@@ -2408,14 +2495,14 @@ write_unoptimized_function_vars (dw_die_ref die)
{
case DW_TAG_formal_parameter:
case DW_TAG_variable:
- write_unoptimized_local_variable (c);
+ write_unoptimized_local_variable (c, fbloc);
break;
case DW_TAG_lexical_block:
{
bool block_started = write_s_block32 (c);
- write_unoptimized_function_vars (c);
+ write_unoptimized_function_vars (c, fbloc);
if (block_started)
write_s_end ();
@@ -2437,9 +2524,10 @@ static void
write_function (codeview_symbol *s)
{
unsigned int label_num = ++sym_label_num;
- dw_attr_node *loc_low, *loc_high;
+ dw_attr_node *loc_low, *loc_high, *frame_base;
const char *label_low, *label_high;
rtx rtx_low, rtx_high;
+ dw_loc_descr_ref fbloc = NULL;
/* This is struct procsym in binutils and PROCSYM32 in Microsoft's cvinfo.h:
@@ -2555,7 +2643,12 @@ write_function (codeview_symbol *s)
targetm.asm_out.internal_label (asm_out_file, SYMBOL_END_LABEL, label_num);
- write_unoptimized_function_vars (s->function.die);
+ frame_base = get_AT (s->function.die, DW_AT_frame_base);
+
+ if (frame_base && frame_base->dw_attr_val.val_class == dw_val_class_loc)
+ fbloc = frame_base->dw_attr_val.v.val_loc;
+
+ write_unoptimized_function_vars (s->function.die, fbloc);
/* Output the S_PROC_ID_END record. */
diff --git a/gcc/dwarf2out.cc b/gcc/dwarf2out.cc
index 357efaa5990..d5144714c6e 100644
--- a/gcc/dwarf2out.cc
+++ b/gcc/dwarf2out.cc
@@ -21183,6 +21183,29 @@ convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
list = NULL;
memset (&next_cfa, 0, sizeof (next_cfa));
+
+#ifdef CODEVIEW_DEBUGGING_INFO
+ /* We can write simplified frame base information for CodeView, as we're
+ not using it for rewinding. */
+ if (codeview_debuginfo_p ())
+ {
+ int dwreg = DEBUGGER_REGNO (cfun->machine->fs.cfa_reg->u.reg.regno);
+
+ next_cfa.reg.set_by_dwreg (dwreg);
+ next_cfa.offset = cfun->machine->fs.fp_valid
+ ? cfun->machine->fs.fp_offset : cfun->machine->fs.sp_offset;
+
+ *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
+ fde->dw_fde_begin, 0,
+ fde->dw_fde_second_begin
+ ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
+ section);
+ maybe_gen_llsym (list);
+
+ return list;
+ }
+#endif
+
next_cfa.reg.set_by_dwreg (INVALID_REGNUM);
remember = next_cfa;
--
2.44.2
^ permalink raw reply [flat|nested] 5+ messages in thread
* Re: [PATCH 1/3] Write CodeView information about local static variables
2024-08-13 0:24 [PATCH 1/3] Write CodeView information about local static variables Mark Harmstone
2024-08-13 0:24 ` [PATCH 2/3] Write CodeView information about enregistered variables Mark Harmstone
2024-08-13 0:24 ` [PATCH 3/3] Write CodeView information about stack variables Mark Harmstone
@ 2024-08-14 4:09 ` Jeff Law
2024-08-16 20:50 ` Mark Harmstone
2 siblings, 1 reply; 5+ messages in thread
From: Jeff Law @ 2024-08-14 4:09 UTC (permalink / raw)
To: Mark Harmstone, gcc-patches
On 8/12/24 6:24 PM, Mark Harmstone wrote:
> Outputs CodeView S_LDATA32 symbols, for static variables within
> functions, along with S_BLOCK32 and S_END for the beginning and end of
> lexical blocks.
>
> gcc/
> * dwarf2codeview.cc (enum cv_sym_type): Add S_END and S_BLOCK32.
> (write_local_s_ldata32): New function.
> (write_unoptimized_local_variable): New function.
> (write_s_block32): New function.
> (write_s_end): New function.
> (write_unoptimized_function_vars): New function.
> (write_function): Call write_unoptimized_function_vars.
This series is fine. I'm not particularly jazzed about how much target
specific data shows up in patch #2. It's probably safe to assume the
mapping of register number of the codeview number doesn't match the
dwarf map. It's probably also safe to assume we're not supporting
codeview on any targets other than x86 and ix86?
jeff
^ permalink raw reply [flat|nested] 5+ messages in thread
* Re: [PATCH 1/3] Write CodeView information about local static variables
2024-08-14 4:09 ` [PATCH 1/3] Write CodeView information about local static variables Jeff Law
@ 2024-08-16 20:50 ` Mark Harmstone
0 siblings, 0 replies; 5+ messages in thread
From: Mark Harmstone @ 2024-08-16 20:50 UTC (permalink / raw)
To: Jeff Law, gcc-patches
Thanks Jeff. No, CodeView is effectively Windows-specific - it relies on PE for reporting the PDB filename, and COFF for the .secidx relocation. I might look into moving these bits into the config once I get down to plumbing it for aarch64-w64-mingw32.
Mark
On 14/08/2024 05:09, Jeff Law wrote:
>
>
> On 8/12/24 6:24 PM, Mark Harmstone wrote:
>> Outputs CodeView S_LDATA32 symbols, for static variables within
>> functions, along with S_BLOCK32 and S_END for the beginning and end of
>> lexical blocks.
>>
>> gcc/
>> * dwarf2codeview.cc (enum cv_sym_type): Add S_END and S_BLOCK32.
>> (write_local_s_ldata32): New function.
>> (write_unoptimized_local_variable): New function.
>> (write_s_block32): New function.
>> (write_s_end): New function.
>> (write_unoptimized_function_vars): New function.
>> (write_function): Call write_unoptimized_function_vars.
> This series is fine. I'm not particularly jazzed about how much target specific data shows up in patch #2. It's probably safe to assume the mapping of register number of the codeview number doesn't match the dwarf map. It's probably also safe to assume we're not supporting codeview on any targets other than x86 and ix86?
>
> jeff
^ permalink raw reply [flat|nested] 5+ messages in thread
end of thread, other threads:[~2024-08-16 20:50 UTC | newest]
Thread overview: 5+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2024-08-13 0:24 [PATCH 1/3] Write CodeView information about local static variables Mark Harmstone
2024-08-13 0:24 ` [PATCH 2/3] Write CodeView information about enregistered variables Mark Harmstone
2024-08-13 0:24 ` [PATCH 3/3] Write CodeView information about stack variables Mark Harmstone
2024-08-14 4:09 ` [PATCH 1/3] Write CodeView information about local static variables Jeff Law
2024-08-16 20:50 ` Mark Harmstone
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).