/* Type information for GCC.
Copyright (C) 2004-2022 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
. */
/* This file is machine generated. Do not edit. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "predict.h"
#include "tree.h"
#include "rtl.h"
#include "gimple.h"
#include "fold-const.h"
#include "insn-codes.h"
#include "splay-tree.h"
#include "alias.h"
#include "insn-config.h"
#include "flags.h"
#include "expmed.h"
#include "dojump.h"
#include "explow.h"
#include "calls.h"
#include "memmodel.h"
#include "emit-rtl.h"
#include "varasm.h"
#include "stmt.h"
#include "expr.h"
#include "alloc-pool.h"
#include "cselib.h"
#include "insn-addr.h"
#include "optabs.h"
#include "libfuncs.h"
#include "debug.h"
#include "internal-fn.h"
#include "gimple-fold.h"
#include "value-range.h"
#include "tree-eh.h"
#include "gimple-iterator.h"
#include "gimple-ssa.h"
#include "tree-cfg.h"
#include "tree-vrp.h"
#include "tree-phinodes.h"
#include "ssa-iterators.h"
#include "stringpool.h"
#include "tree-ssanames.h"
#include "tree-ssa-loop.h"
#include "tree-ssa-loop-ivopts.h"
#include "tree-ssa-loop-manip.h"
#include "tree-ssa-loop-niter.h"
#include "tree-into-ssa.h"
#include "tree-dfa.h"
#include "tree-ssa.h"
#include "reload.h"
#include "cpplib.h"
#include "tree-chrec.h"
#include "except.h"
#include "output.h"
#include "cfgloop.h"
#include "target.h"
#include "lto-streamer.h"
#include "target-globals.h"
#include "ipa-ref.h"
#include "cgraph.h"
#include "symbol-summary.h"
#include "ipa-prop.h"
#include "ipa-fnsummary.h"
#include "dwarf2out.h"
#include "omp-general.h"
#include "omp-offload.h"
#include "ipa-modref-tree.h"
#include "ipa-modref.h"
#include "symtab-thunks.h"
#include "symtab-clones.h"
#include "diagnostic-spec.h"
#include "ctfc.h"
/* See definition in function.h. */
#undef cfun
/* Types with a "gcc::" namespace have it stripped
during gengtype parsing. Provide a "using" directive
to ensure that the fully-qualified types are found. */
using namespace gcc;
void
gt_ggc_mx_line_maps (void *x_p)
{
struct line_maps * const x = (struct line_maps *)x_p;
if (ggc_test_and_set_mark (x))
{
{
size_t l0 = (size_t)(((*x).info_ordinary).used);
if ((*x).info_ordinary.maps != NULL) {
size_t i0;
for (i0 = 0; i0 != (size_t)(l0); i0++) {
gt_ggc_m_S ((*x).info_ordinary.maps[i0].to_file);
}
ggc_mark ((*x).info_ordinary.maps);
}
}
{
size_t l1 = (size_t)(((*x).info_macro).used);
if ((*x).info_macro.maps != NULL) {
size_t i1;
for (i1 = 0; i1 != (size_t)(l1); i1++) {
{
union tree_node * const x2 =
((*x).info_macro.maps[i1].macro) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).info_macro.maps[i1].macro))) : NULL;
gt_ggc_m_9tree_node (x2);
}
if ((*x).info_macro.maps[i1].macro_locations != NULL) {
ggc_mark ((*x).info_macro.maps[i1].macro_locations);
}
}
ggc_mark ((*x).info_macro.maps);
}
}
{
size_t l3 = (size_t)(((*x).location_adhoc_data_map).allocated);
if ((*x).location_adhoc_data_map.data != NULL) {
size_t i3;
for (i3 = 0; i3 != (size_t)(l3); i3++) {
}
ggc_mark ((*x).location_adhoc_data_map.data);
}
}
}
}
void
gt_ggc_mx_cpp_token (void *x_p)
{
struct cpp_token * const x = (struct cpp_token *)x_p;
if (ggc_test_and_set_mark (x))
{
switch ((int) (cpp_token_val_index (&((*x)))))
{
case CPP_TOKEN_FLD_NODE:
{
union tree_node * const x0 =
((*x).val.node.node) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).val.node.node))) : NULL;
gt_ggc_m_9tree_node (x0);
}
{
union tree_node * const x1 =
((*x).val.node.spelling) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).val.node.spelling))) : NULL;
gt_ggc_m_9tree_node (x1);
}
break;
case CPP_TOKEN_FLD_SOURCE:
gt_ggc_m_9cpp_token ((*x).val.source);
break;
case CPP_TOKEN_FLD_STR:
gt_ggc_m_S ((*x).val.str.text);
break;
case CPP_TOKEN_FLD_ARG_NO:
{
union tree_node * const x2 =
((*x).val.macro_arg.spelling) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).val.macro_arg.spelling))) : NULL;
gt_ggc_m_9tree_node (x2);
}
break;
case CPP_TOKEN_FLD_TOKEN_NO:
break;
case CPP_TOKEN_FLD_PRAGMA:
break;
default:
break;
}
}
}
void
gt_ggc_mx_cpp_macro (void *x_p)
{
struct cpp_macro * const x = (struct cpp_macro *)x_p;
if (ggc_test_and_set_mark (x))
{
switch ((int) (((*x)).kind == cmk_assert))
{
case false:
if ((*x).parm.params != NULL) {
size_t i0;
for (i0 = 0; i0 != (size_t)(((*x)).paramc); i0++) {
{
union tree_node * const x1 =
((*x).parm.params[i0]) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).parm.params[i0]))) : NULL;
gt_ggc_m_9tree_node (x1);
}
}
ggc_mark ((*x).parm.params);
}
break;
case true:
gt_ggc_m_9cpp_macro ((*x).parm.next);
break;
default:
break;
}
switch ((int) (((*x)).kind == cmk_traditional))
{
case false:
{
size_t i2;
size_t l2 = (size_t)(((*x)).count);
for (i2 = 0; i2 != l2; i2++) {
switch ((int) (cpp_token_val_index (&((*x).exp.tokens[i2]))))
{
case CPP_TOKEN_FLD_NODE:
{
union tree_node * const x3 =
((*x).exp.tokens[i2].val.node.node) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).exp.tokens[i2].val.node.node))) : NULL;
gt_ggc_m_9tree_node (x3);
}
{
union tree_node * const x4 =
((*x).exp.tokens[i2].val.node.spelling) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).exp.tokens[i2].val.node.spelling))) : NULL;
gt_ggc_m_9tree_node (x4);
}
break;
case CPP_TOKEN_FLD_SOURCE:
gt_ggc_m_9cpp_token ((*x).exp.tokens[i2].val.source);
break;
case CPP_TOKEN_FLD_STR:
gt_ggc_m_S ((*x).exp.tokens[i2].val.str.text);
break;
case CPP_TOKEN_FLD_ARG_NO:
{
union tree_node * const x5 =
((*x).exp.tokens[i2].val.macro_arg.spelling) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).exp.tokens[i2].val.macro_arg.spelling))) : NULL;
gt_ggc_m_9tree_node (x5);
}
break;
case CPP_TOKEN_FLD_TOKEN_NO:
break;
case CPP_TOKEN_FLD_PRAGMA:
break;
default:
break;
}
}
}
break;
case true:
gt_ggc_m_S ((*x).exp.text);
break;
default:
break;
}
}
}
void
gt_ggc_mx_string_concat (void *x_p)
{
struct string_concat * const x = (struct string_concat *)x_p;
if (ggc_test_and_set_mark (x))
{
if ((*x).m_locs != NULL) {
ggc_mark ((*x).m_locs);
}
}
}
void
gt_ggc_mx_string_concat_db (void *x_p)
{
struct string_concat_db * const x = (struct string_concat_db *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_38hash_map_location_hash_string_concat__ ((*x).m_table);
}
}
void
gt_ggc_mx_hash_map_location_hash_string_concat__ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct string_concat *& x)
{
if (x)
gt_ggc_mx_string_concat ((void *) x);
}
void
gt_ggc_mx_bitmap_head (void *x_p)
{
struct bitmap_head * const x = (struct bitmap_head *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_14bitmap_element ((*x).first);
}
}
void
gt_ggc_mx_rtx_def (void *x_p)
{
struct rtx_def * x = (struct rtx_def *)x_p;
struct rtx_def * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = (RTX_NEXT (&(*xlimit)));
if (x != xlimit)
for (;;)
{
struct rtx_def * const xprev = (RTX_PREV (&(*x)));
if (xprev == NULL) break;
x = xprev;
(void) ggc_test_and_set_mark (xprev);
}
while (x != xlimit)
{
switch ((int) (0))
{
case 0:
switch ((int) (GET_CODE (&(*x))))
{
case DEBUG_MARKER:
break;
case DEBUG_PARAMETER_REF:
gt_ggc_m_9tree_node ((*x).u.fld[0].rt_tree);
break;
case ENTRY_VALUE:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case DEBUG_IMPLICIT_PTR:
gt_ggc_m_9tree_node ((*x).u.fld[0].rt_tree);
break;
case VAR_LOCATION:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_9tree_node ((*x).u.fld[0].rt_tree);
break;
case FMA:
gt_ggc_m_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_TRUNCATE:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_TRUNCATE:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_MINUS:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_ASHIFT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_ASHIFT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_ABS:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_NEG:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_NEG:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_MINUS:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_PLUS:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_PLUS:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case VEC_SERIES:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case VEC_DUPLICATE:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case VEC_CONCAT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case VEC_SELECT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case VEC_MERGE:
gt_ggc_m_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LO_SUM:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case HIGH:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ZERO_EXTRACT:
gt_ggc_m_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SIGN_EXTRACT:
gt_ggc_m_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PARITY:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case POPCOUNT:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CTZ:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CLZ:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CLRSB:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FFS:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case BSWAP:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SQRT:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ABS:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNSIGNED_SAT_FRACT:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SAT_FRACT:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNSIGNED_FRACT_CONVERT:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FRACT_CONVERT:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNSIGNED_FIX:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNSIGNED_FLOAT:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FIX:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FLOAT:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FLOAT_TRUNCATE:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FLOAT_EXTEND:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case TRUNCATE:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ZERO_EXTEND:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SIGN_EXTEND:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNLT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNLE:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNGT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNGE:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNEQ:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ORDERED:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNORDERED:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LTU:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LEU:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case GTU:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case GEU:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LTGT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LE:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case GT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case GE:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case EQ:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case NE:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case POST_MODIFY:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PRE_MODIFY:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case POST_INC:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case POST_DEC:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PRE_INC:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PRE_DEC:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UMAX:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UMIN:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SMAX:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SMIN:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ROTATERT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LSHIFTRT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ASHIFTRT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ROTATE:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ASHIFT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case NOT:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case XOR:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case IOR:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case AND:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UMOD:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UDIV:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case MOD:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_DIV:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_DIV:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case DIV:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UMUL_HIGHPART:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SMUL_HIGHPART:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_MULT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_MULT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case MULT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case NEG:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case MINUS:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PLUS:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case COMPARE:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case IF_THEN_ELSE:
gt_ggc_m_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SYMBOL_REF:
switch ((int) (SYMBOL_REF_HAS_BLOCK_INFO_P (&(*x))))
{
case 1:
gt_ggc_m_12object_block ((*x).u.block_sym.block);
break;
default:
break;
}
switch ((int) (CONSTANT_POOL_ADDRESS_P (&(*x))))
{
case 1:
gt_ggc_m_23constant_descriptor_rtx ((*x).u.fld[1].rt_constant);
break;
default:
gt_ggc_m_9tree_node ((*x).u.fld[1].rt_tree);
break;
}
gt_ggc_m_S ((*x).u.fld[0].rt_str);
break;
case LABEL_REF:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case MEM:
gt_ggc_m_9mem_attrs ((*x).u.fld[1].rt_mem);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CONCATN:
gt_ggc_m_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case CONCAT:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case STRICT_LOW_PART:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SUBREG:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SCRATCH:
break;
case REG:
gt_ggc_m_9reg_attrs ((*x).u.reg.attrs);
break;
case PC:
break;
case CONST:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CONST_STRING:
gt_ggc_m_S ((*x).u.fld[0].rt_str);
break;
case CONST_VECTOR:
gt_ggc_m_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case CONST_DOUBLE:
break;
case CONST_FIXED:
break;
case CONST_POLY_INT:
break;
case CONST_WIDE_INT:
break;
case CONST_INT:
break;
case TRAP_IF:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case EH_RETURN:
break;
case SIMPLE_RETURN:
break;
case RETURN:
break;
case CALL:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CLOBBER:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case USE:
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SET:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PREFETCH:
gt_ggc_m_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ADDR_DIFF_VEC:
gt_ggc_m_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_ggc_m_9rtvec_def ((*x).u.fld[1].rt_rtvec);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ADDR_VEC:
gt_ggc_m_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case UNSPEC_VOLATILE:
gt_ggc_m_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case UNSPEC:
gt_ggc_m_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case ASM_OPERANDS:
gt_ggc_m_9rtvec_def ((*x).u.fld[5].rt_rtvec);
gt_ggc_m_9rtvec_def ((*x).u.fld[4].rt_rtvec);
gt_ggc_m_9rtvec_def ((*x).u.fld[3].rt_rtvec);
gt_ggc_m_S ((*x).u.fld[1].rt_str);
gt_ggc_m_S ((*x).u.fld[0].rt_str);
break;
case ASM_INPUT:
gt_ggc_m_S ((*x).u.fld[0].rt_str);
break;
case PARALLEL:
gt_ggc_m_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case COND_EXEC:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case NOTE:
switch ((int) (NOTE_KIND (&(*x))))
{
default:
gt_ggc_m_S ((*x).u.fld[3].rt_str);
break;
case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
break;
case NOTE_INSN_CFI_LABEL:
break;
case NOTE_INSN_CFI:
break;
case NOTE_INSN_SWITCH_TEXT_SECTIONS:
break;
case NOTE_INSN_BASIC_BLOCK:
break;
case NOTE_INSN_INLINE_ENTRY:
break;
case NOTE_INSN_BEGIN_STMT:
break;
case NOTE_INSN_VAR_LOCATION:
gt_ggc_m_7rtx_def ((*x).u.fld[3].rt_rtx);
break;
case NOTE_INSN_EH_REGION_END:
break;
case NOTE_INSN_EH_REGION_BEG:
break;
case NOTE_INSN_EPILOGUE_BEG:
break;
case NOTE_INSN_PROLOGUE_END:
break;
case NOTE_INSN_FUNCTION_BEG:
break;
case NOTE_INSN_BLOCK_END:
gt_ggc_m_9tree_node ((*x).u.fld[3].rt_tree);
break;
case NOTE_INSN_BLOCK_BEG:
gt_ggc_m_9tree_node ((*x).u.fld[3].rt_tree);
break;
case NOTE_INSN_DELETED_DEBUG_LABEL:
gt_ggc_m_S ((*x).u.fld[3].rt_str);
break;
case NOTE_INSN_DELETED_LABEL:
gt_ggc_m_S ((*x).u.fld[3].rt_str);
break;
case NOTE_INSN_DELETED:
break;
}
gt_ggc_m_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CODE_LABEL:
gt_ggc_m_S ((*x).u.fld[6].rt_str);
gt_ggc_m_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_ggc_m_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case BARRIER:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case JUMP_TABLE_DATA:
gt_ggc_m_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_ggc_m_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CALL_INSN:
gt_ggc_m_7rtx_def ((*x).u.fld[7].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[6].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_ggc_m_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case JUMP_INSN:
gt_ggc_m_7rtx_def ((*x).u.fld[7].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[6].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_ggc_m_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case INSN:
gt_ggc_m_7rtx_def ((*x).u.fld[6].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_ggc_m_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case DEBUG_INSN:
gt_ggc_m_7rtx_def ((*x).u.fld[6].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_ggc_m_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ADDRESS:
break;
case SEQUENCE:
gt_ggc_m_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case INT_LIST:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
break;
case INSN_LIST:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case EXPR_LIST:
gt_ggc_m_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_ggc_m_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case DEBUG_EXPR:
gt_ggc_m_9tree_node ((*x).u.fld[0].rt_tree);
break;
case VALUE:
break;
case UNKNOWN:
break;
default:
break;
}
break;
/* Unrecognized tag value. */
default: gcc_unreachable ();
}
x = (RTX_NEXT (&(*x)));
}
}
void
gt_ggc_mx_rtvec_def (void *x_p)
{
struct rtvec_def * const x = (struct rtvec_def *)x_p;
if (ggc_test_and_set_mark (x))
{
{
size_t l0 = (size_t)(((*x)).num_elem);
{
size_t i0;
for (i0 = 0; i0 != l0; i0++) {
gt_ggc_m_7rtx_def ((*x).elem[i0]);
}
}
}
}
}
void
gt_ggc_mx_gimple (void *x_p)
{
struct gimple * x = (struct gimple *)x_p;
struct gimple * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
switch ((int) (gimple_statement_structure (&((*x)))))
{
case GSS_BASE:
gt_ggc_m_15basic_block_def ((*x).bb);
gt_ggc_m_6gimple ((*x).next);
break;
case GSS_WCE:
{
gimple_statement_wce *sub = static_cast (x);
gt_ggc_m_6gimple ((*sub).cleanup);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_OMP:
{
gimple_statement_omp *sub = static_cast (x);
gt_ggc_m_6gimple ((*sub).body);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_OMP_SECTIONS:
{
gomp_sections *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).clauses);
gt_ggc_m_9tree_node ((*sub).control);
gt_ggc_m_6gimple ((*sub).body);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_OMP_PARALLEL_LAYOUT:
{
gimple_statement_omp_parallel_layout *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).clauses);
gt_ggc_m_9tree_node ((*sub).child_fn);
gt_ggc_m_9tree_node ((*sub).data_arg);
gt_ggc_m_6gimple ((*sub).body);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_OMP_TASK:
{
gomp_task *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).copy_fn);
gt_ggc_m_9tree_node ((*sub).arg_size);
gt_ggc_m_9tree_node ((*sub).arg_align);
gt_ggc_m_9tree_node ((*sub).clauses);
gt_ggc_m_9tree_node ((*sub).child_fn);
gt_ggc_m_9tree_node ((*sub).data_arg);
gt_ggc_m_6gimple ((*sub).body);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_OMP_FOR:
{
gomp_for *sub = static_cast (x);
{
size_t l0 = (size_t)(((*sub)).collapse);
gt_ggc_m_9tree_node ((*sub).clauses);
if ((*sub).iter != NULL) {
size_t i0;
for (i0 = 0; i0 != (size_t)(l0); i0++) {
gt_ggc_m_9tree_node ((*sub).iter[i0].index);
gt_ggc_m_9tree_node ((*sub).iter[i0].initial);
gt_ggc_m_9tree_node ((*sub).iter[i0].final);
gt_ggc_m_9tree_node ((*sub).iter[i0].incr);
}
ggc_mark ((*sub).iter);
}
gt_ggc_m_6gimple ((*sub).pre_body);
gt_ggc_m_6gimple ((*sub).body);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
}
break;
case GSS_OMP_SINGLE_LAYOUT:
{
gimple_statement_omp_single_layout *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).clauses);
gt_ggc_m_6gimple ((*sub).body);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_OMP_CRITICAL:
{
gomp_critical *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).clauses);
gt_ggc_m_9tree_node ((*sub).name);
gt_ggc_m_6gimple ((*sub).body);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_OMP_CONTINUE:
{
gomp_continue *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).control_def);
gt_ggc_m_9tree_node ((*sub).control_use);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_OMP_ATOMIC_STORE_LAYOUT:
{
gimple_statement_omp_atomic_store_layout *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).val);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_OMP_ATOMIC_LOAD:
{
gomp_atomic_load *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).rhs);
gt_ggc_m_9tree_node ((*sub).lhs);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_TRY:
{
gtry *sub = static_cast (x);
gt_ggc_m_6gimple ((*sub).eval);
gt_ggc_m_6gimple ((*sub).cleanup);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_PHI:
{
gphi *sub = static_cast (x);
{
size_t l1 = (size_t)(((*sub)).nargs);
gt_ggc_m_9tree_node ((*sub).result);
{
size_t i1;
for (i1 = 0; i1 != l1; i1++) {
gt_ggc_m_9tree_node ((*sub).args[i1].def);
}
}
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
}
break;
case GSS_EH_CTRL:
{
gimple_statement_eh_ctrl *sub = static_cast (x);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_EH_ELSE:
{
geh_else *sub = static_cast (x);
gt_ggc_m_6gimple ((*sub).n_body);
gt_ggc_m_6gimple ((*sub).e_body);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_EH_MNT:
{
geh_mnt *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).fndecl);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_EH_FILTER:
{
geh_filter *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).types);
gt_ggc_m_6gimple ((*sub).failure);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_CATCH:
{
gcatch *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).types);
gt_ggc_m_6gimple ((*sub).handler);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_BIND:
{
gbind *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).vars);
gt_ggc_m_9tree_node ((*sub).block);
gt_ggc_m_6gimple ((*sub).body);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_WITH_MEM_OPS_BASE:
{
gimple_statement_with_memory_ops_base *sub = static_cast (x);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_TRANSACTION:
{
gtransaction *sub = static_cast (x);
gt_ggc_m_6gimple ((*sub).body);
gt_ggc_m_9tree_node ((*sub).label_norm);
gt_ggc_m_9tree_node ((*sub).label_uninst);
gt_ggc_m_9tree_node ((*sub).label_over);
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
break;
case GSS_CALL:
{
gcall *sub = static_cast (x);
{
size_t l2 = (size_t)(((*sub)).num_ops);
gt_ggc_m_11bitmap_head ((*sub).call_used.vars);
gt_ggc_m_11bitmap_head ((*sub).call_clobbered.vars);
switch ((int) (((*sub)).subcode & GF_CALL_INTERNAL))
{
case 0:
gt_ggc_m_9tree_node ((*sub).u.fntype);
break;
case GF_CALL_INTERNAL:
break;
default:
break;
}
{
size_t i2;
for (i2 = 0; i2 != l2; i2++) {
gt_ggc_m_9tree_node ((*sub).op[i2]);
}
}
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
}
break;
case GSS_ASM:
{
gasm *sub = static_cast (x);
{
size_t l3 = (size_t)(((*sub)).num_ops);
gt_ggc_m_S ((*sub).string);
{
size_t i3;
for (i3 = 0; i3 != l3; i3++) {
gt_ggc_m_9tree_node ((*sub).op[i3]);
}
}
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
}
break;
case GSS_WITH_MEM_OPS:
{
gimple_statement_with_memory_ops *sub = static_cast (x);
{
size_t l4 = (size_t)(((*sub)).num_ops);
{
size_t i4;
for (i4 = 0; i4 != l4; i4++) {
gt_ggc_m_9tree_node ((*sub).op[i4]);
}
}
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
}
break;
case GSS_WITH_OPS:
{
gimple_statement_with_ops *sub = static_cast (x);
{
size_t l5 = (size_t)(((*sub)).num_ops);
{
size_t i5;
for (i5 = 0; i5 != l5; i5++) {
gt_ggc_m_9tree_node ((*sub).op[i5]);
}
}
gt_ggc_m_15basic_block_def ((*sub).bb);
gt_ggc_m_6gimple ((*sub).next);
}
}
break;
/* Unrecognized tag value. */
default: gcc_unreachable ();
}
x = ((*x).next);
}
}
void
gt_ggc_mx_symtab_node (void *x_p)
{
struct symtab_node * x = (struct symtab_node *)x_p;
struct symtab_node * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).next);
if (x != xlimit)
for (;;)
{
struct symtab_node * const xprev = ((*x).previous);
if (xprev == NULL) break;
x = xprev;
(void) ggc_test_and_set_mark (xprev);
}
while (x != xlimit)
{
switch ((int) (((*x)).type))
{
case SYMTAB_SYMBOL:
gt_ggc_m_9tree_node ((*x).decl);
gt_ggc_m_11symtab_node ((*x).next);
gt_ggc_m_11symtab_node ((*x).previous);
gt_ggc_m_11symtab_node ((*x).next_sharing_asm_name);
gt_ggc_m_11symtab_node ((*x).previous_sharing_asm_name);
gt_ggc_m_11symtab_node ((*x).same_comdat_group);
gt_ggc_m_9tree_node ((*x).alias_target);
gt_ggc_m_18lto_file_decl_data ((*x).lto_file_data);
gt_ggc_m_9tree_node ((*x).x_comdat_group);
gt_ggc_m_18section_hash_entry ((*x).x_section);
break;
case SYMTAB_VARIABLE:
{
varpool_node *sub = static_cast (x);
gt_ggc_m_9tree_node ((*sub).decl);
gt_ggc_m_11symtab_node ((*sub).next);
gt_ggc_m_11symtab_node ((*sub).previous);
gt_ggc_m_11symtab_node ((*sub).next_sharing_asm_name);
gt_ggc_m_11symtab_node ((*sub).previous_sharing_asm_name);
gt_ggc_m_11symtab_node ((*sub).same_comdat_group);
gt_ggc_m_9tree_node ((*sub).alias_target);
gt_ggc_m_18lto_file_decl_data ((*sub).lto_file_data);
gt_ggc_m_9tree_node ((*sub).x_comdat_group);
gt_ggc_m_18section_hash_entry ((*sub).x_section);
}
break;
case SYMTAB_FUNCTION:
{
cgraph_node *sub = static_cast (x);
gt_ggc_m_11cgraph_edge ((*sub).callees);
gt_ggc_m_11cgraph_edge ((*sub).callers);
gt_ggc_m_11cgraph_edge ((*sub).indirect_calls);
gt_ggc_m_11symtab_node ((*sub).next_sibling_clone);
gt_ggc_m_11symtab_node ((*sub).prev_sibling_clone);
gt_ggc_m_11symtab_node ((*sub).clones);
gt_ggc_m_11symtab_node ((*sub).clone_of);
gt_ggc_m_30hash_table_cgraph_edge_hasher_ ((*sub).call_site_hash);
gt_ggc_m_9tree_node ((*sub).former_clone_of);
gt_ggc_m_17cgraph_simd_clone ((*sub).simdclone);
gt_ggc_m_11symtab_node ((*sub).simd_clones);
gt_ggc_m_11symtab_node ((*sub).inlined_to);
gt_ggc_m_15cgraph_rtl_info ((*sub).rtl);
gt_ggc_m_9tree_node ((*sub).decl);
gt_ggc_m_11symtab_node ((*sub).next);
gt_ggc_m_11symtab_node ((*sub).previous);
gt_ggc_m_11symtab_node ((*sub).next_sharing_asm_name);
gt_ggc_m_11symtab_node ((*sub).previous_sharing_asm_name);
gt_ggc_m_11symtab_node ((*sub).same_comdat_group);
gt_ggc_m_9tree_node ((*sub).alias_target);
gt_ggc_m_18lto_file_decl_data ((*sub).lto_file_data);
gt_ggc_m_9tree_node ((*sub).x_comdat_group);
gt_ggc_m_18section_hash_entry ((*sub).x_section);
}
break;
/* Unrecognized tag value. */
default: gcc_unreachable ();
}
x = ((*x).next);
}
}
void
gt_ggc_mx_cgraph_edge (void *x_p)
{
struct cgraph_edge * x = (struct cgraph_edge *)x_p;
struct cgraph_edge * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).next_caller);
if (x != xlimit)
for (;;)
{
struct cgraph_edge * const xprev = ((*x).prev_caller);
if (xprev == NULL) break;
x = xprev;
(void) ggc_test_and_set_mark (xprev);
}
while (x != xlimit)
{
gt_ggc_m_11symtab_node ((*x).caller);
gt_ggc_m_11symtab_node ((*x).callee);
gt_ggc_m_11cgraph_edge ((*x).prev_caller);
gt_ggc_m_11cgraph_edge ((*x).next_caller);
gt_ggc_m_11cgraph_edge ((*x).prev_callee);
gt_ggc_m_11cgraph_edge ((*x).next_callee);
gt_ggc_m_6gimple ((*x).call_stmt);
gt_ggc_m_25cgraph_indirect_call_info ((*x).indirect_info);
x = ((*x).next_caller);
}
}
void
gt_ggc_mx (struct cgraph_edge& x_r ATTRIBUTE_UNUSED)
{
struct cgraph_edge * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_11symtab_node ((*x).caller);
gt_ggc_m_11symtab_node ((*x).callee);
gt_ggc_m_11cgraph_edge ((*x).prev_caller);
gt_ggc_m_11cgraph_edge ((*x).next_caller);
gt_ggc_m_11cgraph_edge ((*x).prev_callee);
gt_ggc_m_11cgraph_edge ((*x).next_callee);
gt_ggc_m_6gimple ((*x).call_stmt);
gt_ggc_m_25cgraph_indirect_call_info ((*x).indirect_info);
}
void
gt_ggc_mx (struct cgraph_edge *& x)
{
if (x)
gt_ggc_mx_cgraph_edge ((void *) x);
}
void
gt_ggc_mx_section (void *x_p)
{
union section * const x = (union section *)x_p;
if (ggc_test_and_set_mark (x))
{
switch ((int) (SECTION_STYLE (&(((*x))))))
{
case SECTION_NAMED:
gt_ggc_m_S ((*x).named.name);
gt_ggc_m_9tree_node ((*x).named.decl);
break;
case SECTION_UNNAMED:
gt_ggc_m_S ((*x).unnamed.data);
gt_ggc_m_7section ((*x).unnamed.next);
break;
case SECTION_NOSWITCH:
break;
default:
break;
}
}
}
void
gt_ggc_mx (union section& x_r ATTRIBUTE_UNUSED)
{
union section * ATTRIBUTE_UNUSED x = &x_r;
switch ((int) (SECTION_STYLE (&(((*x))))))
{
case SECTION_NAMED:
gt_ggc_m_S ((*x).named.name);
gt_ggc_m_9tree_node ((*x).named.decl);
break;
case SECTION_UNNAMED:
gt_ggc_m_S ((*x).unnamed.data);
gt_ggc_m_7section ((*x).unnamed.next);
break;
case SECTION_NOSWITCH:
break;
default:
break;
}
}
void
gt_ggc_mx (union section *& x)
{
if (x)
gt_ggc_mx_section ((void *) x);
}
void
gt_ggc_mx_cl_target_option (void *x_p)
{
struct cl_target_option * const x = (struct cl_target_option *)x_p;
if (ggc_test_and_set_mark (x))
{
}
}
void
gt_ggc_mx_cl_optimization (void *x_p)
{
struct cl_optimization * const x = (struct cl_optimization *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_S ((*x).x_str_align_functions);
gt_ggc_m_S ((*x).x_str_align_jumps);
gt_ggc_m_S ((*x).x_str_align_labels);
gt_ggc_m_S ((*x).x_str_align_loops);
gt_ggc_m_S ((*x).x_flag_patchable_function_entry);
}
}
void
gt_ggc_mx_edge_def (void *x_p)
{
edge_def * const x = (edge_def *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx_basic_block_def (void *x_p)
{
struct basic_block_def * x = (struct basic_block_def *)x_p;
struct basic_block_def * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).next_bb);
if (x != xlimit)
for (;;)
{
struct basic_block_def * const xprev = ((*x).prev_bb);
if (xprev == NULL) break;
x = xprev;
(void) ggc_test_and_set_mark (xprev);
}
while (x != xlimit)
{
gt_ggc_m_15vec_edge_va_gc_ ((*x).preds);
gt_ggc_m_15vec_edge_va_gc_ ((*x).succs);
gt_ggc_m_4loop ((*x).loop_father);
gt_ggc_m_15basic_block_def ((*x).prev_bb);
gt_ggc_m_15basic_block_def ((*x).next_bb);
switch ((int) (((((*x)).flags & BB_RTL) != 0)))
{
case 0:
gt_ggc_m_6gimple ((*x).il.gimple.seq);
gt_ggc_m_6gimple ((*x).il.gimple.phi_nodes);
break;
case 1:
gt_ggc_m_7rtx_def ((*x).il.x.head_);
gt_ggc_m_11rtl_bb_info ((*x).il.x.rtl);
break;
default:
break;
}
x = ((*x).next_bb);
}
}
void
gt_ggc_mx_bitmap_element (void *x_p)
{
struct bitmap_element * x = (struct bitmap_element *)x_p;
struct bitmap_element * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
gt_ggc_m_14bitmap_element ((*x).next);
gt_ggc_m_14bitmap_element ((*x).prev);
x = ((*x).next);
}
}
void
gt_ggc_mx_generic_wide_int_wide_int_storage_ (void *x_p)
{
generic_wide_int * const x = (generic_wide_int *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct wide_int_storage& x_r ATTRIBUTE_UNUSED)
{
struct wide_int_storage * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_mem_attrs (void *x_p)
{
struct mem_attrs * const x = (struct mem_attrs *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).expr);
}
}
void
gt_ggc_mx_reg_attrs (void *x_p)
{
struct reg_attrs * const x = (struct reg_attrs *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).decl);
}
}
void
gt_ggc_mx (struct reg_attrs& x_r ATTRIBUTE_UNUSED)
{
struct reg_attrs * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).decl);
}
void
gt_ggc_mx (struct reg_attrs *& x)
{
if (x)
gt_ggc_mx_reg_attrs ((void *) x);
}
void
gt_ggc_mx_object_block (void *x_p)
{
struct object_block * const x = (struct object_block *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_7section ((*x).sect);
gt_ggc_m_14vec_rtx_va_gc_ ((*x).objects);
gt_ggc_m_14vec_rtx_va_gc_ ((*x).anchors);
}
}
void
gt_ggc_mx (struct object_block& x_r ATTRIBUTE_UNUSED)
{
struct object_block * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_7section ((*x).sect);
gt_ggc_m_14vec_rtx_va_gc_ ((*x).objects);
gt_ggc_m_14vec_rtx_va_gc_ ((*x).anchors);
}
void
gt_ggc_mx (struct object_block *& x)
{
if (x)
gt_ggc_mx_object_block ((void *) x);
}
void
gt_ggc_mx_vec_rtx_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct rtx_def *& x)
{
if (x)
gt_ggc_mx_rtx_def ((void *) x);
}
void
gt_ggc_mx_real_value (void *x_p)
{
struct real_value * const x = (struct real_value *)x_p;
if (ggc_test_and_set_mark (x))
{
}
}
void
gt_ggc_mx_fixed_value (void *x_p)
{
struct fixed_value * const x = (struct fixed_value *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (&((*x).mode));
}
}
void
gt_ggc_mx_function (void *x_p)
{
struct function * const x = (struct function *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9eh_status ((*x).eh);
gt_ggc_m_18control_flow_graph ((*x).cfg);
gt_ggc_m_6gimple ((*x).gimple_body);
gt_ggc_m_9gimple_df ((*x).gimple_df);
gt_ggc_m_5loops ((*x).x_current_loops);
gt_ggc_m_S ((*x).pass_startwith);
gt_ggc_m_11stack_usage ((*x).su);
gt_ggc_m_9tree_node ((*x).decl);
gt_ggc_m_9tree_node ((*x).static_chain_decl);
gt_ggc_m_9tree_node ((*x).nonlocal_goto_save_area);
gt_ggc_m_15vec_tree_va_gc_ ((*x).local_decls);
gt_ggc_m_16machine_function ((*x).machine);
gt_ggc_m_17language_function ((*x).language);
gt_ggc_m_14hash_set_tree_ ((*x).used_types_hash);
gt_ggc_m_11dw_fde_node ((*x).fde);
}
}
void
gt_ggc_mx_target_rtl (void *x_p)
{
struct target_rtl * const x = (struct target_rtl *)x_p;
if (ggc_test_and_set_mark (x))
{
{
size_t i0;
size_t l0 = (size_t)(GR_MAX);
for (i0 = 0; i0 != l0; i0++) {
gt_ggc_m_7rtx_def ((*x).x_global_rtl[i0]);
}
}
gt_ggc_m_7rtx_def ((*x).x_pic_offset_table_rtx);
gt_ggc_m_7rtx_def ((*x).x_return_address_pointer_rtx);
{
size_t i1;
size_t l1 = (size_t)(FIRST_PSEUDO_REGISTER);
for (i1 = 0; i1 != l1; i1++) {
gt_ggc_m_7rtx_def ((*x).x_initial_regno_reg_rtx[i1]);
}
}
{
size_t i2;
size_t l2 = (size_t)(MAX_MACHINE_MODE);
for (i2 = 0; i2 != l2; i2++) {
gt_ggc_m_7rtx_def ((*x).x_top_of_stack[i2]);
}
}
{
size_t i3;
size_t l3 = (size_t)(FIRST_PSEUDO_REGISTER);
for (i3 = 0; i3 != l3; i3++) {
gt_ggc_m_7rtx_def ((*x).x_static_reg_base_value[i3]);
}
}
{
size_t i4;
size_t l4 = (size_t)((int) MAX_MACHINE_MODE);
for (i4 = 0; i4 != l4; i4++) {
gt_ggc_m_9mem_attrs ((*x).x_mode_mem_attrs[i4]);
}
}
}
}
void
gt_ggc_mx_cgraph_rtl_info (void *x_p)
{
struct cgraph_rtl_info * const x = (struct cgraph_rtl_info *)x_p;
if (ggc_test_and_set_mark (x))
{
}
}
void
gt_ggc_mx_hash_map_tree_tree_decl_tree_cache_traits_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct decl_tree_cache_traits& x_r ATTRIBUTE_UNUSED)
{
struct decl_tree_cache_traits * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx (union tree_node *& x)
{
if (x)
gt_ggc_mx_lang_tree_node ((void *) x);
}
void
gt_ggc_mx_hash_map_tree_tree_type_tree_cache_traits_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct type_tree_cache_traits& x_r ATTRIBUTE_UNUSED)
{
struct type_tree_cache_traits * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_hash_map_tree_tree_decl_tree_traits_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct decl_tree_traits& x_r ATTRIBUTE_UNUSED)
{
struct decl_tree_traits * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_ptr_info_def (void *x_p)
{
struct ptr_info_def * const x = (struct ptr_info_def *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_11bitmap_head ((*x).pt.vars);
}
}
void
gt_ggc_mx_range_info_def (void *x_p)
{
struct range_info_def * const x = (struct range_info_def *)x_p;
if (ggc_test_and_set_mark (x))
{
}
}
void
gt_ggc_mx_vec_constructor_elt_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct constructor_elt& x_r ATTRIBUTE_UNUSED)
{
struct constructor_elt * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).index);
gt_ggc_m_9tree_node ((*x).value);
}
void
gt_ggc_mx_vec_tree_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx_tree_statement_list_node (void *x_p)
{
struct tree_statement_list_node * x = (struct tree_statement_list_node *)x_p;
struct tree_statement_list_node * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).next);
if (x != xlimit)
for (;;)
{
struct tree_statement_list_node * const xprev = ((*x).prev);
if (xprev == NULL) break;
x = xprev;
(void) ggc_test_and_set_mark (xprev);
}
while (x != xlimit)
{
gt_ggc_m_24tree_statement_list_node ((*x).prev);
gt_ggc_m_24tree_statement_list_node ((*x).next);
gt_ggc_m_9tree_node ((*x).stmt);
x = ((*x).next);
}
}
void
gt_ggc_mx_target_globals (void *x_p)
{
struct target_globals * const x = (struct target_globals *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_10target_rtl ((*x).rtl);
gt_ggc_m_15target_libfuncs ((*x).libfuncs);
}
}
void
gt_ggc_mx_tree_map (void *x_p)
{
struct tree_map * const x = (struct tree_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).base.from);
gt_ggc_m_9tree_node ((*x).to);
}
}
void
gt_ggc_mx (struct tree_map& x_r ATTRIBUTE_UNUSED)
{
struct tree_map * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).base.from);
gt_ggc_m_9tree_node ((*x).to);
}
void
gt_ggc_mx (struct tree_map *& x)
{
if (x)
gt_ggc_mx_tree_map ((void *) x);
}
void
gt_ggc_mx_tree_decl_map (void *x_p)
{
struct tree_decl_map * const x = (struct tree_decl_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).base.from);
gt_ggc_m_9tree_node ((*x).to);
}
}
void
gt_ggc_mx (struct tree_decl_map& x_r ATTRIBUTE_UNUSED)
{
struct tree_decl_map * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).base.from);
gt_ggc_m_9tree_node ((*x).to);
}
void
gt_ggc_mx (struct tree_decl_map *& x)
{
if (x)
gt_ggc_mx_tree_decl_map ((void *) x);
}
void
gt_ggc_mx_tree_int_map (void *x_p)
{
struct tree_int_map * const x = (struct tree_int_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).base.from);
}
}
void
gt_ggc_mx (struct tree_int_map& x_r ATTRIBUTE_UNUSED)
{
struct tree_int_map * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).base.from);
}
void
gt_ggc_mx (struct tree_int_map *& x)
{
if (x)
gt_ggc_mx_tree_int_map ((void *) x);
}
void
gt_ggc_mx_tree_vec_map (void *x_p)
{
struct tree_vec_map * const x = (struct tree_vec_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).base.from);
gt_ggc_m_15vec_tree_va_gc_ ((*x).to);
}
}
void
gt_ggc_mx (struct tree_vec_map& x_r ATTRIBUTE_UNUSED)
{
struct tree_vec_map * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).base.from);
gt_ggc_m_15vec_tree_va_gc_ ((*x).to);
}
void
gt_ggc_mx (struct tree_vec_map *& x)
{
if (x)
gt_ggc_mx_tree_vec_map ((void *) x);
}
void
gt_ggc_mx_vec_alias_pair_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct alias_pair& x_r ATTRIBUTE_UNUSED)
{
struct alias_pair * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).decl);
gt_ggc_m_9tree_node ((*x).target);
}
void
gt_ggc_mx_libfunc_entry (void *x_p)
{
struct libfunc_entry * const x = (struct libfunc_entry *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_7rtx_def ((*x).libfunc);
}
}
void
gt_ggc_mx (struct libfunc_entry& x_r ATTRIBUTE_UNUSED)
{
struct libfunc_entry * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_7rtx_def ((*x).libfunc);
}
void
gt_ggc_mx (struct libfunc_entry *& x)
{
if (x)
gt_ggc_mx_libfunc_entry ((void *) x);
}
void
gt_ggc_mx_hash_table_libfunc_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct libfunc_hasher& x_r ATTRIBUTE_UNUSED)
{
struct libfunc_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_target_libfuncs (void *x_p)
{
struct target_libfuncs * const x = (struct target_libfuncs *)x_p;
if (ggc_test_and_set_mark (x))
{
{
size_t i0;
size_t l0 = (size_t)(LTI_MAX);
for (i0 = 0; i0 != l0; i0++) {
gt_ggc_m_7rtx_def ((*x).x_libfunc_table[i0]);
}
}
gt_ggc_m_26hash_table_libfunc_hasher_ ((*x).x_libfunc_hash);
}
}
void
gt_ggc_mx_sequence_stack (void *x_p)
{
struct sequence_stack * const x = (struct sequence_stack *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_7rtx_def ((*x).first);
gt_ggc_m_7rtx_def ((*x).last);
gt_ggc_m_14sequence_stack ((*x).next);
}
}
void
gt_ggc_mx_vec_rtx_insn__va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct rtx_insn *& x)
{
if (x)
gt_ggc_mx_rtx_def ((void *) x);
}
void
gt_ggc_mx_vec_uchar_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx_vec_call_site_record_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct call_site_record_d *& x)
{
if (x)
gt_ggc_mx_call_site_record_d ((void *) x);
}
void
gt_ggc_mx_gimple_df (void *x_p)
{
struct gimple_df * const x = (struct gimple_df *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_15vec_tree_va_gc_ ((*x).ssa_names);
gt_ggc_m_9tree_node ((*x).vop);
gt_ggc_m_11bitmap_head ((*x).escaped.vars);
gt_ggc_m_15vec_tree_va_gc_ ((*x).free_ssanames);
gt_ggc_m_15vec_tree_va_gc_ ((*x).free_ssanames_queue);
gt_ggc_m_27hash_table_ssa_name_hasher_ ((*x).default_defs);
gt_ggc_m_20ssa_operand_memory_d ((*x).ssa_operands.operand_memory);
gt_ggc_m_29hash_table_tm_restart_hasher_ ((*x).tm_restart);
}
}
void
gt_ggc_mx_dw_fde_node (void *x_p)
{
struct dw_fde_node * const x = (struct dw_fde_node *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).decl);
gt_ggc_m_S ((*x).dw_fde_begin);
gt_ggc_m_S ((*x).dw_fde_current_label);
gt_ggc_m_S ((*x).dw_fde_end);
gt_ggc_m_S ((*x).dw_fde_vms_end_prologue);
gt_ggc_m_S ((*x).dw_fde_vms_begin_epilogue);
gt_ggc_m_S ((*x).dw_fde_second_begin);
gt_ggc_m_S ((*x).dw_fde_second_end);
gt_ggc_m_21vec_dw_cfi_ref_va_gc_ ((*x).dw_fde_cfi);
}
}
void
gt_ggc_mx_frame_space (void *x_p)
{
struct frame_space * const x = (struct frame_space *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_11frame_space ((*x).next);
}
}
void
gt_ggc_mx_vec_callinfo_callee_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct callinfo_callee& x_r ATTRIBUTE_UNUSED)
{
struct callinfo_callee * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).decl);
}
void
gt_ggc_mx_vec_callinfo_dalloc_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct callinfo_dalloc& x_r ATTRIBUTE_UNUSED)
{
struct callinfo_dalloc * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_S ((*x).name);
}
void
gt_ggc_mx_stack_usage (void *x_p)
{
struct stack_usage * const x = (struct stack_usage *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_26vec_callinfo_callee_va_gc_ ((*x).callees);
gt_ggc_m_26vec_callinfo_dalloc_va_gc_ ((*x).dallocs);
}
}
void
gt_ggc_mx_eh_status (void *x_p)
{
struct eh_status * const x = (struct eh_status *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_11eh_region_d ((*x).region_tree);
gt_ggc_m_20vec_eh_region_va_gc_ ((*x).region_array);
gt_ggc_m_25vec_eh_landing_pad_va_gc_ ((*x).lp_array);
gt_ggc_m_21hash_map_gimple__int_ ((*x).throw_stmt_table);
gt_ggc_m_15vec_tree_va_gc_ ((*x).ttype_data);
switch ((int) (targetm.arm_eabi_unwinder))
{
case 1:
gt_ggc_m_15vec_tree_va_gc_ ((*x).ehspec_data.arm_eabi);
break;
case 0:
gt_ggc_m_16vec_uchar_va_gc_ ((*x).ehspec_data.other);
break;
default:
break;
}
}
}
void
gt_ggc_mx_control_flow_graph (void *x_p)
{
struct control_flow_graph * const x = (struct control_flow_graph *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_15basic_block_def ((*x).x_entry_block_ptr);
gt_ggc_m_15basic_block_def ((*x).x_exit_block_ptr);
gt_ggc_m_22vec_basic_block_va_gc_ ((*x).x_basic_block_info);
gt_ggc_m_22vec_basic_block_va_gc_ ((*x).x_label_to_block_map);
}
}
void
gt_ggc_mx_loops (void *x_p)
{
struct loops * const x = (struct loops *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_17vec_loop_p_va_gc_ ((*x).larray);
gt_ggc_m_28hash_table_loop_exit_hasher_ ((*x).exits);
gt_ggc_m_4loop ((*x).tree_root);
}
}
void
gt_ggc_mx_hash_set_tree_ (void *x_p)
{
hash_set * const x = (hash_set *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx_types_used_by_vars_entry (void *x_p)
{
struct types_used_by_vars_entry * const x = (struct types_used_by_vars_entry *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).type);
gt_ggc_m_9tree_node ((*x).var_decl);
}
}
void
gt_ggc_mx (struct types_used_by_vars_entry& x_r ATTRIBUTE_UNUSED)
{
struct types_used_by_vars_entry * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).type);
gt_ggc_m_9tree_node ((*x).var_decl);
}
void
gt_ggc_mx (struct types_used_by_vars_entry *& x)
{
if (x)
gt_ggc_mx_types_used_by_vars_entry ((void *) x);
}
void
gt_ggc_mx_hash_table_used_type_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct used_type_hasher& x_r ATTRIBUTE_UNUSED)
{
struct used_type_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_nb_iter_bound (void *x_p)
{
struct nb_iter_bound * x = (struct nb_iter_bound *)x_p;
struct nb_iter_bound * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
gt_ggc_m_6gimple ((*x).stmt);
gt_ggc_m_13nb_iter_bound ((*x).next);
x = ((*x).next);
}
}
void
gt_ggc_mx_loop_exit (void *x_p)
{
struct loop_exit * const x = (struct loop_exit *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_8edge_def ((*x).e);
gt_ggc_m_9loop_exit ((*x).prev);
gt_ggc_m_9loop_exit ((*x).next);
gt_ggc_m_9loop_exit ((*x).next_e);
}
}
void
gt_ggc_mx (struct loop_exit& x_r ATTRIBUTE_UNUSED)
{
struct loop_exit * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_8edge_def ((*x).e);
gt_ggc_m_9loop_exit ((*x).prev);
gt_ggc_m_9loop_exit ((*x).next);
gt_ggc_m_9loop_exit ((*x).next_e);
}
void
gt_ggc_mx (struct loop_exit *& x)
{
if (x)
gt_ggc_mx_loop_exit ((void *) x);
}
void
gt_ggc_mx_loop (void *x_p)
{
struct loop * x = (struct loop *)x_p;
struct loop * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
gt_ggc_m_15basic_block_def ((*x).header);
gt_ggc_m_15basic_block_def ((*x).latch);
gt_ggc_m_17vec_loop_p_va_gc_ ((*x).superloops);
gt_ggc_m_4loop ((*x).inner);
gt_ggc_m_4loop ((*x).next);
gt_ggc_m_9tree_node ((*x).nb_iterations);
gt_ggc_m_9tree_node ((*x).simduid);
gt_ggc_m_13nb_iter_bound ((*x).bounds);
gt_ggc_m_10control_iv ((*x).control_ivs);
gt_ggc_m_9loop_exit ((*x).exits);
gt_ggc_m_10niter_desc ((*x).simple_loop_desc);
gt_ggc_m_15basic_block_def ((*x).former_header);
x = ((*x).next);
}
}
void
gt_ggc_mx_control_iv (void *x_p)
{
struct control_iv * x = (struct control_iv *)x_p;
struct control_iv * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
gt_ggc_m_9tree_node ((*x).base);
gt_ggc_m_9tree_node ((*x).step);
gt_ggc_m_10control_iv ((*x).next);
x = ((*x).next);
}
}
void
gt_ggc_mx_vec_loop_p_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct loop *& x)
{
if (x)
gt_ggc_mx_loop ((void *) x);
}
void
gt_ggc_mx_niter_desc (void *x_p)
{
struct niter_desc * const x = (struct niter_desc *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_8edge_def ((*x).out_edge);
gt_ggc_m_8edge_def ((*x).in_edge);
gt_ggc_m_7rtx_def ((*x).assumptions);
gt_ggc_m_7rtx_def ((*x).noloop_assumptions);
gt_ggc_m_7rtx_def ((*x).infinite);
gt_ggc_m_7rtx_def ((*x).niter_expr);
}
}
void
gt_ggc_mx_hash_table_loop_exit_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct loop_exit_hasher& x_r ATTRIBUTE_UNUSED)
{
struct loop_exit_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_vec_basic_block_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct basic_block_def *& x)
{
if (x)
gt_ggc_mx_basic_block_def ((void *) x);
}
void
gt_ggc_mx_rtl_bb_info (void *x_p)
{
struct rtl_bb_info * const x = (struct rtl_bb_info *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_7rtx_def ((*x).end_);
gt_ggc_m_7rtx_def ((*x).header_);
gt_ggc_m_7rtx_def ((*x).footer_);
}
}
void
gt_ggc_mx_vec_edge_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (edge_def *& x)
{
if (x)
gt_ggc_mx_edge_def ((void *) x);
}
void
gt_ggc_mx_section_hash_entry (void *x_p)
{
struct section_hash_entry * const x = (struct section_hash_entry *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_S ((*x).name);
}
}
void
gt_ggc_mx (struct section_hash_entry& x_r ATTRIBUTE_UNUSED)
{
struct section_hash_entry * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_S ((*x).name);
}
void
gt_ggc_mx (struct section_hash_entry *& x)
{
if (x)
gt_ggc_mx_section_hash_entry ((void *) x);
}
void
gt_ggc_mx_lto_file_decl_data (void *x_p)
{
struct lto_file_decl_data * const x = (struct lto_file_decl_data *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_17lto_in_decl_state ((*x).current_decl_state);
gt_ggc_m_17lto_in_decl_state ((*x).global_decl_state);
gt_ggc_m_29hash_table_decl_state_hasher_ ((*x).function_decl_states);
gt_ggc_m_18lto_file_decl_data ((*x).next);
gt_ggc_m_S ((*x).mode_table);
}
}
void
gt_ggc_mx_ipa_replace_map (void *x_p)
{
struct ipa_replace_map * const x = (struct ipa_replace_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).new_tree);
}
}
void
gt_ggc_mx_cgraph_simd_clone (void *x_p)
{
struct cgraph_simd_clone * const x = (struct cgraph_simd_clone *)x_p;
if (ggc_test_and_set_mark (x))
{
{
size_t l0 = (size_t)(((*x)).nargs);
gt_ggc_m_11symtab_node ((*x).prev_clone);
gt_ggc_m_11symtab_node ((*x).next_clone);
gt_ggc_m_11symtab_node ((*x).origin);
{
size_t i0;
for (i0 = 0; i0 != l0; i0++) {
gt_ggc_m_9tree_node ((*x).args[i0].orig_arg);
gt_ggc_m_9tree_node ((*x).args[i0].orig_type);
gt_ggc_m_9tree_node ((*x).args[i0].vector_arg);
gt_ggc_m_9tree_node ((*x).args[i0].vector_type);
gt_ggc_m_9tree_node ((*x).args[i0].simd_array);
}
}
}
}
}
void
gt_ggc_mx_cgraph_function_version_info (void *x_p)
{
struct cgraph_function_version_info * const x = (struct cgraph_function_version_info *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_11symtab_node ((*x).this_node);
gt_ggc_m_28cgraph_function_version_info ((*x).prev);
gt_ggc_m_28cgraph_function_version_info ((*x).next);
gt_ggc_m_9tree_node ((*x).dispatcher_resolver);
}
}
void
gt_ggc_mx (struct cgraph_function_version_info& x_r ATTRIBUTE_UNUSED)
{
struct cgraph_function_version_info * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_11symtab_node ((*x).this_node);
gt_ggc_m_28cgraph_function_version_info ((*x).prev);
gt_ggc_m_28cgraph_function_version_info ((*x).next);
gt_ggc_m_9tree_node ((*x).dispatcher_resolver);
}
void
gt_ggc_mx (struct cgraph_function_version_info *& x)
{
if (x)
gt_ggc_mx_cgraph_function_version_info ((void *) x);
}
void
gt_ggc_mx_hash_table_cgraph_edge_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct cgraph_edge_hasher& x_r ATTRIBUTE_UNUSED)
{
struct cgraph_edge_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_cgraph_indirect_call_info (void *x_p)
{
struct cgraph_indirect_call_info * const x = (struct cgraph_indirect_call_info *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).context.outer_type);
gt_ggc_m_9tree_node ((*x).context.speculative_outer_type);
gt_ggc_m_9tree_node ((*x).otr_type);
}
}
void
gt_ggc_mx_asm_node (void *x_p)
{
struct asm_node * const x = (struct asm_node *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_8asm_node ((*x).next);
gt_ggc_m_9tree_node ((*x).asm_str);
}
}
void
gt_ggc_mx_thunk_info (void *x_p)
{
struct thunk_info * const x = (struct thunk_info *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_9tree_node ((*x).alias);
}
}
void
gt_ggc_mx_function_summary_thunk_info__ (void *x_p)
{
function_summary * const x = (function_summary *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct thunk_info *& x)
{
if (x)
gt_ggc_mx_thunk_info ((void *) x);
}
void
gt_ggc_mx_clone_info (void *x_p)
{
struct clone_info * const x = (struct clone_info *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_27vec_ipa_replace_map__va_gc_ ((*x).tree_map);
gt_ggc_m_21ipa_param_adjustments ((*x).param_adjustments);
}
}
void
gt_ggc_mx_function_summary_clone_info__ (void *x_p)
{
function_summary * const x = (function_summary *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct clone_info *& x)
{
if (x)
gt_ggc_mx_clone_info ((void *) x);
}
void
gt_ggc_mx_symbol_table (void *x_p)
{
struct symbol_table * const x = (struct symbol_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_11symtab_node ((*x).nodes);
gt_ggc_m_8asm_node ((*x).asmnodes);
gt_ggc_m_8asm_node ((*x).asm_last_node);
gt_ggc_m_31hash_table_section_name_hasher_ ((*x).section_hash);
gt_ggc_m_26hash_table_asmname_hasher_ ((*x).assembler_name_hash);
gt_ggc_m_42hash_map_symtab_node__symbol_priority_map_ ((*x).init_priority_hash);
gt_ggc_m_29function_summary_thunk_info__ ((*x).m_thunks);
gt_ggc_m_29function_summary_clone_info__ ((*x).m_clones);
}
}
void
gt_ggc_mx_hash_table_section_name_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct section_name_hasher& x_r ATTRIBUTE_UNUSED)
{
struct section_name_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_hash_table_asmname_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct asmname_hasher& x_r ATTRIBUTE_UNUSED)
{
struct asmname_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_hash_map_symtab_node__symbol_priority_map_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct symbol_priority_map& x_r ATTRIBUTE_UNUSED)
{
struct symbol_priority_map * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx (struct symtab_node *& x)
{
if (x)
gt_ggc_mx_symtab_node ((void *) x);
}
void
gt_ggc_mx_constant_descriptor_tree (void *x_p)
{
struct constant_descriptor_tree * const x = (struct constant_descriptor_tree *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_7rtx_def ((*x).rtl);
gt_ggc_m_9tree_node ((*x).value);
}
}
void
gt_ggc_mx (struct constant_descriptor_tree& x_r ATTRIBUTE_UNUSED)
{
struct constant_descriptor_tree * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_7rtx_def ((*x).rtl);
gt_ggc_m_9tree_node ((*x).value);
}
void
gt_ggc_mx (struct constant_descriptor_tree *& x)
{
if (x)
gt_ggc_mx_constant_descriptor_tree ((void *) x);
}
void
gt_ggc_mx_vec_ipa_replace_map__va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipa_replace_map *& x)
{
if (x)
gt_ggc_mx_ipa_replace_map ((void *) x);
}
void
gt_ggc_mx_ipa_param_adjustments (void *x_p)
{
struct ipa_param_adjustments * const x = (struct ipa_param_adjustments *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_29vec_ipa_adjusted_param_va_gc_ ((*x).m_adj_params);
}
}
void
gt_ggc_mx_lto_in_decl_state (void *x_p)
{
struct lto_in_decl_state * const x = (struct lto_in_decl_state *)x_p;
if (ggc_test_and_set_mark (x))
{
{
size_t i0;
size_t l0 = (size_t)(LTO_N_DECL_STREAMS);
for (i0 = 0; i0 != l0; i0++) {
gt_ggc_m_15vec_tree_va_gc_ ((*x).streams[i0]);
}
}
gt_ggc_m_9tree_node ((*x).fn_decl);
}
}
void
gt_ggc_mx (struct lto_in_decl_state& x_r ATTRIBUTE_UNUSED)
{
struct lto_in_decl_state * ATTRIBUTE_UNUSED x = &x_r;
{
size_t i1;
size_t l1 = (size_t)(LTO_N_DECL_STREAMS);
for (i1 = 0; i1 != l1; i1++) {
gt_ggc_m_15vec_tree_va_gc_ ((*x).streams[i1]);
}
}
gt_ggc_m_9tree_node ((*x).fn_decl);
}
void
gt_ggc_mx (struct lto_in_decl_state *& x)
{
if (x)
gt_ggc_mx_lto_in_decl_state ((void *) x);
}
void
gt_ggc_mx_ipa_node_params (void *x_p)
{
struct ipa_node_params * const x = (struct ipa_node_params *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_31vec_ipa_param_descriptor_va_gc_ ((*x).descriptors);
}
}
void
gt_ggc_mx (struct ipa_node_params& x_r ATTRIBUTE_UNUSED)
{
struct ipa_node_params * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_31vec_ipa_param_descriptor_va_gc_ ((*x).descriptors);
}
void
gt_ggc_mx (struct ipa_node_params *& x)
{
if (x)
gt_ggc_mx_ipa_node_params ((void *) x);
}
void
gt_ggc_mx_ipa_edge_args (void *x_p)
{
struct ipa_edge_args * const x = (struct ipa_edge_args *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_24vec_ipa_jump_func_va_gc_ ((*x).jump_functions);
gt_ggc_m_39vec_ipa_polymorphic_call_context_va_gc_ ((*x).polymorphic_call_contexts);
}
}
void
gt_ggc_mx (struct ipa_edge_args& x_r ATTRIBUTE_UNUSED)
{
struct ipa_edge_args * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_24vec_ipa_jump_func_va_gc_ ((*x).jump_functions);
gt_ggc_m_39vec_ipa_polymorphic_call_context_va_gc_ ((*x).polymorphic_call_contexts);
}
void
gt_ggc_mx (struct ipa_edge_args *& x)
{
if (x)
gt_ggc_mx_ipa_edge_args ((void *) x);
}
void
gt_ggc_mx_ipa_agg_replacement_value (void *x_p)
{
struct ipa_agg_replacement_value * const x = (struct ipa_agg_replacement_value *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_25ipa_agg_replacement_value ((*x).next);
gt_ggc_m_9tree_node ((*x).value);
}
}
void
gt_ggc_mx_ipa_fn_summary (void *x_p)
{
struct ipa_fn_summary * const x = (struct ipa_fn_summary *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_20vec_condition_va_gc_ ((*x).conds);
gt_ggc_m_37vec_ipa_freqcounting_predicate_va_gc_ ((*x).loop_iterations);
gt_ggc_m_37vec_ipa_freqcounting_predicate_va_gc_ ((*x).loop_strides);
}
}
void
gt_ggc_mx_vec_ipa_adjusted_param_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipa_adjusted_param& x_r ATTRIBUTE_UNUSED)
{
struct ipa_adjusted_param * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).type);
gt_ggc_m_9tree_node ((*x).alias_ptr_type);
}
void
gt_ggc_mx_modref_tree_alias_set_type_ (void *x_p)
{
modref_tree * const x = (modref_tree *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx_modref_summary (void *x_p)
{
struct modref_summary * const x = (struct modref_summary *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_27modref_tree_alias_set_type_ ((*x).loads);
gt_ggc_m_27modref_tree_alias_set_type_ ((*x).stores);
}
}
void
gt_ggc_mx_hash_map_location_hash_nowarn_spec_t_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct nowarn_spec_t& x_r ATTRIBUTE_UNUSED)
{
struct nowarn_spec_t * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_dw_cfi_node (void *x_p)
{
struct dw_cfi_node * const x = (struct dw_cfi_node *)x_p;
if (ggc_test_and_set_mark (x))
{
switch ((int) (dw_cfi_oprnd1_desc (((*x)).dw_cfi_opc)))
{
case dw_cfi_oprnd_reg_num:
break;
case dw_cfi_oprnd_offset:
break;
case dw_cfi_oprnd_addr:
gt_ggc_m_S ((*x).dw_cfi_oprnd1.dw_cfi_addr);
break;
case dw_cfi_oprnd_loc:
gt_ggc_m_17dw_loc_descr_node ((*x).dw_cfi_oprnd1.dw_cfi_loc);
break;
case dw_cfi_oprnd_cfa_loc:
gt_ggc_m_15dw_cfa_location ((*x).dw_cfi_oprnd1.dw_cfi_cfa_loc);
break;
default:
break;
}
switch ((int) (dw_cfi_oprnd2_desc (((*x)).dw_cfi_opc)))
{
case dw_cfi_oprnd_reg_num:
break;
case dw_cfi_oprnd_offset:
break;
case dw_cfi_oprnd_addr:
gt_ggc_m_S ((*x).dw_cfi_oprnd2.dw_cfi_addr);
break;
case dw_cfi_oprnd_loc:
gt_ggc_m_17dw_loc_descr_node ((*x).dw_cfi_oprnd2.dw_cfi_loc);
break;
case dw_cfi_oprnd_cfa_loc:
gt_ggc_m_15dw_cfa_location ((*x).dw_cfi_oprnd2.dw_cfi_cfa_loc);
break;
default:
break;
}
}
}
void
gt_ggc_mx_dw_loc_descr_node (void *x_p)
{
struct dw_loc_descr_node * x = (struct dw_loc_descr_node *)x_p;
struct dw_loc_descr_node * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).dw_loc_next);
while (x != xlimit)
{
gt_ggc_m_17dw_loc_descr_node ((*x).dw_loc_next);
gt_ggc_m_16addr_table_entry ((*x).dw_loc_oprnd1.val_entry);
switch ((int) (((*x).dw_loc_oprnd1).val_class))
{
case dw_val_class_addr:
gt_ggc_m_7rtx_def ((*x).dw_loc_oprnd1.v.val_addr);
break;
case dw_val_class_offset:
break;
case dw_val_class_loc_list:
gt_ggc_m_18dw_loc_list_struct ((*x).dw_loc_oprnd1.v.val_loc_list);
break;
case dw_val_class_view_list:
gt_ggc_m_10die_struct ((*x).dw_loc_oprnd1.v.val_view_list);
break;
case dw_val_class_loc:
gt_ggc_m_17dw_loc_descr_node ((*x).dw_loc_oprnd1.v.val_loc);
break;
default:
break;
case dw_val_class_unsigned_const:
break;
case dw_val_class_const_double:
break;
case dw_val_class_wide_int:
gt_ggc_m_34generic_wide_int_wide_int_storage_ ((*x).dw_loc_oprnd1.v.val_wide);
break;
case dw_val_class_vec:
if ((*x).dw_loc_oprnd1.v.val_vec.array != NULL) {
ggc_mark ((*x).dw_loc_oprnd1.v.val_vec.array);
}
break;
case dw_val_class_die_ref:
gt_ggc_m_10die_struct ((*x).dw_loc_oprnd1.v.val_die_ref.die);
break;
case dw_val_class_fde_ref:
break;
case dw_val_class_str:
gt_ggc_m_20indirect_string_node ((*x).dw_loc_oprnd1.v.val_str);
break;
case dw_val_class_lbl_id:
gt_ggc_m_S ((*x).dw_loc_oprnd1.v.val_lbl_id);
break;
case dw_val_class_flag:
break;
case dw_val_class_file:
gt_ggc_m_15dwarf_file_data ((*x).dw_loc_oprnd1.v.val_file);
break;
case dw_val_class_file_implicit:
gt_ggc_m_15dwarf_file_data ((*x).dw_loc_oprnd1.v.val_file_implicit);
break;
case dw_val_class_data8:
break;
case dw_val_class_decl_ref:
gt_ggc_m_9tree_node ((*x).dw_loc_oprnd1.v.val_decl_ref);
break;
case dw_val_class_vms_delta:
gt_ggc_m_S ((*x).dw_loc_oprnd1.v.val_vms_delta.lbl1);
gt_ggc_m_S ((*x).dw_loc_oprnd1.v.val_vms_delta.lbl2);
break;
case dw_val_class_discr_value:
switch ((int) (((*x).dw_loc_oprnd1.v.val_discr_value).pos))
{
case 0:
break;
case 1:
break;
default:
break;
}
break;
case dw_val_class_discr_list:
gt_ggc_m_18dw_discr_list_node ((*x).dw_loc_oprnd1.v.val_discr_list);
break;
case dw_val_class_symview:
gt_ggc_m_S ((*x).dw_loc_oprnd1.v.val_symbolic_view);
break;
}
gt_ggc_m_16addr_table_entry ((*x).dw_loc_oprnd2.val_entry);
switch ((int) (((*x).dw_loc_oprnd2).val_class))
{
case dw_val_class_addr:
gt_ggc_m_7rtx_def ((*x).dw_loc_oprnd2.v.val_addr);
break;
case dw_val_class_offset:
break;
case dw_val_class_loc_list:
gt_ggc_m_18dw_loc_list_struct ((*x).dw_loc_oprnd2.v.val_loc_list);
break;
case dw_val_class_view_list:
gt_ggc_m_10die_struct ((*x).dw_loc_oprnd2.v.val_view_list);
break;
case dw_val_class_loc:
gt_ggc_m_17dw_loc_descr_node ((*x).dw_loc_oprnd2.v.val_loc);
break;
default:
break;
case dw_val_class_unsigned_const:
break;
case dw_val_class_const_double:
break;
case dw_val_class_wide_int:
gt_ggc_m_34generic_wide_int_wide_int_storage_ ((*x).dw_loc_oprnd2.v.val_wide);
break;
case dw_val_class_vec:
if ((*x).dw_loc_oprnd2.v.val_vec.array != NULL) {
ggc_mark ((*x).dw_loc_oprnd2.v.val_vec.array);
}
break;
case dw_val_class_die_ref:
gt_ggc_m_10die_struct ((*x).dw_loc_oprnd2.v.val_die_ref.die);
break;
case dw_val_class_fde_ref:
break;
case dw_val_class_str:
gt_ggc_m_20indirect_string_node ((*x).dw_loc_oprnd2.v.val_str);
break;
case dw_val_class_lbl_id:
gt_ggc_m_S ((*x).dw_loc_oprnd2.v.val_lbl_id);
break;
case dw_val_class_flag:
break;
case dw_val_class_file:
gt_ggc_m_15dwarf_file_data ((*x).dw_loc_oprnd2.v.val_file);
break;
case dw_val_class_file_implicit:
gt_ggc_m_15dwarf_file_data ((*x).dw_loc_oprnd2.v.val_file_implicit);
break;
case dw_val_class_data8:
break;
case dw_val_class_decl_ref:
gt_ggc_m_9tree_node ((*x).dw_loc_oprnd2.v.val_decl_ref);
break;
case dw_val_class_vms_delta:
gt_ggc_m_S ((*x).dw_loc_oprnd2.v.val_vms_delta.lbl1);
gt_ggc_m_S ((*x).dw_loc_oprnd2.v.val_vms_delta.lbl2);
break;
case dw_val_class_discr_value:
switch ((int) (((*x).dw_loc_oprnd2.v.val_discr_value).pos))
{
case 0:
break;
case 1:
break;
default:
break;
}
break;
case dw_val_class_discr_list:
gt_ggc_m_18dw_discr_list_node ((*x).dw_loc_oprnd2.v.val_discr_list);
break;
case dw_val_class_symview:
gt_ggc_m_S ((*x).dw_loc_oprnd2.v.val_symbolic_view);
break;
}
x = ((*x).dw_loc_next);
}
}
void
gt_ggc_mx_dw_discr_list_node (void *x_p)
{
struct dw_discr_list_node * const x = (struct dw_discr_list_node *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_18dw_discr_list_node ((*x).dw_discr_next);
switch ((int) (((*x).dw_discr_lower_bound).pos))
{
case 0:
break;
case 1:
break;
default:
break;
}
switch ((int) (((*x).dw_discr_upper_bound).pos))
{
case 0:
break;
case 1:
break;
default:
break;
}
}
}
void
gt_ggc_mx_vec_dw_cfi_ref_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct dw_cfi_node *& x)
{
if (x)
gt_ggc_mx_dw_cfi_node ((void *) x);
}
void
gt_ggc_mx_dwarf_file_data (void *x_p)
{
struct dwarf_file_data * const x = (struct dwarf_file_data *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_S ((*x).key);
gt_ggc_m_S ((*x).filename);
}
}
void
gt_ggc_mx (struct dwarf_file_data& x_r ATTRIBUTE_UNUSED)
{
struct dwarf_file_data * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_S ((*x).key);
gt_ggc_m_S ((*x).filename);
}
void
gt_ggc_mx (struct dwarf_file_data *& x)
{
if (x)
gt_ggc_mx_dwarf_file_data ((void *) x);
}
void
gt_ggc_mx_ctf_string (void *x_p)
{
struct ctf_string * x = (struct ctf_string *)x_p;
struct ctf_string * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).cts_next);
while (x != xlimit)
{
gt_ggc_m_S ((*x).cts_str);
gt_ggc_m_10ctf_string ((*x).cts_next);
x = ((*x).cts_next);
}
}
void
gt_ggc_mx_ctf_dmdef (void *x_p)
{
struct ctf_dmdef * x = (struct ctf_dmdef *)x_p;
struct ctf_dmdef * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).dmd_next);
while (x != xlimit)
{
gt_ggc_m_S ((*x).dmd_name);
gt_ggc_m_9ctf_dmdef ((*x).dmd_next);
x = ((*x).dmd_next);
}
}
void
gt_ggc_mx_ctf_func_arg (void *x_p)
{
struct ctf_func_arg * const x = (struct ctf_func_arg *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_S ((*x).farg_name);
gt_ggc_m_12ctf_func_arg ((*x).farg_next);
}
}
void
gt_ggc_mx_ctf_dtdef (void *x_p)
{
struct ctf_dtdef * const x = (struct ctf_dtdef *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_10die_struct ((*x).dtd_key);
gt_ggc_m_S ((*x).dtd_name);
switch ((int) (0))
{
case 0:
break;
case 1:
break;
default:
break;
}
switch ((int) (ctf_dtu_d_union_selector (&((*x)))))
{
case CTF_DTU_D_MEMBERS:
gt_ggc_m_9ctf_dmdef ((*x).dtd_u.dtu_members);
break;
case CTF_DTU_D_ARRAY:
break;
case CTF_DTU_D_ENCODING:
break;
case CTF_DTU_D_ARGUMENTS:
gt_ggc_m_12ctf_func_arg ((*x).dtd_u.dtu_argv);
break;
case CTF_DTU_D_SLICE:
break;
default:
break;
}
}
}
void
gt_ggc_mx (struct ctf_dtdef& x_r ATTRIBUTE_UNUSED)
{
struct ctf_dtdef * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_10die_struct ((*x).dtd_key);
gt_ggc_m_S ((*x).dtd_name);
switch ((int) (0))
{
case 0:
break;
case 1:
break;
default:
break;
}
switch ((int) (ctf_dtu_d_union_selector (&((*x)))))
{
case CTF_DTU_D_MEMBERS:
gt_ggc_m_9ctf_dmdef ((*x).dtd_u.dtu_members);
break;
case CTF_DTU_D_ARRAY:
break;
case CTF_DTU_D_ENCODING:
break;
case CTF_DTU_D_ARGUMENTS:
gt_ggc_m_12ctf_func_arg ((*x).dtd_u.dtu_argv);
break;
case CTF_DTU_D_SLICE:
break;
default:
break;
}
}
void
gt_ggc_mx (struct ctf_dtdef *& x)
{
if (x)
gt_ggc_mx_ctf_dtdef ((void *) x);
}
void
gt_ggc_mx_ctf_dvdef (void *x_p)
{
struct ctf_dvdef * const x = (struct ctf_dvdef *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_10die_struct ((*x).dvd_key);
gt_ggc_m_S ((*x).dvd_name);
}
}
void
gt_ggc_mx (struct ctf_dvdef& x_r ATTRIBUTE_UNUSED)
{
struct ctf_dvdef * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_10die_struct ((*x).dvd_key);
gt_ggc_m_S ((*x).dvd_name);
}
void
gt_ggc_mx (struct ctf_dvdef *& x)
{
if (x)
gt_ggc_mx_ctf_dvdef ((void *) x);
}
void
gt_ggc_mx_hash_table_ctfc_dtd_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ctfc_dtd_hasher& x_r ATTRIBUTE_UNUSED)
{
struct ctfc_dtd_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_hash_table_ctfc_dvd_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ctfc_dvd_hasher& x_r ATTRIBUTE_UNUSED)
{
struct ctfc_dvd_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_ctf_container (void *x_p)
{
struct ctf_container * const x = (struct ctf_container *)x_p;
if (ggc_test_and_set_mark (x))
{
{
size_t l0 = (size_t)(0);
size_t l1 = (size_t)(0);
size_t l2 = (size_t)(0);
size_t l3 = (size_t)(0);
gt_ggc_m_27hash_table_ctfc_dtd_hasher_ ((*x).ctfc_types);
gt_ggc_m_27hash_table_ctfc_dvd_hasher_ ((*x).ctfc_vars);
gt_ggc_m_27hash_table_ctfc_dvd_hasher_ ((*x).ctfc_ignore_vars);
gt_ggc_m_10ctf_string ((*x).ctfc_strtable.ctstab_head);
gt_ggc_m_10ctf_string ((*x).ctfc_strtable.ctstab_tail);
gt_ggc_m_S ((*x).ctfc_strtable.ctstab_estr);
gt_ggc_m_10ctf_string ((*x).ctfc_aux_strtable.ctstab_head);
gt_ggc_m_10ctf_string ((*x).ctfc_aux_strtable.ctstab_tail);
gt_ggc_m_S ((*x).ctfc_aux_strtable.ctstab_estr);
if ((*x).ctfc_vars_list != NULL) {
size_t i0;
for (i0 = 0; i0 != (size_t)(l0); i0++) {
gt_ggc_m_9ctf_dvdef ((*x).ctfc_vars_list[i0]);
}
ggc_mark ((*x).ctfc_vars_list);
}
if ((*x).ctfc_types_list != NULL) {
size_t i1;
for (i1 = 0; i1 != (size_t)(l1); i1++) {
gt_ggc_m_9ctf_dtdef ((*x).ctfc_types_list[i1]);
}
ggc_mark ((*x).ctfc_types_list);
}
if ((*x).ctfc_gfuncs_list != NULL) {
size_t i2;
for (i2 = 0; i2 != (size_t)(l2); i2++) {
gt_ggc_m_9ctf_dtdef ((*x).ctfc_gfuncs_list[i2]);
}
ggc_mark ((*x).ctfc_gfuncs_list);
}
if ((*x).ctfc_gobjts_list != NULL) {
size_t i3;
for (i3 = 0; i3 != (size_t)(l3); i3++) {
gt_ggc_m_9ctf_dvdef ((*x).ctfc_gobjts_list[i3]);
}
ggc_mark ((*x).ctfc_gobjts_list);
}
}
}
}
void
gt_ggc_mx_vec_temp_slot_p_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct temp_slot *& x)
{
if (x)
gt_ggc_mx_temp_slot ((void *) x);
}
void
gt_ggc_mx_eh_region_d (void *x_p)
{
struct eh_region_d * const x = (struct eh_region_d *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_11eh_region_d ((*x).outer);
gt_ggc_m_11eh_region_d ((*x).inner);
gt_ggc_m_11eh_region_d ((*x).next_peer);
switch ((int) ((*x).type))
{
case ERT_TRY:
gt_ggc_m_10eh_catch_d ((*x).u.eh_try.first_catch);
gt_ggc_m_10eh_catch_d ((*x).u.eh_try.last_catch);
break;
case ERT_ALLOWED_EXCEPTIONS:
gt_ggc_m_9tree_node ((*x).u.allowed.type_list);
gt_ggc_m_9tree_node ((*x).u.allowed.label);
break;
case ERT_MUST_NOT_THROW:
gt_ggc_m_9tree_node ((*x).u.must_not_throw.failure_decl);
break;
default:
break;
}
gt_ggc_m_16eh_landing_pad_d ((*x).landing_pads);
gt_ggc_m_7rtx_def ((*x).exc_ptr_reg);
gt_ggc_m_7rtx_def ((*x).filter_reg);
}
}
void
gt_ggc_mx_eh_landing_pad_d (void *x_p)
{
struct eh_landing_pad_d * const x = (struct eh_landing_pad_d *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_16eh_landing_pad_d ((*x).next_lp);
gt_ggc_m_11eh_region_d ((*x).region);
gt_ggc_m_9tree_node ((*x).post_landing_pad);
gt_ggc_m_7rtx_def ((*x).landing_pad);
}
}
void
gt_ggc_mx_eh_catch_d (void *x_p)
{
struct eh_catch_d * const x = (struct eh_catch_d *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_10eh_catch_d ((*x).next_catch);
gt_ggc_m_10eh_catch_d ((*x).prev_catch);
gt_ggc_m_9tree_node ((*x).type_list);
gt_ggc_m_9tree_node ((*x).filter_list);
gt_ggc_m_9tree_node ((*x).label);
}
}
void
gt_ggc_mx_vec_eh_region_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct eh_region_d *& x)
{
if (x)
gt_ggc_mx_eh_region_d ((void *) x);
}
void
gt_ggc_mx_vec_eh_landing_pad_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct eh_landing_pad_d *& x)
{
if (x)
gt_ggc_mx_eh_landing_pad_d ((void *) x);
}
void
gt_ggc_mx_hash_map_gimple__int_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct gimple *& x)
{
if (x)
gt_ggc_mx_gimple ((void *) x);
}
void
gt_ggc_mx_tm_restart_node (void *x_p)
{
struct tm_restart_node * const x = (struct tm_restart_node *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_6gimple ((*x).stmt);
gt_ggc_m_9tree_node ((*x).label_or_list);
}
}
void
gt_ggc_mx (struct tm_restart_node& x_r ATTRIBUTE_UNUSED)
{
struct tm_restart_node * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_6gimple ((*x).stmt);
gt_ggc_m_9tree_node ((*x).label_or_list);
}
void
gt_ggc_mx (struct tm_restart_node *& x)
{
if (x)
gt_ggc_mx_tm_restart_node ((void *) x);
}
void
gt_ggc_mx_hash_map_tree_tree_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx_hash_table_ssa_name_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ssa_name_hasher& x_r ATTRIBUTE_UNUSED)
{
struct ssa_name_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_hash_table_tm_restart_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct tm_restart_hasher& x_r ATTRIBUTE_UNUSED)
{
struct tm_restart_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_ssa_operand_memory_d (void *x_p)
{
struct ssa_operand_memory_d * x = (struct ssa_operand_memory_d *)x_p;
struct ssa_operand_memory_d * xlimit = x;
while (ggc_test_and_set_mark (xlimit))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
gt_ggc_m_20ssa_operand_memory_d ((*x).next);
x = ((*x).next);
}
}
void
gt_ggc_mx_int_range_1_ (void *x_p)
{
int_range<1> * const x = (int_range<1> *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx_vec_ipa_agg_jf_item_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipa_agg_jf_item& x_r ATTRIBUTE_UNUSED)
{
struct ipa_agg_jf_item * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).type);
switch ((int) (((*x)).jftype))
{
case IPA_JF_CONST:
gt_ggc_m_9tree_node ((*x).value.constant);
break;
case IPA_JF_PASS_THROUGH:
gt_ggc_m_9tree_node ((*x).value.pass_through.operand);
break;
case IPA_JF_LOAD_AGG:
gt_ggc_m_9tree_node ((*x).value.load_agg.pass_through.operand);
gt_ggc_m_9tree_node ((*x).value.load_agg.type);
break;
default:
break;
}
}
void
gt_ggc_mx_ipa_bits (void *x_p)
{
struct ipa_bits * const x = (struct ipa_bits *)x_p;
if (ggc_test_and_set_mark (x))
{
}
}
void
gt_ggc_mx_vec_ipa_param_descriptor_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipa_param_descriptor& x_r ATTRIBUTE_UNUSED)
{
struct ipa_param_descriptor * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).decl_or_type);
}
void
gt_ggc_mx_vec_ipa_bits__va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipa_bits *& x)
{
if (x)
gt_ggc_mx_ipa_bits ((void *) x);
}
void
gt_ggc_mx_vec_ipa_vr_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipa_vr& x_r ATTRIBUTE_UNUSED)
{
struct ipa_vr * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_mx (&((*x).min));
gt_ggc_mx (&((*x).max));
}
void
gt_ggc_mx_ipcp_transformation (void *x_p)
{
struct ipcp_transformation * const x = (struct ipcp_transformation *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_m_25ipa_agg_replacement_value ((*x).agg_values);
gt_ggc_m_20vec_ipa_bits__va_gc_ ((*x).bits);
gt_ggc_m_17vec_ipa_vr_va_gc_ ((*x).m_vr);
}
}
void
gt_ggc_mx_vec_ipa_jump_func_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipa_jump_func& x_r ATTRIBUTE_UNUSED)
{
struct ipa_jump_func * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_26vec_ipa_agg_jf_item_va_gc_ ((*x).agg.items);
gt_ggc_m_8ipa_bits ((*x).bits);
gt_ggc_m_12int_range_1_ ((*x).m_vr);
switch ((int) (((*x)).type))
{
case IPA_JF_CONST:
gt_ggc_m_9tree_node ((*x).value.constant.value);
break;
case IPA_JF_PASS_THROUGH:
gt_ggc_m_9tree_node ((*x).value.pass_through.operand);
break;
case IPA_JF_ANCESTOR:
break;
default:
break;
}
}
void
gt_ggc_mx_vec_ipa_polymorphic_call_context_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipa_polymorphic_call_context& x_r ATTRIBUTE_UNUSED)
{
struct ipa_polymorphic_call_context * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).outer_type);
gt_ggc_m_9tree_node ((*x).speculative_outer_type);
}
void
gt_ggc_mx_ipa_node_params_t (void *x_p)
{
ipa_node_params_t * const x = (ipa_node_params_t *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx_ipa_edge_args_sum_t (void *x_p)
{
ipa_edge_args_sum_t * const x = (ipa_edge_args_sum_t *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx_function_summary_ipcp_transformation__ (void *x_p)
{
function_summary * const x = (function_summary *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipcp_transformation *& x)
{
if (x)
gt_ggc_mx_ipcp_transformation ((void *) x);
}
void
gt_ggc_mx_hash_table_decl_state_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct decl_state_hasher& x_r ATTRIBUTE_UNUSED)
{
struct decl_state_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_vec_expr_eval_op_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct expr_eval_op& x_r ATTRIBUTE_UNUSED)
{
struct expr_eval_op * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).type);
{
size_t i0;
size_t l0 = (size_t)(2);
for (i0 = 0; i0 != l0; i0++) {
gt_ggc_m_9tree_node ((*x).val[i0]);
}
}
}
void
gt_ggc_mx_vec_condition_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct condition& x_r ATTRIBUTE_UNUSED)
{
struct condition * ATTRIBUTE_UNUSED x = &x_r;
gt_ggc_m_9tree_node ((*x).type);
gt_ggc_m_9tree_node ((*x).val);
gt_ggc_m_23vec_expr_eval_op_va_gc_ ((*x).param_ops);
}
void
gt_ggc_mx_vec_ipa_freqcounting_predicate_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipa_freqcounting_predicate& x_r ATTRIBUTE_UNUSED)
{
struct ipa_freqcounting_predicate * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_ggc_mx_fast_function_summary_ipa_fn_summary__va_gc_ (void *x_p)
{
fast_function_summary * const x = (fast_function_summary *)x_p;
if (ggc_test_and_set_mark (x))
{
gt_ggc_mx (x);
}
}
void
gt_ggc_mx (struct ipa_fn_summary *& x)
{
if (x)
gt_ggc_mx_ipa_fn_summary ((void *) x);
}
void
gt_pch_nx_line_maps (void *x_p)
{
struct line_maps * const x = (struct line_maps *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9line_maps))
{
{
size_t l0 = (size_t)(((*x).info_ordinary).used);
if ((*x).info_ordinary.maps != NULL) {
size_t i0;
for (i0 = 0; i0 != (size_t)(l0); i0++) {
gt_pch_n_S ((*x).info_ordinary.maps[i0].to_file);
}
gt_pch_note_object ((*x).info_ordinary.maps, x, gt_pch_p_9line_maps);
}
}
{
size_t l1 = (size_t)(((*x).info_macro).used);
if ((*x).info_macro.maps != NULL) {
size_t i1;
for (i1 = 0; i1 != (size_t)(l1); i1++) {
{
union tree_node * const x2 =
((*x).info_macro.maps[i1].macro) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).info_macro.maps[i1].macro))) : NULL;
gt_pch_n_9tree_node (x2);
}
if ((*x).info_macro.maps[i1].macro_locations != NULL) {
gt_pch_note_object ((*x).info_macro.maps[i1].macro_locations, x, gt_pch_p_9line_maps);
}
}
gt_pch_note_object ((*x).info_macro.maps, x, gt_pch_p_9line_maps);
}
}
{
size_t l3 = (size_t)(((*x).location_adhoc_data_map).allocated);
if ((*x).location_adhoc_data_map.data != NULL) {
size_t i3;
for (i3 = 0; i3 != (size_t)(l3); i3++) {
}
gt_pch_note_object ((*x).location_adhoc_data_map.data, x, gt_pch_p_9line_maps);
}
}
}
}
void
gt_pch_nx_cpp_token (void *x_p)
{
struct cpp_token * const x = (struct cpp_token *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9cpp_token))
{
switch ((int) (cpp_token_val_index (&((*x)))))
{
case CPP_TOKEN_FLD_NODE:
{
union tree_node * const x0 =
((*x).val.node.node) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).val.node.node))) : NULL;
gt_pch_n_9tree_node (x0);
}
{
union tree_node * const x1 =
((*x).val.node.spelling) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).val.node.spelling))) : NULL;
gt_pch_n_9tree_node (x1);
}
break;
case CPP_TOKEN_FLD_SOURCE:
gt_pch_n_9cpp_token ((*x).val.source);
break;
case CPP_TOKEN_FLD_STR:
gt_pch_n_S ((*x).val.str.text);
break;
case CPP_TOKEN_FLD_ARG_NO:
{
union tree_node * const x2 =
((*x).val.macro_arg.spelling) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).val.macro_arg.spelling))) : NULL;
gt_pch_n_9tree_node (x2);
}
break;
case CPP_TOKEN_FLD_TOKEN_NO:
break;
case CPP_TOKEN_FLD_PRAGMA:
break;
default:
break;
}
}
}
void
gt_pch_nx_cpp_macro (void *x_p)
{
struct cpp_macro * const x = (struct cpp_macro *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9cpp_macro))
{
switch ((int) (((*x)).kind == cmk_assert))
{
case false:
if ((*x).parm.params != NULL) {
size_t i0;
for (i0 = 0; i0 != (size_t)(((*x)).paramc); i0++) {
{
union tree_node * const x1 =
((*x).parm.params[i0]) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).parm.params[i0]))) : NULL;
gt_pch_n_9tree_node (x1);
}
}
gt_pch_note_object ((*x).parm.params, x, gt_pch_p_9cpp_macro);
}
break;
case true:
gt_pch_n_9cpp_macro ((*x).parm.next);
break;
default:
break;
}
switch ((int) (((*x)).kind == cmk_traditional))
{
case false:
{
size_t i2;
size_t l2 = (size_t)(((*x)).count);
for (i2 = 0; i2 != l2; i2++) {
switch ((int) (cpp_token_val_index (&((*x).exp.tokens[i2]))))
{
case CPP_TOKEN_FLD_NODE:
{
union tree_node * const x3 =
((*x).exp.tokens[i2].val.node.node) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).exp.tokens[i2].val.node.node))) : NULL;
gt_pch_n_9tree_node (x3);
}
{
union tree_node * const x4 =
((*x).exp.tokens[i2].val.node.spelling) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).exp.tokens[i2].val.node.spelling))) : NULL;
gt_pch_n_9tree_node (x4);
}
break;
case CPP_TOKEN_FLD_SOURCE:
gt_pch_n_9cpp_token ((*x).exp.tokens[i2].val.source);
break;
case CPP_TOKEN_FLD_STR:
gt_pch_n_S ((*x).exp.tokens[i2].val.str.text);
break;
case CPP_TOKEN_FLD_ARG_NO:
{
union tree_node * const x5 =
((*x).exp.tokens[i2].val.macro_arg.spelling) ? HT_IDENT_TO_GCC_IDENT (HT_NODE (((*x).exp.tokens[i2].val.macro_arg.spelling))) : NULL;
gt_pch_n_9tree_node (x5);
}
break;
case CPP_TOKEN_FLD_TOKEN_NO:
break;
case CPP_TOKEN_FLD_PRAGMA:
break;
default:
break;
}
}
}
break;
case true:
gt_pch_n_S ((*x).exp.text);
break;
default:
break;
}
}
}
void
gt_pch_nx_string_concat (void *x_p)
{
struct string_concat * const x = (struct string_concat *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_13string_concat))
{
if ((*x).m_locs != NULL) {
gt_pch_note_object ((*x).m_locs, x, gt_pch_p_13string_concat);
}
}
}
void
gt_pch_nx_string_concat_db (void *x_p)
{
struct string_concat_db * const x = (struct string_concat_db *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_16string_concat_db))
{
gt_pch_n_38hash_map_location_hash_string_concat__ ((*x).m_table);
}
}
void
gt_pch_nx_hash_map_location_hash_string_concat__ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_38hash_map_location_hash_string_concat__))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct string_concat *& x)
{
if (x)
gt_pch_nx_string_concat ((void *) x);
}
void
gt_pch_nx_bitmap_head (void *x_p)
{
struct bitmap_head * const x = (struct bitmap_head *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_11bitmap_head))
{
gt_pch_n_14bitmap_element ((*x).first);
}
}
void
gt_pch_nx_rtx_def (void *x_p)
{
struct rtx_def * x = (struct rtx_def *)x_p;
struct rtx_def * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_7rtx_def))
xlimit = (RTX_NEXT (&(*xlimit)));
if (x != xlimit)
for (;;)
{
struct rtx_def * const xprev = (RTX_PREV (&(*x)));
if (xprev == NULL) break;
x = xprev;
(void) gt_pch_note_object (xprev, xprev, gt_pch_p_7rtx_def);
}
while (x != xlimit)
{
switch ((int) (0))
{
case 0:
switch ((int) (GET_CODE (&(*x))))
{
case DEBUG_MARKER:
break;
case DEBUG_PARAMETER_REF:
gt_pch_n_9tree_node ((*x).u.fld[0].rt_tree);
break;
case ENTRY_VALUE:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case DEBUG_IMPLICIT_PTR:
gt_pch_n_9tree_node ((*x).u.fld[0].rt_tree);
break;
case VAR_LOCATION:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_9tree_node ((*x).u.fld[0].rt_tree);
break;
case FMA:
gt_pch_n_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_TRUNCATE:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_TRUNCATE:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_MINUS:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_ASHIFT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_ASHIFT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_ABS:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_NEG:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_NEG:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_MINUS:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_PLUS:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_PLUS:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case VEC_SERIES:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case VEC_DUPLICATE:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case VEC_CONCAT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case VEC_SELECT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case VEC_MERGE:
gt_pch_n_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LO_SUM:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case HIGH:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ZERO_EXTRACT:
gt_pch_n_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SIGN_EXTRACT:
gt_pch_n_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PARITY:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case POPCOUNT:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CTZ:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CLZ:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CLRSB:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FFS:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case BSWAP:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SQRT:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ABS:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNSIGNED_SAT_FRACT:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SAT_FRACT:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNSIGNED_FRACT_CONVERT:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FRACT_CONVERT:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNSIGNED_FIX:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNSIGNED_FLOAT:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FIX:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FLOAT:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FLOAT_TRUNCATE:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case FLOAT_EXTEND:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case TRUNCATE:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ZERO_EXTEND:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SIGN_EXTEND:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNLT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNLE:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNGT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNGE:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNEQ:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ORDERED:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UNORDERED:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LTU:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LEU:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case GTU:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case GEU:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LTGT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LE:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case GT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case GE:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case EQ:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case NE:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case POST_MODIFY:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PRE_MODIFY:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case POST_INC:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case POST_DEC:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PRE_INC:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PRE_DEC:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UMAX:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UMIN:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SMAX:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SMIN:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ROTATERT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case LSHIFTRT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ASHIFTRT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ROTATE:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ASHIFT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case NOT:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case XOR:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case IOR:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case AND:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UMOD:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UDIV:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case MOD:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_DIV:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_DIV:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case DIV:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case UMUL_HIGHPART:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SMUL_HIGHPART:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case US_MULT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SS_MULT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case MULT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case NEG:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case MINUS:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PLUS:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case COMPARE:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case IF_THEN_ELSE:
gt_pch_n_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SYMBOL_REF:
switch ((int) (SYMBOL_REF_HAS_BLOCK_INFO_P (&(*x))))
{
case 1:
gt_pch_n_12object_block ((*x).u.block_sym.block);
break;
default:
break;
}
switch ((int) (CONSTANT_POOL_ADDRESS_P (&(*x))))
{
case 1:
gt_pch_n_23constant_descriptor_rtx ((*x).u.fld[1].rt_constant);
break;
default:
gt_pch_n_9tree_node ((*x).u.fld[1].rt_tree);
break;
}
gt_pch_n_S ((*x).u.fld[0].rt_str);
break;
case LABEL_REF:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case MEM:
gt_pch_n_9mem_attrs ((*x).u.fld[1].rt_mem);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CONCATN:
gt_pch_n_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case CONCAT:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case STRICT_LOW_PART:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SUBREG:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SCRATCH:
break;
case REG:
gt_pch_n_9reg_attrs ((*x).u.reg.attrs);
break;
case PC:
break;
case CONST:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CONST_STRING:
gt_pch_n_S ((*x).u.fld[0].rt_str);
break;
case CONST_VECTOR:
gt_pch_n_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case CONST_DOUBLE:
break;
case CONST_FIXED:
break;
case CONST_POLY_INT:
break;
case CONST_WIDE_INT:
break;
case CONST_INT:
break;
case TRAP_IF:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case EH_RETURN:
break;
case SIMPLE_RETURN:
break;
case RETURN:
break;
case CALL:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CLOBBER:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case USE:
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case SET:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case PREFETCH:
gt_pch_n_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ADDR_DIFF_VEC:
gt_pch_n_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[2].rt_rtx);
gt_pch_n_9rtvec_def ((*x).u.fld[1].rt_rtvec);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ADDR_VEC:
gt_pch_n_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case UNSPEC_VOLATILE:
gt_pch_n_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case UNSPEC:
gt_pch_n_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case ASM_OPERANDS:
gt_pch_n_9rtvec_def ((*x).u.fld[5].rt_rtvec);
gt_pch_n_9rtvec_def ((*x).u.fld[4].rt_rtvec);
gt_pch_n_9rtvec_def ((*x).u.fld[3].rt_rtvec);
gt_pch_n_S ((*x).u.fld[1].rt_str);
gt_pch_n_S ((*x).u.fld[0].rt_str);
break;
case ASM_INPUT:
gt_pch_n_S ((*x).u.fld[0].rt_str);
break;
case PARALLEL:
gt_pch_n_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case COND_EXEC:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case NOTE:
switch ((int) (NOTE_KIND (&(*x))))
{
default:
gt_pch_n_S ((*x).u.fld[3].rt_str);
break;
case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
break;
case NOTE_INSN_CFI_LABEL:
break;
case NOTE_INSN_CFI:
break;
case NOTE_INSN_SWITCH_TEXT_SECTIONS:
break;
case NOTE_INSN_BASIC_BLOCK:
break;
case NOTE_INSN_INLINE_ENTRY:
break;
case NOTE_INSN_BEGIN_STMT:
break;
case NOTE_INSN_VAR_LOCATION:
gt_pch_n_7rtx_def ((*x).u.fld[3].rt_rtx);
break;
case NOTE_INSN_EH_REGION_END:
break;
case NOTE_INSN_EH_REGION_BEG:
break;
case NOTE_INSN_EPILOGUE_BEG:
break;
case NOTE_INSN_PROLOGUE_END:
break;
case NOTE_INSN_FUNCTION_BEG:
break;
case NOTE_INSN_BLOCK_END:
gt_pch_n_9tree_node ((*x).u.fld[3].rt_tree);
break;
case NOTE_INSN_BLOCK_BEG:
gt_pch_n_9tree_node ((*x).u.fld[3].rt_tree);
break;
case NOTE_INSN_DELETED_DEBUG_LABEL:
gt_pch_n_S ((*x).u.fld[3].rt_str);
break;
case NOTE_INSN_DELETED_LABEL:
gt_pch_n_S ((*x).u.fld[3].rt_str);
break;
case NOTE_INSN_DELETED:
break;
}
gt_pch_n_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CODE_LABEL:
gt_pch_n_S ((*x).u.fld[6].rt_str);
gt_pch_n_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_pch_n_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case BARRIER:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case JUMP_TABLE_DATA:
gt_pch_n_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_pch_n_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case CALL_INSN:
gt_pch_n_7rtx_def ((*x).u.fld[7].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[6].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_pch_n_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case JUMP_INSN:
gt_pch_n_7rtx_def ((*x).u.fld[7].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[6].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_pch_n_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case INSN:
gt_pch_n_7rtx_def ((*x).u.fld[6].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_pch_n_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case DEBUG_INSN:
gt_pch_n_7rtx_def ((*x).u.fld[6].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[3].rt_rtx);
gt_pch_n_15basic_block_def ((*x).u.fld[2].rt_bb);
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case ADDRESS:
break;
case SEQUENCE:
gt_pch_n_9rtvec_def ((*x).u.fld[0].rt_rtvec);
break;
case INT_LIST:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
break;
case INSN_LIST:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case EXPR_LIST:
gt_pch_n_7rtx_def ((*x).u.fld[1].rt_rtx);
gt_pch_n_7rtx_def ((*x).u.fld[0].rt_rtx);
break;
case DEBUG_EXPR:
gt_pch_n_9tree_node ((*x).u.fld[0].rt_tree);
break;
case VALUE:
break;
case UNKNOWN:
break;
default:
break;
}
break;
/* Unrecognized tag value. */
default: gcc_unreachable ();
}
x = (RTX_NEXT (&(*x)));
}
}
void
gt_pch_nx_rtvec_def (void *x_p)
{
struct rtvec_def * const x = (struct rtvec_def *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9rtvec_def))
{
{
size_t l0 = (size_t)(((*x)).num_elem);
{
size_t i0;
for (i0 = 0; i0 != l0; i0++) {
gt_pch_n_7rtx_def ((*x).elem[i0]);
}
}
}
}
}
void
gt_pch_nx_gimple (void *x_p)
{
struct gimple * x = (struct gimple *)x_p;
struct gimple * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_6gimple))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
switch ((int) (gimple_statement_structure (&((*x)))))
{
case GSS_BASE:
gt_pch_n_15basic_block_def ((*x).bb);
gt_pch_n_6gimple ((*x).next);
break;
case GSS_WCE:
{
gimple_statement_wce *sub = static_cast (x);
gt_pch_n_6gimple ((*sub).cleanup);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_OMP:
{
gimple_statement_omp *sub = static_cast (x);
gt_pch_n_6gimple ((*sub).body);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_OMP_SECTIONS:
{
gomp_sections *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).clauses);
gt_pch_n_9tree_node ((*sub).control);
gt_pch_n_6gimple ((*sub).body);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_OMP_PARALLEL_LAYOUT:
{
gimple_statement_omp_parallel_layout *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).clauses);
gt_pch_n_9tree_node ((*sub).child_fn);
gt_pch_n_9tree_node ((*sub).data_arg);
gt_pch_n_6gimple ((*sub).body);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_OMP_TASK:
{
gomp_task *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).copy_fn);
gt_pch_n_9tree_node ((*sub).arg_size);
gt_pch_n_9tree_node ((*sub).arg_align);
gt_pch_n_9tree_node ((*sub).clauses);
gt_pch_n_9tree_node ((*sub).child_fn);
gt_pch_n_9tree_node ((*sub).data_arg);
gt_pch_n_6gimple ((*sub).body);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_OMP_FOR:
{
gomp_for *sub = static_cast (x);
{
size_t l0 = (size_t)(((*sub)).collapse);
gt_pch_n_9tree_node ((*sub).clauses);
if ((*sub).iter != NULL) {
size_t i0;
for (i0 = 0; i0 != (size_t)(l0); i0++) {
gt_pch_n_9tree_node ((*sub).iter[i0].index);
gt_pch_n_9tree_node ((*sub).iter[i0].initial);
gt_pch_n_9tree_node ((*sub).iter[i0].final);
gt_pch_n_9tree_node ((*sub).iter[i0].incr);
}
gt_pch_note_object ((*sub).iter, x, gt_pch_p_6gimple);
}
gt_pch_n_6gimple ((*sub).pre_body);
gt_pch_n_6gimple ((*sub).body);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
}
break;
case GSS_OMP_SINGLE_LAYOUT:
{
gimple_statement_omp_single_layout *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).clauses);
gt_pch_n_6gimple ((*sub).body);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_OMP_CRITICAL:
{
gomp_critical *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).clauses);
gt_pch_n_9tree_node ((*sub).name);
gt_pch_n_6gimple ((*sub).body);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_OMP_CONTINUE:
{
gomp_continue *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).control_def);
gt_pch_n_9tree_node ((*sub).control_use);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_OMP_ATOMIC_STORE_LAYOUT:
{
gimple_statement_omp_atomic_store_layout *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).val);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_OMP_ATOMIC_LOAD:
{
gomp_atomic_load *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).rhs);
gt_pch_n_9tree_node ((*sub).lhs);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_TRY:
{
gtry *sub = static_cast (x);
gt_pch_n_6gimple ((*sub).eval);
gt_pch_n_6gimple ((*sub).cleanup);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_PHI:
{
gphi *sub = static_cast (x);
{
size_t l1 = (size_t)(((*sub)).nargs);
gt_pch_n_9tree_node ((*sub).result);
{
size_t i1;
for (i1 = 0; i1 != l1; i1++) {
gt_pch_n_9tree_node ((*sub).args[i1].def);
}
}
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
}
break;
case GSS_EH_CTRL:
{
gimple_statement_eh_ctrl *sub = static_cast (x);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_EH_ELSE:
{
geh_else *sub = static_cast (x);
gt_pch_n_6gimple ((*sub).n_body);
gt_pch_n_6gimple ((*sub).e_body);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_EH_MNT:
{
geh_mnt *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).fndecl);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_EH_FILTER:
{
geh_filter *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).types);
gt_pch_n_6gimple ((*sub).failure);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_CATCH:
{
gcatch *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).types);
gt_pch_n_6gimple ((*sub).handler);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_BIND:
{
gbind *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).vars);
gt_pch_n_9tree_node ((*sub).block);
gt_pch_n_6gimple ((*sub).body);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_WITH_MEM_OPS_BASE:
{
gimple_statement_with_memory_ops_base *sub = static_cast (x);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_TRANSACTION:
{
gtransaction *sub = static_cast (x);
gt_pch_n_6gimple ((*sub).body);
gt_pch_n_9tree_node ((*sub).label_norm);
gt_pch_n_9tree_node ((*sub).label_uninst);
gt_pch_n_9tree_node ((*sub).label_over);
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
break;
case GSS_CALL:
{
gcall *sub = static_cast (x);
{
size_t l2 = (size_t)(((*sub)).num_ops);
gt_pch_n_11bitmap_head ((*sub).call_used.vars);
gt_pch_n_11bitmap_head ((*sub).call_clobbered.vars);
switch ((int) (((*sub)).subcode & GF_CALL_INTERNAL))
{
case 0:
gt_pch_n_9tree_node ((*sub).u.fntype);
break;
case GF_CALL_INTERNAL:
break;
default:
break;
}
{
size_t i2;
for (i2 = 0; i2 != l2; i2++) {
gt_pch_n_9tree_node ((*sub).op[i2]);
}
}
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
}
break;
case GSS_ASM:
{
gasm *sub = static_cast (x);
{
size_t l3 = (size_t)(((*sub)).num_ops);
gt_pch_n_S ((*sub).string);
{
size_t i3;
for (i3 = 0; i3 != l3; i3++) {
gt_pch_n_9tree_node ((*sub).op[i3]);
}
}
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
}
break;
case GSS_WITH_MEM_OPS:
{
gimple_statement_with_memory_ops *sub = static_cast (x);
{
size_t l4 = (size_t)(((*sub)).num_ops);
{
size_t i4;
for (i4 = 0; i4 != l4; i4++) {
gt_pch_n_9tree_node ((*sub).op[i4]);
}
}
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
}
break;
case GSS_WITH_OPS:
{
gimple_statement_with_ops *sub = static_cast (x);
{
size_t l5 = (size_t)(((*sub)).num_ops);
{
size_t i5;
for (i5 = 0; i5 != l5; i5++) {
gt_pch_n_9tree_node ((*sub).op[i5]);
}
}
gt_pch_n_15basic_block_def ((*sub).bb);
gt_pch_n_6gimple ((*sub).next);
}
}
break;
/* Unrecognized tag value. */
default: gcc_unreachable ();
}
x = ((*x).next);
}
}
void
gt_pch_nx_symtab_node (void *x_p)
{
struct symtab_node * x = (struct symtab_node *)x_p;
struct symtab_node * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_11symtab_node))
xlimit = ((*xlimit).next);
if (x != xlimit)
for (;;)
{
struct symtab_node * const xprev = ((*x).previous);
if (xprev == NULL) break;
x = xprev;
(void) gt_pch_note_object (xprev, xprev, gt_pch_p_11symtab_node);
}
while (x != xlimit)
{
switch ((int) (((*x)).type))
{
case SYMTAB_SYMBOL:
gt_pch_n_9tree_node ((*x).decl);
gt_pch_n_11symtab_node ((*x).next);
gt_pch_n_11symtab_node ((*x).previous);
gt_pch_n_11symtab_node ((*x).next_sharing_asm_name);
gt_pch_n_11symtab_node ((*x).previous_sharing_asm_name);
gt_pch_n_11symtab_node ((*x).same_comdat_group);
gt_pch_n_9tree_node ((*x).alias_target);
gt_pch_n_18lto_file_decl_data ((*x).lto_file_data);
gt_pch_n_9tree_node ((*x).x_comdat_group);
gt_pch_n_18section_hash_entry ((*x).x_section);
break;
case SYMTAB_VARIABLE:
{
varpool_node *sub = static_cast (x);
gt_pch_n_9tree_node ((*sub).decl);
gt_pch_n_11symtab_node ((*sub).next);
gt_pch_n_11symtab_node ((*sub).previous);
gt_pch_n_11symtab_node ((*sub).next_sharing_asm_name);
gt_pch_n_11symtab_node ((*sub).previous_sharing_asm_name);
gt_pch_n_11symtab_node ((*sub).same_comdat_group);
gt_pch_n_9tree_node ((*sub).alias_target);
gt_pch_n_18lto_file_decl_data ((*sub).lto_file_data);
gt_pch_n_9tree_node ((*sub).x_comdat_group);
gt_pch_n_18section_hash_entry ((*sub).x_section);
}
break;
case SYMTAB_FUNCTION:
{
cgraph_node *sub = static_cast (x);
gt_pch_n_11cgraph_edge ((*sub).callees);
gt_pch_n_11cgraph_edge ((*sub).callers);
gt_pch_n_11cgraph_edge ((*sub).indirect_calls);
gt_pch_n_11symtab_node ((*sub).next_sibling_clone);
gt_pch_n_11symtab_node ((*sub).prev_sibling_clone);
gt_pch_n_11symtab_node ((*sub).clones);
gt_pch_n_11symtab_node ((*sub).clone_of);
gt_pch_n_30hash_table_cgraph_edge_hasher_ ((*sub).call_site_hash);
gt_pch_n_9tree_node ((*sub).former_clone_of);
gt_pch_n_17cgraph_simd_clone ((*sub).simdclone);
gt_pch_n_11symtab_node ((*sub).simd_clones);
gt_pch_n_11symtab_node ((*sub).inlined_to);
gt_pch_n_15cgraph_rtl_info ((*sub).rtl);
gt_pch_n_9tree_node ((*sub).decl);
gt_pch_n_11symtab_node ((*sub).next);
gt_pch_n_11symtab_node ((*sub).previous);
gt_pch_n_11symtab_node ((*sub).next_sharing_asm_name);
gt_pch_n_11symtab_node ((*sub).previous_sharing_asm_name);
gt_pch_n_11symtab_node ((*sub).same_comdat_group);
gt_pch_n_9tree_node ((*sub).alias_target);
gt_pch_n_18lto_file_decl_data ((*sub).lto_file_data);
gt_pch_n_9tree_node ((*sub).x_comdat_group);
gt_pch_n_18section_hash_entry ((*sub).x_section);
}
break;
/* Unrecognized tag value. */
default: gcc_unreachable ();
}
x = ((*x).next);
}
}
void
gt_pch_nx_cgraph_edge (void *x_p)
{
struct cgraph_edge * x = (struct cgraph_edge *)x_p;
struct cgraph_edge * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_11cgraph_edge))
xlimit = ((*xlimit).next_caller);
if (x != xlimit)
for (;;)
{
struct cgraph_edge * const xprev = ((*x).prev_caller);
if (xprev == NULL) break;
x = xprev;
(void) gt_pch_note_object (xprev, xprev, gt_pch_p_11cgraph_edge);
}
while (x != xlimit)
{
gt_pch_n_11symtab_node ((*x).caller);
gt_pch_n_11symtab_node ((*x).callee);
gt_pch_n_11cgraph_edge ((*x).prev_caller);
gt_pch_n_11cgraph_edge ((*x).next_caller);
gt_pch_n_11cgraph_edge ((*x).prev_callee);
gt_pch_n_11cgraph_edge ((*x).next_callee);
gt_pch_n_6gimple ((*x).call_stmt);
gt_pch_n_25cgraph_indirect_call_info ((*x).indirect_info);
x = ((*x).next_caller);
}
}
void
gt_pch_nx (struct cgraph_edge& x_r ATTRIBUTE_UNUSED)
{
struct cgraph_edge * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_11symtab_node ((*x).caller);
gt_pch_n_11symtab_node ((*x).callee);
gt_pch_n_11cgraph_edge ((*x).prev_caller);
gt_pch_n_11cgraph_edge ((*x).next_caller);
gt_pch_n_11cgraph_edge ((*x).prev_callee);
gt_pch_n_11cgraph_edge ((*x).next_callee);
gt_pch_n_6gimple ((*x).call_stmt);
gt_pch_n_25cgraph_indirect_call_info ((*x).indirect_info);
}
void
gt_pch_nx (struct cgraph_edge *& x)
{
if (x)
gt_pch_nx_cgraph_edge ((void *) x);
}
void
gt_pch_nx_section (void *x_p)
{
union section * const x = (union section *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_7section))
{
switch ((int) (SECTION_STYLE (&(((*x))))))
{
case SECTION_NAMED:
gt_pch_n_S ((*x).named.name);
gt_pch_n_9tree_node ((*x).named.decl);
break;
case SECTION_UNNAMED:
gt_pch_n_S ((*x).unnamed.data);
gt_pch_n_7section ((*x).unnamed.next);
break;
case SECTION_NOSWITCH:
break;
default:
break;
}
}
}
void
gt_pch_nx (union section& x_r ATTRIBUTE_UNUSED)
{
union section * ATTRIBUTE_UNUSED x = &x_r;
switch ((int) (SECTION_STYLE (&(((*x))))))
{
case SECTION_NAMED:
gt_pch_n_S ((*x).named.name);
gt_pch_n_9tree_node ((*x).named.decl);
break;
case SECTION_UNNAMED:
gt_pch_n_S ((*x).unnamed.data);
gt_pch_n_7section ((*x).unnamed.next);
break;
case SECTION_NOSWITCH:
break;
default:
break;
}
}
void
gt_pch_nx (union section *& x)
{
if (x)
gt_pch_nx_section ((void *) x);
}
void
gt_pch_nx_cl_target_option (void *x_p)
{
struct cl_target_option * const x = (struct cl_target_option *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_16cl_target_option))
{
}
}
void
gt_pch_nx_cl_optimization (void *x_p)
{
struct cl_optimization * const x = (struct cl_optimization *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_15cl_optimization))
{
gt_pch_n_S ((*x).x_str_align_functions);
gt_pch_n_S ((*x).x_str_align_jumps);
gt_pch_n_S ((*x).x_str_align_labels);
gt_pch_n_S ((*x).x_str_align_loops);
gt_pch_n_S ((*x).x_flag_patchable_function_entry);
}
}
void
gt_pch_nx_edge_def (void *x_p)
{
edge_def * const x = (edge_def *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_8edge_def))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx_basic_block_def (void *x_p)
{
struct basic_block_def * x = (struct basic_block_def *)x_p;
struct basic_block_def * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_15basic_block_def))
xlimit = ((*xlimit).next_bb);
if (x != xlimit)
for (;;)
{
struct basic_block_def * const xprev = ((*x).prev_bb);
if (xprev == NULL) break;
x = xprev;
(void) gt_pch_note_object (xprev, xprev, gt_pch_p_15basic_block_def);
}
while (x != xlimit)
{
gt_pch_n_15vec_edge_va_gc_ ((*x).preds);
gt_pch_n_15vec_edge_va_gc_ ((*x).succs);
gt_pch_n_4loop ((*x).loop_father);
gt_pch_n_15basic_block_def ((*x).prev_bb);
gt_pch_n_15basic_block_def ((*x).next_bb);
switch ((int) (((((*x)).flags & BB_RTL) != 0)))
{
case 0:
gt_pch_n_6gimple ((*x).il.gimple.seq);
gt_pch_n_6gimple ((*x).il.gimple.phi_nodes);
break;
case 1:
gt_pch_n_7rtx_def ((*x).il.x.head_);
gt_pch_n_11rtl_bb_info ((*x).il.x.rtl);
break;
default:
break;
}
x = ((*x).next_bb);
}
}
void
gt_pch_nx_bitmap_element (void *x_p)
{
struct bitmap_element * x = (struct bitmap_element *)x_p;
struct bitmap_element * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_14bitmap_element))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
gt_pch_n_14bitmap_element ((*x).next);
gt_pch_n_14bitmap_element ((*x).prev);
x = ((*x).next);
}
}
void
gt_pch_nx_generic_wide_int_wide_int_storage_ (void *x_p)
{
generic_wide_int * const x = (generic_wide_int *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_34generic_wide_int_wide_int_storage_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct wide_int_storage& x_r ATTRIBUTE_UNUSED)
{
struct wide_int_storage * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_mem_attrs (void *x_p)
{
struct mem_attrs * const x = (struct mem_attrs *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9mem_attrs))
{
gt_pch_n_9tree_node ((*x).expr);
}
}
void
gt_pch_nx_reg_attrs (void *x_p)
{
struct reg_attrs * const x = (struct reg_attrs *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9reg_attrs))
{
gt_pch_n_9tree_node ((*x).decl);
}
}
void
gt_pch_nx (struct reg_attrs& x_r ATTRIBUTE_UNUSED)
{
struct reg_attrs * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).decl);
}
void
gt_pch_nx (struct reg_attrs *& x)
{
if (x)
gt_pch_nx_reg_attrs ((void *) x);
}
void
gt_pch_nx_object_block (void *x_p)
{
struct object_block * const x = (struct object_block *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_12object_block))
{
gt_pch_n_7section ((*x).sect);
gt_pch_n_14vec_rtx_va_gc_ ((*x).objects);
gt_pch_n_14vec_rtx_va_gc_ ((*x).anchors);
}
}
void
gt_pch_nx (struct object_block& x_r ATTRIBUTE_UNUSED)
{
struct object_block * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_7section ((*x).sect);
gt_pch_n_14vec_rtx_va_gc_ ((*x).objects);
gt_pch_n_14vec_rtx_va_gc_ ((*x).anchors);
}
void
gt_pch_nx (struct object_block *& x)
{
if (x)
gt_pch_nx_object_block ((void *) x);
}
void
gt_pch_nx_vec_rtx_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_14vec_rtx_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct rtx_def *& x)
{
if (x)
gt_pch_nx_rtx_def ((void *) x);
}
void
gt_pch_nx_real_value (void *x_p)
{
struct real_value * const x = (struct real_value *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_10real_value))
{
}
}
void
gt_pch_nx_fixed_value (void *x_p)
{
struct fixed_value * const x = (struct fixed_value *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_11fixed_value))
{
gt_pch_nx (&((*x).mode));
}
}
void
gt_pch_nx_function (void *x_p)
{
struct function * const x = (struct function *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_8function))
{
gt_pch_n_9eh_status ((*x).eh);
gt_pch_n_18control_flow_graph ((*x).cfg);
gt_pch_n_6gimple ((*x).gimple_body);
gt_pch_n_9gimple_df ((*x).gimple_df);
gt_pch_n_5loops ((*x).x_current_loops);
gt_pch_n_S ((*x).pass_startwith);
gt_pch_n_11stack_usage ((*x).su);
gt_pch_n_9tree_node ((*x).decl);
gt_pch_n_9tree_node ((*x).static_chain_decl);
gt_pch_n_9tree_node ((*x).nonlocal_goto_save_area);
gt_pch_n_15vec_tree_va_gc_ ((*x).local_decls);
gt_pch_n_16machine_function ((*x).machine);
gt_pch_n_17language_function ((*x).language);
gt_pch_n_14hash_set_tree_ ((*x).used_types_hash);
gt_pch_n_11dw_fde_node ((*x).fde);
}
}
void
gt_pch_nx_target_rtl (void *x_p)
{
struct target_rtl * const x = (struct target_rtl *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_10target_rtl))
{
{
size_t i0;
size_t l0 = (size_t)(GR_MAX);
for (i0 = 0; i0 != l0; i0++) {
gt_pch_n_7rtx_def ((*x).x_global_rtl[i0]);
}
}
gt_pch_n_7rtx_def ((*x).x_pic_offset_table_rtx);
gt_pch_n_7rtx_def ((*x).x_return_address_pointer_rtx);
{
size_t i1;
size_t l1 = (size_t)(FIRST_PSEUDO_REGISTER);
for (i1 = 0; i1 != l1; i1++) {
gt_pch_n_7rtx_def ((*x).x_initial_regno_reg_rtx[i1]);
}
}
{
size_t i2;
size_t l2 = (size_t)(MAX_MACHINE_MODE);
for (i2 = 0; i2 != l2; i2++) {
gt_pch_n_7rtx_def ((*x).x_top_of_stack[i2]);
}
}
{
size_t i3;
size_t l3 = (size_t)(FIRST_PSEUDO_REGISTER);
for (i3 = 0; i3 != l3; i3++) {
gt_pch_n_7rtx_def ((*x).x_static_reg_base_value[i3]);
}
}
{
size_t i4;
size_t l4 = (size_t)((int) MAX_MACHINE_MODE);
for (i4 = 0; i4 != l4; i4++) {
gt_pch_n_9mem_attrs ((*x).x_mode_mem_attrs[i4]);
}
}
}
}
void
gt_pch_nx_cgraph_rtl_info (void *x_p)
{
struct cgraph_rtl_info * const x = (struct cgraph_rtl_info *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_15cgraph_rtl_info))
{
}
}
void
gt_pch_nx_hash_map_tree_tree_decl_tree_cache_traits_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_42hash_map_tree_tree_decl_tree_cache_traits_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct decl_tree_cache_traits& x_r ATTRIBUTE_UNUSED)
{
struct decl_tree_cache_traits * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx (union tree_node *& x)
{
if (x)
gt_pch_nx_lang_tree_node ((void *) x);
}
void
gt_pch_nx_hash_map_tree_tree_type_tree_cache_traits_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_42hash_map_tree_tree_type_tree_cache_traits_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct type_tree_cache_traits& x_r ATTRIBUTE_UNUSED)
{
struct type_tree_cache_traits * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_hash_map_tree_tree_decl_tree_traits_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_36hash_map_tree_tree_decl_tree_traits_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct decl_tree_traits& x_r ATTRIBUTE_UNUSED)
{
struct decl_tree_traits * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_ptr_info_def (void *x_p)
{
struct ptr_info_def * const x = (struct ptr_info_def *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_12ptr_info_def))
{
gt_pch_n_11bitmap_head ((*x).pt.vars);
}
}
void
gt_pch_nx_range_info_def (void *x_p)
{
struct range_info_def * const x = (struct range_info_def *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_14range_info_def))
{
}
}
void
gt_pch_nx_vec_constructor_elt_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_26vec_constructor_elt_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct constructor_elt& x_r ATTRIBUTE_UNUSED)
{
struct constructor_elt * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).index);
gt_pch_n_9tree_node ((*x).value);
}
void
gt_pch_nx_vec_tree_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_15vec_tree_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx_tree_statement_list_node (void *x_p)
{
struct tree_statement_list_node * x = (struct tree_statement_list_node *)x_p;
struct tree_statement_list_node * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_24tree_statement_list_node))
xlimit = ((*xlimit).next);
if (x != xlimit)
for (;;)
{
struct tree_statement_list_node * const xprev = ((*x).prev);
if (xprev == NULL) break;
x = xprev;
(void) gt_pch_note_object (xprev, xprev, gt_pch_p_24tree_statement_list_node);
}
while (x != xlimit)
{
gt_pch_n_24tree_statement_list_node ((*x).prev);
gt_pch_n_24tree_statement_list_node ((*x).next);
gt_pch_n_9tree_node ((*x).stmt);
x = ((*x).next);
}
}
void
gt_pch_nx_target_globals (void *x_p)
{
struct target_globals * const x = (struct target_globals *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_14target_globals))
{
gt_pch_n_10target_rtl ((*x).rtl);
gt_pch_n_15target_libfuncs ((*x).libfuncs);
}
}
void
gt_pch_nx_tree_map (void *x_p)
{
struct tree_map * const x = (struct tree_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_8tree_map))
{
gt_pch_n_9tree_node ((*x).base.from);
gt_pch_n_9tree_node ((*x).to);
}
}
void
gt_pch_nx (struct tree_map& x_r ATTRIBUTE_UNUSED)
{
struct tree_map * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).base.from);
gt_pch_n_9tree_node ((*x).to);
}
void
gt_pch_nx (struct tree_map *& x)
{
if (x)
gt_pch_nx_tree_map ((void *) x);
}
void
gt_pch_nx_tree_decl_map (void *x_p)
{
struct tree_decl_map * const x = (struct tree_decl_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_13tree_decl_map))
{
gt_pch_n_9tree_node ((*x).base.from);
gt_pch_n_9tree_node ((*x).to);
}
}
void
gt_pch_nx (struct tree_decl_map& x_r ATTRIBUTE_UNUSED)
{
struct tree_decl_map * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).base.from);
gt_pch_n_9tree_node ((*x).to);
}
void
gt_pch_nx (struct tree_decl_map *& x)
{
if (x)
gt_pch_nx_tree_decl_map ((void *) x);
}
void
gt_pch_nx_tree_int_map (void *x_p)
{
struct tree_int_map * const x = (struct tree_int_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_12tree_int_map))
{
gt_pch_n_9tree_node ((*x).base.from);
}
}
void
gt_pch_nx (struct tree_int_map& x_r ATTRIBUTE_UNUSED)
{
struct tree_int_map * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).base.from);
}
void
gt_pch_nx (struct tree_int_map *& x)
{
if (x)
gt_pch_nx_tree_int_map ((void *) x);
}
void
gt_pch_nx_tree_vec_map (void *x_p)
{
struct tree_vec_map * const x = (struct tree_vec_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_12tree_vec_map))
{
gt_pch_n_9tree_node ((*x).base.from);
gt_pch_n_15vec_tree_va_gc_ ((*x).to);
}
}
void
gt_pch_nx (struct tree_vec_map& x_r ATTRIBUTE_UNUSED)
{
struct tree_vec_map * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).base.from);
gt_pch_n_15vec_tree_va_gc_ ((*x).to);
}
void
gt_pch_nx (struct tree_vec_map *& x)
{
if (x)
gt_pch_nx_tree_vec_map ((void *) x);
}
void
gt_pch_nx_vec_alias_pair_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_21vec_alias_pair_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct alias_pair& x_r ATTRIBUTE_UNUSED)
{
struct alias_pair * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).decl);
gt_pch_n_9tree_node ((*x).target);
}
void
gt_pch_nx_libfunc_entry (void *x_p)
{
struct libfunc_entry * const x = (struct libfunc_entry *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_13libfunc_entry))
{
gt_pch_n_7rtx_def ((*x).libfunc);
}
}
void
gt_pch_nx (struct libfunc_entry& x_r ATTRIBUTE_UNUSED)
{
struct libfunc_entry * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_7rtx_def ((*x).libfunc);
}
void
gt_pch_nx (struct libfunc_entry *& x)
{
if (x)
gt_pch_nx_libfunc_entry ((void *) x);
}
void
gt_pch_nx_hash_table_libfunc_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_26hash_table_libfunc_hasher_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct libfunc_hasher& x_r ATTRIBUTE_UNUSED)
{
struct libfunc_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_target_libfuncs (void *x_p)
{
struct target_libfuncs * const x = (struct target_libfuncs *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_15target_libfuncs))
{
{
size_t i0;
size_t l0 = (size_t)(LTI_MAX);
for (i0 = 0; i0 != l0; i0++) {
gt_pch_n_7rtx_def ((*x).x_libfunc_table[i0]);
}
}
gt_pch_n_26hash_table_libfunc_hasher_ ((*x).x_libfunc_hash);
}
}
void
gt_pch_nx_sequence_stack (void *x_p)
{
struct sequence_stack * const x = (struct sequence_stack *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_14sequence_stack))
{
gt_pch_n_7rtx_def ((*x).first);
gt_pch_n_7rtx_def ((*x).last);
gt_pch_n_14sequence_stack ((*x).next);
}
}
void
gt_pch_nx_vec_rtx_insn__va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_20vec_rtx_insn__va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct rtx_insn *& x)
{
if (x)
gt_pch_nx_rtx_def ((void *) x);
}
void
gt_pch_nx_vec_uchar_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_16vec_uchar_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx_vec_call_site_record_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_27vec_call_site_record_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct call_site_record_d *& x)
{
if (x)
gt_pch_nx_call_site_record_d ((void *) x);
}
void
gt_pch_nx_gimple_df (void *x_p)
{
struct gimple_df * const x = (struct gimple_df *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9gimple_df))
{
gt_pch_n_15vec_tree_va_gc_ ((*x).ssa_names);
gt_pch_n_9tree_node ((*x).vop);
gt_pch_n_11bitmap_head ((*x).escaped.vars);
gt_pch_n_15vec_tree_va_gc_ ((*x).free_ssanames);
gt_pch_n_15vec_tree_va_gc_ ((*x).free_ssanames_queue);
gt_pch_n_27hash_table_ssa_name_hasher_ ((*x).default_defs);
gt_pch_n_20ssa_operand_memory_d ((*x).ssa_operands.operand_memory);
gt_pch_n_29hash_table_tm_restart_hasher_ ((*x).tm_restart);
}
}
void
gt_pch_nx_dw_fde_node (void *x_p)
{
struct dw_fde_node * const x = (struct dw_fde_node *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_11dw_fde_node))
{
gt_pch_n_9tree_node ((*x).decl);
gt_pch_n_S ((*x).dw_fde_begin);
gt_pch_n_S ((*x).dw_fde_current_label);
gt_pch_n_S ((*x).dw_fde_end);
gt_pch_n_S ((*x).dw_fde_vms_end_prologue);
gt_pch_n_S ((*x).dw_fde_vms_begin_epilogue);
gt_pch_n_S ((*x).dw_fde_second_begin);
gt_pch_n_S ((*x).dw_fde_second_end);
gt_pch_n_21vec_dw_cfi_ref_va_gc_ ((*x).dw_fde_cfi);
}
}
void
gt_pch_nx_frame_space (void *x_p)
{
struct frame_space * const x = (struct frame_space *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_11frame_space))
{
gt_pch_n_11frame_space ((*x).next);
}
}
void
gt_pch_nx_vec_callinfo_callee_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_26vec_callinfo_callee_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct callinfo_callee& x_r ATTRIBUTE_UNUSED)
{
struct callinfo_callee * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).decl);
}
void
gt_pch_nx_vec_callinfo_dalloc_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_26vec_callinfo_dalloc_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct callinfo_dalloc& x_r ATTRIBUTE_UNUSED)
{
struct callinfo_dalloc * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_S ((*x).name);
}
void
gt_pch_nx_stack_usage (void *x_p)
{
struct stack_usage * const x = (struct stack_usage *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_11stack_usage))
{
gt_pch_n_26vec_callinfo_callee_va_gc_ ((*x).callees);
gt_pch_n_26vec_callinfo_dalloc_va_gc_ ((*x).dallocs);
}
}
void
gt_pch_nx_eh_status (void *x_p)
{
struct eh_status * const x = (struct eh_status *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9eh_status))
{
gt_pch_n_11eh_region_d ((*x).region_tree);
gt_pch_n_20vec_eh_region_va_gc_ ((*x).region_array);
gt_pch_n_25vec_eh_landing_pad_va_gc_ ((*x).lp_array);
gt_pch_n_21hash_map_gimple__int_ ((*x).throw_stmt_table);
gt_pch_n_15vec_tree_va_gc_ ((*x).ttype_data);
switch ((int) (targetm.arm_eabi_unwinder))
{
case 1:
gt_pch_n_15vec_tree_va_gc_ ((*x).ehspec_data.arm_eabi);
break;
case 0:
gt_pch_n_16vec_uchar_va_gc_ ((*x).ehspec_data.other);
break;
default:
break;
}
}
}
void
gt_pch_nx_control_flow_graph (void *x_p)
{
struct control_flow_graph * const x = (struct control_flow_graph *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_18control_flow_graph))
{
gt_pch_n_15basic_block_def ((*x).x_entry_block_ptr);
gt_pch_n_15basic_block_def ((*x).x_exit_block_ptr);
gt_pch_n_22vec_basic_block_va_gc_ ((*x).x_basic_block_info);
gt_pch_n_22vec_basic_block_va_gc_ ((*x).x_label_to_block_map);
}
}
void
gt_pch_nx_loops (void *x_p)
{
struct loops * const x = (struct loops *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_5loops))
{
gt_pch_n_17vec_loop_p_va_gc_ ((*x).larray);
gt_pch_n_28hash_table_loop_exit_hasher_ ((*x).exits);
gt_pch_n_4loop ((*x).tree_root);
}
}
void
gt_pch_nx_hash_set_tree_ (void *x_p)
{
hash_set * const x = (hash_set *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_14hash_set_tree_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx_types_used_by_vars_entry (void *x_p)
{
struct types_used_by_vars_entry * const x = (struct types_used_by_vars_entry *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_24types_used_by_vars_entry))
{
gt_pch_n_9tree_node ((*x).type);
gt_pch_n_9tree_node ((*x).var_decl);
}
}
void
gt_pch_nx (struct types_used_by_vars_entry& x_r ATTRIBUTE_UNUSED)
{
struct types_used_by_vars_entry * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).type);
gt_pch_n_9tree_node ((*x).var_decl);
}
void
gt_pch_nx (struct types_used_by_vars_entry *& x)
{
if (x)
gt_pch_nx_types_used_by_vars_entry ((void *) x);
}
void
gt_pch_nx_hash_table_used_type_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_28hash_table_used_type_hasher_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct used_type_hasher& x_r ATTRIBUTE_UNUSED)
{
struct used_type_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_nb_iter_bound (void *x_p)
{
struct nb_iter_bound * x = (struct nb_iter_bound *)x_p;
struct nb_iter_bound * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_13nb_iter_bound))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
gt_pch_n_6gimple ((*x).stmt);
gt_pch_n_13nb_iter_bound ((*x).next);
x = ((*x).next);
}
}
void
gt_pch_nx_loop_exit (void *x_p)
{
struct loop_exit * const x = (struct loop_exit *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9loop_exit))
{
gt_pch_n_8edge_def ((*x).e);
gt_pch_n_9loop_exit ((*x).prev);
gt_pch_n_9loop_exit ((*x).next);
gt_pch_n_9loop_exit ((*x).next_e);
}
}
void
gt_pch_nx (struct loop_exit& x_r ATTRIBUTE_UNUSED)
{
struct loop_exit * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_8edge_def ((*x).e);
gt_pch_n_9loop_exit ((*x).prev);
gt_pch_n_9loop_exit ((*x).next);
gt_pch_n_9loop_exit ((*x).next_e);
}
void
gt_pch_nx (struct loop_exit *& x)
{
if (x)
gt_pch_nx_loop_exit ((void *) x);
}
void
gt_pch_nx_loop (void *x_p)
{
struct loop * x = (struct loop *)x_p;
struct loop * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_4loop))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
gt_pch_n_15basic_block_def ((*x).header);
gt_pch_n_15basic_block_def ((*x).latch);
gt_pch_n_17vec_loop_p_va_gc_ ((*x).superloops);
gt_pch_n_4loop ((*x).inner);
gt_pch_n_4loop ((*x).next);
gt_pch_n_9tree_node ((*x).nb_iterations);
gt_pch_n_9tree_node ((*x).simduid);
gt_pch_n_13nb_iter_bound ((*x).bounds);
gt_pch_n_10control_iv ((*x).control_ivs);
gt_pch_n_9loop_exit ((*x).exits);
gt_pch_n_10niter_desc ((*x).simple_loop_desc);
gt_pch_n_15basic_block_def ((*x).former_header);
x = ((*x).next);
}
}
void
gt_pch_nx_control_iv (void *x_p)
{
struct control_iv * x = (struct control_iv *)x_p;
struct control_iv * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_10control_iv))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
gt_pch_n_9tree_node ((*x).base);
gt_pch_n_9tree_node ((*x).step);
gt_pch_n_10control_iv ((*x).next);
x = ((*x).next);
}
}
void
gt_pch_nx_vec_loop_p_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_17vec_loop_p_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct loop *& x)
{
if (x)
gt_pch_nx_loop ((void *) x);
}
void
gt_pch_nx_niter_desc (void *x_p)
{
struct niter_desc * const x = (struct niter_desc *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_10niter_desc))
{
gt_pch_n_8edge_def ((*x).out_edge);
gt_pch_n_8edge_def ((*x).in_edge);
gt_pch_n_7rtx_def ((*x).assumptions);
gt_pch_n_7rtx_def ((*x).noloop_assumptions);
gt_pch_n_7rtx_def ((*x).infinite);
gt_pch_n_7rtx_def ((*x).niter_expr);
}
}
void
gt_pch_nx_hash_table_loop_exit_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_28hash_table_loop_exit_hasher_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct loop_exit_hasher& x_r ATTRIBUTE_UNUSED)
{
struct loop_exit_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_vec_basic_block_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_22vec_basic_block_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct basic_block_def *& x)
{
if (x)
gt_pch_nx_basic_block_def ((void *) x);
}
void
gt_pch_nx_rtl_bb_info (void *x_p)
{
struct rtl_bb_info * const x = (struct rtl_bb_info *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_11rtl_bb_info))
{
gt_pch_n_7rtx_def ((*x).end_);
gt_pch_n_7rtx_def ((*x).header_);
gt_pch_n_7rtx_def ((*x).footer_);
}
}
void
gt_pch_nx_vec_edge_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_15vec_edge_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (edge_def *& x)
{
if (x)
gt_pch_nx_edge_def ((void *) x);
}
void
gt_pch_nx_section_hash_entry (void *x_p)
{
struct section_hash_entry * const x = (struct section_hash_entry *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_18section_hash_entry))
{
gt_pch_n_S ((*x).name);
}
}
void
gt_pch_nx (struct section_hash_entry& x_r ATTRIBUTE_UNUSED)
{
struct section_hash_entry * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_S ((*x).name);
}
void
gt_pch_nx (struct section_hash_entry *& x)
{
if (x)
gt_pch_nx_section_hash_entry ((void *) x);
}
void
gt_pch_nx_lto_file_decl_data (void *x_p)
{
struct lto_file_decl_data * const x = (struct lto_file_decl_data *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_18lto_file_decl_data))
{
gt_pch_n_17lto_in_decl_state ((*x).current_decl_state);
gt_pch_n_17lto_in_decl_state ((*x).global_decl_state);
gt_pch_n_29hash_table_decl_state_hasher_ ((*x).function_decl_states);
gt_pch_n_18lto_file_decl_data ((*x).next);
gt_pch_n_S ((*x).mode_table);
}
}
void
gt_pch_nx_ipa_replace_map (void *x_p)
{
struct ipa_replace_map * const x = (struct ipa_replace_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_15ipa_replace_map))
{
gt_pch_n_9tree_node ((*x).new_tree);
}
}
void
gt_pch_nx_cgraph_simd_clone (void *x_p)
{
struct cgraph_simd_clone * const x = (struct cgraph_simd_clone *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_17cgraph_simd_clone))
{
{
size_t l0 = (size_t)(((*x)).nargs);
gt_pch_n_11symtab_node ((*x).prev_clone);
gt_pch_n_11symtab_node ((*x).next_clone);
gt_pch_n_11symtab_node ((*x).origin);
{
size_t i0;
for (i0 = 0; i0 != l0; i0++) {
gt_pch_n_9tree_node ((*x).args[i0].orig_arg);
gt_pch_n_9tree_node ((*x).args[i0].orig_type);
gt_pch_n_9tree_node ((*x).args[i0].vector_arg);
gt_pch_n_9tree_node ((*x).args[i0].vector_type);
gt_pch_n_9tree_node ((*x).args[i0].simd_array);
}
}
}
}
}
void
gt_pch_nx_cgraph_function_version_info (void *x_p)
{
struct cgraph_function_version_info * const x = (struct cgraph_function_version_info *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_28cgraph_function_version_info))
{
gt_pch_n_11symtab_node ((*x).this_node);
gt_pch_n_28cgraph_function_version_info ((*x).prev);
gt_pch_n_28cgraph_function_version_info ((*x).next);
gt_pch_n_9tree_node ((*x).dispatcher_resolver);
}
}
void
gt_pch_nx (struct cgraph_function_version_info& x_r ATTRIBUTE_UNUSED)
{
struct cgraph_function_version_info * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_11symtab_node ((*x).this_node);
gt_pch_n_28cgraph_function_version_info ((*x).prev);
gt_pch_n_28cgraph_function_version_info ((*x).next);
gt_pch_n_9tree_node ((*x).dispatcher_resolver);
}
void
gt_pch_nx (struct cgraph_function_version_info *& x)
{
if (x)
gt_pch_nx_cgraph_function_version_info ((void *) x);
}
void
gt_pch_nx_hash_table_cgraph_edge_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_30hash_table_cgraph_edge_hasher_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct cgraph_edge_hasher& x_r ATTRIBUTE_UNUSED)
{
struct cgraph_edge_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_cgraph_indirect_call_info (void *x_p)
{
struct cgraph_indirect_call_info * const x = (struct cgraph_indirect_call_info *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_25cgraph_indirect_call_info))
{
gt_pch_n_9tree_node ((*x).context.outer_type);
gt_pch_n_9tree_node ((*x).context.speculative_outer_type);
gt_pch_n_9tree_node ((*x).otr_type);
}
}
void
gt_pch_nx_asm_node (void *x_p)
{
struct asm_node * const x = (struct asm_node *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_8asm_node))
{
gt_pch_n_8asm_node ((*x).next);
gt_pch_n_9tree_node ((*x).asm_str);
}
}
void
gt_pch_nx_thunk_info (void *x_p)
{
struct thunk_info * const x = (struct thunk_info *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_10thunk_info))
{
gt_pch_n_9tree_node ((*x).alias);
}
}
void
gt_pch_nx_function_summary_thunk_info__ (void *x_p)
{
function_summary * const x = (function_summary *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_29function_summary_thunk_info__))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct thunk_info *& x)
{
if (x)
gt_pch_nx_thunk_info ((void *) x);
}
void
gt_pch_nx_clone_info (void *x_p)
{
struct clone_info * const x = (struct clone_info *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_10clone_info))
{
gt_pch_n_27vec_ipa_replace_map__va_gc_ ((*x).tree_map);
gt_pch_n_21ipa_param_adjustments ((*x).param_adjustments);
}
}
void
gt_pch_nx_function_summary_clone_info__ (void *x_p)
{
function_summary * const x = (function_summary *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_29function_summary_clone_info__))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct clone_info *& x)
{
if (x)
gt_pch_nx_clone_info ((void *) x);
}
void
gt_pch_nx_symbol_table (void *x_p)
{
struct symbol_table * const x = (struct symbol_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_12symbol_table))
{
gt_pch_n_11symtab_node ((*x).nodes);
gt_pch_n_8asm_node ((*x).asmnodes);
gt_pch_n_8asm_node ((*x).asm_last_node);
gt_pch_n_31hash_table_section_name_hasher_ ((*x).section_hash);
gt_pch_n_26hash_table_asmname_hasher_ ((*x).assembler_name_hash);
gt_pch_n_42hash_map_symtab_node__symbol_priority_map_ ((*x).init_priority_hash);
gt_pch_n_29function_summary_thunk_info__ ((*x).m_thunks);
gt_pch_n_29function_summary_clone_info__ ((*x).m_clones);
}
}
void
gt_pch_nx_hash_table_section_name_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_31hash_table_section_name_hasher_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct section_name_hasher& x_r ATTRIBUTE_UNUSED)
{
struct section_name_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_hash_table_asmname_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_26hash_table_asmname_hasher_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct asmname_hasher& x_r ATTRIBUTE_UNUSED)
{
struct asmname_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_hash_map_symtab_node__symbol_priority_map_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_42hash_map_symtab_node__symbol_priority_map_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct symbol_priority_map& x_r ATTRIBUTE_UNUSED)
{
struct symbol_priority_map * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx (struct symtab_node *& x)
{
if (x)
gt_pch_nx_symtab_node ((void *) x);
}
void
gt_pch_nx_constant_descriptor_tree (void *x_p)
{
struct constant_descriptor_tree * const x = (struct constant_descriptor_tree *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_24constant_descriptor_tree))
{
gt_pch_n_7rtx_def ((*x).rtl);
gt_pch_n_9tree_node ((*x).value);
}
}
void
gt_pch_nx (struct constant_descriptor_tree& x_r ATTRIBUTE_UNUSED)
{
struct constant_descriptor_tree * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_7rtx_def ((*x).rtl);
gt_pch_n_9tree_node ((*x).value);
}
void
gt_pch_nx (struct constant_descriptor_tree *& x)
{
if (x)
gt_pch_nx_constant_descriptor_tree ((void *) x);
}
void
gt_pch_nx_vec_ipa_replace_map__va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_27vec_ipa_replace_map__va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ipa_replace_map *& x)
{
if (x)
gt_pch_nx_ipa_replace_map ((void *) x);
}
void
gt_pch_nx_ipa_param_adjustments (void *x_p)
{
struct ipa_param_adjustments * const x = (struct ipa_param_adjustments *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_21ipa_param_adjustments))
{
gt_pch_n_29vec_ipa_adjusted_param_va_gc_ ((*x).m_adj_params);
}
}
void
gt_pch_nx_lto_in_decl_state (void *x_p)
{
struct lto_in_decl_state * const x = (struct lto_in_decl_state *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_17lto_in_decl_state))
{
{
size_t i0;
size_t l0 = (size_t)(LTO_N_DECL_STREAMS);
for (i0 = 0; i0 != l0; i0++) {
gt_pch_n_15vec_tree_va_gc_ ((*x).streams[i0]);
}
}
gt_pch_n_9tree_node ((*x).fn_decl);
}
}
void
gt_pch_nx (struct lto_in_decl_state& x_r ATTRIBUTE_UNUSED)
{
struct lto_in_decl_state * ATTRIBUTE_UNUSED x = &x_r;
{
size_t i1;
size_t l1 = (size_t)(LTO_N_DECL_STREAMS);
for (i1 = 0; i1 != l1; i1++) {
gt_pch_n_15vec_tree_va_gc_ ((*x).streams[i1]);
}
}
gt_pch_n_9tree_node ((*x).fn_decl);
}
void
gt_pch_nx (struct lto_in_decl_state *& x)
{
if (x)
gt_pch_nx_lto_in_decl_state ((void *) x);
}
void
gt_pch_nx_ipa_node_params (void *x_p)
{
struct ipa_node_params * const x = (struct ipa_node_params *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_15ipa_node_params))
{
gt_pch_n_31vec_ipa_param_descriptor_va_gc_ ((*x).descriptors);
}
}
void
gt_pch_nx (struct ipa_node_params& x_r ATTRIBUTE_UNUSED)
{
struct ipa_node_params * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_31vec_ipa_param_descriptor_va_gc_ ((*x).descriptors);
}
void
gt_pch_nx (struct ipa_node_params *& x)
{
if (x)
gt_pch_nx_ipa_node_params ((void *) x);
}
void
gt_pch_nx_ipa_edge_args (void *x_p)
{
struct ipa_edge_args * const x = (struct ipa_edge_args *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_13ipa_edge_args))
{
gt_pch_n_24vec_ipa_jump_func_va_gc_ ((*x).jump_functions);
gt_pch_n_39vec_ipa_polymorphic_call_context_va_gc_ ((*x).polymorphic_call_contexts);
}
}
void
gt_pch_nx (struct ipa_edge_args& x_r ATTRIBUTE_UNUSED)
{
struct ipa_edge_args * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_24vec_ipa_jump_func_va_gc_ ((*x).jump_functions);
gt_pch_n_39vec_ipa_polymorphic_call_context_va_gc_ ((*x).polymorphic_call_contexts);
}
void
gt_pch_nx (struct ipa_edge_args *& x)
{
if (x)
gt_pch_nx_ipa_edge_args ((void *) x);
}
void
gt_pch_nx_ipa_agg_replacement_value (void *x_p)
{
struct ipa_agg_replacement_value * const x = (struct ipa_agg_replacement_value *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_25ipa_agg_replacement_value))
{
gt_pch_n_25ipa_agg_replacement_value ((*x).next);
gt_pch_n_9tree_node ((*x).value);
}
}
void
gt_pch_nx_ipa_fn_summary (void *x_p)
{
struct ipa_fn_summary * const x = (struct ipa_fn_summary *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_14ipa_fn_summary))
{
gt_pch_n_20vec_condition_va_gc_ ((*x).conds);
gt_pch_n_37vec_ipa_freqcounting_predicate_va_gc_ ((*x).loop_iterations);
gt_pch_n_37vec_ipa_freqcounting_predicate_va_gc_ ((*x).loop_strides);
}
}
void
gt_pch_nx_vec_ipa_adjusted_param_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_29vec_ipa_adjusted_param_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ipa_adjusted_param& x_r ATTRIBUTE_UNUSED)
{
struct ipa_adjusted_param * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).type);
gt_pch_n_9tree_node ((*x).alias_ptr_type);
}
void
gt_pch_nx_modref_tree_alias_set_type_ (void *x_p)
{
modref_tree * const x = (modref_tree *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_27modref_tree_alias_set_type_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx_modref_summary (void *x_p)
{
struct modref_summary * const x = (struct modref_summary *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_14modref_summary))
{
gt_pch_n_27modref_tree_alias_set_type_ ((*x).loads);
gt_pch_n_27modref_tree_alias_set_type_ ((*x).stores);
}
}
void
gt_pch_nx_hash_map_location_hash_nowarn_spec_t_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_37hash_map_location_hash_nowarn_spec_t_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct nowarn_spec_t& x_r ATTRIBUTE_UNUSED)
{
struct nowarn_spec_t * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_dw_cfi_node (void *x_p)
{
struct dw_cfi_node * const x = (struct dw_cfi_node *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_11dw_cfi_node))
{
switch ((int) (dw_cfi_oprnd1_desc (((*x)).dw_cfi_opc)))
{
case dw_cfi_oprnd_reg_num:
break;
case dw_cfi_oprnd_offset:
break;
case dw_cfi_oprnd_addr:
gt_pch_n_S ((*x).dw_cfi_oprnd1.dw_cfi_addr);
break;
case dw_cfi_oprnd_loc:
gt_pch_n_17dw_loc_descr_node ((*x).dw_cfi_oprnd1.dw_cfi_loc);
break;
case dw_cfi_oprnd_cfa_loc:
gt_pch_n_15dw_cfa_location ((*x).dw_cfi_oprnd1.dw_cfi_cfa_loc);
break;
default:
break;
}
switch ((int) (dw_cfi_oprnd2_desc (((*x)).dw_cfi_opc)))
{
case dw_cfi_oprnd_reg_num:
break;
case dw_cfi_oprnd_offset:
break;
case dw_cfi_oprnd_addr:
gt_pch_n_S ((*x).dw_cfi_oprnd2.dw_cfi_addr);
break;
case dw_cfi_oprnd_loc:
gt_pch_n_17dw_loc_descr_node ((*x).dw_cfi_oprnd2.dw_cfi_loc);
break;
case dw_cfi_oprnd_cfa_loc:
gt_pch_n_15dw_cfa_location ((*x).dw_cfi_oprnd2.dw_cfi_cfa_loc);
break;
default:
break;
}
}
}
void
gt_pch_nx_dw_loc_descr_node (void *x_p)
{
struct dw_loc_descr_node * x = (struct dw_loc_descr_node *)x_p;
struct dw_loc_descr_node * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_17dw_loc_descr_node))
xlimit = ((*xlimit).dw_loc_next);
while (x != xlimit)
{
gt_pch_n_17dw_loc_descr_node ((*x).dw_loc_next);
gt_pch_n_16addr_table_entry ((*x).dw_loc_oprnd1.val_entry);
switch ((int) (((*x).dw_loc_oprnd1).val_class))
{
case dw_val_class_addr:
gt_pch_n_7rtx_def ((*x).dw_loc_oprnd1.v.val_addr);
break;
case dw_val_class_offset:
break;
case dw_val_class_loc_list:
gt_pch_n_18dw_loc_list_struct ((*x).dw_loc_oprnd1.v.val_loc_list);
break;
case dw_val_class_view_list:
gt_pch_n_10die_struct ((*x).dw_loc_oprnd1.v.val_view_list);
break;
case dw_val_class_loc:
gt_pch_n_17dw_loc_descr_node ((*x).dw_loc_oprnd1.v.val_loc);
break;
default:
break;
case dw_val_class_unsigned_const:
break;
case dw_val_class_const_double:
break;
case dw_val_class_wide_int:
gt_pch_n_34generic_wide_int_wide_int_storage_ ((*x).dw_loc_oprnd1.v.val_wide);
break;
case dw_val_class_vec:
if ((*x).dw_loc_oprnd1.v.val_vec.array != NULL) {
gt_pch_note_object ((*x).dw_loc_oprnd1.v.val_vec.array, x, gt_pch_p_17dw_loc_descr_node);
}
break;
case dw_val_class_die_ref:
gt_pch_n_10die_struct ((*x).dw_loc_oprnd1.v.val_die_ref.die);
break;
case dw_val_class_fde_ref:
break;
case dw_val_class_str:
gt_pch_n_20indirect_string_node ((*x).dw_loc_oprnd1.v.val_str);
break;
case dw_val_class_lbl_id:
gt_pch_n_S ((*x).dw_loc_oprnd1.v.val_lbl_id);
break;
case dw_val_class_flag:
break;
case dw_val_class_file:
gt_pch_n_15dwarf_file_data ((*x).dw_loc_oprnd1.v.val_file);
break;
case dw_val_class_file_implicit:
gt_pch_n_15dwarf_file_data ((*x).dw_loc_oprnd1.v.val_file_implicit);
break;
case dw_val_class_data8:
break;
case dw_val_class_decl_ref:
gt_pch_n_9tree_node ((*x).dw_loc_oprnd1.v.val_decl_ref);
break;
case dw_val_class_vms_delta:
gt_pch_n_S ((*x).dw_loc_oprnd1.v.val_vms_delta.lbl1);
gt_pch_n_S ((*x).dw_loc_oprnd1.v.val_vms_delta.lbl2);
break;
case dw_val_class_discr_value:
switch ((int) (((*x).dw_loc_oprnd1.v.val_discr_value).pos))
{
case 0:
break;
case 1:
break;
default:
break;
}
break;
case dw_val_class_discr_list:
gt_pch_n_18dw_discr_list_node ((*x).dw_loc_oprnd1.v.val_discr_list);
break;
case dw_val_class_symview:
gt_pch_n_S ((*x).dw_loc_oprnd1.v.val_symbolic_view);
break;
}
gt_pch_n_16addr_table_entry ((*x).dw_loc_oprnd2.val_entry);
switch ((int) (((*x).dw_loc_oprnd2).val_class))
{
case dw_val_class_addr:
gt_pch_n_7rtx_def ((*x).dw_loc_oprnd2.v.val_addr);
break;
case dw_val_class_offset:
break;
case dw_val_class_loc_list:
gt_pch_n_18dw_loc_list_struct ((*x).dw_loc_oprnd2.v.val_loc_list);
break;
case dw_val_class_view_list:
gt_pch_n_10die_struct ((*x).dw_loc_oprnd2.v.val_view_list);
break;
case dw_val_class_loc:
gt_pch_n_17dw_loc_descr_node ((*x).dw_loc_oprnd2.v.val_loc);
break;
default:
break;
case dw_val_class_unsigned_const:
break;
case dw_val_class_const_double:
break;
case dw_val_class_wide_int:
gt_pch_n_34generic_wide_int_wide_int_storage_ ((*x).dw_loc_oprnd2.v.val_wide);
break;
case dw_val_class_vec:
if ((*x).dw_loc_oprnd2.v.val_vec.array != NULL) {
gt_pch_note_object ((*x).dw_loc_oprnd2.v.val_vec.array, x, gt_pch_p_17dw_loc_descr_node);
}
break;
case dw_val_class_die_ref:
gt_pch_n_10die_struct ((*x).dw_loc_oprnd2.v.val_die_ref.die);
break;
case dw_val_class_fde_ref:
break;
case dw_val_class_str:
gt_pch_n_20indirect_string_node ((*x).dw_loc_oprnd2.v.val_str);
break;
case dw_val_class_lbl_id:
gt_pch_n_S ((*x).dw_loc_oprnd2.v.val_lbl_id);
break;
case dw_val_class_flag:
break;
case dw_val_class_file:
gt_pch_n_15dwarf_file_data ((*x).dw_loc_oprnd2.v.val_file);
break;
case dw_val_class_file_implicit:
gt_pch_n_15dwarf_file_data ((*x).dw_loc_oprnd2.v.val_file_implicit);
break;
case dw_val_class_data8:
break;
case dw_val_class_decl_ref:
gt_pch_n_9tree_node ((*x).dw_loc_oprnd2.v.val_decl_ref);
break;
case dw_val_class_vms_delta:
gt_pch_n_S ((*x).dw_loc_oprnd2.v.val_vms_delta.lbl1);
gt_pch_n_S ((*x).dw_loc_oprnd2.v.val_vms_delta.lbl2);
break;
case dw_val_class_discr_value:
switch ((int) (((*x).dw_loc_oprnd2.v.val_discr_value).pos))
{
case 0:
break;
case 1:
break;
default:
break;
}
break;
case dw_val_class_discr_list:
gt_pch_n_18dw_discr_list_node ((*x).dw_loc_oprnd2.v.val_discr_list);
break;
case dw_val_class_symview:
gt_pch_n_S ((*x).dw_loc_oprnd2.v.val_symbolic_view);
break;
}
x = ((*x).dw_loc_next);
}
}
void
gt_pch_nx_dw_discr_list_node (void *x_p)
{
struct dw_discr_list_node * const x = (struct dw_discr_list_node *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_18dw_discr_list_node))
{
gt_pch_n_18dw_discr_list_node ((*x).dw_discr_next);
switch ((int) (((*x).dw_discr_lower_bound).pos))
{
case 0:
break;
case 1:
break;
default:
break;
}
switch ((int) (((*x).dw_discr_upper_bound).pos))
{
case 0:
break;
case 1:
break;
default:
break;
}
}
}
void
gt_pch_nx_vec_dw_cfi_ref_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_21vec_dw_cfi_ref_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct dw_cfi_node *& x)
{
if (x)
gt_pch_nx_dw_cfi_node ((void *) x);
}
void
gt_pch_nx_dwarf_file_data (void *x_p)
{
struct dwarf_file_data * const x = (struct dwarf_file_data *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_15dwarf_file_data))
{
gt_pch_n_S ((*x).key);
gt_pch_n_S ((*x).filename);
}
}
void
gt_pch_nx (struct dwarf_file_data& x_r ATTRIBUTE_UNUSED)
{
struct dwarf_file_data * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_S ((*x).key);
gt_pch_n_S ((*x).filename);
}
void
gt_pch_nx (struct dwarf_file_data *& x)
{
if (x)
gt_pch_nx_dwarf_file_data ((void *) x);
}
void
gt_pch_nx_ctf_string (void *x_p)
{
struct ctf_string * x = (struct ctf_string *)x_p;
struct ctf_string * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_10ctf_string))
xlimit = ((*xlimit).cts_next);
while (x != xlimit)
{
gt_pch_n_S ((*x).cts_str);
gt_pch_n_10ctf_string ((*x).cts_next);
x = ((*x).cts_next);
}
}
void
gt_pch_nx_ctf_dmdef (void *x_p)
{
struct ctf_dmdef * x = (struct ctf_dmdef *)x_p;
struct ctf_dmdef * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_9ctf_dmdef))
xlimit = ((*xlimit).dmd_next);
while (x != xlimit)
{
gt_pch_n_S ((*x).dmd_name);
gt_pch_n_9ctf_dmdef ((*x).dmd_next);
x = ((*x).dmd_next);
}
}
void
gt_pch_nx_ctf_func_arg (void *x_p)
{
struct ctf_func_arg * const x = (struct ctf_func_arg *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_12ctf_func_arg))
{
gt_pch_n_S ((*x).farg_name);
gt_pch_n_12ctf_func_arg ((*x).farg_next);
}
}
void
gt_pch_nx_ctf_dtdef (void *x_p)
{
struct ctf_dtdef * const x = (struct ctf_dtdef *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9ctf_dtdef))
{
gt_pch_n_10die_struct ((*x).dtd_key);
gt_pch_n_S ((*x).dtd_name);
switch ((int) (0))
{
case 0:
break;
case 1:
break;
default:
break;
}
switch ((int) (ctf_dtu_d_union_selector (&((*x)))))
{
case CTF_DTU_D_MEMBERS:
gt_pch_n_9ctf_dmdef ((*x).dtd_u.dtu_members);
break;
case CTF_DTU_D_ARRAY:
break;
case CTF_DTU_D_ENCODING:
break;
case CTF_DTU_D_ARGUMENTS:
gt_pch_n_12ctf_func_arg ((*x).dtd_u.dtu_argv);
break;
case CTF_DTU_D_SLICE:
break;
default:
break;
}
}
}
void
gt_pch_nx (struct ctf_dtdef& x_r ATTRIBUTE_UNUSED)
{
struct ctf_dtdef * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_10die_struct ((*x).dtd_key);
gt_pch_n_S ((*x).dtd_name);
switch ((int) (0))
{
case 0:
break;
case 1:
break;
default:
break;
}
switch ((int) (ctf_dtu_d_union_selector (&((*x)))))
{
case CTF_DTU_D_MEMBERS:
gt_pch_n_9ctf_dmdef ((*x).dtd_u.dtu_members);
break;
case CTF_DTU_D_ARRAY:
break;
case CTF_DTU_D_ENCODING:
break;
case CTF_DTU_D_ARGUMENTS:
gt_pch_n_12ctf_func_arg ((*x).dtd_u.dtu_argv);
break;
case CTF_DTU_D_SLICE:
break;
default:
break;
}
}
void
gt_pch_nx (struct ctf_dtdef *& x)
{
if (x)
gt_pch_nx_ctf_dtdef ((void *) x);
}
void
gt_pch_nx_ctf_dvdef (void *x_p)
{
struct ctf_dvdef * const x = (struct ctf_dvdef *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_9ctf_dvdef))
{
gt_pch_n_10die_struct ((*x).dvd_key);
gt_pch_n_S ((*x).dvd_name);
}
}
void
gt_pch_nx (struct ctf_dvdef& x_r ATTRIBUTE_UNUSED)
{
struct ctf_dvdef * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_10die_struct ((*x).dvd_key);
gt_pch_n_S ((*x).dvd_name);
}
void
gt_pch_nx (struct ctf_dvdef *& x)
{
if (x)
gt_pch_nx_ctf_dvdef ((void *) x);
}
void
gt_pch_nx_hash_table_ctfc_dtd_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_27hash_table_ctfc_dtd_hasher_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ctfc_dtd_hasher& x_r ATTRIBUTE_UNUSED)
{
struct ctfc_dtd_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_hash_table_ctfc_dvd_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_27hash_table_ctfc_dvd_hasher_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ctfc_dvd_hasher& x_r ATTRIBUTE_UNUSED)
{
struct ctfc_dvd_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_ctf_container (void *x_p)
{
struct ctf_container * const x = (struct ctf_container *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_13ctf_container))
{
{
size_t l0 = (size_t)(0);
size_t l1 = (size_t)(0);
size_t l2 = (size_t)(0);
size_t l3 = (size_t)(0);
gt_pch_n_27hash_table_ctfc_dtd_hasher_ ((*x).ctfc_types);
gt_pch_n_27hash_table_ctfc_dvd_hasher_ ((*x).ctfc_vars);
gt_pch_n_27hash_table_ctfc_dvd_hasher_ ((*x).ctfc_ignore_vars);
gt_pch_n_10ctf_string ((*x).ctfc_strtable.ctstab_head);
gt_pch_n_10ctf_string ((*x).ctfc_strtable.ctstab_tail);
gt_pch_n_S ((*x).ctfc_strtable.ctstab_estr);
gt_pch_n_10ctf_string ((*x).ctfc_aux_strtable.ctstab_head);
gt_pch_n_10ctf_string ((*x).ctfc_aux_strtable.ctstab_tail);
gt_pch_n_S ((*x).ctfc_aux_strtable.ctstab_estr);
if ((*x).ctfc_vars_list != NULL) {
size_t i0;
for (i0 = 0; i0 != (size_t)(l0); i0++) {
gt_pch_n_9ctf_dvdef ((*x).ctfc_vars_list[i0]);
}
gt_pch_note_object ((*x).ctfc_vars_list, x, gt_pch_p_13ctf_container);
}
if ((*x).ctfc_types_list != NULL) {
size_t i1;
for (i1 = 0; i1 != (size_t)(l1); i1++) {
gt_pch_n_9ctf_dtdef ((*x).ctfc_types_list[i1]);
}
gt_pch_note_object ((*x).ctfc_types_list, x, gt_pch_p_13ctf_container);
}
if ((*x).ctfc_gfuncs_list != NULL) {
size_t i2;
for (i2 = 0; i2 != (size_t)(l2); i2++) {
gt_pch_n_9ctf_dtdef ((*x).ctfc_gfuncs_list[i2]);
}
gt_pch_note_object ((*x).ctfc_gfuncs_list, x, gt_pch_p_13ctf_container);
}
if ((*x).ctfc_gobjts_list != NULL) {
size_t i3;
for (i3 = 0; i3 != (size_t)(l3); i3++) {
gt_pch_n_9ctf_dvdef ((*x).ctfc_gobjts_list[i3]);
}
gt_pch_note_object ((*x).ctfc_gobjts_list, x, gt_pch_p_13ctf_container);
}
}
}
}
void
gt_pch_nx_vec_temp_slot_p_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_22vec_temp_slot_p_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct temp_slot *& x)
{
if (x)
gt_pch_nx_temp_slot ((void *) x);
}
void
gt_pch_nx_eh_region_d (void *x_p)
{
struct eh_region_d * const x = (struct eh_region_d *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_11eh_region_d))
{
gt_pch_n_11eh_region_d ((*x).outer);
gt_pch_n_11eh_region_d ((*x).inner);
gt_pch_n_11eh_region_d ((*x).next_peer);
switch ((int) ((*x).type))
{
case ERT_TRY:
gt_pch_n_10eh_catch_d ((*x).u.eh_try.first_catch);
gt_pch_n_10eh_catch_d ((*x).u.eh_try.last_catch);
break;
case ERT_ALLOWED_EXCEPTIONS:
gt_pch_n_9tree_node ((*x).u.allowed.type_list);
gt_pch_n_9tree_node ((*x).u.allowed.label);
break;
case ERT_MUST_NOT_THROW:
gt_pch_n_9tree_node ((*x).u.must_not_throw.failure_decl);
break;
default:
break;
}
gt_pch_n_16eh_landing_pad_d ((*x).landing_pads);
gt_pch_n_7rtx_def ((*x).exc_ptr_reg);
gt_pch_n_7rtx_def ((*x).filter_reg);
}
}
void
gt_pch_nx_eh_landing_pad_d (void *x_p)
{
struct eh_landing_pad_d * const x = (struct eh_landing_pad_d *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_16eh_landing_pad_d))
{
gt_pch_n_16eh_landing_pad_d ((*x).next_lp);
gt_pch_n_11eh_region_d ((*x).region);
gt_pch_n_9tree_node ((*x).post_landing_pad);
gt_pch_n_7rtx_def ((*x).landing_pad);
}
}
void
gt_pch_nx_eh_catch_d (void *x_p)
{
struct eh_catch_d * const x = (struct eh_catch_d *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_10eh_catch_d))
{
gt_pch_n_10eh_catch_d ((*x).next_catch);
gt_pch_n_10eh_catch_d ((*x).prev_catch);
gt_pch_n_9tree_node ((*x).type_list);
gt_pch_n_9tree_node ((*x).filter_list);
gt_pch_n_9tree_node ((*x).label);
}
}
void
gt_pch_nx_vec_eh_region_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_20vec_eh_region_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct eh_region_d *& x)
{
if (x)
gt_pch_nx_eh_region_d ((void *) x);
}
void
gt_pch_nx_vec_eh_landing_pad_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_25vec_eh_landing_pad_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct eh_landing_pad_d *& x)
{
if (x)
gt_pch_nx_eh_landing_pad_d ((void *) x);
}
void
gt_pch_nx_hash_map_gimple__int_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_21hash_map_gimple__int_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct gimple *& x)
{
if (x)
gt_pch_nx_gimple ((void *) x);
}
void
gt_pch_nx_tm_restart_node (void *x_p)
{
struct tm_restart_node * const x = (struct tm_restart_node *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_15tm_restart_node))
{
gt_pch_n_6gimple ((*x).stmt);
gt_pch_n_9tree_node ((*x).label_or_list);
}
}
void
gt_pch_nx (struct tm_restart_node& x_r ATTRIBUTE_UNUSED)
{
struct tm_restart_node * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_6gimple ((*x).stmt);
gt_pch_n_9tree_node ((*x).label_or_list);
}
void
gt_pch_nx (struct tm_restart_node *& x)
{
if (x)
gt_pch_nx_tm_restart_node ((void *) x);
}
void
gt_pch_nx_hash_map_tree_tree_ (void *x_p)
{
hash_map * const x = (hash_map *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_19hash_map_tree_tree_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx_hash_table_ssa_name_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_27hash_table_ssa_name_hasher_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ssa_name_hasher& x_r ATTRIBUTE_UNUSED)
{
struct ssa_name_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_hash_table_tm_restart_hasher_ (void *x_p)
{
hash_table * const x = (hash_table *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_29hash_table_tm_restart_hasher_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct tm_restart_hasher& x_r ATTRIBUTE_UNUSED)
{
struct tm_restart_hasher * ATTRIBUTE_UNUSED x = &x_r;
}
void
gt_pch_nx_ssa_operand_memory_d (void *x_p)
{
struct ssa_operand_memory_d * x = (struct ssa_operand_memory_d *)x_p;
struct ssa_operand_memory_d * xlimit = x;
while (gt_pch_note_object (xlimit, xlimit, gt_pch_p_20ssa_operand_memory_d))
xlimit = ((*xlimit).next);
while (x != xlimit)
{
gt_pch_n_20ssa_operand_memory_d ((*x).next);
x = ((*x).next);
}
}
void
gt_pch_nx_int_range_1_ (void *x_p)
{
int_range<1> * const x = (int_range<1> *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_12int_range_1_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx_vec_ipa_agg_jf_item_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_26vec_ipa_agg_jf_item_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ipa_agg_jf_item& x_r ATTRIBUTE_UNUSED)
{
struct ipa_agg_jf_item * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).type);
switch ((int) (((*x)).jftype))
{
case IPA_JF_CONST:
gt_pch_n_9tree_node ((*x).value.constant);
break;
case IPA_JF_PASS_THROUGH:
gt_pch_n_9tree_node ((*x).value.pass_through.operand);
break;
case IPA_JF_LOAD_AGG:
gt_pch_n_9tree_node ((*x).value.load_agg.pass_through.operand);
gt_pch_n_9tree_node ((*x).value.load_agg.type);
break;
default:
break;
}
}
void
gt_pch_nx_ipa_bits (void *x_p)
{
struct ipa_bits * const x = (struct ipa_bits *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_8ipa_bits))
{
}
}
void
gt_pch_nx_vec_ipa_param_descriptor_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_31vec_ipa_param_descriptor_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ipa_param_descriptor& x_r ATTRIBUTE_UNUSED)
{
struct ipa_param_descriptor * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).decl_or_type);
}
void
gt_pch_nx_vec_ipa_bits__va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_20vec_ipa_bits__va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ipa_bits *& x)
{
if (x)
gt_pch_nx_ipa_bits ((void *) x);
}
void
gt_pch_nx_vec_ipa_vr_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_17vec_ipa_vr_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ipa_vr& x_r ATTRIBUTE_UNUSED)
{
struct ipa_vr * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_nx (&((*x).min));
gt_pch_nx (&((*x).max));
}
void
gt_pch_nx_ipcp_transformation (void *x_p)
{
struct ipcp_transformation * const x = (struct ipcp_transformation *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_19ipcp_transformation))
{
gt_pch_n_25ipa_agg_replacement_value ((*x).agg_values);
gt_pch_n_20vec_ipa_bits__va_gc_ ((*x).bits);
gt_pch_n_17vec_ipa_vr_va_gc_ ((*x).m_vr);
}
}
void
gt_pch_nx_vec_ipa_jump_func_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_24vec_ipa_jump_func_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ipa_jump_func& x_r ATTRIBUTE_UNUSED)
{
struct ipa_jump_func * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_26vec_ipa_agg_jf_item_va_gc_ ((*x).agg.items);
gt_pch_n_8ipa_bits ((*x).bits);
gt_pch_n_12int_range_1_ ((*x).m_vr);
switch ((int) (((*x)).type))
{
case IPA_JF_CONST:
gt_pch_n_9tree_node ((*x).value.constant.value);
break;
case IPA_JF_PASS_THROUGH:
gt_pch_n_9tree_node ((*x).value.pass_through.operand);
break;
case IPA_JF_ANCESTOR:
break;
default:
break;
}
}
void
gt_pch_nx_vec_ipa_polymorphic_call_context_va_gc_ (void *x_p)
{
vec * const x = (vec *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_39vec_ipa_polymorphic_call_context_va_gc_))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx (struct ipa_polymorphic_call_context& x_r ATTRIBUTE_UNUSED)
{
struct ipa_polymorphic_call_context * ATTRIBUTE_UNUSED x = &x_r;
gt_pch_n_9tree_node ((*x).outer_type);
gt_pch_n_9tree_node ((*x).speculative_outer_type);
}
void
gt_pch_nx_ipa_node_params_t (void *x_p)
{
ipa_node_params_t * const x = (ipa_node_params_t *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_17ipa_node_params_t))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx_ipa_edge_args_sum_t (void *x_p)
{
ipa_edge_args_sum_t * const x = (ipa_edge_args_sum_t *)x_p;
if (gt_pch_note_object (x, x, gt_pch_p_19ipa_edge_args_sum_t))
{
gt_pch_nx (x);
}
}
void
gt_pch_nx_function_summary_ipcp_transformation__ (void *x_p)
{
function_summary