Lines Matching refs:args

204    macro RETURN_POPS_ARGS to determine whether this function pops its own args.
207 RETURN_POPS_ARGS to determine whether this function pops its own args.
223 just after all the args have had their registers assigned.
225 arg-register beyond those used for args in this call,
233 the args to this call were processed.
276 /* If this subroutine pops its own args, record that in the call insn
309 /* If this subroutine pops its own args, record that in the call insn
409 /* Restore this now, so that we do defer pops for this call's args
427 /* If returning from the subroutine does not automatically pop the args,
431 If returning from the subroutine does pop the args, indicate that the
446 /* When we accumulate outgoing args, we must avoid any stack manipulations.
648 precompute_register_parameters (int num_actuals, struct arg_data *args,
656 if (args[i].reg != 0 && ! args[i].pass_on_stack)
660 if (args[i].value == 0)
663 args[i].value = expand_normal (args[i].tree_value);
664 preserve_temp_slots (args[i].value);
670 if (CONSTANT_P (args[i].value)
671 && !LEGITIMATE_CONSTANT_P (args[i].value))
672 args[i].value = force_reg (args[i].mode, args[i].value);
677 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
678 args[i].value
679 = convert_modes (args[i].mode,
680 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
681 args[i].value, args[i].unsignedp);
686 if (GET_CODE (args[i].reg) == PARALLEL)
688 tree type = TREE_TYPE (args[i].tree_value);
689 args[i].parallel_value
690 = emit_group_load_into_temps (args[i].reg, args[i].value,
702 else if ((! (REG_P (args[i].value)
703 || (GET_CODE (args[i].value) == SUBREG
704 && REG_P (SUBREG_REG (args[i].value)))))
705 && args[i].mode != BLKmode
706 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
709 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
818 args[argnum].aligned_regs. The caller is responsible for deallocating
822 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
827 if (args[i].reg != 0 && ! args[i].pass_on_stack
828 && args[i].mode == BLKmode
829 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
832 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
835 if (args[i].partial)
837 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
838 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
842 args[i].n_aligned_regs
846 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
854 && (BLOCK_REG_PADDING (args[i].mode,
855 TREE_TYPE (args[i].tree_value), 1)
863 for (j = 0; j < args[i].n_aligned_regs; j++)
866 rtx word = operand_subword_force (args[i].value, j, BLKmode);
869 args[i].aligned_regs[j] = reg;
922 struct arg_data *args,
935 /* Count arg position in order args appear. */
944 /* In this loop, we consider args in the order they are written.
951 /* In this case, must reverse order of args
959 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
966 args[i].tree_value = TREE_VALUE (p);
970 args[i].tree_value = integer_zero_node, type = integer_type_node;
980 args[i].reg is nonzero if all or part is passed in registers.
982 args[i].partial is nonzero if part but not all is passed in registers,
985 args[i].pass_on_stack is nonzero if the argument must at least be
987 if args[i].reg is nonzero.
1008 && (base = get_base_address (args[i].tree_value))
1016 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1017 type = TREE_TYPE (args[i].tree_value);
1052 store_expr (args[i].tree_value, copy, 0);
1059 args[i].tree_value
1061 type = TREE_TYPE (args[i].tree_value);
1072 args[i].unsignedp = unsignedp;
1073 args[i].mode = mode;
1075 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1081 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1084 args[i].tail_call_reg = args[i].reg;
1087 if (args[i].reg)
1088 args[i].partial
1092 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1097 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1098 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1099 args[i].pass_on_stack = 1;
1107 || (args[i].pass_on_stack && args[i].reg != 0))
1116 if (args[i].reg == 0 || args[i].partial != 0
1118 || args[i].pass_on_stack)
1123 args[i].reg != 0,
1125 args[i].pass_on_stack ? 0 : args[i].partial,
1126 fndecl, args_size, &args[i].locate);
1131 args[i].locate.where_pad =
1136 /* Update ARGS_SIZE, the total stack space for args so far. */
1138 args_size->constant += args[i].locate.size.constant;
1139 if (args[i].locate.size.var)
1140 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1163 /* For accumulate outgoing args mode we don't need to align, since the frame
1236 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1258 && TREE_CODE (args[i].tree_value) != CALL_EXPR)
1262 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1264 args[i].initial_value = args[i].value
1265 = expand_normal (args[i].tree_value);
1267 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1268 if (mode != args[i].mode)
1270 args[i].value
1271 = convert_modes (args[i].mode, mode,
1272 args[i].value, args[i].unsignedp);
1274 /* CSE will replace this only if it contains args[i].value
1277 if (REG_P (args[i].value)
1278 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1280 args[i].initial_value
1281 = gen_lowpart_SUBREG (mode, args[i].value);
1282 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1283 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1284 args[i].unsignedp);
1296 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1325 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1327 else if (partial_seen && args[i].reg == 0)
1330 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1331 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1332 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1333 || TREE_CODE (args[i].tree_value) == COND_EXPR
1334 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1336 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1355 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1367 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1368 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1375 if (! args[i].pass_on_stack
1376 && args[i].reg != 0
1377 && args[i].partial == 0)
1387 if (args[i].partial != 0)
1391 units_on_stack = args[i].locate.size.constant;
1394 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1395 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1399 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1400 set_mem_attributes (args[i].stack,
1401 TREE_TYPE (args[i].tree_value), 1);
1404 boundary = args[i].locate.boundary;
1405 if (args[i].locate.where_pad != downward)
1412 set_mem_align (args[i].stack, align);
1421 if (args[i].partial != 0)
1425 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1426 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1430 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1431 set_mem_attributes (args[i].stack_slot,
1432 TREE_TYPE (args[i].tree_value), 1);
1434 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1440 set_mem_alias_set (args[i].stack, 0);
1441 set_mem_alias_set (args[i].stack_slot, 0);
1534 load_register_parameters (struct arg_data *args, int num_actuals,
1543 ? args[i].tail_call_reg : args[i].reg);
1546 int partial = args[i].partial;
1562 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1564 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1568 size = GET_MODE_SIZE (args[i].mode);
1574 emit_group_move (reg, args[i].parallel_value);
1582 emit_move_insn (reg, args[i].value);
1588 && (args[i].locate.where_pad
1611 else if (args[i].n_aligned_regs != 0)
1612 for (j = 0; j < args[i].n_aligned_regs; j++)
1614 args[i].aligned_regs[j]);
1616 else if (partial == 0 || args[i].pass_on_stack)
1618 rtx mem = validize_mem (args[i].value);
1622 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1629 && args[i].locate.where_pad == downward
1635 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1650 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1657 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1880 /* Number of named args. Args after this are anonymous ones
1887 struct arg_data *args;
1900 /* Nonzero if we must avoid push-insns in the args for this call.
2101 /* Compute number of named args.
2102 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2110 /* If we know nothing, treat all args as named. */
2121 /* Now possibly adjust the number of named args.
2122 Normally, don't include the last named arg if anonymous args follow.
2125 (If no anonymous args follow, the result of list_length is actually
2130 this machine will be able to place unnamed args that were passed
2131 in registers into the stack. So treat all args as named. This
2136 we do not have any reliable way to pass unnamed args in
2147 /* Treat all args as named. */
2151 args = alloca (num_actuals * sizeof (struct arg_data));
2152 memset (args, 0, num_actuals * sizeof (struct arg_data));
2156 initialize_argument_information (num_actuals, args, &args_size,
2176 num_actuals, args,
2305 /* When calling a const function, we must pop the stack args right away,
2312 precompute_arguments (flags, num_actuals, args);
2352 make space for all args right now. */
2372 block even if the size is zero because we may be storing args
2539 if (args[i].pass_on_stack)
2546 compute_argument_addresses (args, argblock, num_actuals);
2548 /* If we push args individually in reverse order, perform stack alignment
2591 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2613 if (args[i].reg == 0 || args[i].pass_on_stack)
2617 if (store_one_arg (&args[i], argblock, flags,
2622 &args[i], 1)))
2626 && args[i].stack
2627 && args[i].value == args[i].stack)
2630 args[i].value),
2639 store_unaligned_arguments_into_pseudos (args, num_actuals);
2645 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2649 if (store_one_arg (&args[i], argblock, flags,
2654 &args[i], 1)))
2658 /* If we pushed args in forward order, perform stack alignment
2691 load_register_parameters (args, num_actuals, &call_fusage, flags,
2778 args[0].initial_value);
2786 args[i].initial_value, note);
2923 if (args[i].save_area)
2994 /* If size of args is variable or this was a constructor call for a stack
3018 if (args[i].save_area)
3020 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3024 XEXP (args[i].stack_slot, 0)));
3027 emit_move_insn (stack_area, args[i].save_area);
3029 emit_block_move (stack_area, args[i].save_area,
3030 GEN_INT (args[i].locate.size.constant),
3047 if (args[i].aligned_regs)
3048 free (args[i].aligned_regs);
3066 args[i].value = 0;
3067 args[i].aligned_regs = 0;
3068 args[i].stack = 0;
3390 library functions shouldn't have many args. */
3618 /* If we push args individually in reverse order, perform stack alignment
3646 /* Push the args that need to be pushed. */
3754 /* If we pushed args in forward order, perform stack alignment