1/* Array translation routines 2 Copyright (C) 2002-2016 Free Software Foundation, Inc. 3 Contributed by Paul Brook <paul@nowt.org> 4 and Steven Bosscher <s.bosscher@student.tudelft.nl> 5 6This file is part of GCC. 7 8GCC is free software; you can redistribute it and/or modify it under 9the terms of the GNU General Public License as published by the Free 10Software Foundation; either version 3, or (at your option) any later 11version. 12 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14WARRANTY; without even the implied warranty of MERCHANTABILITY or 15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16for more details. 17 18You should have received a copy of the GNU General Public License 19along with GCC; see the file COPYING3. If not see 20<http://www.gnu.org/licenses/>. */ 21 22/* trans-array.c-- Various array related code, including scalarization, 23 allocation, initialization and other support routines. */ 24 25/* How the scalarizer works. 26 In gfortran, array expressions use the same core routines as scalar 27 expressions. 28 First, a Scalarization State (SS) chain is built. This is done by walking 29 the expression tree, and building a linear list of the terms in the 30 expression. As the tree is walked, scalar subexpressions are translated. 31 32 The scalarization parameters are stored in a gfc_loopinfo structure. 33 First the start and stride of each term is calculated by 34 gfc_conv_ss_startstride. During this process the expressions for the array 35 descriptors and data pointers are also translated. 36 37 If the expression is an assignment, we must then resolve any dependencies. 38 In Fortran all the rhs values of an assignment must be evaluated before 39 any assignments take place. This can require a temporary array to store the 40 values. We also require a temporary when we are passing array expressions 41 or vector subscripts as procedure parameters. 42 43 Array sections are passed without copying to a temporary. These use the 44 scalarizer to determine the shape of the section. The flag 45 loop->array_parameter tells the scalarizer that the actual values and loop 46 variables will not be required. 47 48 The function gfc_conv_loop_setup generates the scalarization setup code. 49 It determines the range of the scalarizing loop variables. If a temporary 50 is required, this is created and initialized. Code for scalar expressions 51 taken outside the loop is also generated at this time. Next the offset and 52 scaling required to translate from loop variables to array indices for each 53 term is calculated. 54 55 A call to gfc_start_scalarized_body marks the start of the scalarized 56 expression. This creates a scope and declares the loop variables. Before 57 calling this gfc_make_ss_chain_used must be used to indicate which terms 58 will be used inside this loop. 59 60 The scalar gfc_conv_* functions are then used to build the main body of the 61 scalarization loop. Scalarization loop variables and precalculated scalar 62 values are automatically substituted. Note that gfc_advance_se_ss_chain 63 must be used, rather than changing the se->ss directly. 64 65 For assignment expressions requiring a temporary two sub loops are 66 generated. The first stores the result of the expression in the temporary, 67 the second copies it to the result. A call to 68 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and 69 the start of the copying loop. The temporary may be less than full rank. 70 71 Finally gfc_trans_scalarizing_loops is called to generate the implicit do 72 loops. The loops are added to the pre chain of the loopinfo. The post 73 chain may still contain cleanup code. 74 75 After the loop code has been added into its parent scope gfc_cleanup_loop 76 is called to free all the SS allocated by the scalarizer. */ 77 78#include "config.h" 79#include "system.h" 80#include "coretypes.h" 81#include "gfortran.h" 82#include "hash-set.h" 83#include "machmode.h" 84#include "vec.h" 85#include "double-int.h" 86#include "input.h" 87#include "alias.h" 88#include "symtab.h" 89#include "options.h" 90#include "wide-int.h" 91#include "inchash.h" 92#include "tree.h" 93#include "fold-const.h" 94#include "gimple-expr.h" 95#include "diagnostic-core.h" /* For internal_error/fatal_error. */ 96#include "flags.h" 97#include "constructor.h" 98#include "trans.h" 99#include "trans-stmt.h" 100#include "trans-types.h" 101#include "trans-array.h" 102#include "trans-const.h" 103#include "dependency.h" 104#include "wide-int.h" 105 106static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base); 107 108/* The contents of this structure aren't actually used, just the address. */ 109static gfc_ss gfc_ss_terminator_var; 110gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var; 111 112 113static tree 114gfc_array_dataptr_type (tree desc) 115{ 116 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc))); 117} 118 119 120/* Build expressions to access the members of an array descriptor. 121 It's surprisingly easy to mess up here, so never access 122 an array descriptor by "brute force", always use these 123 functions. This also avoids problems if we change the format 124 of an array descriptor. 125 126 To understand these magic numbers, look at the comments 127 before gfc_build_array_type() in trans-types.c. 128 129 The code within these defines should be the only code which knows the format 130 of an array descriptor. 131 132 Any code just needing to read obtain the bounds of an array should use 133 gfc_conv_array_* rather than the following functions as these will return 134 know constant values, and work with arrays which do not have descriptors. 135 136 Don't forget to #undef these! */ 137 138#define DATA_FIELD 0 139#define OFFSET_FIELD 1 140#define DTYPE_FIELD 2 141#define DIMENSION_FIELD 3 142#define CAF_TOKEN_FIELD 4 143 144#define STRIDE_SUBFIELD 0 145#define LBOUND_SUBFIELD 1 146#define UBOUND_SUBFIELD 2 147 148/* This provides READ-ONLY access to the data field. The field itself 149 doesn't have the proper type. */ 150 151tree 152gfc_conv_descriptor_data_get (tree desc) 153{ 154 tree field, type, t; 155 156 type = TREE_TYPE (desc); 157 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type)); 158 159 field = TYPE_FIELDS (type); 160 gcc_assert (DATA_FIELD == 0); 161 162 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc, 163 field, NULL_TREE); 164 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t); 165 166 return t; 167} 168 169/* This provides WRITE access to the data field. 170 171 TUPLES_P is true if we are generating tuples. 172 173 This function gets called through the following macros: 174 gfc_conv_descriptor_data_set 175 gfc_conv_descriptor_data_set. */ 176 177void 178gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value) 179{ 180 tree field, type, t; 181 182 type = TREE_TYPE (desc); 183 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type)); 184 185 field = TYPE_FIELDS (type); 186 gcc_assert (DATA_FIELD == 0); 187 188 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc, 189 field, NULL_TREE); 190 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value)); 191} 192 193 194/* This provides address access to the data field. This should only be 195 used by array allocation, passing this on to the runtime. */ 196 197tree 198gfc_conv_descriptor_data_addr (tree desc) 199{ 200 tree field, type, t; 201 202 type = TREE_TYPE (desc); 203 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type)); 204 205 field = TYPE_FIELDS (type); 206 gcc_assert (DATA_FIELD == 0); 207 208 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc, 209 field, NULL_TREE); 210 return gfc_build_addr_expr (NULL_TREE, t); 211} 212 213static tree 214gfc_conv_descriptor_offset (tree desc) 215{ 216 tree type; 217 tree field; 218 219 type = TREE_TYPE (desc); 220 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type)); 221 222 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD); 223 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type); 224 225 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), 226 desc, field, NULL_TREE); 227} 228 229tree 230gfc_conv_descriptor_offset_get (tree desc) 231{ 232 return gfc_conv_descriptor_offset (desc); 233} 234 235void 236gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc, 237 tree value) 238{ 239 tree t = gfc_conv_descriptor_offset (desc); 240 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value)); 241} 242 243 244tree 245gfc_conv_descriptor_dtype (tree desc) 246{ 247 tree field; 248 tree type; 249 250 type = TREE_TYPE (desc); 251 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type)); 252 253 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD); 254 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type); 255 256 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), 257 desc, field, NULL_TREE); 258} 259 260 261tree 262gfc_conv_descriptor_rank (tree desc) 263{ 264 tree tmp; 265 tree dtype; 266 267 dtype = gfc_conv_descriptor_dtype (desc); 268 tmp = build_int_cst (TREE_TYPE (dtype), GFC_DTYPE_RANK_MASK); 269 tmp = fold_build2_loc (input_location, BIT_AND_EXPR, TREE_TYPE (dtype), 270 dtype, tmp); 271 return fold_convert (gfc_get_int_type (gfc_default_integer_kind), tmp); 272} 273 274 275tree 276gfc_get_descriptor_dimension (tree desc) 277{ 278 tree type, field; 279 280 type = TREE_TYPE (desc); 281 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type)); 282 283 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD); 284 gcc_assert (field != NULL_TREE 285 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE 286 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE); 287 288 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), 289 desc, field, NULL_TREE); 290} 291 292 293static tree 294gfc_conv_descriptor_dimension (tree desc, tree dim) 295{ 296 tree tmp; 297 298 tmp = gfc_get_descriptor_dimension (desc); 299 300 return gfc_build_array_ref (tmp, dim, NULL); 301} 302 303 304tree 305gfc_conv_descriptor_token (tree desc) 306{ 307 tree type; 308 tree field; 309 310 type = TREE_TYPE (desc); 311 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type)); 312 gcc_assert (flag_coarray == GFC_FCOARRAY_LIB); 313 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD); 314 315 /* Should be a restricted pointer - except in the finalization wrapper. */ 316 gcc_assert (field != NULL_TREE 317 && (TREE_TYPE (field) == prvoid_type_node 318 || TREE_TYPE (field) == pvoid_type_node)); 319 320 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), 321 desc, field, NULL_TREE); 322} 323 324 325static tree 326gfc_conv_descriptor_stride (tree desc, tree dim) 327{ 328 tree tmp; 329 tree field; 330 331 tmp = gfc_conv_descriptor_dimension (desc, dim); 332 field = TYPE_FIELDS (TREE_TYPE (tmp)); 333 field = gfc_advance_chain (field, STRIDE_SUBFIELD); 334 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type); 335 336 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), 337 tmp, field, NULL_TREE); 338 return tmp; 339} 340 341tree 342gfc_conv_descriptor_stride_get (tree desc, tree dim) 343{ 344 tree type = TREE_TYPE (desc); 345 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type)); 346 if (integer_zerop (dim) 347 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE 348 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT 349 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_RANK_CONT 350 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT)) 351 return gfc_index_one_node; 352 353 return gfc_conv_descriptor_stride (desc, dim); 354} 355 356void 357gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc, 358 tree dim, tree value) 359{ 360 tree t = gfc_conv_descriptor_stride (desc, dim); 361 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value)); 362} 363 364static tree 365gfc_conv_descriptor_lbound (tree desc, tree dim) 366{ 367 tree tmp; 368 tree field; 369 370 tmp = gfc_conv_descriptor_dimension (desc, dim); 371 field = TYPE_FIELDS (TREE_TYPE (tmp)); 372 field = gfc_advance_chain (field, LBOUND_SUBFIELD); 373 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type); 374 375 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), 376 tmp, field, NULL_TREE); 377 return tmp; 378} 379 380tree 381gfc_conv_descriptor_lbound_get (tree desc, tree dim) 382{ 383 return gfc_conv_descriptor_lbound (desc, dim); 384} 385 386void 387gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc, 388 tree dim, tree value) 389{ 390 tree t = gfc_conv_descriptor_lbound (desc, dim); 391 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value)); 392} 393 394static tree 395gfc_conv_descriptor_ubound (tree desc, tree dim) 396{ 397 tree tmp; 398 tree field; 399 400 tmp = gfc_conv_descriptor_dimension (desc, dim); 401 field = TYPE_FIELDS (TREE_TYPE (tmp)); 402 field = gfc_advance_chain (field, UBOUND_SUBFIELD); 403 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type); 404 405 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), 406 tmp, field, NULL_TREE); 407 return tmp; 408} 409 410tree 411gfc_conv_descriptor_ubound_get (tree desc, tree dim) 412{ 413 return gfc_conv_descriptor_ubound (desc, dim); 414} 415 416void 417gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc, 418 tree dim, tree value) 419{ 420 tree t = gfc_conv_descriptor_ubound (desc, dim); 421 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value)); 422} 423 424/* Build a null array descriptor constructor. */ 425 426tree 427gfc_build_null_descriptor (tree type) 428{ 429 tree field; 430 tree tmp; 431 432 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type)); 433 gcc_assert (DATA_FIELD == 0); 434 field = TYPE_FIELDS (type); 435 436 /* Set a NULL data pointer. */ 437 tmp = build_constructor_single (type, field, null_pointer_node); 438 TREE_CONSTANT (tmp) = 1; 439 /* All other fields are ignored. */ 440 441 return tmp; 442} 443 444 445/* Modify a descriptor such that the lbound of a given dimension is the value 446 specified. This also updates ubound and offset accordingly. */ 447 448void 449gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc, 450 int dim, tree new_lbound) 451{ 452 tree offs, ubound, lbound, stride; 453 tree diff, offs_diff; 454 455 new_lbound = fold_convert (gfc_array_index_type, new_lbound); 456 457 offs = gfc_conv_descriptor_offset_get (desc); 458 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]); 459 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]); 460 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]); 461 462 /* Get difference (new - old) by which to shift stuff. */ 463 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type, 464 new_lbound, lbound); 465 466 /* Shift ubound and offset accordingly. This has to be done before 467 updating the lbound, as they depend on the lbound expression! */ 468 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, 469 ubound, diff); 470 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound); 471 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 472 diff, stride); 473 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type, 474 offs, offs_diff); 475 gfc_conv_descriptor_offset_set (block, desc, offs); 476 477 /* Finally set lbound to value we want. */ 478 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound); 479} 480 481 482/* Cleanup those #defines. */ 483 484#undef DATA_FIELD 485#undef OFFSET_FIELD 486#undef DTYPE_FIELD 487#undef DIMENSION_FIELD 488#undef CAF_TOKEN_FIELD 489#undef STRIDE_SUBFIELD 490#undef LBOUND_SUBFIELD 491#undef UBOUND_SUBFIELD 492 493 494/* Mark a SS chain as used. Flags specifies in which loops the SS is used. 495 flags & 1 = Main loop body. 496 flags & 2 = temp copy loop. */ 497 498void 499gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags) 500{ 501 for (; ss != gfc_ss_terminator; ss = ss->next) 502 ss->info->useflags = flags; 503} 504 505 506/* Free a gfc_ss chain. */ 507 508void 509gfc_free_ss_chain (gfc_ss * ss) 510{ 511 gfc_ss *next; 512 513 while (ss != gfc_ss_terminator) 514 { 515 gcc_assert (ss != NULL); 516 next = ss->next; 517 gfc_free_ss (ss); 518 ss = next; 519 } 520} 521 522 523static void 524free_ss_info (gfc_ss_info *ss_info) 525{ 526 int n; 527 528 ss_info->refcount--; 529 if (ss_info->refcount > 0) 530 return; 531 532 gcc_assert (ss_info->refcount == 0); 533 534 switch (ss_info->type) 535 { 536 case GFC_SS_SECTION: 537 for (n = 0; n < GFC_MAX_DIMENSIONS; n++) 538 if (ss_info->data.array.subscript[n]) 539 gfc_free_ss_chain (ss_info->data.array.subscript[n]); 540 break; 541 542 default: 543 break; 544 } 545 546 free (ss_info); 547} 548 549 550/* Free a SS. */ 551 552void 553gfc_free_ss (gfc_ss * ss) 554{ 555 free_ss_info (ss->info); 556 free (ss); 557} 558 559 560/* Creates and initializes an array type gfc_ss struct. */ 561 562gfc_ss * 563gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type) 564{ 565 gfc_ss *ss; 566 gfc_ss_info *ss_info; 567 int i; 568 569 ss_info = gfc_get_ss_info (); 570 ss_info->refcount++; 571 ss_info->type = type; 572 ss_info->expr = expr; 573 574 ss = gfc_get_ss (); 575 ss->info = ss_info; 576 ss->next = next; 577 ss->dimen = dimen; 578 for (i = 0; i < ss->dimen; i++) 579 ss->dim[i] = i; 580 581 return ss; 582} 583 584 585/* Creates and initializes a temporary type gfc_ss struct. */ 586 587gfc_ss * 588gfc_get_temp_ss (tree type, tree string_length, int dimen) 589{ 590 gfc_ss *ss; 591 gfc_ss_info *ss_info; 592 int i; 593 594 ss_info = gfc_get_ss_info (); 595 ss_info->refcount++; 596 ss_info->type = GFC_SS_TEMP; 597 ss_info->string_length = string_length; 598 ss_info->data.temp.type = type; 599 600 ss = gfc_get_ss (); 601 ss->info = ss_info; 602 ss->next = gfc_ss_terminator; 603 ss->dimen = dimen; 604 for (i = 0; i < ss->dimen; i++) 605 ss->dim[i] = i; 606 607 return ss; 608} 609 610 611/* Creates and initializes a scalar type gfc_ss struct. */ 612 613gfc_ss * 614gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr) 615{ 616 gfc_ss *ss; 617 gfc_ss_info *ss_info; 618 619 ss_info = gfc_get_ss_info (); 620 ss_info->refcount++; 621 ss_info->type = GFC_SS_SCALAR; 622 ss_info->expr = expr; 623 624 ss = gfc_get_ss (); 625 ss->info = ss_info; 626 ss->next = next; 627 628 return ss; 629} 630 631 632/* Free all the SS associated with a loop. */ 633 634void 635gfc_cleanup_loop (gfc_loopinfo * loop) 636{ 637 gfc_loopinfo *loop_next, **ploop; 638 gfc_ss *ss; 639 gfc_ss *next; 640 641 ss = loop->ss; 642 while (ss != gfc_ss_terminator) 643 { 644 gcc_assert (ss != NULL); 645 next = ss->loop_chain; 646 gfc_free_ss (ss); 647 ss = next; 648 } 649 650 /* Remove reference to self in the parent loop. */ 651 if (loop->parent) 652 for (ploop = &loop->parent->nested; *ploop; ploop = &(*ploop)->next) 653 if (*ploop == loop) 654 { 655 *ploop = loop->next; 656 break; 657 } 658 659 /* Free non-freed nested loops. */ 660 for (loop = loop->nested; loop; loop = loop_next) 661 { 662 loop_next = loop->next; 663 gfc_cleanup_loop (loop); 664 free (loop); 665 } 666} 667 668 669static void 670set_ss_loop (gfc_ss *ss, gfc_loopinfo *loop) 671{ 672 int n; 673 674 for (; ss != gfc_ss_terminator; ss = ss->next) 675 { 676 ss->loop = loop; 677 678 if (ss->info->type == GFC_SS_SCALAR 679 || ss->info->type == GFC_SS_REFERENCE 680 || ss->info->type == GFC_SS_TEMP) 681 continue; 682 683 for (n = 0; n < GFC_MAX_DIMENSIONS; n++) 684 if (ss->info->data.array.subscript[n] != NULL) 685 set_ss_loop (ss->info->data.array.subscript[n], loop); 686 } 687} 688 689 690/* Associate a SS chain with a loop. */ 691 692void 693gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head) 694{ 695 gfc_ss *ss; 696 gfc_loopinfo *nested_loop; 697 698 if (head == gfc_ss_terminator) 699 return; 700 701 set_ss_loop (head, loop); 702 703 ss = head; 704 for (; ss && ss != gfc_ss_terminator; ss = ss->next) 705 { 706 if (ss->nested_ss) 707 { 708 nested_loop = ss->nested_ss->loop; 709 710 /* More than one ss can belong to the same loop. Hence, we add the 711 loop to the chain only if it is different from the previously 712 added one, to avoid duplicate nested loops. */ 713 if (nested_loop != loop->nested) 714 { 715 gcc_assert (nested_loop->parent == NULL); 716 nested_loop->parent = loop; 717 718 gcc_assert (nested_loop->next == NULL); 719 nested_loop->next = loop->nested; 720 loop->nested = nested_loop; 721 } 722 else 723 gcc_assert (nested_loop->parent == loop); 724 } 725 726 if (ss->next == gfc_ss_terminator) 727 ss->loop_chain = loop->ss; 728 else 729 ss->loop_chain = ss->next; 730 } 731 gcc_assert (ss == gfc_ss_terminator); 732 loop->ss = head; 733} 734 735 736/* Generate an initializer for a static pointer or allocatable array. */ 737 738void 739gfc_trans_static_array_pointer (gfc_symbol * sym) 740{ 741 tree type; 742 743 gcc_assert (TREE_STATIC (sym->backend_decl)); 744 /* Just zero the data member. */ 745 type = TREE_TYPE (sym->backend_decl); 746 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type); 747} 748 749 750/* If the bounds of SE's loop have not yet been set, see if they can be 751 determined from array spec AS, which is the array spec of a called 752 function. MAPPING maps the callee's dummy arguments to the values 753 that the caller is passing. Add any initialization and finalization 754 code to SE. */ 755 756void 757gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping, 758 gfc_se * se, gfc_array_spec * as) 759{ 760 int n, dim, total_dim; 761 gfc_se tmpse; 762 gfc_ss *ss; 763 tree lower; 764 tree upper; 765 tree tmp; 766 767 total_dim = 0; 768 769 if (!as || as->type != AS_EXPLICIT) 770 return; 771 772 for (ss = se->ss; ss; ss = ss->parent) 773 { 774 total_dim += ss->loop->dimen; 775 for (n = 0; n < ss->loop->dimen; n++) 776 { 777 /* The bound is known, nothing to do. */ 778 if (ss->loop->to[n] != NULL_TREE) 779 continue; 780 781 dim = ss->dim[n]; 782 gcc_assert (dim < as->rank); 783 gcc_assert (ss->loop->dimen <= as->rank); 784 785 /* Evaluate the lower bound. */ 786 gfc_init_se (&tmpse, NULL); 787 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]); 788 gfc_add_block_to_block (&se->pre, &tmpse.pre); 789 gfc_add_block_to_block (&se->post, &tmpse.post); 790 lower = fold_convert (gfc_array_index_type, tmpse.expr); 791 792 /* ...and the upper bound. */ 793 gfc_init_se (&tmpse, NULL); 794 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]); 795 gfc_add_block_to_block (&se->pre, &tmpse.pre); 796 gfc_add_block_to_block (&se->post, &tmpse.post); 797 upper = fold_convert (gfc_array_index_type, tmpse.expr); 798 799 /* Set the upper bound of the loop to UPPER - LOWER. */ 800 tmp = fold_build2_loc (input_location, MINUS_EXPR, 801 gfc_array_index_type, upper, lower); 802 tmp = gfc_evaluate_now (tmp, &se->pre); 803 ss->loop->to[n] = tmp; 804 } 805 } 806 807 gcc_assert (total_dim == as->rank); 808} 809 810 811/* Generate code to allocate an array temporary, or create a variable to 812 hold the data. If size is NULL, zero the descriptor so that the 813 callee will allocate the array. If DEALLOC is true, also generate code to 814 free the array afterwards. 815 816 If INITIAL is not NULL, it is packed using internal_pack and the result used 817 as data instead of allocating a fresh, unitialized area of memory. 818 819 Initialization code is added to PRE and finalization code to POST. 820 DYNAMIC is true if the caller may want to extend the array later 821 using realloc. This prevents us from putting the array on the stack. */ 822 823static void 824gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post, 825 gfc_array_info * info, tree size, tree nelem, 826 tree initial, bool dynamic, bool dealloc) 827{ 828 tree tmp; 829 tree desc; 830 bool onstack; 831 832 desc = info->descriptor; 833 info->offset = gfc_index_zero_node; 834 if (size == NULL_TREE || integer_zerop (size)) 835 { 836 /* A callee allocated array. */ 837 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node); 838 onstack = FALSE; 839 } 840 else 841 { 842 /* Allocate the temporary. */ 843 onstack = !dynamic && initial == NULL_TREE 844 && (flag_stack_arrays 845 || gfc_can_put_var_on_stack (size)); 846 847 if (onstack) 848 { 849 /* Make a temporary variable to hold the data. */ 850 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem), 851 nelem, gfc_index_one_node); 852 tmp = gfc_evaluate_now (tmp, pre); 853 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node, 854 tmp); 855 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)), 856 tmp); 857 tmp = gfc_create_var (tmp, "A"); 858 /* If we're here only because of -fstack-arrays we have to 859 emit a DECL_EXPR to make the gimplifier emit alloca calls. */ 860 if (!gfc_can_put_var_on_stack (size)) 861 gfc_add_expr_to_block (pre, 862 fold_build1_loc (input_location, 863 DECL_EXPR, TREE_TYPE (tmp), 864 tmp)); 865 tmp = gfc_build_addr_expr (NULL_TREE, tmp); 866 gfc_conv_descriptor_data_set (pre, desc, tmp); 867 } 868 else 869 { 870 /* Allocate memory to hold the data or call internal_pack. */ 871 if (initial == NULL_TREE) 872 { 873 tmp = gfc_call_malloc (pre, NULL, size); 874 tmp = gfc_evaluate_now (tmp, pre); 875 } 876 else 877 { 878 tree packed; 879 tree source_data; 880 tree was_packed; 881 stmtblock_t do_copying; 882 883 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */ 884 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE); 885 tmp = TREE_TYPE (tmp); /* The descriptor itself. */ 886 tmp = gfc_get_element_type (tmp); 887 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc))); 888 packed = gfc_create_var (build_pointer_type (tmp), "data"); 889 890 tmp = build_call_expr_loc (input_location, 891 gfor_fndecl_in_pack, 1, initial); 892 tmp = fold_convert (TREE_TYPE (packed), tmp); 893 gfc_add_modify (pre, packed, tmp); 894 895 tmp = build_fold_indirect_ref_loc (input_location, 896 initial); 897 source_data = gfc_conv_descriptor_data_get (tmp); 898 899 /* internal_pack may return source->data without any allocation 900 or copying if it is already packed. If that's the case, we 901 need to allocate and copy manually. */ 902 903 gfc_start_block (&do_copying); 904 tmp = gfc_call_malloc (&do_copying, NULL, size); 905 tmp = fold_convert (TREE_TYPE (packed), tmp); 906 gfc_add_modify (&do_copying, packed, tmp); 907 tmp = gfc_build_memcpy_call (packed, source_data, size); 908 gfc_add_expr_to_block (&do_copying, tmp); 909 910 was_packed = fold_build2_loc (input_location, EQ_EXPR, 911 boolean_type_node, packed, 912 source_data); 913 tmp = gfc_finish_block (&do_copying); 914 tmp = build3_v (COND_EXPR, was_packed, tmp, 915 build_empty_stmt (input_location)); 916 gfc_add_expr_to_block (pre, tmp); 917 918 tmp = fold_convert (pvoid_type_node, packed); 919 } 920 921 gfc_conv_descriptor_data_set (pre, desc, tmp); 922 } 923 } 924 info->data = gfc_conv_descriptor_data_get (desc); 925 926 /* The offset is zero because we create temporaries with a zero 927 lower bound. */ 928 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node); 929 930 if (dealloc && !onstack) 931 { 932 /* Free the temporary. */ 933 tmp = gfc_conv_descriptor_data_get (desc); 934 tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp)); 935 gfc_add_expr_to_block (post, tmp); 936 } 937} 938 939 940/* Get the scalarizer array dimension corresponding to actual array dimension 941 given by ARRAY_DIM. 942 943 For example, if SS represents the array ref a(1,:,:,1), it is a 944 bidimensional scalarizer array, and the result would be 0 for ARRAY_DIM=1, 945 and 1 for ARRAY_DIM=2. 946 If SS represents transpose(a(:,1,1,:)), it is again a bidimensional 947 scalarizer array, and the result would be 1 for ARRAY_DIM=0 and 0 for 948 ARRAY_DIM=3. 949 If SS represents sum(a(:,:,:,1), dim=1), it is a 2+1-dimensional scalarizer 950 array. If called on the inner ss, the result would be respectively 0,1,2 for 951 ARRAY_DIM=0,1,2. If called on the outer ss, the result would be 0,1 952 for ARRAY_DIM=1,2. */ 953 954static int 955get_scalarizer_dim_for_array_dim (gfc_ss *ss, int array_dim) 956{ 957 int array_ref_dim; 958 int n; 959 960 array_ref_dim = 0; 961 962 for (; ss; ss = ss->parent) 963 for (n = 0; n < ss->dimen; n++) 964 if (ss->dim[n] < array_dim) 965 array_ref_dim++; 966 967 return array_ref_dim; 968} 969 970 971static gfc_ss * 972innermost_ss (gfc_ss *ss) 973{ 974 while (ss->nested_ss != NULL) 975 ss = ss->nested_ss; 976 977 return ss; 978} 979 980 981 982/* Get the array reference dimension corresponding to the given loop dimension. 983 It is different from the true array dimension given by the dim array in 984 the case of a partial array reference (i.e. a(:,:,1,:) for example) 985 It is different from the loop dimension in the case of a transposed array. 986 */ 987 988static int 989get_array_ref_dim_for_loop_dim (gfc_ss *ss, int loop_dim) 990{ 991 return get_scalarizer_dim_for_array_dim (innermost_ss (ss), 992 ss->dim[loop_dim]); 993} 994 995 996/* Generate code to create and initialize the descriptor for a temporary 997 array. This is used for both temporaries needed by the scalarizer, and 998 functions returning arrays. Adjusts the loop variables to be 999 zero-based, and calculates the loop bounds for callee allocated arrays. 1000 Allocate the array unless it's callee allocated (we have a callee 1001 allocated array if 'callee_alloc' is true, or if loop->to[n] is 1002 NULL_TREE for any n). Also fills in the descriptor, data and offset 1003 fields of info if known. Returns the size of the array, or NULL for a 1004 callee allocated array. 1005 1006 'eltype' == NULL signals that the temporary should be a class object. 1007 The 'initial' expression is used to obtain the size of the dynamic 1008 type; otherwise the allocation and initialization proceeds as for any 1009 other expression 1010 1011 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for 1012 gfc_trans_allocate_array_storage. */ 1013 1014tree 1015gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post, gfc_ss * ss, 1016 tree eltype, tree initial, bool dynamic, 1017 bool dealloc, bool callee_alloc, locus * where) 1018{ 1019 gfc_loopinfo *loop; 1020 gfc_ss *s; 1021 gfc_array_info *info; 1022 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS]; 1023 tree type; 1024 tree desc; 1025 tree tmp; 1026 tree size; 1027 tree nelem; 1028 tree cond; 1029 tree or_expr; 1030 tree class_expr = NULL_TREE; 1031 int n, dim, tmp_dim; 1032 int total_dim = 0; 1033 1034 /* This signals a class array for which we need the size of the 1035 dynamic type. Generate an eltype and then the class expression. */ 1036 if (eltype == NULL_TREE && initial) 1037 { 1038 gcc_assert (POINTER_TYPE_P (TREE_TYPE (initial))); 1039 class_expr = build_fold_indirect_ref_loc (input_location, initial); 1040 eltype = TREE_TYPE (class_expr); 1041 eltype = gfc_get_element_type (eltype); 1042 /* Obtain the structure (class) expression. */ 1043 class_expr = TREE_OPERAND (class_expr, 0); 1044 gcc_assert (class_expr); 1045 } 1046 1047 memset (from, 0, sizeof (from)); 1048 memset (to, 0, sizeof (to)); 1049 1050 info = &ss->info->data.array; 1051 1052 gcc_assert (ss->dimen > 0); 1053 gcc_assert (ss->loop->dimen == ss->dimen); 1054 1055 if (warn_array_temporaries && where) 1056 gfc_warning (OPT_Warray_temporaries, 1057 "Creating array temporary at %L", where); 1058 1059 /* Set the lower bound to zero. */ 1060 for (s = ss; s; s = s->parent) 1061 { 1062 loop = s->loop; 1063 1064 total_dim += loop->dimen; 1065 for (n = 0; n < loop->dimen; n++) 1066 { 1067 dim = s->dim[n]; 1068 1069 /* Callee allocated arrays may not have a known bound yet. */ 1070 if (loop->to[n]) 1071 loop->to[n] = gfc_evaluate_now ( 1072 fold_build2_loc (input_location, MINUS_EXPR, 1073 gfc_array_index_type, 1074 loop->to[n], loop->from[n]), 1075 pre); 1076 loop->from[n] = gfc_index_zero_node; 1077 1078 /* We have just changed the loop bounds, we must clear the 1079 corresponding specloop, so that delta calculation is not skipped 1080 later in gfc_set_delta. */ 1081 loop->specloop[n] = NULL; 1082 1083 /* We are constructing the temporary's descriptor based on the loop 1084 dimensions. As the dimensions may be accessed in arbitrary order 1085 (think of transpose) the size taken from the n'th loop may not map 1086 to the n'th dimension of the array. We need to reconstruct loop 1087 infos in the right order before using it to set the descriptor 1088 bounds. */ 1089 tmp_dim = get_scalarizer_dim_for_array_dim (ss, dim); 1090 from[tmp_dim] = loop->from[n]; 1091 to[tmp_dim] = loop->to[n]; 1092 1093 info->delta[dim] = gfc_index_zero_node; 1094 info->start[dim] = gfc_index_zero_node; 1095 info->end[dim] = gfc_index_zero_node; 1096 info->stride[dim] = gfc_index_one_node; 1097 } 1098 } 1099 1100 /* Initialize the descriptor. */ 1101 type = 1102 gfc_get_array_type_bounds (eltype, total_dim, 0, from, to, 1, 1103 GFC_ARRAY_UNKNOWN, true); 1104 desc = gfc_create_var (type, "atmp"); 1105 GFC_DECL_PACKED_ARRAY (desc) = 1; 1106 1107 info->descriptor = desc; 1108 size = gfc_index_one_node; 1109 1110 /* Fill in the array dtype. */ 1111 tmp = gfc_conv_descriptor_dtype (desc); 1112 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc))); 1113 1114 /* 1115 Fill in the bounds and stride. This is a packed array, so: 1116 1117 size = 1; 1118 for (n = 0; n < rank; n++) 1119 { 1120 stride[n] = size 1121 delta = ubound[n] + 1 - lbound[n]; 1122 size = size * delta; 1123 } 1124 size = size * sizeof(element); 1125 */ 1126 1127 or_expr = NULL_TREE; 1128 1129 /* If there is at least one null loop->to[n], it is a callee allocated 1130 array. */ 1131 for (n = 0; n < total_dim; n++) 1132 if (to[n] == NULL_TREE) 1133 { 1134 size = NULL_TREE; 1135 break; 1136 } 1137 1138 if (size == NULL_TREE) 1139 for (s = ss; s; s = s->parent) 1140 for (n = 0; n < s->loop->dimen; n++) 1141 { 1142 dim = get_scalarizer_dim_for_array_dim (ss, s->dim[n]); 1143 1144 /* For a callee allocated array express the loop bounds in terms 1145 of the descriptor fields. */ 1146 tmp = fold_build2_loc (input_location, 1147 MINUS_EXPR, gfc_array_index_type, 1148 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]), 1149 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim])); 1150 s->loop->to[n] = tmp; 1151 } 1152 else 1153 { 1154 for (n = 0; n < total_dim; n++) 1155 { 1156 /* Store the stride and bound components in the descriptor. */ 1157 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size); 1158 1159 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n], 1160 gfc_index_zero_node); 1161 1162 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], to[n]); 1163 1164 tmp = fold_build2_loc (input_location, PLUS_EXPR, 1165 gfc_array_index_type, 1166 to[n], gfc_index_one_node); 1167 1168 /* Check whether the size for this dimension is negative. */ 1169 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, 1170 tmp, gfc_index_zero_node); 1171 cond = gfc_evaluate_now (cond, pre); 1172 1173 if (n == 0) 1174 or_expr = cond; 1175 else 1176 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR, 1177 boolean_type_node, or_expr, cond); 1178 1179 size = fold_build2_loc (input_location, MULT_EXPR, 1180 gfc_array_index_type, size, tmp); 1181 size = gfc_evaluate_now (size, pre); 1182 } 1183 } 1184 1185 /* Get the size of the array. */ 1186 if (size && !callee_alloc) 1187 { 1188 tree elemsize; 1189 /* If or_expr is true, then the extent in at least one 1190 dimension is zero and the size is set to zero. */ 1191 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, 1192 or_expr, gfc_index_zero_node, size); 1193 1194 nelem = size; 1195 if (class_expr == NULL_TREE) 1196 elemsize = fold_convert (gfc_array_index_type, 1197 TYPE_SIZE_UNIT (gfc_get_element_type (type))); 1198 else 1199 elemsize = gfc_class_vtab_size_get (class_expr); 1200 1201 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 1202 size, elemsize); 1203 } 1204 else 1205 { 1206 nelem = size; 1207 size = NULL_TREE; 1208 } 1209 1210 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial, 1211 dynamic, dealloc); 1212 1213 while (ss->parent) 1214 ss = ss->parent; 1215 1216 if (ss->dimen > ss->loop->temp_dim) 1217 ss->loop->temp_dim = ss->dimen; 1218 1219 return size; 1220} 1221 1222 1223/* Return the number of iterations in a loop that starts at START, 1224 ends at END, and has step STEP. */ 1225 1226static tree 1227gfc_get_iteration_count (tree start, tree end, tree step) 1228{ 1229 tree tmp; 1230 tree type; 1231 1232 type = TREE_TYPE (step); 1233 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start); 1234 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step); 1235 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp, 1236 build_int_cst (type, 1)); 1237 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp, 1238 build_int_cst (type, 0)); 1239 return fold_convert (gfc_array_index_type, tmp); 1240} 1241 1242 1243/* Extend the data in array DESC by EXTRA elements. */ 1244 1245static void 1246gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra) 1247{ 1248 tree arg0, arg1; 1249 tree tmp; 1250 tree size; 1251 tree ubound; 1252 1253 if (integer_zerop (extra)) 1254 return; 1255 1256 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]); 1257 1258 /* Add EXTRA to the upper bound. */ 1259 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, 1260 ubound, extra); 1261 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp); 1262 1263 /* Get the value of the current data pointer. */ 1264 arg0 = gfc_conv_descriptor_data_get (desc); 1265 1266 /* Calculate the new array size. */ 1267 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc))); 1268 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, 1269 ubound, gfc_index_one_node); 1270 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node, 1271 fold_convert (size_type_node, tmp), 1272 fold_convert (size_type_node, size)); 1273 1274 /* Call the realloc() function. */ 1275 tmp = gfc_call_realloc (pblock, arg0, arg1); 1276 gfc_conv_descriptor_data_set (pblock, desc, tmp); 1277} 1278 1279 1280/* Return true if the bounds of iterator I can only be determined 1281 at run time. */ 1282 1283static inline bool 1284gfc_iterator_has_dynamic_bounds (gfc_iterator * i) 1285{ 1286 return (i->start->expr_type != EXPR_CONSTANT 1287 || i->end->expr_type != EXPR_CONSTANT 1288 || i->step->expr_type != EXPR_CONSTANT); 1289} 1290 1291 1292/* Split the size of constructor element EXPR into the sum of two terms, 1293 one of which can be determined at compile time and one of which must 1294 be calculated at run time. Set *SIZE to the former and return true 1295 if the latter might be nonzero. */ 1296 1297static bool 1298gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr) 1299{ 1300 if (expr->expr_type == EXPR_ARRAY) 1301 return gfc_get_array_constructor_size (size, expr->value.constructor); 1302 else if (expr->rank > 0) 1303 { 1304 /* Calculate everything at run time. */ 1305 mpz_set_ui (*size, 0); 1306 return true; 1307 } 1308 else 1309 { 1310 /* A single element. */ 1311 mpz_set_ui (*size, 1); 1312 return false; 1313 } 1314} 1315 1316 1317/* Like gfc_get_array_constructor_element_size, but applied to the whole 1318 of array constructor C. */ 1319 1320static bool 1321gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base) 1322{ 1323 gfc_constructor *c; 1324 gfc_iterator *i; 1325 mpz_t val; 1326 mpz_t len; 1327 bool dynamic; 1328 1329 mpz_set_ui (*size, 0); 1330 mpz_init (len); 1331 mpz_init (val); 1332 1333 dynamic = false; 1334 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c)) 1335 { 1336 i = c->iterator; 1337 if (i && gfc_iterator_has_dynamic_bounds (i)) 1338 dynamic = true; 1339 else 1340 { 1341 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr); 1342 if (i) 1343 { 1344 /* Multiply the static part of the element size by the 1345 number of iterations. */ 1346 mpz_sub (val, i->end->value.integer, i->start->value.integer); 1347 mpz_fdiv_q (val, val, i->step->value.integer); 1348 mpz_add_ui (val, val, 1); 1349 if (mpz_sgn (val) > 0) 1350 mpz_mul (len, len, val); 1351 else 1352 mpz_set_ui (len, 0); 1353 } 1354 mpz_add (*size, *size, len); 1355 } 1356 } 1357 mpz_clear (len); 1358 mpz_clear (val); 1359 return dynamic; 1360} 1361 1362 1363/* Make sure offset is a variable. */ 1364 1365static void 1366gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset, 1367 tree * offsetvar) 1368{ 1369 /* We should have already created the offset variable. We cannot 1370 create it here because we may be in an inner scope. */ 1371 gcc_assert (*offsetvar != NULL_TREE); 1372 gfc_add_modify (pblock, *offsetvar, *poffset); 1373 *poffset = *offsetvar; 1374 TREE_USED (*offsetvar) = 1; 1375} 1376 1377 1378/* Variables needed for bounds-checking. */ 1379static bool first_len; 1380static tree first_len_val; 1381static bool typespec_chararray_ctor; 1382 1383static void 1384gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc, 1385 tree offset, gfc_se * se, gfc_expr * expr) 1386{ 1387 tree tmp; 1388 1389 gfc_conv_expr (se, expr); 1390 1391 /* Store the value. */ 1392 tmp = build_fold_indirect_ref_loc (input_location, 1393 gfc_conv_descriptor_data_get (desc)); 1394 tmp = gfc_build_array_ref (tmp, offset, NULL); 1395 1396 if (expr->ts.type == BT_CHARACTER) 1397 { 1398 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false); 1399 tree esize; 1400 1401 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc))); 1402 esize = fold_convert (gfc_charlen_type_node, esize); 1403 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR, 1404 gfc_charlen_type_node, esize, 1405 build_int_cst (gfc_charlen_type_node, 1406 gfc_character_kinds[i].bit_size / 8)); 1407 1408 gfc_conv_string_parameter (se); 1409 if (POINTER_TYPE_P (TREE_TYPE (tmp))) 1410 { 1411 /* The temporary is an array of pointers. */ 1412 se->expr = fold_convert (TREE_TYPE (tmp), se->expr); 1413 gfc_add_modify (&se->pre, tmp, se->expr); 1414 } 1415 else 1416 { 1417 /* The temporary is an array of string values. */ 1418 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp); 1419 /* We know the temporary and the value will be the same length, 1420 so can use memcpy. */ 1421 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind, 1422 se->string_length, se->expr, expr->ts.kind); 1423 } 1424 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor) 1425 { 1426 if (first_len) 1427 { 1428 gfc_add_modify (&se->pre, first_len_val, 1429 se->string_length); 1430 first_len = false; 1431 } 1432 else 1433 { 1434 /* Verify that all constructor elements are of the same 1435 length. */ 1436 tree cond = fold_build2_loc (input_location, NE_EXPR, 1437 boolean_type_node, first_len_val, 1438 se->string_length); 1439 gfc_trans_runtime_check 1440 (true, false, cond, &se->pre, &expr->where, 1441 "Different CHARACTER lengths (%ld/%ld) in array constructor", 1442 fold_convert (long_integer_type_node, first_len_val), 1443 fold_convert (long_integer_type_node, se->string_length)); 1444 } 1445 } 1446 } 1447 else 1448 { 1449 /* TODO: Should the frontend already have done this conversion? */ 1450 se->expr = fold_convert (TREE_TYPE (tmp), se->expr); 1451 gfc_add_modify (&se->pre, tmp, se->expr); 1452 } 1453 1454 gfc_add_block_to_block (pblock, &se->pre); 1455 gfc_add_block_to_block (pblock, &se->post); 1456} 1457 1458 1459/* Add the contents of an array to the constructor. DYNAMIC is as for 1460 gfc_trans_array_constructor_value. */ 1461 1462static void 1463gfc_trans_array_constructor_subarray (stmtblock_t * pblock, 1464 tree type ATTRIBUTE_UNUSED, 1465 tree desc, gfc_expr * expr, 1466 tree * poffset, tree * offsetvar, 1467 bool dynamic) 1468{ 1469 gfc_se se; 1470 gfc_ss *ss; 1471 gfc_loopinfo loop; 1472 stmtblock_t body; 1473 tree tmp; 1474 tree size; 1475 int n; 1476 1477 /* We need this to be a variable so we can increment it. */ 1478 gfc_put_offset_into_var (pblock, poffset, offsetvar); 1479 1480 gfc_init_se (&se, NULL); 1481 1482 /* Walk the array expression. */ 1483 ss = gfc_walk_expr (expr); 1484 gcc_assert (ss != gfc_ss_terminator); 1485 1486 /* Initialize the scalarizer. */ 1487 gfc_init_loopinfo (&loop); 1488 gfc_add_ss_to_loop (&loop, ss); 1489 1490 /* Initialize the loop. */ 1491 gfc_conv_ss_startstride (&loop); 1492 gfc_conv_loop_setup (&loop, &expr->where); 1493 1494 /* Make sure the constructed array has room for the new data. */ 1495 if (dynamic) 1496 { 1497 /* Set SIZE to the total number of elements in the subarray. */ 1498 size = gfc_index_one_node; 1499 for (n = 0; n < loop.dimen; n++) 1500 { 1501 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n], 1502 gfc_index_one_node); 1503 size = fold_build2_loc (input_location, MULT_EXPR, 1504 gfc_array_index_type, size, tmp); 1505 } 1506 1507 /* Grow the constructed array by SIZE elements. */ 1508 gfc_grow_array (&loop.pre, desc, size); 1509 } 1510 1511 /* Make the loop body. */ 1512 gfc_mark_ss_chain_used (ss, 1); 1513 gfc_start_scalarized_body (&loop, &body); 1514 gfc_copy_loopinfo_to_se (&se, &loop); 1515 se.ss = ss; 1516 1517 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr); 1518 gcc_assert (se.ss == gfc_ss_terminator); 1519 1520 /* Increment the offset. */ 1521 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, 1522 *poffset, gfc_index_one_node); 1523 gfc_add_modify (&body, *poffset, tmp); 1524 1525 /* Finish the loop. */ 1526 gfc_trans_scalarizing_loops (&loop, &body); 1527 gfc_add_block_to_block (&loop.pre, &loop.post); 1528 tmp = gfc_finish_block (&loop.pre); 1529 gfc_add_expr_to_block (pblock, tmp); 1530 1531 gfc_cleanup_loop (&loop); 1532} 1533 1534 1535/* Assign the values to the elements of an array constructor. DYNAMIC 1536 is true if descriptor DESC only contains enough data for the static 1537 size calculated by gfc_get_array_constructor_size. When true, memory 1538 for the dynamic parts must be allocated using realloc. */ 1539 1540static void 1541gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type, 1542 tree desc, gfc_constructor_base base, 1543 tree * poffset, tree * offsetvar, 1544 bool dynamic) 1545{ 1546 tree tmp; 1547 tree start = NULL_TREE; 1548 tree end = NULL_TREE; 1549 tree step = NULL_TREE; 1550 stmtblock_t body; 1551 gfc_se se; 1552 mpz_t size; 1553 gfc_constructor *c; 1554 1555 tree shadow_loopvar = NULL_TREE; 1556 gfc_saved_var saved_loopvar; 1557 1558 mpz_init (size); 1559 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c)) 1560 { 1561 /* If this is an iterator or an array, the offset must be a variable. */ 1562 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset)) 1563 gfc_put_offset_into_var (pblock, poffset, offsetvar); 1564 1565 /* Shadowing the iterator avoids changing its value and saves us from 1566 keeping track of it. Further, it makes sure that there's always a 1567 backend-decl for the symbol, even if there wasn't one before, 1568 e.g. in the case of an iterator that appears in a specification 1569 expression in an interface mapping. */ 1570 if (c->iterator) 1571 { 1572 gfc_symbol *sym; 1573 tree type; 1574 1575 /* Evaluate loop bounds before substituting the loop variable 1576 in case they depend on it. Such a case is invalid, but it is 1577 not more expensive to do the right thing here. 1578 See PR 44354. */ 1579 gfc_init_se (&se, NULL); 1580 gfc_conv_expr_val (&se, c->iterator->start); 1581 gfc_add_block_to_block (pblock, &se.pre); 1582 start = gfc_evaluate_now (se.expr, pblock); 1583 1584 gfc_init_se (&se, NULL); 1585 gfc_conv_expr_val (&se, c->iterator->end); 1586 gfc_add_block_to_block (pblock, &se.pre); 1587 end = gfc_evaluate_now (se.expr, pblock); 1588 1589 gfc_init_se (&se, NULL); 1590 gfc_conv_expr_val (&se, c->iterator->step); 1591 gfc_add_block_to_block (pblock, &se.pre); 1592 step = gfc_evaluate_now (se.expr, pblock); 1593 1594 sym = c->iterator->var->symtree->n.sym; 1595 type = gfc_typenode_for_spec (&sym->ts); 1596 1597 shadow_loopvar = gfc_create_var (type, "shadow_loopvar"); 1598 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar); 1599 } 1600 1601 gfc_start_block (&body); 1602 1603 if (c->expr->expr_type == EXPR_ARRAY) 1604 { 1605 /* Array constructors can be nested. */ 1606 gfc_trans_array_constructor_value (&body, type, desc, 1607 c->expr->value.constructor, 1608 poffset, offsetvar, dynamic); 1609 } 1610 else if (c->expr->rank > 0) 1611 { 1612 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr, 1613 poffset, offsetvar, dynamic); 1614 } 1615 else 1616 { 1617 /* This code really upsets the gimplifier so don't bother for now. */ 1618 gfc_constructor *p; 1619 HOST_WIDE_INT n; 1620 HOST_WIDE_INT size; 1621 1622 p = c; 1623 n = 0; 1624 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT)) 1625 { 1626 p = gfc_constructor_next (p); 1627 n++; 1628 } 1629 if (n < 4) 1630 { 1631 /* Scalar values. */ 1632 gfc_init_se (&se, NULL); 1633 gfc_trans_array_ctor_element (&body, desc, *poffset, 1634 &se, c->expr); 1635 1636 *poffset = fold_build2_loc (input_location, PLUS_EXPR, 1637 gfc_array_index_type, 1638 *poffset, gfc_index_one_node); 1639 } 1640 else 1641 { 1642 /* Collect multiple scalar constants into a constructor. */ 1643 vec<constructor_elt, va_gc> *v = NULL; 1644 tree init; 1645 tree bound; 1646 tree tmptype; 1647 HOST_WIDE_INT idx = 0; 1648 1649 p = c; 1650 /* Count the number of consecutive scalar constants. */ 1651 while (p && !(p->iterator 1652 || p->expr->expr_type != EXPR_CONSTANT)) 1653 { 1654 gfc_init_se (&se, NULL); 1655 gfc_conv_constant (&se, p->expr); 1656 1657 if (c->expr->ts.type != BT_CHARACTER) 1658 se.expr = fold_convert (type, se.expr); 1659 /* For constant character array constructors we build 1660 an array of pointers. */ 1661 else if (POINTER_TYPE_P (type)) 1662 se.expr = gfc_build_addr_expr 1663 (gfc_get_pchar_type (p->expr->ts.kind), 1664 se.expr); 1665 1666 CONSTRUCTOR_APPEND_ELT (v, 1667 build_int_cst (gfc_array_index_type, 1668 idx++), 1669 se.expr); 1670 c = p; 1671 p = gfc_constructor_next (p); 1672 } 1673 1674 bound = size_int (n - 1); 1675 /* Create an array type to hold them. */ 1676 tmptype = build_range_type (gfc_array_index_type, 1677 gfc_index_zero_node, bound); 1678 tmptype = build_array_type (type, tmptype); 1679 1680 init = build_constructor (tmptype, v); 1681 TREE_CONSTANT (init) = 1; 1682 TREE_STATIC (init) = 1; 1683 /* Create a static variable to hold the data. */ 1684 tmp = gfc_create_var (tmptype, "data"); 1685 TREE_STATIC (tmp) = 1; 1686 TREE_CONSTANT (tmp) = 1; 1687 TREE_READONLY (tmp) = 1; 1688 DECL_INITIAL (tmp) = init; 1689 init = tmp; 1690 1691 /* Use BUILTIN_MEMCPY to assign the values. */ 1692 tmp = gfc_conv_descriptor_data_get (desc); 1693 tmp = build_fold_indirect_ref_loc (input_location, 1694 tmp); 1695 tmp = gfc_build_array_ref (tmp, *poffset, NULL); 1696 tmp = gfc_build_addr_expr (NULL_TREE, tmp); 1697 init = gfc_build_addr_expr (NULL_TREE, init); 1698 1699 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type)); 1700 bound = build_int_cst (size_type_node, n * size); 1701 tmp = build_call_expr_loc (input_location, 1702 builtin_decl_explicit (BUILT_IN_MEMCPY), 1703 3, tmp, init, bound); 1704 gfc_add_expr_to_block (&body, tmp); 1705 1706 *poffset = fold_build2_loc (input_location, PLUS_EXPR, 1707 gfc_array_index_type, *poffset, 1708 build_int_cst (gfc_array_index_type, n)); 1709 } 1710 if (!INTEGER_CST_P (*poffset)) 1711 { 1712 gfc_add_modify (&body, *offsetvar, *poffset); 1713 *poffset = *offsetvar; 1714 } 1715 } 1716 1717 /* The frontend should already have done any expansions 1718 at compile-time. */ 1719 if (!c->iterator) 1720 { 1721 /* Pass the code as is. */ 1722 tmp = gfc_finish_block (&body); 1723 gfc_add_expr_to_block (pblock, tmp); 1724 } 1725 else 1726 { 1727 /* Build the implied do-loop. */ 1728 stmtblock_t implied_do_block; 1729 tree cond; 1730 tree exit_label; 1731 tree loopbody; 1732 tree tmp2; 1733 1734 loopbody = gfc_finish_block (&body); 1735 1736 /* Create a new block that holds the implied-do loop. A temporary 1737 loop-variable is used. */ 1738 gfc_start_block(&implied_do_block); 1739 1740 /* Initialize the loop. */ 1741 gfc_add_modify (&implied_do_block, shadow_loopvar, start); 1742 1743 /* If this array expands dynamically, and the number of iterations 1744 is not constant, we won't have allocated space for the static 1745 part of C->EXPR's size. Do that now. */ 1746 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator)) 1747 { 1748 /* Get the number of iterations. */ 1749 tmp = gfc_get_iteration_count (shadow_loopvar, end, step); 1750 1751 /* Get the static part of C->EXPR's size. */ 1752 gfc_get_array_constructor_element_size (&size, c->expr); 1753 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind); 1754 1755 /* Grow the array by TMP * TMP2 elements. */ 1756 tmp = fold_build2_loc (input_location, MULT_EXPR, 1757 gfc_array_index_type, tmp, tmp2); 1758 gfc_grow_array (&implied_do_block, desc, tmp); 1759 } 1760 1761 /* Generate the loop body. */ 1762 exit_label = gfc_build_label_decl (NULL_TREE); 1763 gfc_start_block (&body); 1764 1765 /* Generate the exit condition. Depending on the sign of 1766 the step variable we have to generate the correct 1767 comparison. */ 1768 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node, 1769 step, build_int_cst (TREE_TYPE (step), 0)); 1770 cond = fold_build3_loc (input_location, COND_EXPR, 1771 boolean_type_node, tmp, 1772 fold_build2_loc (input_location, GT_EXPR, 1773 boolean_type_node, shadow_loopvar, end), 1774 fold_build2_loc (input_location, LT_EXPR, 1775 boolean_type_node, shadow_loopvar, end)); 1776 tmp = build1_v (GOTO_EXPR, exit_label); 1777 TREE_USED (exit_label) = 1; 1778 tmp = build3_v (COND_EXPR, cond, tmp, 1779 build_empty_stmt (input_location)); 1780 gfc_add_expr_to_block (&body, tmp); 1781 1782 /* The main loop body. */ 1783 gfc_add_expr_to_block (&body, loopbody); 1784 1785 /* Increase loop variable by step. */ 1786 tmp = fold_build2_loc (input_location, PLUS_EXPR, 1787 TREE_TYPE (shadow_loopvar), shadow_loopvar, 1788 step); 1789 gfc_add_modify (&body, shadow_loopvar, tmp); 1790 1791 /* Finish the loop. */ 1792 tmp = gfc_finish_block (&body); 1793 tmp = build1_v (LOOP_EXPR, tmp); 1794 gfc_add_expr_to_block (&implied_do_block, tmp); 1795 1796 /* Add the exit label. */ 1797 tmp = build1_v (LABEL_EXPR, exit_label); 1798 gfc_add_expr_to_block (&implied_do_block, tmp); 1799 1800 /* Finish the implied-do loop. */ 1801 tmp = gfc_finish_block(&implied_do_block); 1802 gfc_add_expr_to_block(pblock, tmp); 1803 1804 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar); 1805 } 1806 } 1807 mpz_clear (size); 1808} 1809 1810 1811/* A catch-all to obtain the string length for anything that is not 1812 a substring of non-constant length, a constant, array or variable. */ 1813 1814static void 1815get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len) 1816{ 1817 gfc_se se; 1818 1819 /* Don't bother if we already know the length is a constant. */ 1820 if (*len && INTEGER_CST_P (*len)) 1821 return; 1822 1823 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length 1824 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT) 1825 { 1826 /* This is easy. */ 1827 gfc_conv_const_charlen (e->ts.u.cl); 1828 *len = e->ts.u.cl->backend_decl; 1829 } 1830 else 1831 { 1832 /* Otherwise, be brutal even if inefficient. */ 1833 gfc_init_se (&se, NULL); 1834 1835 /* No function call, in case of side effects. */ 1836 se.no_function_call = 1; 1837 if (e->rank == 0) 1838 gfc_conv_expr (&se, e); 1839 else 1840 gfc_conv_expr_descriptor (&se, e); 1841 1842 /* Fix the value. */ 1843 *len = gfc_evaluate_now (se.string_length, &se.pre); 1844 1845 gfc_add_block_to_block (block, &se.pre); 1846 gfc_add_block_to_block (block, &se.post); 1847 1848 e->ts.u.cl->backend_decl = *len; 1849 } 1850} 1851 1852 1853/* Figure out the string length of a variable reference expression. 1854 Used by get_array_ctor_strlen. */ 1855 1856static void 1857get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len) 1858{ 1859 gfc_ref *ref; 1860 gfc_typespec *ts; 1861 mpz_t char_len; 1862 1863 /* Don't bother if we already know the length is a constant. */ 1864 if (*len && INTEGER_CST_P (*len)) 1865 return; 1866 1867 ts = &expr->symtree->n.sym->ts; 1868 for (ref = expr->ref; ref; ref = ref->next) 1869 { 1870 switch (ref->type) 1871 { 1872 case REF_ARRAY: 1873 /* Array references don't change the string length. */ 1874 break; 1875 1876 case REF_COMPONENT: 1877 /* Use the length of the component. */ 1878 ts = &ref->u.c.component->ts; 1879 break; 1880 1881 case REF_SUBSTRING: 1882 if (ref->u.ss.start->expr_type != EXPR_CONSTANT 1883 || ref->u.ss.end->expr_type != EXPR_CONSTANT) 1884 { 1885 /* Note that this might evaluate expr. */ 1886 get_array_ctor_all_strlen (block, expr, len); 1887 return; 1888 } 1889 mpz_init_set_ui (char_len, 1); 1890 mpz_add (char_len, char_len, ref->u.ss.end->value.integer); 1891 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer); 1892 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind); 1893 *len = convert (gfc_charlen_type_node, *len); 1894 mpz_clear (char_len); 1895 return; 1896 1897 default: 1898 gcc_unreachable (); 1899 } 1900 } 1901 1902 *len = ts->u.cl->backend_decl; 1903} 1904 1905 1906/* Figure out the string length of a character array constructor. 1907 If len is NULL, don't calculate the length; this happens for recursive calls 1908 when a sub-array-constructor is an element but not at the first position, 1909 so when we're not interested in the length. 1910 Returns TRUE if all elements are character constants. */ 1911 1912bool 1913get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len) 1914{ 1915 gfc_constructor *c; 1916 bool is_const; 1917 1918 is_const = TRUE; 1919 1920 if (gfc_constructor_first (base) == NULL) 1921 { 1922 if (len) 1923 *len = build_int_cstu (gfc_charlen_type_node, 0); 1924 return is_const; 1925 } 1926 1927 /* Loop over all constructor elements to find out is_const, but in len we 1928 want to store the length of the first, not the last, element. We can 1929 of course exit the loop as soon as is_const is found to be false. */ 1930 for (c = gfc_constructor_first (base); 1931 c && is_const; c = gfc_constructor_next (c)) 1932 { 1933 switch (c->expr->expr_type) 1934 { 1935 case EXPR_CONSTANT: 1936 if (len && !(*len && INTEGER_CST_P (*len))) 1937 *len = build_int_cstu (gfc_charlen_type_node, 1938 c->expr->value.character.length); 1939 break; 1940 1941 case EXPR_ARRAY: 1942 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len)) 1943 is_const = false; 1944 break; 1945 1946 case EXPR_VARIABLE: 1947 is_const = false; 1948 if (len) 1949 get_array_ctor_var_strlen (block, c->expr, len); 1950 break; 1951 1952 default: 1953 is_const = false; 1954 if (len) 1955 get_array_ctor_all_strlen (block, c->expr, len); 1956 break; 1957 } 1958 1959 /* After the first iteration, we don't want the length modified. */ 1960 len = NULL; 1961 } 1962 1963 return is_const; 1964} 1965 1966/* Check whether the array constructor C consists entirely of constant 1967 elements, and if so returns the number of those elements, otherwise 1968 return zero. Note, an empty or NULL array constructor returns zero. */ 1969 1970unsigned HOST_WIDE_INT 1971gfc_constant_array_constructor_p (gfc_constructor_base base) 1972{ 1973 unsigned HOST_WIDE_INT nelem = 0; 1974 1975 gfc_constructor *c = gfc_constructor_first (base); 1976 while (c) 1977 { 1978 if (c->iterator 1979 || c->expr->rank > 0 1980 || c->expr->expr_type != EXPR_CONSTANT) 1981 return 0; 1982 c = gfc_constructor_next (c); 1983 nelem++; 1984 } 1985 return nelem; 1986} 1987 1988 1989/* Given EXPR, the constant array constructor specified by an EXPR_ARRAY, 1990 and the tree type of it's elements, TYPE, return a static constant 1991 variable that is compile-time initialized. */ 1992 1993tree 1994gfc_build_constant_array_constructor (gfc_expr * expr, tree type) 1995{ 1996 tree tmptype, init, tmp; 1997 HOST_WIDE_INT nelem; 1998 gfc_constructor *c; 1999 gfc_array_spec as; 2000 gfc_se se; 2001 int i; 2002 vec<constructor_elt, va_gc> *v = NULL; 2003 2004 /* First traverse the constructor list, converting the constants 2005 to tree to build an initializer. */ 2006 nelem = 0; 2007 c = gfc_constructor_first (expr->value.constructor); 2008 while (c) 2009 { 2010 gfc_init_se (&se, NULL); 2011 gfc_conv_constant (&se, c->expr); 2012 if (c->expr->ts.type != BT_CHARACTER) 2013 se.expr = fold_convert (type, se.expr); 2014 else if (POINTER_TYPE_P (type)) 2015 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind), 2016 se.expr); 2017 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem), 2018 se.expr); 2019 c = gfc_constructor_next (c); 2020 nelem++; 2021 } 2022 2023 /* Next determine the tree type for the array. We use the gfortran 2024 front-end's gfc_get_nodesc_array_type in order to create a suitable 2025 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */ 2026 2027 memset (&as, 0, sizeof (gfc_array_spec)); 2028 2029 as.rank = expr->rank; 2030 as.type = AS_EXPLICIT; 2031 if (!expr->shape) 2032 { 2033 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0); 2034 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind, 2035 NULL, nelem - 1); 2036 } 2037 else 2038 for (i = 0; i < expr->rank; i++) 2039 { 2040 int tmp = (int) mpz_get_si (expr->shape[i]); 2041 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0); 2042 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind, 2043 NULL, tmp - 1); 2044 } 2045 2046 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true); 2047 2048 /* as is not needed anymore. */ 2049 for (i = 0; i < as.rank + as.corank; i++) 2050 { 2051 gfc_free_expr (as.lower[i]); 2052 gfc_free_expr (as.upper[i]); 2053 } 2054 2055 init = build_constructor (tmptype, v); 2056 2057 TREE_CONSTANT (init) = 1; 2058 TREE_STATIC (init) = 1; 2059 2060 tmp = build_decl (input_location, VAR_DECL, create_tmp_var_name ("A"), 2061 tmptype); 2062 DECL_ARTIFICIAL (tmp) = 1; 2063 DECL_IGNORED_P (tmp) = 1; 2064 TREE_STATIC (tmp) = 1; 2065 TREE_CONSTANT (tmp) = 1; 2066 TREE_READONLY (tmp) = 1; 2067 DECL_INITIAL (tmp) = init; 2068 pushdecl (tmp); 2069 2070 return tmp; 2071} 2072 2073 2074/* Translate a constant EXPR_ARRAY array constructor for the scalarizer. 2075 This mostly initializes the scalarizer state info structure with the 2076 appropriate values to directly use the array created by the function 2077 gfc_build_constant_array_constructor. */ 2078 2079static void 2080trans_constant_array_constructor (gfc_ss * ss, tree type) 2081{ 2082 gfc_array_info *info; 2083 tree tmp; 2084 int i; 2085 2086 tmp = gfc_build_constant_array_constructor (ss->info->expr, type); 2087 2088 info = &ss->info->data.array; 2089 2090 info->descriptor = tmp; 2091 info->data = gfc_build_addr_expr (NULL_TREE, tmp); 2092 info->offset = gfc_index_zero_node; 2093 2094 for (i = 0; i < ss->dimen; i++) 2095 { 2096 info->delta[i] = gfc_index_zero_node; 2097 info->start[i] = gfc_index_zero_node; 2098 info->end[i] = gfc_index_zero_node; 2099 info->stride[i] = gfc_index_one_node; 2100 } 2101} 2102 2103 2104static int 2105get_rank (gfc_loopinfo *loop) 2106{ 2107 int rank; 2108 2109 rank = 0; 2110 for (; loop; loop = loop->parent) 2111 rank += loop->dimen; 2112 2113 return rank; 2114} 2115 2116 2117/* Helper routine of gfc_trans_array_constructor to determine if the 2118 bounds of the loop specified by LOOP are constant and simple enough 2119 to use with trans_constant_array_constructor. Returns the 2120 iteration count of the loop if suitable, and NULL_TREE otherwise. */ 2121 2122static tree 2123constant_array_constructor_loop_size (gfc_loopinfo * l) 2124{ 2125 gfc_loopinfo *loop; 2126 tree size = gfc_index_one_node; 2127 tree tmp; 2128 int i, total_dim; 2129 2130 total_dim = get_rank (l); 2131 2132 for (loop = l; loop; loop = loop->parent) 2133 { 2134 for (i = 0; i < loop->dimen; i++) 2135 { 2136 /* If the bounds aren't constant, return NULL_TREE. */ 2137 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i])) 2138 return NULL_TREE; 2139 if (!integer_zerop (loop->from[i])) 2140 { 2141 /* Only allow nonzero "from" in one-dimensional arrays. */ 2142 if (total_dim != 1) 2143 return NULL_TREE; 2144 tmp = fold_build2_loc (input_location, MINUS_EXPR, 2145 gfc_array_index_type, 2146 loop->to[i], loop->from[i]); 2147 } 2148 else 2149 tmp = loop->to[i]; 2150 tmp = fold_build2_loc (input_location, PLUS_EXPR, 2151 gfc_array_index_type, tmp, gfc_index_one_node); 2152 size = fold_build2_loc (input_location, MULT_EXPR, 2153 gfc_array_index_type, size, tmp); 2154 } 2155 } 2156 2157 return size; 2158} 2159 2160 2161static tree * 2162get_loop_upper_bound_for_array (gfc_ss *array, int array_dim) 2163{ 2164 gfc_ss *ss; 2165 int n; 2166 2167 gcc_assert (array->nested_ss == NULL); 2168 2169 for (ss = array; ss; ss = ss->parent) 2170 for (n = 0; n < ss->loop->dimen; n++) 2171 if (array_dim == get_array_ref_dim_for_loop_dim (ss, n)) 2172 return &(ss->loop->to[n]); 2173 2174 gcc_unreachable (); 2175} 2176 2177 2178static gfc_loopinfo * 2179outermost_loop (gfc_loopinfo * loop) 2180{ 2181 while (loop->parent != NULL) 2182 loop = loop->parent; 2183 2184 return loop; 2185} 2186 2187 2188/* Array constructors are handled by constructing a temporary, then using that 2189 within the scalarization loop. This is not optimal, but seems by far the 2190 simplest method. */ 2191 2192static void 2193trans_array_constructor (gfc_ss * ss, locus * where) 2194{ 2195 gfc_constructor_base c; 2196 tree offset; 2197 tree offsetvar; 2198 tree desc; 2199 tree type; 2200 tree tmp; 2201 tree *loop_ubound0; 2202 bool dynamic; 2203 bool old_first_len, old_typespec_chararray_ctor; 2204 tree old_first_len_val; 2205 gfc_loopinfo *loop, *outer_loop; 2206 gfc_ss_info *ss_info; 2207 gfc_expr *expr; 2208 gfc_ss *s; 2209 2210 /* Save the old values for nested checking. */ 2211 old_first_len = first_len; 2212 old_first_len_val = first_len_val; 2213 old_typespec_chararray_ctor = typespec_chararray_ctor; 2214 2215 loop = ss->loop; 2216 outer_loop = outermost_loop (loop); 2217 ss_info = ss->info; 2218 expr = ss_info->expr; 2219 2220 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no 2221 typespec was given for the array constructor. */ 2222 typespec_chararray_ctor = (expr->ts.u.cl 2223 && expr->ts.u.cl->length_from_typespec); 2224 2225 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) 2226 && expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor) 2227 { 2228 first_len_val = gfc_create_var (gfc_charlen_type_node, "len"); 2229 first_len = true; 2230 } 2231 2232 gcc_assert (ss->dimen == ss->loop->dimen); 2233 2234 c = expr->value.constructor; 2235 if (expr->ts.type == BT_CHARACTER) 2236 { 2237 bool const_string; 2238 2239 /* get_array_ctor_strlen walks the elements of the constructor, if a 2240 typespec was given, we already know the string length and want the one 2241 specified there. */ 2242 if (typespec_chararray_ctor && expr->ts.u.cl->length 2243 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT) 2244 { 2245 gfc_se length_se; 2246 2247 const_string = false; 2248 gfc_init_se (&length_se, NULL); 2249 gfc_conv_expr_type (&length_se, expr->ts.u.cl->length, 2250 gfc_charlen_type_node); 2251 ss_info->string_length = length_se.expr; 2252 gfc_add_block_to_block (&outer_loop->pre, &length_se.pre); 2253 gfc_add_block_to_block (&outer_loop->post, &length_se.post); 2254 } 2255 else 2256 const_string = get_array_ctor_strlen (&outer_loop->pre, c, 2257 &ss_info->string_length); 2258 2259 /* Complex character array constructors should have been taken care of 2260 and not end up here. */ 2261 gcc_assert (ss_info->string_length); 2262 2263 expr->ts.u.cl->backend_decl = ss_info->string_length; 2264 2265 type = gfc_get_character_type_len (expr->ts.kind, ss_info->string_length); 2266 if (const_string) 2267 type = build_pointer_type (type); 2268 } 2269 else 2270 type = gfc_typenode_for_spec (&expr->ts); 2271 2272 /* See if the constructor determines the loop bounds. */ 2273 dynamic = false; 2274 2275 loop_ubound0 = get_loop_upper_bound_for_array (ss, 0); 2276 2277 if (expr->shape && get_rank (loop) > 1 && *loop_ubound0 == NULL_TREE) 2278 { 2279 /* We have a multidimensional parameter. */ 2280 for (s = ss; s; s = s->parent) 2281 { 2282 int n; 2283 for (n = 0; n < s->loop->dimen; n++) 2284 { 2285 s->loop->from[n] = gfc_index_zero_node; 2286 s->loop->to[n] = gfc_conv_mpz_to_tree (expr->shape[s->dim[n]], 2287 gfc_index_integer_kind); 2288 s->loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR, 2289 gfc_array_index_type, 2290 s->loop->to[n], 2291 gfc_index_one_node); 2292 } 2293 } 2294 } 2295 2296 if (*loop_ubound0 == NULL_TREE) 2297 { 2298 mpz_t size; 2299 2300 /* We should have a 1-dimensional, zero-based loop. */ 2301 gcc_assert (loop->parent == NULL && loop->nested == NULL); 2302 gcc_assert (loop->dimen == 1); 2303 gcc_assert (integer_zerop (loop->from[0])); 2304 2305 /* Split the constructor size into a static part and a dynamic part. 2306 Allocate the static size up-front and record whether the dynamic 2307 size might be nonzero. */ 2308 mpz_init (size); 2309 dynamic = gfc_get_array_constructor_size (&size, c); 2310 mpz_sub_ui (size, size, 1); 2311 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind); 2312 mpz_clear (size); 2313 } 2314 2315 /* Special case constant array constructors. */ 2316 if (!dynamic) 2317 { 2318 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c); 2319 if (nelem > 0) 2320 { 2321 tree size = constant_array_constructor_loop_size (loop); 2322 if (size && compare_tree_int (size, nelem) == 0) 2323 { 2324 trans_constant_array_constructor (ss, type); 2325 goto finish; 2326 } 2327 } 2328 } 2329 2330 gfc_trans_create_temp_array (&outer_loop->pre, &outer_loop->post, ss, type, 2331 NULL_TREE, dynamic, true, false, where); 2332 2333 desc = ss_info->data.array.descriptor; 2334 offset = gfc_index_zero_node; 2335 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset"); 2336 TREE_NO_WARNING (offsetvar) = 1; 2337 TREE_USED (offsetvar) = 0; 2338 gfc_trans_array_constructor_value (&outer_loop->pre, type, desc, c, 2339 &offset, &offsetvar, dynamic); 2340 2341 /* If the array grows dynamically, the upper bound of the loop variable 2342 is determined by the array's final upper bound. */ 2343 if (dynamic) 2344 { 2345 tmp = fold_build2_loc (input_location, MINUS_EXPR, 2346 gfc_array_index_type, 2347 offsetvar, gfc_index_one_node); 2348 tmp = gfc_evaluate_now (tmp, &outer_loop->pre); 2349 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp); 2350 if (*loop_ubound0 && TREE_CODE (*loop_ubound0) == VAR_DECL) 2351 gfc_add_modify (&outer_loop->pre, *loop_ubound0, tmp); 2352 else 2353 *loop_ubound0 = tmp; 2354 } 2355 2356 if (TREE_USED (offsetvar)) 2357 pushdecl (offsetvar); 2358 else 2359 gcc_assert (INTEGER_CST_P (offset)); 2360 2361#if 0 2362 /* Disable bound checking for now because it's probably broken. */ 2363 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) 2364 { 2365 gcc_unreachable (); 2366 } 2367#endif 2368 2369finish: 2370 /* Restore old values of globals. */ 2371 first_len = old_first_len; 2372 first_len_val = old_first_len_val; 2373 typespec_chararray_ctor = old_typespec_chararray_ctor; 2374} 2375 2376 2377/* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is 2378 called after evaluating all of INFO's vector dimensions. Go through 2379 each such vector dimension and see if we can now fill in any missing 2380 loop bounds. */ 2381 2382static void 2383set_vector_loop_bounds (gfc_ss * ss) 2384{ 2385 gfc_loopinfo *loop, *outer_loop; 2386 gfc_array_info *info; 2387 gfc_se se; 2388 tree tmp; 2389 tree desc; 2390 tree zero; 2391 int n; 2392 int dim; 2393 2394 outer_loop = outermost_loop (ss->loop); 2395 2396 info = &ss->info->data.array; 2397 2398 for (; ss; ss = ss->parent) 2399 { 2400 loop = ss->loop; 2401 2402 for (n = 0; n < loop->dimen; n++) 2403 { 2404 dim = ss->dim[n]; 2405 if (info->ref->u.ar.dimen_type[dim] != DIMEN_VECTOR 2406 || loop->to[n] != NULL) 2407 continue; 2408 2409 /* Loop variable N indexes vector dimension DIM, and we don't 2410 yet know the upper bound of loop variable N. Set it to the 2411 difference between the vector's upper and lower bounds. */ 2412 gcc_assert (loop->from[n] == gfc_index_zero_node); 2413 gcc_assert (info->subscript[dim] 2414 && info->subscript[dim]->info->type == GFC_SS_VECTOR); 2415 2416 gfc_init_se (&se, NULL); 2417 desc = info->subscript[dim]->info->data.array.descriptor; 2418 zero = gfc_rank_cst[0]; 2419 tmp = fold_build2_loc (input_location, MINUS_EXPR, 2420 gfc_array_index_type, 2421 gfc_conv_descriptor_ubound_get (desc, zero), 2422 gfc_conv_descriptor_lbound_get (desc, zero)); 2423 tmp = gfc_evaluate_now (tmp, &outer_loop->pre); 2424 loop->to[n] = tmp; 2425 } 2426 } 2427} 2428 2429 2430/* Add the pre and post chains for all the scalar expressions in a SS chain 2431 to loop. This is called after the loop parameters have been calculated, 2432 but before the actual scalarizing loops. */ 2433 2434static void 2435gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript, 2436 locus * where) 2437{ 2438 gfc_loopinfo *nested_loop, *outer_loop; 2439 gfc_se se; 2440 gfc_ss_info *ss_info; 2441 gfc_array_info *info; 2442 gfc_expr *expr; 2443 int n; 2444 2445 /* Don't evaluate the arguments for realloc_lhs_loop_for_fcn_call; otherwise, 2446 arguments could get evaluated multiple times. */ 2447 if (ss->is_alloc_lhs) 2448 return; 2449 2450 outer_loop = outermost_loop (loop); 2451 2452 /* TODO: This can generate bad code if there are ordering dependencies, 2453 e.g., a callee allocated function and an unknown size constructor. */ 2454 gcc_assert (ss != NULL); 2455 2456 for (; ss != gfc_ss_terminator; ss = ss->loop_chain) 2457 { 2458 gcc_assert (ss); 2459 2460 /* Cross loop arrays are handled from within the most nested loop. */ 2461 if (ss->nested_ss != NULL) 2462 continue; 2463 2464 ss_info = ss->info; 2465 expr = ss_info->expr; 2466 info = &ss_info->data.array; 2467 2468 switch (ss_info->type) 2469 { 2470 case GFC_SS_SCALAR: 2471 /* Scalar expression. Evaluate this now. This includes elemental 2472 dimension indices, but not array section bounds. */ 2473 gfc_init_se (&se, NULL); 2474 gfc_conv_expr (&se, expr); 2475 gfc_add_block_to_block (&outer_loop->pre, &se.pre); 2476 2477 if (expr->ts.type != BT_CHARACTER 2478 && !gfc_is_alloc_class_scalar_function (expr)) 2479 { 2480 /* Move the evaluation of scalar expressions outside the 2481 scalarization loop, except for WHERE assignments. */ 2482 if (subscript) 2483 se.expr = convert(gfc_array_index_type, se.expr); 2484 if (!ss_info->where) 2485 se.expr = gfc_evaluate_now (se.expr, &outer_loop->pre); 2486 gfc_add_block_to_block (&outer_loop->pre, &se.post); 2487 } 2488 else 2489 gfc_add_block_to_block (&outer_loop->post, &se.post); 2490 2491 ss_info->data.scalar.value = se.expr; 2492 ss_info->string_length = se.string_length; 2493 break; 2494 2495 case GFC_SS_REFERENCE: 2496 /* Scalar argument to elemental procedure. */ 2497 gfc_init_se (&se, NULL); 2498 if (ss_info->can_be_null_ref) 2499 { 2500 /* If the actual argument can be absent (in other words, it can 2501 be a NULL reference), don't try to evaluate it; pass instead 2502 the reference directly. */ 2503 gfc_conv_expr_reference (&se, expr); 2504 } 2505 else 2506 { 2507 /* Otherwise, evaluate the argument outside the loop and pass 2508 a reference to the value. */ 2509 gfc_conv_expr (&se, expr); 2510 } 2511 2512 /* Ensure that a pointer to the string is stored. */ 2513 if (expr->ts.type == BT_CHARACTER) 2514 gfc_conv_string_parameter (&se); 2515 2516 gfc_add_block_to_block (&outer_loop->pre, &se.pre); 2517 gfc_add_block_to_block (&outer_loop->post, &se.post); 2518 if (gfc_is_class_scalar_expr (expr)) 2519 /* This is necessary because the dynamic type will always be 2520 large than the declared type. In consequence, assigning 2521 the value to a temporary could segfault. 2522 OOP-TODO: see if this is generally correct or is the value 2523 has to be written to an allocated temporary, whose address 2524 is passed via ss_info. */ 2525 ss_info->data.scalar.value = se.expr; 2526 else 2527 ss_info->data.scalar.value = gfc_evaluate_now (se.expr, 2528 &outer_loop->pre); 2529 2530 ss_info->string_length = se.string_length; 2531 break; 2532 2533 case GFC_SS_SECTION: 2534 /* Add the expressions for scalar and vector subscripts. */ 2535 for (n = 0; n < GFC_MAX_DIMENSIONS; n++) 2536 if (info->subscript[n]) 2537 gfc_add_loop_ss_code (loop, info->subscript[n], true, where); 2538 2539 set_vector_loop_bounds (ss); 2540 break; 2541 2542 case GFC_SS_VECTOR: 2543 /* Get the vector's descriptor and store it in SS. */ 2544 gfc_init_se (&se, NULL); 2545 gfc_conv_expr_descriptor (&se, expr); 2546 gfc_add_block_to_block (&outer_loop->pre, &se.pre); 2547 gfc_add_block_to_block (&outer_loop->post, &se.post); 2548 info->descriptor = se.expr; 2549 break; 2550 2551 case GFC_SS_INTRINSIC: 2552 gfc_add_intrinsic_ss_code (loop, ss); 2553 break; 2554 2555 case GFC_SS_FUNCTION: 2556 /* Array function return value. We call the function and save its 2557 result in a temporary for use inside the loop. */ 2558 gfc_init_se (&se, NULL); 2559 se.loop = loop; 2560 se.ss = ss; 2561 gfc_conv_expr (&se, expr); 2562 gfc_add_block_to_block (&outer_loop->pre, &se.pre); 2563 gfc_add_block_to_block (&outer_loop->post, &se.post); 2564 ss_info->string_length = se.string_length; 2565 break; 2566 2567 case GFC_SS_CONSTRUCTOR: 2568 if (expr->ts.type == BT_CHARACTER 2569 && ss_info->string_length == NULL 2570 && expr->ts.u.cl 2571 && expr->ts.u.cl->length) 2572 { 2573 gfc_init_se (&se, NULL); 2574 gfc_conv_expr_type (&se, expr->ts.u.cl->length, 2575 gfc_charlen_type_node); 2576 ss_info->string_length = se.expr; 2577 gfc_add_block_to_block (&outer_loop->pre, &se.pre); 2578 gfc_add_block_to_block (&outer_loop->post, &se.post); 2579 } 2580 trans_array_constructor (ss, where); 2581 break; 2582 2583 case GFC_SS_TEMP: 2584 case GFC_SS_COMPONENT: 2585 /* Do nothing. These are handled elsewhere. */ 2586 break; 2587 2588 default: 2589 gcc_unreachable (); 2590 } 2591 } 2592 2593 if (!subscript) 2594 for (nested_loop = loop->nested; nested_loop; 2595 nested_loop = nested_loop->next) 2596 gfc_add_loop_ss_code (nested_loop, nested_loop->ss, subscript, where); 2597} 2598 2599 2600/* Translate expressions for the descriptor and data pointer of a SS. */ 2601/*GCC ARRAYS*/ 2602 2603static void 2604gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base) 2605{ 2606 gfc_se se; 2607 gfc_ss_info *ss_info; 2608 gfc_array_info *info; 2609 tree tmp; 2610 2611 ss_info = ss->info; 2612 info = &ss_info->data.array; 2613 2614 /* Get the descriptor for the array to be scalarized. */ 2615 gcc_assert (ss_info->expr->expr_type == EXPR_VARIABLE); 2616 gfc_init_se (&se, NULL); 2617 se.descriptor_only = 1; 2618 gfc_conv_expr_lhs (&se, ss_info->expr); 2619 gfc_add_block_to_block (block, &se.pre); 2620 info->descriptor = se.expr; 2621 ss_info->string_length = se.string_length; 2622 2623 if (base) 2624 { 2625 /* Also the data pointer. */ 2626 tmp = gfc_conv_array_data (se.expr); 2627 /* If this is a variable or address of a variable we use it directly. 2628 Otherwise we must evaluate it now to avoid breaking dependency 2629 analysis by pulling the expressions for elemental array indices 2630 inside the loop. */ 2631 if (!(DECL_P (tmp) 2632 || (TREE_CODE (tmp) == ADDR_EXPR 2633 && DECL_P (TREE_OPERAND (tmp, 0))))) 2634 tmp = gfc_evaluate_now (tmp, block); 2635 info->data = tmp; 2636 2637 tmp = gfc_conv_array_offset (se.expr); 2638 info->offset = gfc_evaluate_now (tmp, block); 2639 2640 /* Make absolutely sure that the saved_offset is indeed saved 2641 so that the variable is still accessible after the loops 2642 are translated. */ 2643 info->saved_offset = info->offset; 2644 } 2645} 2646 2647 2648/* Initialize a gfc_loopinfo structure. */ 2649 2650void 2651gfc_init_loopinfo (gfc_loopinfo * loop) 2652{ 2653 int n; 2654 2655 memset (loop, 0, sizeof (gfc_loopinfo)); 2656 gfc_init_block (&loop->pre); 2657 gfc_init_block (&loop->post); 2658 2659 /* Initially scalarize in order and default to no loop reversal. */ 2660 for (n = 0; n < GFC_MAX_DIMENSIONS; n++) 2661 { 2662 loop->order[n] = n; 2663 loop->reverse[n] = GFC_INHIBIT_REVERSE; 2664 } 2665 2666 loop->ss = gfc_ss_terminator; 2667} 2668 2669 2670/* Copies the loop variable info to a gfc_se structure. Does not copy the SS 2671 chain. */ 2672 2673void 2674gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop) 2675{ 2676 se->loop = loop; 2677} 2678 2679 2680/* Return an expression for the data pointer of an array. */ 2681 2682tree 2683gfc_conv_array_data (tree descriptor) 2684{ 2685 tree type; 2686 2687 type = TREE_TYPE (descriptor); 2688 if (GFC_ARRAY_TYPE_P (type)) 2689 { 2690 if (TREE_CODE (type) == POINTER_TYPE) 2691 return descriptor; 2692 else 2693 { 2694 /* Descriptorless arrays. */ 2695 return gfc_build_addr_expr (NULL_TREE, descriptor); 2696 } 2697 } 2698 else 2699 return gfc_conv_descriptor_data_get (descriptor); 2700} 2701 2702 2703/* Return an expression for the base offset of an array. */ 2704 2705tree 2706gfc_conv_array_offset (tree descriptor) 2707{ 2708 tree type; 2709 2710 type = TREE_TYPE (descriptor); 2711 if (GFC_ARRAY_TYPE_P (type)) 2712 return GFC_TYPE_ARRAY_OFFSET (type); 2713 else 2714 return gfc_conv_descriptor_offset_get (descriptor); 2715} 2716 2717 2718/* Get an expression for the array stride. */ 2719 2720tree 2721gfc_conv_array_stride (tree descriptor, int dim) 2722{ 2723 tree tmp; 2724 tree type; 2725 2726 type = TREE_TYPE (descriptor); 2727 2728 /* For descriptorless arrays use the array size. */ 2729 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim); 2730 if (tmp != NULL_TREE) 2731 return tmp; 2732 2733 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]); 2734 return tmp; 2735} 2736 2737 2738/* Like gfc_conv_array_stride, but for the lower bound. */ 2739 2740tree 2741gfc_conv_array_lbound (tree descriptor, int dim) 2742{ 2743 tree tmp; 2744 tree type; 2745 2746 type = TREE_TYPE (descriptor); 2747 2748 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim); 2749 if (tmp != NULL_TREE) 2750 return tmp; 2751 2752 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]); 2753 return tmp; 2754} 2755 2756 2757/* Like gfc_conv_array_stride, but for the upper bound. */ 2758 2759tree 2760gfc_conv_array_ubound (tree descriptor, int dim) 2761{ 2762 tree tmp; 2763 tree type; 2764 2765 type = TREE_TYPE (descriptor); 2766 2767 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim); 2768 if (tmp != NULL_TREE) 2769 return tmp; 2770 2771 /* This should only ever happen when passing an assumed shape array 2772 as an actual parameter. The value will never be used. */ 2773 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor))) 2774 return gfc_index_zero_node; 2775 2776 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]); 2777 return tmp; 2778} 2779 2780 2781/* Generate code to perform an array index bound check. */ 2782 2783static tree 2784trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n, 2785 locus * where, bool check_upper) 2786{ 2787 tree fault; 2788 tree tmp_lo, tmp_up; 2789 tree descriptor; 2790 char *msg; 2791 const char * name = NULL; 2792 2793 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) 2794 return index; 2795 2796 descriptor = ss->info->data.array.descriptor; 2797 2798 index = gfc_evaluate_now (index, &se->pre); 2799 2800 /* We find a name for the error message. */ 2801 name = ss->info->expr->symtree->n.sym->name; 2802 gcc_assert (name != NULL); 2803 2804 if (TREE_CODE (descriptor) == VAR_DECL) 2805 name = IDENTIFIER_POINTER (DECL_NAME (descriptor)); 2806 2807 /* If upper bound is present, include both bounds in the error message. */ 2808 if (check_upper) 2809 { 2810 tmp_lo = gfc_conv_array_lbound (descriptor, n); 2811 tmp_up = gfc_conv_array_ubound (descriptor, n); 2812 2813 if (name) 2814 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' " 2815 "outside of expected range (%%ld:%%ld)", n+1, name); 2816 else 2817 msg = xasprintf ("Index '%%ld' of dimension %d " 2818 "outside of expected range (%%ld:%%ld)", n+1); 2819 2820 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, 2821 index, tmp_lo); 2822 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg, 2823 fold_convert (long_integer_type_node, index), 2824 fold_convert (long_integer_type_node, tmp_lo), 2825 fold_convert (long_integer_type_node, tmp_up)); 2826 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node, 2827 index, tmp_up); 2828 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg, 2829 fold_convert (long_integer_type_node, index), 2830 fold_convert (long_integer_type_node, tmp_lo), 2831 fold_convert (long_integer_type_node, tmp_up)); 2832 free (msg); 2833 } 2834 else 2835 { 2836 tmp_lo = gfc_conv_array_lbound (descriptor, n); 2837 2838 if (name) 2839 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' " 2840 "below lower bound of %%ld", n+1, name); 2841 else 2842 msg = xasprintf ("Index '%%ld' of dimension %d " 2843 "below lower bound of %%ld", n+1); 2844 2845 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, 2846 index, tmp_lo); 2847 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg, 2848 fold_convert (long_integer_type_node, index), 2849 fold_convert (long_integer_type_node, tmp_lo)); 2850 free (msg); 2851 } 2852 2853 return index; 2854} 2855 2856 2857/* Return the offset for an index. Performs bound checking for elemental 2858 dimensions. Single element references are processed separately. 2859 DIM is the array dimension, I is the loop dimension. */ 2860 2861static tree 2862conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i, 2863 gfc_array_ref * ar, tree stride) 2864{ 2865 gfc_array_info *info; 2866 tree index; 2867 tree desc; 2868 tree data; 2869 2870 info = &ss->info->data.array; 2871 2872 /* Get the index into the array for this dimension. */ 2873 if (ar) 2874 { 2875 gcc_assert (ar->type != AR_ELEMENT); 2876 switch (ar->dimen_type[dim]) 2877 { 2878 case DIMEN_THIS_IMAGE: 2879 gcc_unreachable (); 2880 break; 2881 case DIMEN_ELEMENT: 2882 /* Elemental dimension. */ 2883 gcc_assert (info->subscript[dim] 2884 && info->subscript[dim]->info->type == GFC_SS_SCALAR); 2885 /* We've already translated this value outside the loop. */ 2886 index = info->subscript[dim]->info->data.scalar.value; 2887 2888 index = trans_array_bound_check (se, ss, index, dim, &ar->where, 2889 ar->as->type != AS_ASSUMED_SIZE 2890 || dim < ar->dimen - 1); 2891 break; 2892 2893 case DIMEN_VECTOR: 2894 gcc_assert (info && se->loop); 2895 gcc_assert (info->subscript[dim] 2896 && info->subscript[dim]->info->type == GFC_SS_VECTOR); 2897 desc = info->subscript[dim]->info->data.array.descriptor; 2898 2899 /* Get a zero-based index into the vector. */ 2900 index = fold_build2_loc (input_location, MINUS_EXPR, 2901 gfc_array_index_type, 2902 se->loop->loopvar[i], se->loop->from[i]); 2903 2904 /* Multiply the index by the stride. */ 2905 index = fold_build2_loc (input_location, MULT_EXPR, 2906 gfc_array_index_type, 2907 index, gfc_conv_array_stride (desc, 0)); 2908 2909 /* Read the vector to get an index into info->descriptor. */ 2910 data = build_fold_indirect_ref_loc (input_location, 2911 gfc_conv_array_data (desc)); 2912 index = gfc_build_array_ref (data, index, NULL); 2913 index = gfc_evaluate_now (index, &se->pre); 2914 index = fold_convert (gfc_array_index_type, index); 2915 2916 /* Do any bounds checking on the final info->descriptor index. */ 2917 index = trans_array_bound_check (se, ss, index, dim, &ar->where, 2918 ar->as->type != AS_ASSUMED_SIZE 2919 || dim < ar->dimen - 1); 2920 break; 2921 2922 case DIMEN_RANGE: 2923 /* Scalarized dimension. */ 2924 gcc_assert (info && se->loop); 2925 2926 /* Multiply the loop variable by the stride and delta. */ 2927 index = se->loop->loopvar[i]; 2928 if (!integer_onep (info->stride[dim])) 2929 index = fold_build2_loc (input_location, MULT_EXPR, 2930 gfc_array_index_type, index, 2931 info->stride[dim]); 2932 if (!integer_zerop (info->delta[dim])) 2933 index = fold_build2_loc (input_location, PLUS_EXPR, 2934 gfc_array_index_type, index, 2935 info->delta[dim]); 2936 break; 2937 2938 default: 2939 gcc_unreachable (); 2940 } 2941 } 2942 else 2943 { 2944 /* Temporary array or derived type component. */ 2945 gcc_assert (se->loop); 2946 index = se->loop->loopvar[se->loop->order[i]]; 2947 2948 /* Pointer functions can have stride[0] different from unity. 2949 Use the stride returned by the function call and stored in 2950 the descriptor for the temporary. */ 2951 if (se->ss && se->ss->info->type == GFC_SS_FUNCTION 2952 && se->ss->info->expr 2953 && se->ss->info->expr->symtree 2954 && se->ss->info->expr->symtree->n.sym->result 2955 && se->ss->info->expr->symtree->n.sym->result->attr.pointer) 2956 stride = gfc_conv_descriptor_stride_get (info->descriptor, 2957 gfc_rank_cst[dim]); 2958 2959 if (info->delta[dim] && !integer_zerop (info->delta[dim])) 2960 index = fold_build2_loc (input_location, PLUS_EXPR, 2961 gfc_array_index_type, index, info->delta[dim]); 2962 } 2963 2964 /* Multiply by the stride. */ 2965 if (!integer_onep (stride)) 2966 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 2967 index, stride); 2968 2969 return index; 2970} 2971 2972 2973/* Build a scalarized array reference using the vptr 'size'. */ 2974 2975static bool 2976build_class_array_ref (gfc_se *se, tree base, tree index) 2977{ 2978 tree type; 2979 tree size; 2980 tree offset; 2981 tree decl; 2982 tree tmp; 2983 gfc_expr *expr = se->ss->info->expr; 2984 gfc_ref *ref; 2985 gfc_ref *class_ref; 2986 gfc_typespec *ts; 2987 2988 if (expr == NULL 2989 || (expr->ts.type != BT_CLASS 2990 && !gfc_is_alloc_class_array_function (expr))) 2991 return false; 2992 2993 if (expr->symtree && expr->symtree->n.sym->ts.type == BT_CLASS) 2994 ts = &expr->symtree->n.sym->ts; 2995 else 2996 ts = NULL; 2997 class_ref = NULL; 2998 2999 for (ref = expr->ref; ref; ref = ref->next) 3000 { 3001 if (ref->type == REF_COMPONENT 3002 && ref->u.c.component->ts.type == BT_CLASS 3003 && ref->next && ref->next->type == REF_COMPONENT 3004 && strcmp (ref->next->u.c.component->name, "_data") == 0 3005 && ref->next->next 3006 && ref->next->next->type == REF_ARRAY 3007 && ref->next->next->u.ar.type != AR_ELEMENT) 3008 { 3009 ts = &ref->u.c.component->ts; 3010 class_ref = ref; 3011 break; 3012 } 3013 } 3014 3015 if (ts == NULL) 3016 return false; 3017 3018 if (class_ref == NULL && expr->symtree->n.sym->attr.function 3019 && expr->symtree->n.sym == expr->symtree->n.sym->result) 3020 { 3021 gcc_assert (expr->symtree->n.sym->backend_decl == current_function_decl); 3022 decl = gfc_get_fake_result_decl (expr->symtree->n.sym, 0); 3023 } 3024 else if (gfc_is_alloc_class_array_function (expr)) 3025 { 3026 size = NULL_TREE; 3027 decl = NULL_TREE; 3028 for (tmp = base; tmp; tmp = TREE_OPERAND (tmp, 0)) 3029 { 3030 tree type; 3031 type = TREE_TYPE (tmp); 3032 while (type) 3033 { 3034 if (GFC_CLASS_TYPE_P (type)) 3035 decl = tmp; 3036 if (type != TYPE_CANONICAL (type)) 3037 type = TYPE_CANONICAL (type); 3038 else 3039 type = NULL_TREE; 3040 } 3041 if (TREE_CODE (tmp) == VAR_DECL) 3042 break; 3043 } 3044 3045 if (decl == NULL_TREE) 3046 return false; 3047 } 3048 else if (class_ref == NULL) 3049 decl = expr->symtree->n.sym->backend_decl; 3050 else 3051 { 3052 /* Remove everything after the last class reference, convert the 3053 expression and then recover its tailend once more. */ 3054 gfc_se tmpse; 3055 ref = class_ref->next; 3056 class_ref->next = NULL; 3057 gfc_init_se (&tmpse, NULL); 3058 gfc_conv_expr (&tmpse, expr); 3059 decl = tmpse.expr; 3060 class_ref->next = ref; 3061 } 3062 3063 if (POINTER_TYPE_P (TREE_TYPE (decl))) 3064 decl = build_fold_indirect_ref_loc (input_location, decl); 3065 3066 if (!GFC_CLASS_TYPE_P (TREE_TYPE (decl))) 3067 return false; 3068 3069 size = gfc_class_vtab_size_get (decl); 3070 3071 /* Build the address of the element. */ 3072 type = TREE_TYPE (TREE_TYPE (base)); 3073 size = fold_convert (TREE_TYPE (index), size); 3074 offset = fold_build2_loc (input_location, MULT_EXPR, 3075 gfc_array_index_type, 3076 index, size); 3077 tmp = gfc_build_addr_expr (pvoid_type_node, base); 3078 tmp = fold_build_pointer_plus_loc (input_location, tmp, offset); 3079 tmp = fold_convert (build_pointer_type (type), tmp); 3080 3081 /* Return the element in the se expression. */ 3082 se->expr = build_fold_indirect_ref_loc (input_location, tmp); 3083 return true; 3084} 3085 3086 3087/* Build a scalarized reference to an array. */ 3088 3089static void 3090gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar) 3091{ 3092 gfc_array_info *info; 3093 tree decl = NULL_TREE; 3094 tree index; 3095 tree tmp; 3096 gfc_ss *ss; 3097 gfc_expr *expr; 3098 int n; 3099 3100 ss = se->ss; 3101 expr = ss->info->expr; 3102 info = &ss->info->data.array; 3103 if (ar) 3104 n = se->loop->order[0]; 3105 else 3106 n = 0; 3107 3108 index = conv_array_index_offset (se, ss, ss->dim[n], n, ar, info->stride0); 3109 /* Add the offset for this dimension to the stored offset for all other 3110 dimensions. */ 3111 if (info->offset && !integer_zerop (info->offset)) 3112 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, 3113 index, info->offset); 3114 3115 if (expr && (is_subref_array (expr) 3116 || (expr->ts.deferred && (expr->expr_type == EXPR_VARIABLE 3117 || expr->expr_type == EXPR_FUNCTION)))) 3118 decl = expr->symtree->n.sym->backend_decl; 3119 3120 tmp = build_fold_indirect_ref_loc (input_location, info->data); 3121 3122 /* Use the vptr 'size' field to access a class the element of a class 3123 array. */ 3124 if (build_class_array_ref (se, tmp, index)) 3125 return; 3126 3127 se->expr = gfc_build_array_ref (tmp, index, decl); 3128} 3129 3130 3131/* Translate access of temporary array. */ 3132 3133void 3134gfc_conv_tmp_array_ref (gfc_se * se) 3135{ 3136 se->string_length = se->ss->info->string_length; 3137 gfc_conv_scalarized_array_ref (se, NULL); 3138 gfc_advance_se_ss_chain (se); 3139} 3140 3141/* Add T to the offset pair *OFFSET, *CST_OFFSET. */ 3142 3143static void 3144add_to_offset (tree *cst_offset, tree *offset, tree t) 3145{ 3146 if (TREE_CODE (t) == INTEGER_CST) 3147 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t); 3148 else 3149 { 3150 if (!integer_zerop (*offset)) 3151 *offset = fold_build2_loc (input_location, PLUS_EXPR, 3152 gfc_array_index_type, *offset, t); 3153 else 3154 *offset = t; 3155 } 3156} 3157 3158 3159static tree 3160build_array_ref (tree desc, tree offset, tree decl) 3161{ 3162 tree tmp; 3163 tree type; 3164 3165 /* Class container types do not always have the GFC_CLASS_TYPE_P 3166 but the canonical type does. */ 3167 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)) 3168 && TREE_CODE (desc) == COMPONENT_REF) 3169 { 3170 type = TREE_TYPE (TREE_OPERAND (desc, 0)); 3171 if (TYPE_CANONICAL (type) 3172 && GFC_CLASS_TYPE_P (TYPE_CANONICAL (type))) 3173 type = TYPE_CANONICAL (type); 3174 } 3175 else 3176 type = NULL; 3177 3178 /* Class array references need special treatment because the assigned 3179 type size needs to be used to point to the element. */ 3180 if (type && GFC_CLASS_TYPE_P (type)) 3181 { 3182 type = gfc_get_element_type (TREE_TYPE (desc)); 3183 tmp = TREE_OPERAND (desc, 0); 3184 tmp = gfc_get_class_array_ref (offset, tmp); 3185 tmp = fold_convert (build_pointer_type (type), tmp); 3186 tmp = build_fold_indirect_ref_loc (input_location, tmp); 3187 return tmp; 3188 } 3189 3190 tmp = gfc_conv_array_data (desc); 3191 tmp = build_fold_indirect_ref_loc (input_location, tmp); 3192 tmp = gfc_build_array_ref (tmp, offset, decl); 3193 return tmp; 3194} 3195 3196 3197/* Build an array reference. se->expr already holds the array descriptor. 3198 This should be either a variable, indirect variable reference or component 3199 reference. For arrays which do not have a descriptor, se->expr will be 3200 the data pointer. 3201 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/ 3202 3203void 3204gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_expr *expr, 3205 locus * where) 3206{ 3207 int n; 3208 tree offset, cst_offset; 3209 tree tmp; 3210 tree stride; 3211 gfc_se indexse; 3212 gfc_se tmpse; 3213 gfc_symbol * sym = expr->symtree->n.sym; 3214 char *var_name = NULL; 3215 3216 if (ar->dimen == 0) 3217 { 3218 gcc_assert (ar->codimen); 3219 3220 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr))) 3221 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr)); 3222 else 3223 { 3224 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr)) 3225 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE) 3226 se->expr = build_fold_indirect_ref_loc (input_location, se->expr); 3227 3228 /* Use the actual tree type and not the wrapped coarray. */ 3229 if (!se->want_pointer) 3230 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)), 3231 se->expr); 3232 } 3233 3234 return; 3235 } 3236 3237 /* Handle scalarized references separately. */ 3238 if (ar->type != AR_ELEMENT) 3239 { 3240 gfc_conv_scalarized_array_ref (se, ar); 3241 gfc_advance_se_ss_chain (se); 3242 return; 3243 } 3244 3245 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) 3246 { 3247 size_t len; 3248 gfc_ref *ref; 3249 3250 len = strlen (sym->name) + 1; 3251 for (ref = expr->ref; ref; ref = ref->next) 3252 { 3253 if (ref->type == REF_ARRAY && &ref->u.ar == ar) 3254 break; 3255 if (ref->type == REF_COMPONENT) 3256 len += 1 + strlen (ref->u.c.component->name); 3257 } 3258 3259 var_name = XALLOCAVEC (char, len); 3260 strcpy (var_name, sym->name); 3261 3262 for (ref = expr->ref; ref; ref = ref->next) 3263 { 3264 if (ref->type == REF_ARRAY && &ref->u.ar == ar) 3265 break; 3266 if (ref->type == REF_COMPONENT) 3267 { 3268 strcat (var_name, "%%"); 3269 strcat (var_name, ref->u.c.component->name); 3270 } 3271 } 3272 } 3273 3274 cst_offset = offset = gfc_index_zero_node; 3275 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr)); 3276 3277 /* Calculate the offsets from all the dimensions. Make sure to associate 3278 the final offset so that we form a chain of loop invariant summands. */ 3279 for (n = ar->dimen - 1; n >= 0; n--) 3280 { 3281 /* Calculate the index for this dimension. */ 3282 gfc_init_se (&indexse, se); 3283 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type); 3284 gfc_add_block_to_block (&se->pre, &indexse.pre); 3285 3286 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) 3287 { 3288 /* Check array bounds. */ 3289 tree cond; 3290 char *msg; 3291 3292 /* Evaluate the indexse.expr only once. */ 3293 indexse.expr = save_expr (indexse.expr); 3294 3295 /* Lower bound. */ 3296 tmp = gfc_conv_array_lbound (se->expr, n); 3297 if (sym->attr.temporary) 3298 { 3299 gfc_init_se (&tmpse, se); 3300 gfc_conv_expr_type (&tmpse, ar->as->lower[n], 3301 gfc_array_index_type); 3302 gfc_add_block_to_block (&se->pre, &tmpse.pre); 3303 tmp = tmpse.expr; 3304 } 3305 3306 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, 3307 indexse.expr, tmp); 3308 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' " 3309 "below lower bound of %%ld", n+1, var_name); 3310 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg, 3311 fold_convert (long_integer_type_node, 3312 indexse.expr), 3313 fold_convert (long_integer_type_node, tmp)); 3314 free (msg); 3315 3316 /* Upper bound, but not for the last dimension of assumed-size 3317 arrays. */ 3318 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE) 3319 { 3320 tmp = gfc_conv_array_ubound (se->expr, n); 3321 if (sym->attr.temporary) 3322 { 3323 gfc_init_se (&tmpse, se); 3324 gfc_conv_expr_type (&tmpse, ar->as->upper[n], 3325 gfc_array_index_type); 3326 gfc_add_block_to_block (&se->pre, &tmpse.pre); 3327 tmp = tmpse.expr; 3328 } 3329 3330 cond = fold_build2_loc (input_location, GT_EXPR, 3331 boolean_type_node, indexse.expr, tmp); 3332 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' " 3333 "above upper bound of %%ld", n+1, var_name); 3334 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg, 3335 fold_convert (long_integer_type_node, 3336 indexse.expr), 3337 fold_convert (long_integer_type_node, tmp)); 3338 free (msg); 3339 } 3340 } 3341 3342 /* Multiply the index by the stride. */ 3343 stride = gfc_conv_array_stride (se->expr, n); 3344 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 3345 indexse.expr, stride); 3346 3347 /* And add it to the total. */ 3348 add_to_offset (&cst_offset, &offset, tmp); 3349 } 3350 3351 if (!integer_zerop (cst_offset)) 3352 offset = fold_build2_loc (input_location, PLUS_EXPR, 3353 gfc_array_index_type, offset, cst_offset); 3354 3355 se->expr = build_array_ref (se->expr, offset, sym->backend_decl); 3356} 3357 3358 3359/* Add the offset corresponding to array's ARRAY_DIM dimension and loop's 3360 LOOP_DIM dimension (if any) to array's offset. */ 3361 3362static void 3363add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss, 3364 gfc_array_ref *ar, int array_dim, int loop_dim) 3365{ 3366 gfc_se se; 3367 gfc_array_info *info; 3368 tree stride, index; 3369 3370 info = &ss->info->data.array; 3371 3372 gfc_init_se (&se, NULL); 3373 se.loop = loop; 3374 se.expr = info->descriptor; 3375 stride = gfc_conv_array_stride (info->descriptor, array_dim); 3376 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride); 3377 gfc_add_block_to_block (pblock, &se.pre); 3378 3379 info->offset = fold_build2_loc (input_location, PLUS_EXPR, 3380 gfc_array_index_type, 3381 info->offset, index); 3382 info->offset = gfc_evaluate_now (info->offset, pblock); 3383} 3384 3385 3386/* Generate the code to be executed immediately before entering a 3387 scalarization loop. */ 3388 3389static void 3390gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag, 3391 stmtblock_t * pblock) 3392{ 3393 tree stride; 3394 gfc_ss_info *ss_info; 3395 gfc_array_info *info; 3396 gfc_ss_type ss_type; 3397 gfc_ss *ss, *pss; 3398 gfc_loopinfo *ploop; 3399 gfc_array_ref *ar; 3400 int i; 3401 3402 /* This code will be executed before entering the scalarization loop 3403 for this dimension. */ 3404 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain) 3405 { 3406 ss_info = ss->info; 3407 3408 if ((ss_info->useflags & flag) == 0) 3409 continue; 3410 3411 ss_type = ss_info->type; 3412 if (ss_type != GFC_SS_SECTION 3413 && ss_type != GFC_SS_FUNCTION 3414 && ss_type != GFC_SS_CONSTRUCTOR 3415 && ss_type != GFC_SS_COMPONENT) 3416 continue; 3417 3418 info = &ss_info->data.array; 3419 3420 gcc_assert (dim < ss->dimen); 3421 gcc_assert (ss->dimen == loop->dimen); 3422 3423 if (info->ref) 3424 ar = &info->ref->u.ar; 3425 else 3426 ar = NULL; 3427 3428 if (dim == loop->dimen - 1 && loop->parent != NULL) 3429 { 3430 /* If we are in the outermost dimension of this loop, the previous 3431 dimension shall be in the parent loop. */ 3432 gcc_assert (ss->parent != NULL); 3433 3434 pss = ss->parent; 3435 ploop = loop->parent; 3436 3437 /* ss and ss->parent are about the same array. */ 3438 gcc_assert (ss_info == pss->info); 3439 } 3440 else 3441 { 3442 ploop = loop; 3443 pss = ss; 3444 } 3445 3446 if (dim == loop->dimen - 1) 3447 i = 0; 3448 else 3449 i = dim + 1; 3450 3451 /* For the time being, there is no loop reordering. */ 3452 gcc_assert (i == ploop->order[i]); 3453 i = ploop->order[i]; 3454 3455 if (dim == loop->dimen - 1 && loop->parent == NULL) 3456 { 3457 stride = gfc_conv_array_stride (info->descriptor, 3458 innermost_ss (ss)->dim[i]); 3459 3460 /* Calculate the stride of the innermost loop. Hopefully this will 3461 allow the backend optimizers to do their stuff more effectively. 3462 */ 3463 info->stride0 = gfc_evaluate_now (stride, pblock); 3464 3465 /* For the outermost loop calculate the offset due to any 3466 elemental dimensions. It will have been initialized with the 3467 base offset of the array. */ 3468 if (info->ref) 3469 { 3470 for (i = 0; i < ar->dimen; i++) 3471 { 3472 if (ar->dimen_type[i] != DIMEN_ELEMENT) 3473 continue; 3474 3475 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1); 3476 } 3477 } 3478 } 3479 else 3480 /* Add the offset for the previous loop dimension. */ 3481 add_array_offset (pblock, ploop, ss, ar, pss->dim[i], i); 3482 3483 /* Remember this offset for the second loop. */ 3484 if (dim == loop->temp_dim - 1 && loop->parent == NULL) 3485 info->saved_offset = info->offset; 3486 } 3487} 3488 3489 3490/* Start a scalarized expression. Creates a scope and declares loop 3491 variables. */ 3492 3493void 3494gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody) 3495{ 3496 int dim; 3497 int n; 3498 int flags; 3499 3500 gcc_assert (!loop->array_parameter); 3501 3502 for (dim = loop->dimen - 1; dim >= 0; dim--) 3503 { 3504 n = loop->order[dim]; 3505 3506 gfc_start_block (&loop->code[n]); 3507 3508 /* Create the loop variable. */ 3509 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S"); 3510 3511 if (dim < loop->temp_dim) 3512 flags = 3; 3513 else 3514 flags = 1; 3515 /* Calculate values that will be constant within this loop. */ 3516 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]); 3517 } 3518 gfc_start_block (pbody); 3519} 3520 3521 3522/* Generates the actual loop code for a scalarization loop. */ 3523 3524void 3525gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n, 3526 stmtblock_t * pbody) 3527{ 3528 stmtblock_t block; 3529 tree cond; 3530 tree tmp; 3531 tree loopbody; 3532 tree exit_label; 3533 tree stmt; 3534 tree init; 3535 tree incr; 3536 3537 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS 3538 | OMPWS_SCALARIZER_BODY)) 3539 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS) 3540 && n == loop->dimen - 1) 3541 { 3542 /* We create an OMP_FOR construct for the outermost scalarized loop. */ 3543 init = make_tree_vec (1); 3544 cond = make_tree_vec (1); 3545 incr = make_tree_vec (1); 3546 3547 /* Cycle statement is implemented with a goto. Exit statement must not 3548 be present for this loop. */ 3549 exit_label = gfc_build_label_decl (NULL_TREE); 3550 TREE_USED (exit_label) = 1; 3551 3552 /* Label for cycle statements (if needed). */ 3553 tmp = build1_v (LABEL_EXPR, exit_label); 3554 gfc_add_expr_to_block (pbody, tmp); 3555 3556 stmt = make_node (OMP_FOR); 3557 3558 TREE_TYPE (stmt) = void_type_node; 3559 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody); 3560 3561 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location, 3562 OMP_CLAUSE_SCHEDULE); 3563 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt)) 3564 = OMP_CLAUSE_SCHEDULE_STATIC; 3565 if (ompws_flags & OMPWS_NOWAIT) 3566 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt)) 3567 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT); 3568 3569 /* Initialize the loopvar. */ 3570 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n], 3571 loop->from[n]); 3572 OMP_FOR_INIT (stmt) = init; 3573 /* The exit condition. */ 3574 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR, 3575 boolean_type_node, 3576 loop->loopvar[n], loop->to[n]); 3577 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location); 3578 OMP_FOR_COND (stmt) = cond; 3579 /* Increment the loopvar. */ 3580 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, 3581 loop->loopvar[n], gfc_index_one_node); 3582 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR, 3583 void_type_node, loop->loopvar[n], tmp); 3584 OMP_FOR_INCR (stmt) = incr; 3585 3586 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT; 3587 gfc_add_expr_to_block (&loop->code[n], stmt); 3588 } 3589 else 3590 { 3591 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET) 3592 && (loop->temp_ss == NULL); 3593 3594 loopbody = gfc_finish_block (pbody); 3595 3596 if (reverse_loop) 3597 { 3598 tmp = loop->from[n]; 3599 loop->from[n] = loop->to[n]; 3600 loop->to[n] = tmp; 3601 } 3602 3603 /* Initialize the loopvar. */ 3604 if (loop->loopvar[n] != loop->from[n]) 3605 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]); 3606 3607 exit_label = gfc_build_label_decl (NULL_TREE); 3608 3609 /* Generate the loop body. */ 3610 gfc_init_block (&block); 3611 3612 /* The exit condition. */ 3613 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR, 3614 boolean_type_node, loop->loopvar[n], loop->to[n]); 3615 tmp = build1_v (GOTO_EXPR, exit_label); 3616 TREE_USED (exit_label) = 1; 3617 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location)); 3618 gfc_add_expr_to_block (&block, tmp); 3619 3620 /* The main body. */ 3621 gfc_add_expr_to_block (&block, loopbody); 3622 3623 /* Increment the loopvar. */ 3624 tmp = fold_build2_loc (input_location, 3625 reverse_loop ? MINUS_EXPR : PLUS_EXPR, 3626 gfc_array_index_type, loop->loopvar[n], 3627 gfc_index_one_node); 3628 3629 gfc_add_modify (&block, loop->loopvar[n], tmp); 3630 3631 /* Build the loop. */ 3632 tmp = gfc_finish_block (&block); 3633 tmp = build1_v (LOOP_EXPR, tmp); 3634 gfc_add_expr_to_block (&loop->code[n], tmp); 3635 3636 /* Add the exit label. */ 3637 tmp = build1_v (LABEL_EXPR, exit_label); 3638 gfc_add_expr_to_block (&loop->code[n], tmp); 3639 } 3640 3641} 3642 3643 3644/* Finishes and generates the loops for a scalarized expression. */ 3645 3646void 3647gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body) 3648{ 3649 int dim; 3650 int n; 3651 gfc_ss *ss; 3652 stmtblock_t *pblock; 3653 tree tmp; 3654 3655 pblock = body; 3656 /* Generate the loops. */ 3657 for (dim = 0; dim < loop->dimen; dim++) 3658 { 3659 n = loop->order[dim]; 3660 gfc_trans_scalarized_loop_end (loop, n, pblock); 3661 loop->loopvar[n] = NULL_TREE; 3662 pblock = &loop->code[n]; 3663 } 3664 3665 tmp = gfc_finish_block (pblock); 3666 gfc_add_expr_to_block (&loop->pre, tmp); 3667 3668 /* Clear all the used flags. */ 3669 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain) 3670 if (ss->parent == NULL) 3671 ss->info->useflags = 0; 3672} 3673 3674 3675/* Finish the main body of a scalarized expression, and start the secondary 3676 copying body. */ 3677 3678void 3679gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body) 3680{ 3681 int dim; 3682 int n; 3683 stmtblock_t *pblock; 3684 gfc_ss *ss; 3685 3686 pblock = body; 3687 /* We finish as many loops as are used by the temporary. */ 3688 for (dim = 0; dim < loop->temp_dim - 1; dim++) 3689 { 3690 n = loop->order[dim]; 3691 gfc_trans_scalarized_loop_end (loop, n, pblock); 3692 loop->loopvar[n] = NULL_TREE; 3693 pblock = &loop->code[n]; 3694 } 3695 3696 /* We don't want to finish the outermost loop entirely. */ 3697 n = loop->order[loop->temp_dim - 1]; 3698 gfc_trans_scalarized_loop_end (loop, n, pblock); 3699 3700 /* Restore the initial offsets. */ 3701 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain) 3702 { 3703 gfc_ss_type ss_type; 3704 gfc_ss_info *ss_info; 3705 3706 ss_info = ss->info; 3707 3708 if ((ss_info->useflags & 2) == 0) 3709 continue; 3710 3711 ss_type = ss_info->type; 3712 if (ss_type != GFC_SS_SECTION 3713 && ss_type != GFC_SS_FUNCTION 3714 && ss_type != GFC_SS_CONSTRUCTOR 3715 && ss_type != GFC_SS_COMPONENT) 3716 continue; 3717 3718 ss_info->data.array.offset = ss_info->data.array.saved_offset; 3719 } 3720 3721 /* Restart all the inner loops we just finished. */ 3722 for (dim = loop->temp_dim - 2; dim >= 0; dim--) 3723 { 3724 n = loop->order[dim]; 3725 3726 gfc_start_block (&loop->code[n]); 3727 3728 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q"); 3729 3730 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]); 3731 } 3732 3733 /* Start a block for the secondary copying code. */ 3734 gfc_start_block (body); 3735} 3736 3737 3738/* Precalculate (either lower or upper) bound of an array section. 3739 BLOCK: Block in which the (pre)calculation code will go. 3740 BOUNDS[DIM]: Where the bound value will be stored once evaluated. 3741 VALUES[DIM]: Specified bound (NULL <=> unspecified). 3742 DESC: Array descriptor from which the bound will be picked if unspecified 3743 (either lower or upper bound according to LBOUND). */ 3744 3745static void 3746evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values, 3747 tree desc, int dim, bool lbound) 3748{ 3749 gfc_se se; 3750 gfc_expr * input_val = values[dim]; 3751 tree *output = &bounds[dim]; 3752 3753 3754 if (input_val) 3755 { 3756 /* Specified section bound. */ 3757 gfc_init_se (&se, NULL); 3758 gfc_conv_expr_type (&se, input_val, gfc_array_index_type); 3759 gfc_add_block_to_block (block, &se.pre); 3760 *output = se.expr; 3761 } 3762 else 3763 { 3764 /* No specific bound specified so use the bound of the array. */ 3765 *output = lbound ? gfc_conv_array_lbound (desc, dim) : 3766 gfc_conv_array_ubound (desc, dim); 3767 } 3768 *output = gfc_evaluate_now (*output, block); 3769} 3770 3771 3772/* Calculate the lower bound of an array section. */ 3773 3774static void 3775gfc_conv_section_startstride (stmtblock_t * block, gfc_ss * ss, int dim) 3776{ 3777 gfc_expr *stride = NULL; 3778 tree desc; 3779 gfc_se se; 3780 gfc_array_info *info; 3781 gfc_array_ref *ar; 3782 3783 gcc_assert (ss->info->type == GFC_SS_SECTION); 3784 3785 info = &ss->info->data.array; 3786 ar = &info->ref->u.ar; 3787 3788 if (ar->dimen_type[dim] == DIMEN_VECTOR) 3789 { 3790 /* We use a zero-based index to access the vector. */ 3791 info->start[dim] = gfc_index_zero_node; 3792 info->end[dim] = NULL; 3793 info->stride[dim] = gfc_index_one_node; 3794 return; 3795 } 3796 3797 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE 3798 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE); 3799 desc = info->descriptor; 3800 stride = ar->stride[dim]; 3801 3802 /* Calculate the start of the range. For vector subscripts this will 3803 be the range of the vector. */ 3804 evaluate_bound (block, info->start, ar->start, desc, dim, true); 3805 3806 /* Similarly calculate the end. Although this is not used in the 3807 scalarizer, it is needed when checking bounds and where the end 3808 is an expression with side-effects. */ 3809 evaluate_bound (block, info->end, ar->end, desc, dim, false); 3810 3811 /* Calculate the stride. */ 3812 if (stride == NULL) 3813 info->stride[dim] = gfc_index_one_node; 3814 else 3815 { 3816 gfc_init_se (&se, NULL); 3817 gfc_conv_expr_type (&se, stride, gfc_array_index_type); 3818 gfc_add_block_to_block (block, &se.pre); 3819 info->stride[dim] = gfc_evaluate_now (se.expr, block); 3820 } 3821} 3822 3823 3824/* Calculates the range start and stride for a SS chain. Also gets the 3825 descriptor and data pointer. The range of vector subscripts is the size 3826 of the vector. Array bounds are also checked. */ 3827 3828void 3829gfc_conv_ss_startstride (gfc_loopinfo * loop) 3830{ 3831 int n; 3832 tree tmp; 3833 gfc_ss *ss; 3834 tree desc; 3835 3836 gfc_loopinfo * const outer_loop = outermost_loop (loop); 3837 3838 loop->dimen = 0; 3839 /* Determine the rank of the loop. */ 3840 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain) 3841 { 3842 switch (ss->info->type) 3843 { 3844 case GFC_SS_SECTION: 3845 case GFC_SS_CONSTRUCTOR: 3846 case GFC_SS_FUNCTION: 3847 case GFC_SS_COMPONENT: 3848 loop->dimen = ss->dimen; 3849 goto done; 3850 3851 /* As usual, lbound and ubound are exceptions!. */ 3852 case GFC_SS_INTRINSIC: 3853 switch (ss->info->expr->value.function.isym->id) 3854 { 3855 case GFC_ISYM_LBOUND: 3856 case GFC_ISYM_UBOUND: 3857 case GFC_ISYM_LCOBOUND: 3858 case GFC_ISYM_UCOBOUND: 3859 case GFC_ISYM_THIS_IMAGE: 3860 loop->dimen = ss->dimen; 3861 goto done; 3862 3863 default: 3864 break; 3865 } 3866 3867 default: 3868 break; 3869 } 3870 } 3871 3872 /* We should have determined the rank of the expression by now. If 3873 not, that's bad news. */ 3874 gcc_unreachable (); 3875 3876done: 3877 /* Loop over all the SS in the chain. */ 3878 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain) 3879 { 3880 gfc_ss_info *ss_info; 3881 gfc_array_info *info; 3882 gfc_expr *expr; 3883 3884 ss_info = ss->info; 3885 expr = ss_info->expr; 3886 info = &ss_info->data.array; 3887 3888 if (expr && expr->shape && !info->shape) 3889 info->shape = expr->shape; 3890 3891 switch (ss_info->type) 3892 { 3893 case GFC_SS_SECTION: 3894 /* Get the descriptor for the array. If it is a cross loops array, 3895 we got the descriptor already in the outermost loop. */ 3896 if (ss->parent == NULL) 3897 gfc_conv_ss_descriptor (&outer_loop->pre, ss, 3898 !loop->array_parameter); 3899 3900 for (n = 0; n < ss->dimen; n++) 3901 gfc_conv_section_startstride (&outer_loop->pre, ss, ss->dim[n]); 3902 break; 3903 3904 case GFC_SS_INTRINSIC: 3905 switch (expr->value.function.isym->id) 3906 { 3907 /* Fall through to supply start and stride. */ 3908 case GFC_ISYM_LBOUND: 3909 case GFC_ISYM_UBOUND: 3910 { 3911 gfc_expr *arg; 3912 3913 /* This is the variant without DIM=... */ 3914 gcc_assert (expr->value.function.actual->next->expr == NULL); 3915 3916 arg = expr->value.function.actual->expr; 3917 if (arg->rank == -1) 3918 { 3919 gfc_se se; 3920 tree rank, tmp; 3921 3922 /* The rank (hence the return value's shape) is unknown, 3923 we have to retrieve it. */ 3924 gfc_init_se (&se, NULL); 3925 se.descriptor_only = 1; 3926 gfc_conv_expr (&se, arg); 3927 /* This is a bare variable, so there is no preliminary 3928 or cleanup code. */ 3929 gcc_assert (se.pre.head == NULL_TREE 3930 && se.post.head == NULL_TREE); 3931 rank = gfc_conv_descriptor_rank (se.expr); 3932 tmp = fold_build2_loc (input_location, MINUS_EXPR, 3933 gfc_array_index_type, 3934 fold_convert (gfc_array_index_type, 3935 rank), 3936 gfc_index_one_node); 3937 info->end[0] = gfc_evaluate_now (tmp, &outer_loop->pre); 3938 info->start[0] = gfc_index_zero_node; 3939 info->stride[0] = gfc_index_one_node; 3940 continue; 3941 } 3942 /* Otherwise fall through GFC_SS_FUNCTION. */ 3943 } 3944 case GFC_ISYM_LCOBOUND: 3945 case GFC_ISYM_UCOBOUND: 3946 case GFC_ISYM_THIS_IMAGE: 3947 break; 3948 3949 default: 3950 continue; 3951 } 3952 3953 case GFC_SS_CONSTRUCTOR: 3954 case GFC_SS_FUNCTION: 3955 for (n = 0; n < ss->dimen; n++) 3956 { 3957 int dim = ss->dim[n]; 3958 3959 info->start[dim] = gfc_index_zero_node; 3960 info->end[dim] = gfc_index_zero_node; 3961 info->stride[dim] = gfc_index_one_node; 3962 } 3963 break; 3964 3965 default: 3966 break; 3967 } 3968 } 3969 3970 /* The rest is just runtime bound checking. */ 3971 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) 3972 { 3973 stmtblock_t block; 3974 tree lbound, ubound; 3975 tree end; 3976 tree size[GFC_MAX_DIMENSIONS]; 3977 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3; 3978 gfc_array_info *info; 3979 char *msg; 3980 int dim; 3981 3982 gfc_start_block (&block); 3983 3984 for (n = 0; n < loop->dimen; n++) 3985 size[n] = NULL_TREE; 3986 3987 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain) 3988 { 3989 stmtblock_t inner; 3990 gfc_ss_info *ss_info; 3991 gfc_expr *expr; 3992 locus *expr_loc; 3993 const char *expr_name; 3994 3995 ss_info = ss->info; 3996 if (ss_info->type != GFC_SS_SECTION) 3997 continue; 3998 3999 /* Catch allocatable lhs in f2003. */ 4000 if (flag_realloc_lhs && ss->is_alloc_lhs) 4001 continue; 4002 4003 expr = ss_info->expr; 4004 expr_loc = &expr->where; 4005 expr_name = expr->symtree->name; 4006 4007 gfc_start_block (&inner); 4008 4009 /* TODO: range checking for mapped dimensions. */ 4010 info = &ss_info->data.array; 4011 4012 /* This code only checks ranges. Elemental and vector 4013 dimensions are checked later. */ 4014 for (n = 0; n < loop->dimen; n++) 4015 { 4016 bool check_upper; 4017 4018 dim = ss->dim[n]; 4019 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE) 4020 continue; 4021 4022 if (dim == info->ref->u.ar.dimen - 1 4023 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE) 4024 check_upper = false; 4025 else 4026 check_upper = true; 4027 4028 /* Zero stride is not allowed. */ 4029 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, 4030 info->stride[dim], gfc_index_zero_node); 4031 msg = xasprintf ("Zero stride is not allowed, for dimension %d " 4032 "of array '%s'", dim + 1, expr_name); 4033 gfc_trans_runtime_check (true, false, tmp, &inner, 4034 expr_loc, msg); 4035 free (msg); 4036 4037 desc = info->descriptor; 4038 4039 /* This is the run-time equivalent of resolve.c's 4040 check_dimension(). The logical is more readable there 4041 than it is here, with all the trees. */ 4042 lbound = gfc_conv_array_lbound (desc, dim); 4043 end = info->end[dim]; 4044 if (check_upper) 4045 ubound = gfc_conv_array_ubound (desc, dim); 4046 else 4047 ubound = NULL; 4048 4049 /* non_zerosized is true when the selected range is not 4050 empty. */ 4051 stride_pos = fold_build2_loc (input_location, GT_EXPR, 4052 boolean_type_node, info->stride[dim], 4053 gfc_index_zero_node); 4054 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, 4055 info->start[dim], end); 4056 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR, 4057 boolean_type_node, stride_pos, tmp); 4058 4059 stride_neg = fold_build2_loc (input_location, LT_EXPR, 4060 boolean_type_node, 4061 info->stride[dim], gfc_index_zero_node); 4062 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node, 4063 info->start[dim], end); 4064 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR, 4065 boolean_type_node, 4066 stride_neg, tmp); 4067 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR, 4068 boolean_type_node, 4069 stride_pos, stride_neg); 4070 4071 /* Check the start of the range against the lower and upper 4072 bounds of the array, if the range is not empty. 4073 If upper bound is present, include both bounds in the 4074 error message. */ 4075 if (check_upper) 4076 { 4077 tmp = fold_build2_loc (input_location, LT_EXPR, 4078 boolean_type_node, 4079 info->start[dim], lbound); 4080 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, 4081 boolean_type_node, 4082 non_zerosized, tmp); 4083 tmp2 = fold_build2_loc (input_location, GT_EXPR, 4084 boolean_type_node, 4085 info->start[dim], ubound); 4086 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR, 4087 boolean_type_node, 4088 non_zerosized, tmp2); 4089 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' " 4090 "outside of expected range (%%ld:%%ld)", 4091 dim + 1, expr_name); 4092 gfc_trans_runtime_check (true, false, tmp, &inner, 4093 expr_loc, msg, 4094 fold_convert (long_integer_type_node, info->start[dim]), 4095 fold_convert (long_integer_type_node, lbound), 4096 fold_convert (long_integer_type_node, ubound)); 4097 gfc_trans_runtime_check (true, false, tmp2, &inner, 4098 expr_loc, msg, 4099 fold_convert (long_integer_type_node, info->start[dim]), 4100 fold_convert (long_integer_type_node, lbound), 4101 fold_convert (long_integer_type_node, ubound)); 4102 free (msg); 4103 } 4104 else 4105 { 4106 tmp = fold_build2_loc (input_location, LT_EXPR, 4107 boolean_type_node, 4108 info->start[dim], lbound); 4109 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, 4110 boolean_type_node, non_zerosized, tmp); 4111 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' " 4112 "below lower bound of %%ld", 4113 dim + 1, expr_name); 4114 gfc_trans_runtime_check (true, false, tmp, &inner, 4115 expr_loc, msg, 4116 fold_convert (long_integer_type_node, info->start[dim]), 4117 fold_convert (long_integer_type_node, lbound)); 4118 free (msg); 4119 } 4120 4121 /* Compute the last element of the range, which is not 4122 necessarily "end" (think 0:5:3, which doesn't contain 5) 4123 and check it against both lower and upper bounds. */ 4124 4125 tmp = fold_build2_loc (input_location, MINUS_EXPR, 4126 gfc_array_index_type, end, 4127 info->start[dim]); 4128 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR, 4129 gfc_array_index_type, tmp, 4130 info->stride[dim]); 4131 tmp = fold_build2_loc (input_location, MINUS_EXPR, 4132 gfc_array_index_type, end, tmp); 4133 tmp2 = fold_build2_loc (input_location, LT_EXPR, 4134 boolean_type_node, tmp, lbound); 4135 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR, 4136 boolean_type_node, non_zerosized, tmp2); 4137 if (check_upper) 4138 { 4139 tmp3 = fold_build2_loc (input_location, GT_EXPR, 4140 boolean_type_node, tmp, ubound); 4141 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR, 4142 boolean_type_node, non_zerosized, tmp3); 4143 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' " 4144 "outside of expected range (%%ld:%%ld)", 4145 dim + 1, expr_name); 4146 gfc_trans_runtime_check (true, false, tmp2, &inner, 4147 expr_loc, msg, 4148 fold_convert (long_integer_type_node, tmp), 4149 fold_convert (long_integer_type_node, ubound), 4150 fold_convert (long_integer_type_node, lbound)); 4151 gfc_trans_runtime_check (true, false, tmp3, &inner, 4152 expr_loc, msg, 4153 fold_convert (long_integer_type_node, tmp), 4154 fold_convert (long_integer_type_node, ubound), 4155 fold_convert (long_integer_type_node, lbound)); 4156 free (msg); 4157 } 4158 else 4159 { 4160 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' " 4161 "below lower bound of %%ld", 4162 dim + 1, expr_name); 4163 gfc_trans_runtime_check (true, false, tmp2, &inner, 4164 expr_loc, msg, 4165 fold_convert (long_integer_type_node, tmp), 4166 fold_convert (long_integer_type_node, lbound)); 4167 free (msg); 4168 } 4169 4170 /* Check the section sizes match. */ 4171 tmp = fold_build2_loc (input_location, MINUS_EXPR, 4172 gfc_array_index_type, end, 4173 info->start[dim]); 4174 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, 4175 gfc_array_index_type, tmp, 4176 info->stride[dim]); 4177 tmp = fold_build2_loc (input_location, PLUS_EXPR, 4178 gfc_array_index_type, 4179 gfc_index_one_node, tmp); 4180 tmp = fold_build2_loc (input_location, MAX_EXPR, 4181 gfc_array_index_type, tmp, 4182 build_int_cst (gfc_array_index_type, 0)); 4183 /* We remember the size of the first section, and check all the 4184 others against this. */ 4185 if (size[n]) 4186 { 4187 tmp3 = fold_build2_loc (input_location, NE_EXPR, 4188 boolean_type_node, tmp, size[n]); 4189 msg = xasprintf ("Array bound mismatch for dimension %d " 4190 "of array '%s' (%%ld/%%ld)", 4191 dim + 1, expr_name); 4192 4193 gfc_trans_runtime_check (true, false, tmp3, &inner, 4194 expr_loc, msg, 4195 fold_convert (long_integer_type_node, tmp), 4196 fold_convert (long_integer_type_node, size[n])); 4197 4198 free (msg); 4199 } 4200 else 4201 size[n] = gfc_evaluate_now (tmp, &inner); 4202 } 4203 4204 tmp = gfc_finish_block (&inner); 4205 4206 /* For optional arguments, only check bounds if the argument is 4207 present. */ 4208 if (expr->symtree->n.sym->attr.optional 4209 || expr->symtree->n.sym->attr.not_always_present) 4210 tmp = build3_v (COND_EXPR, 4211 gfc_conv_expr_present (expr->symtree->n.sym), 4212 tmp, build_empty_stmt (input_location)); 4213 4214 gfc_add_expr_to_block (&block, tmp); 4215 4216 } 4217 4218 tmp = gfc_finish_block (&block); 4219 gfc_add_expr_to_block (&outer_loop->pre, tmp); 4220 } 4221 4222 for (loop = loop->nested; loop; loop = loop->next) 4223 gfc_conv_ss_startstride (loop); 4224} 4225 4226/* Return true if both symbols could refer to the same data object. Does 4227 not take account of aliasing due to equivalence statements. */ 4228 4229static int 4230symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer, 4231 bool lsym_target, bool rsym_pointer, bool rsym_target) 4232{ 4233 /* Aliasing isn't possible if the symbols have different base types. */ 4234 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0) 4235 return 0; 4236 4237 /* Pointers can point to other pointers and target objects. */ 4238 4239 if ((lsym_pointer && (rsym_pointer || rsym_target)) 4240 || (rsym_pointer && (lsym_pointer || lsym_target))) 4241 return 1; 4242 4243 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7 4244 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already 4245 checked above. */ 4246 if (lsym_target && rsym_target 4247 && ((lsym->attr.dummy && !lsym->attr.contiguous 4248 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE)) 4249 || (rsym->attr.dummy && !rsym->attr.contiguous 4250 && (!rsym->attr.dimension 4251 || rsym->as->type == AS_ASSUMED_SHAPE)))) 4252 return 1; 4253 4254 return 0; 4255} 4256 4257 4258/* Return true if the two SS could be aliased, i.e. both point to the same data 4259 object. */ 4260/* TODO: resolve aliases based on frontend expressions. */ 4261 4262static int 4263gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss) 4264{ 4265 gfc_ref *lref; 4266 gfc_ref *rref; 4267 gfc_expr *lexpr, *rexpr; 4268 gfc_symbol *lsym; 4269 gfc_symbol *rsym; 4270 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target; 4271 4272 lexpr = lss->info->expr; 4273 rexpr = rss->info->expr; 4274 4275 lsym = lexpr->symtree->n.sym; 4276 rsym = rexpr->symtree->n.sym; 4277 4278 lsym_pointer = lsym->attr.pointer; 4279 lsym_target = lsym->attr.target; 4280 rsym_pointer = rsym->attr.pointer; 4281 rsym_target = rsym->attr.target; 4282 4283 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target, 4284 rsym_pointer, rsym_target)) 4285 return 1; 4286 4287 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS 4288 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS) 4289 return 0; 4290 4291 /* For derived types we must check all the component types. We can ignore 4292 array references as these will have the same base type as the previous 4293 component ref. */ 4294 for (lref = lexpr->ref; lref != lss->info->data.array.ref; lref = lref->next) 4295 { 4296 if (lref->type != REF_COMPONENT) 4297 continue; 4298 4299 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer; 4300 lsym_target = lsym_target || lref->u.c.sym->attr.target; 4301 4302 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target, 4303 rsym_pointer, rsym_target)) 4304 return 1; 4305 4306 if ((lsym_pointer && (rsym_pointer || rsym_target)) 4307 || (rsym_pointer && (lsym_pointer || lsym_target))) 4308 { 4309 if (gfc_compare_types (&lref->u.c.component->ts, 4310 &rsym->ts)) 4311 return 1; 4312 } 4313 4314 for (rref = rexpr->ref; rref != rss->info->data.array.ref; 4315 rref = rref->next) 4316 { 4317 if (rref->type != REF_COMPONENT) 4318 continue; 4319 4320 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer; 4321 rsym_target = lsym_target || rref->u.c.sym->attr.target; 4322 4323 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym, 4324 lsym_pointer, lsym_target, 4325 rsym_pointer, rsym_target)) 4326 return 1; 4327 4328 if ((lsym_pointer && (rsym_pointer || rsym_target)) 4329 || (rsym_pointer && (lsym_pointer || lsym_target))) 4330 { 4331 if (gfc_compare_types (&lref->u.c.component->ts, 4332 &rref->u.c.sym->ts)) 4333 return 1; 4334 if (gfc_compare_types (&lref->u.c.sym->ts, 4335 &rref->u.c.component->ts)) 4336 return 1; 4337 if (gfc_compare_types (&lref->u.c.component->ts, 4338 &rref->u.c.component->ts)) 4339 return 1; 4340 } 4341 } 4342 } 4343 4344 lsym_pointer = lsym->attr.pointer; 4345 lsym_target = lsym->attr.target; 4346 lsym_pointer = lsym->attr.pointer; 4347 lsym_target = lsym->attr.target; 4348 4349 for (rref = rexpr->ref; rref != rss->info->data.array.ref; rref = rref->next) 4350 { 4351 if (rref->type != REF_COMPONENT) 4352 break; 4353 4354 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer; 4355 rsym_target = lsym_target || rref->u.c.sym->attr.target; 4356 4357 if (symbols_could_alias (rref->u.c.sym, lsym, 4358 lsym_pointer, lsym_target, 4359 rsym_pointer, rsym_target)) 4360 return 1; 4361 4362 if ((lsym_pointer && (rsym_pointer || rsym_target)) 4363 || (rsym_pointer && (lsym_pointer || lsym_target))) 4364 { 4365 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts)) 4366 return 1; 4367 } 4368 } 4369 4370 return 0; 4371} 4372 4373 4374/* Resolve array data dependencies. Creates a temporary if required. */ 4375/* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to 4376 dependency.c. */ 4377 4378void 4379gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest, 4380 gfc_ss * rss) 4381{ 4382 gfc_ss *ss; 4383 gfc_ref *lref; 4384 gfc_ref *rref; 4385 gfc_expr *dest_expr; 4386 gfc_expr *ss_expr; 4387 int nDepend = 0; 4388 int i, j; 4389 4390 loop->temp_ss = NULL; 4391 dest_expr = dest->info->expr; 4392 4393 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next) 4394 { 4395 ss_expr = ss->info->expr; 4396 4397 if (ss->info->array_outer_dependency) 4398 { 4399 nDepend = 1; 4400 break; 4401 } 4402 4403 if (ss->info->type != GFC_SS_SECTION) 4404 { 4405 if (flag_realloc_lhs 4406 && dest_expr != ss_expr 4407 && gfc_is_reallocatable_lhs (dest_expr) 4408 && ss_expr->rank) 4409 nDepend = gfc_check_dependency (dest_expr, ss_expr, true); 4410 4411 /* Check for cases like c(:)(1:2) = c(2)(2:3) */ 4412 if (!nDepend && dest_expr->rank > 0 4413 && dest_expr->ts.type == BT_CHARACTER 4414 && ss_expr->expr_type == EXPR_VARIABLE) 4415 4416 nDepend = gfc_check_dependency (dest_expr, ss_expr, false); 4417 4418 continue; 4419 } 4420 4421 if (dest_expr->symtree->n.sym != ss_expr->symtree->n.sym) 4422 { 4423 if (gfc_could_be_alias (dest, ss) 4424 || gfc_are_equivalenced_arrays (dest_expr, ss_expr)) 4425 { 4426 nDepend = 1; 4427 break; 4428 } 4429 } 4430 else 4431 { 4432 lref = dest_expr->ref; 4433 rref = ss_expr->ref; 4434 4435 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]); 4436 4437 if (nDepend == 1) 4438 break; 4439 4440 for (i = 0; i < dest->dimen; i++) 4441 for (j = 0; j < ss->dimen; j++) 4442 if (i != j 4443 && dest->dim[i] == ss->dim[j]) 4444 { 4445 /* If we don't access array elements in the same order, 4446 there is a dependency. */ 4447 nDepend = 1; 4448 goto temporary; 4449 } 4450#if 0 4451 /* TODO : loop shifting. */ 4452 if (nDepend == 1) 4453 { 4454 /* Mark the dimensions for LOOP SHIFTING */ 4455 for (n = 0; n < loop->dimen; n++) 4456 { 4457 int dim = dest->data.info.dim[n]; 4458 4459 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR) 4460 depends[n] = 2; 4461 else if (! gfc_is_same_range (&lref->u.ar, 4462 &rref->u.ar, dim, 0)) 4463 depends[n] = 1; 4464 } 4465 4466 /* Put all the dimensions with dependencies in the 4467 innermost loops. */ 4468 dim = 0; 4469 for (n = 0; n < loop->dimen; n++) 4470 { 4471 gcc_assert (loop->order[n] == n); 4472 if (depends[n]) 4473 loop->order[dim++] = n; 4474 } 4475 for (n = 0; n < loop->dimen; n++) 4476 { 4477 if (! depends[n]) 4478 loop->order[dim++] = n; 4479 } 4480 4481 gcc_assert (dim == loop->dimen); 4482 break; 4483 } 4484#endif 4485 } 4486 } 4487 4488temporary: 4489 4490 if (nDepend == 1) 4491 { 4492 tree base_type = gfc_typenode_for_spec (&dest_expr->ts); 4493 if (GFC_ARRAY_TYPE_P (base_type) 4494 || GFC_DESCRIPTOR_TYPE_P (base_type)) 4495 base_type = gfc_get_element_type (base_type); 4496 loop->temp_ss = gfc_get_temp_ss (base_type, dest->info->string_length, 4497 loop->dimen); 4498 gfc_add_ss_to_loop (loop, loop->temp_ss); 4499 } 4500 else 4501 loop->temp_ss = NULL; 4502} 4503 4504 4505/* Browse through each array's information from the scalarizer and set the loop 4506 bounds according to the "best" one (per dimension), i.e. the one which 4507 provides the most information (constant bounds, shape, etc.). */ 4508 4509static void 4510set_loop_bounds (gfc_loopinfo *loop) 4511{ 4512 int n, dim, spec_dim; 4513 gfc_array_info *info; 4514 gfc_array_info *specinfo; 4515 gfc_ss *ss; 4516 tree tmp; 4517 gfc_ss **loopspec; 4518 bool dynamic[GFC_MAX_DIMENSIONS]; 4519 mpz_t *cshape; 4520 mpz_t i; 4521 bool nonoptional_arr; 4522 4523 gfc_loopinfo * const outer_loop = outermost_loop (loop); 4524 4525 loopspec = loop->specloop; 4526 4527 mpz_init (i); 4528 for (n = 0; n < loop->dimen; n++) 4529 { 4530 loopspec[n] = NULL; 4531 dynamic[n] = false; 4532 4533 /* If there are both optional and nonoptional array arguments, scalarize 4534 over the nonoptional; otherwise, it does not matter as then all 4535 (optional) arrays have to be present per F2008, 125.2.12p3(6). */ 4536 4537 nonoptional_arr = false; 4538 4539 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain) 4540 if (ss->info->type != GFC_SS_SCALAR && ss->info->type != GFC_SS_TEMP 4541 && ss->info->type != GFC_SS_REFERENCE && !ss->info->can_be_null_ref) 4542 { 4543 nonoptional_arr = true; 4544 break; 4545 } 4546 4547 /* We use one SS term, and use that to determine the bounds of the 4548 loop for this dimension. We try to pick the simplest term. */ 4549 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain) 4550 { 4551 gfc_ss_type ss_type; 4552 4553 ss_type = ss->info->type; 4554 if (ss_type == GFC_SS_SCALAR 4555 || ss_type == GFC_SS_TEMP 4556 || ss_type == GFC_SS_REFERENCE 4557 || (ss->info->can_be_null_ref && nonoptional_arr)) 4558 continue; 4559 4560 info = &ss->info->data.array; 4561 dim = ss->dim[n]; 4562 4563 if (loopspec[n] != NULL) 4564 { 4565 specinfo = &loopspec[n]->info->data.array; 4566 spec_dim = loopspec[n]->dim[n]; 4567 } 4568 else 4569 { 4570 /* Silence uninitialized warnings. */ 4571 specinfo = NULL; 4572 spec_dim = 0; 4573 } 4574 4575 if (info->shape) 4576 { 4577 gcc_assert (info->shape[dim]); 4578 /* The frontend has worked out the size for us. */ 4579 if (!loopspec[n] 4580 || !specinfo->shape 4581 || !integer_zerop (specinfo->start[spec_dim])) 4582 /* Prefer zero-based descriptors if possible. */ 4583 loopspec[n] = ss; 4584 continue; 4585 } 4586 4587 if (ss_type == GFC_SS_CONSTRUCTOR) 4588 { 4589 gfc_constructor_base base; 4590 /* An unknown size constructor will always be rank one. 4591 Higher rank constructors will either have known shape, 4592 or still be wrapped in a call to reshape. */ 4593 gcc_assert (loop->dimen == 1); 4594 4595 /* Always prefer to use the constructor bounds if the size 4596 can be determined at compile time. Prefer not to otherwise, 4597 since the general case involves realloc, and it's better to 4598 avoid that overhead if possible. */ 4599 base = ss->info->expr->value.constructor; 4600 dynamic[n] = gfc_get_array_constructor_size (&i, base); 4601 if (!dynamic[n] || !loopspec[n]) 4602 loopspec[n] = ss; 4603 continue; 4604 } 4605 4606 /* Avoid using an allocatable lhs in an assignment, since 4607 there might be a reallocation coming. */ 4608 if (loopspec[n] && ss->is_alloc_lhs) 4609 continue; 4610 4611 if (!loopspec[n]) 4612 loopspec[n] = ss; 4613 /* Criteria for choosing a loop specifier (most important first): 4614 doesn't need realloc 4615 stride of one 4616 known stride 4617 known lower bound 4618 known upper bound 4619 */ 4620 else if (loopspec[n]->info->type == GFC_SS_CONSTRUCTOR && dynamic[n]) 4621 loopspec[n] = ss; 4622 else if (integer_onep (info->stride[dim]) 4623 && !integer_onep (specinfo->stride[spec_dim])) 4624 loopspec[n] = ss; 4625 else if (INTEGER_CST_P (info->stride[dim]) 4626 && !INTEGER_CST_P (specinfo->stride[spec_dim])) 4627 loopspec[n] = ss; 4628 else if (INTEGER_CST_P (info->start[dim]) 4629 && !INTEGER_CST_P (specinfo->start[spec_dim]) 4630 && integer_onep (info->stride[dim]) 4631 == integer_onep (specinfo->stride[spec_dim]) 4632 && INTEGER_CST_P (info->stride[dim]) 4633 == INTEGER_CST_P (specinfo->stride[spec_dim])) 4634 loopspec[n] = ss; 4635 /* We don't work out the upper bound. 4636 else if (INTEGER_CST_P (info->finish[n]) 4637 && ! INTEGER_CST_P (specinfo->finish[n])) 4638 loopspec[n] = ss; */ 4639 } 4640 4641 /* We should have found the scalarization loop specifier. If not, 4642 that's bad news. */ 4643 gcc_assert (loopspec[n]); 4644 4645 info = &loopspec[n]->info->data.array; 4646 dim = loopspec[n]->dim[n]; 4647 4648 /* Set the extents of this range. */ 4649 cshape = info->shape; 4650 if (cshape && INTEGER_CST_P (info->start[dim]) 4651 && INTEGER_CST_P (info->stride[dim])) 4652 { 4653 loop->from[n] = info->start[dim]; 4654 mpz_set (i, cshape[get_array_ref_dim_for_loop_dim (loopspec[n], n)]); 4655 mpz_sub_ui (i, i, 1); 4656 /* To = from + (size - 1) * stride. */ 4657 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind); 4658 if (!integer_onep (info->stride[dim])) 4659 tmp = fold_build2_loc (input_location, MULT_EXPR, 4660 gfc_array_index_type, tmp, 4661 info->stride[dim]); 4662 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR, 4663 gfc_array_index_type, 4664 loop->from[n], tmp); 4665 } 4666 else 4667 { 4668 loop->from[n] = info->start[dim]; 4669 switch (loopspec[n]->info->type) 4670 { 4671 case GFC_SS_CONSTRUCTOR: 4672 /* The upper bound is calculated when we expand the 4673 constructor. */ 4674 gcc_assert (loop->to[n] == NULL_TREE); 4675 break; 4676 4677 case GFC_SS_SECTION: 4678 /* Use the end expression if it exists and is not constant, 4679 so that it is only evaluated once. */ 4680 loop->to[n] = info->end[dim]; 4681 break; 4682 4683 case GFC_SS_FUNCTION: 4684 /* The loop bound will be set when we generate the call. */ 4685 gcc_assert (loop->to[n] == NULL_TREE); 4686 break; 4687 4688 case GFC_SS_INTRINSIC: 4689 { 4690 gfc_expr *expr = loopspec[n]->info->expr; 4691 4692 /* The {l,u}bound of an assumed rank. */ 4693 gcc_assert ((expr->value.function.isym->id == GFC_ISYM_LBOUND 4694 || expr->value.function.isym->id == GFC_ISYM_UBOUND) 4695 && expr->value.function.actual->next->expr == NULL 4696 && expr->value.function.actual->expr->rank == -1); 4697 4698 loop->to[n] = info->end[dim]; 4699 break; 4700 } 4701 4702 default: 4703 gcc_unreachable (); 4704 } 4705 } 4706 4707 /* Transform everything so we have a simple incrementing variable. */ 4708 if (integer_onep (info->stride[dim])) 4709 info->delta[dim] = gfc_index_zero_node; 4710 else 4711 { 4712 /* Set the delta for this section. */ 4713 info->delta[dim] = gfc_evaluate_now (loop->from[n], &outer_loop->pre); 4714 /* Number of iterations is (end - start + step) / step. 4715 with start = 0, this simplifies to 4716 last = end / step; 4717 for (i = 0; i<=last; i++){...}; */ 4718 tmp = fold_build2_loc (input_location, MINUS_EXPR, 4719 gfc_array_index_type, loop->to[n], 4720 loop->from[n]); 4721 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, 4722 gfc_array_index_type, tmp, info->stride[dim]); 4723 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type, 4724 tmp, build_int_cst (gfc_array_index_type, -1)); 4725 loop->to[n] = gfc_evaluate_now (tmp, &outer_loop->pre); 4726 /* Make the loop variable start at 0. */ 4727 loop->from[n] = gfc_index_zero_node; 4728 } 4729 } 4730 mpz_clear (i); 4731 4732 for (loop = loop->nested; loop; loop = loop->next) 4733 set_loop_bounds (loop); 4734} 4735 4736 4737/* Initialize the scalarization loop. Creates the loop variables. Determines 4738 the range of the loop variables. Creates a temporary if required. 4739 Also generates code for scalar expressions which have been 4740 moved outside the loop. */ 4741 4742void 4743gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where) 4744{ 4745 gfc_ss *tmp_ss; 4746 tree tmp; 4747 4748 set_loop_bounds (loop); 4749 4750 /* Add all the scalar code that can be taken out of the loops. 4751 This may include calculating the loop bounds, so do it before 4752 allocating the temporary. */ 4753 gfc_add_loop_ss_code (loop, loop->ss, false, where); 4754 4755 tmp_ss = loop->temp_ss; 4756 /* If we want a temporary then create it. */ 4757 if (tmp_ss != NULL) 4758 { 4759 gfc_ss_info *tmp_ss_info; 4760 4761 tmp_ss_info = tmp_ss->info; 4762 gcc_assert (tmp_ss_info->type == GFC_SS_TEMP); 4763 gcc_assert (loop->parent == NULL); 4764 4765 /* Make absolutely sure that this is a complete type. */ 4766 if (tmp_ss_info->string_length) 4767 tmp_ss_info->data.temp.type 4768 = gfc_get_character_type_len_for_eltype 4769 (TREE_TYPE (tmp_ss_info->data.temp.type), 4770 tmp_ss_info->string_length); 4771 4772 tmp = tmp_ss_info->data.temp.type; 4773 memset (&tmp_ss_info->data.array, 0, sizeof (gfc_array_info)); 4774 tmp_ss_info->type = GFC_SS_SECTION; 4775 4776 gcc_assert (tmp_ss->dimen != 0); 4777 4778 gfc_trans_create_temp_array (&loop->pre, &loop->post, tmp_ss, tmp, 4779 NULL_TREE, false, true, false, where); 4780 } 4781 4782 /* For array parameters we don't have loop variables, so don't calculate the 4783 translations. */ 4784 if (!loop->array_parameter) 4785 gfc_set_delta (loop); 4786} 4787 4788 4789/* Calculates how to transform from loop variables to array indices for each 4790 array: once loop bounds are chosen, sets the difference (DELTA field) between 4791 loop bounds and array reference bounds, for each array info. */ 4792 4793void 4794gfc_set_delta (gfc_loopinfo *loop) 4795{ 4796 gfc_ss *ss, **loopspec; 4797 gfc_array_info *info; 4798 tree tmp; 4799 int n, dim; 4800 4801 gfc_loopinfo * const outer_loop = outermost_loop (loop); 4802 4803 loopspec = loop->specloop; 4804 4805 /* Calculate the translation from loop variables to array indices. */ 4806 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain) 4807 { 4808 gfc_ss_type ss_type; 4809 4810 ss_type = ss->info->type; 4811 if (ss_type != GFC_SS_SECTION 4812 && ss_type != GFC_SS_COMPONENT 4813 && ss_type != GFC_SS_CONSTRUCTOR) 4814 continue; 4815 4816 info = &ss->info->data.array; 4817 4818 for (n = 0; n < ss->dimen; n++) 4819 { 4820 /* If we are specifying the range the delta is already set. */ 4821 if (loopspec[n] != ss) 4822 { 4823 dim = ss->dim[n]; 4824 4825 /* Calculate the offset relative to the loop variable. 4826 First multiply by the stride. */ 4827 tmp = loop->from[n]; 4828 if (!integer_onep (info->stride[dim])) 4829 tmp = fold_build2_loc (input_location, MULT_EXPR, 4830 gfc_array_index_type, 4831 tmp, info->stride[dim]); 4832 4833 /* Then subtract this from our starting value. */ 4834 tmp = fold_build2_loc (input_location, MINUS_EXPR, 4835 gfc_array_index_type, 4836 info->start[dim], tmp); 4837 4838 info->delta[dim] = gfc_evaluate_now (tmp, &outer_loop->pre); 4839 } 4840 } 4841 } 4842 4843 for (loop = loop->nested; loop; loop = loop->next) 4844 gfc_set_delta (loop); 4845} 4846 4847 4848/* Calculate the size of a given array dimension from the bounds. This 4849 is simply (ubound - lbound + 1) if this expression is positive 4850 or 0 if it is negative (pick either one if it is zero). Optionally 4851 (if or_expr is present) OR the (expression != 0) condition to it. */ 4852 4853tree 4854gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr) 4855{ 4856 tree res; 4857 tree cond; 4858 4859 /* Calculate (ubound - lbound + 1). */ 4860 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type, 4861 ubound, lbound); 4862 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res, 4863 gfc_index_one_node); 4864 4865 /* Check whether the size for this dimension is negative. */ 4866 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res, 4867 gfc_index_zero_node); 4868 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond, 4869 gfc_index_zero_node, res); 4870 4871 /* Build OR expression. */ 4872 if (or_expr) 4873 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR, 4874 boolean_type_node, *or_expr, cond); 4875 4876 return res; 4877} 4878 4879 4880/* For an array descriptor, get the total number of elements. This is just 4881 the product of the extents along from_dim to to_dim. */ 4882 4883static tree 4884gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim) 4885{ 4886 tree res; 4887 int dim; 4888 4889 res = gfc_index_one_node; 4890 4891 for (dim = from_dim; dim < to_dim; ++dim) 4892 { 4893 tree lbound; 4894 tree ubound; 4895 tree extent; 4896 4897 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]); 4898 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]); 4899 4900 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL); 4901 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 4902 res, extent); 4903 } 4904 4905 return res; 4906} 4907 4908 4909/* Full size of an array. */ 4910 4911tree 4912gfc_conv_descriptor_size (tree desc, int rank) 4913{ 4914 return gfc_conv_descriptor_size_1 (desc, 0, rank); 4915} 4916 4917 4918/* Size of a coarray for all dimensions but the last. */ 4919 4920tree 4921gfc_conv_descriptor_cosize (tree desc, int rank, int corank) 4922{ 4923 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1); 4924} 4925 4926 4927/* Fills in an array descriptor, and returns the size of the array. 4928 The size will be a simple_val, ie a variable or a constant. Also 4929 calculates the offset of the base. The pointer argument overflow, 4930 which should be of integer type, will increase in value if overflow 4931 occurs during the size calculation. Returns the size of the array. 4932 { 4933 stride = 1; 4934 offset = 0; 4935 for (n = 0; n < rank; n++) 4936 { 4937 a.lbound[n] = specified_lower_bound; 4938 offset = offset + a.lbond[n] * stride; 4939 size = 1 - lbound; 4940 a.ubound[n] = specified_upper_bound; 4941 a.stride[n] = stride; 4942 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound 4943 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0); 4944 stride = stride * size; 4945 } 4946 for (n = rank; n < rank+corank; n++) 4947 (Set lcobound/ucobound as above.) 4948 element_size = sizeof (array element); 4949 if (!rank) 4950 return element_size 4951 stride = (size_t) stride; 4952 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0); 4953 stride = stride * element_size; 4954 return (stride); 4955 } */ 4956/*GCC ARRAYS*/ 4957 4958static tree 4959gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset, 4960 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock, 4961 stmtblock_t * descriptor_block, tree * overflow, 4962 tree expr3_elem_size, tree *nelems, gfc_expr *expr3, 4963 gfc_expr *expr) 4964{ 4965 tree type; 4966 tree tmp; 4967 tree size; 4968 tree offset; 4969 tree stride; 4970 tree element_size; 4971 tree or_expr; 4972 tree thencase; 4973 tree elsecase; 4974 tree cond; 4975 tree var; 4976 stmtblock_t thenblock; 4977 stmtblock_t elseblock; 4978 gfc_expr *ubound; 4979 gfc_se se; 4980 int n; 4981 4982 type = TREE_TYPE (descriptor); 4983 4984 stride = gfc_index_one_node; 4985 offset = gfc_index_zero_node; 4986 4987 /* Set the dtype. */ 4988 if (expr->ts.type == BT_CHARACTER && expr->ts.deferred 4989 && TREE_CODE (expr->ts.u.cl->backend_decl) == VAR_DECL) 4990 { 4991 type = gfc_typenode_for_spec (&expr->ts); 4992 tmp = gfc_conv_descriptor_dtype (descriptor); 4993 gfc_add_modify (descriptor_block, tmp, 4994 gfc_get_dtype_rank_type (rank, type)); 4995 } 4996 else 4997 { 4998 tmp = gfc_conv_descriptor_dtype (descriptor); 4999 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (type)); 5000 } 5001 5002 or_expr = boolean_false_node; 5003 5004 for (n = 0; n < rank; n++) 5005 { 5006 tree conv_lbound; 5007 tree conv_ubound; 5008 5009 /* We have 3 possibilities for determining the size of the array: 5010 lower == NULL => lbound = 1, ubound = upper[n] 5011 upper[n] = NULL => lbound = 1, ubound = lower[n] 5012 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */ 5013 ubound = upper[n]; 5014 5015 /* Set lower bound. */ 5016 gfc_init_se (&se, NULL); 5017 if (lower == NULL) 5018 se.expr = gfc_index_one_node; 5019 else 5020 { 5021 gcc_assert (lower[n]); 5022 if (ubound) 5023 { 5024 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type); 5025 gfc_add_block_to_block (pblock, &se.pre); 5026 } 5027 else 5028 { 5029 se.expr = gfc_index_one_node; 5030 ubound = lower[n]; 5031 } 5032 } 5033 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor, 5034 gfc_rank_cst[n], se.expr); 5035 conv_lbound = se.expr; 5036 5037 /* Work out the offset for this component. */ 5038 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 5039 se.expr, stride); 5040 offset = fold_build2_loc (input_location, MINUS_EXPR, 5041 gfc_array_index_type, offset, tmp); 5042 5043 /* Set upper bound. */ 5044 gfc_init_se (&se, NULL); 5045 gcc_assert (ubound); 5046 gfc_conv_expr_type (&se, ubound, gfc_array_index_type); 5047 gfc_add_block_to_block (pblock, &se.pre); 5048 if (ubound->expr_type == EXPR_FUNCTION) 5049 se.expr = gfc_evaluate_now (se.expr, pblock); 5050 5051 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor, 5052 gfc_rank_cst[n], se.expr); 5053 conv_ubound = se.expr; 5054 5055 /* Store the stride. */ 5056 gfc_conv_descriptor_stride_set (descriptor_block, descriptor, 5057 gfc_rank_cst[n], stride); 5058 5059 /* Calculate size and check whether extent is negative. */ 5060 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr); 5061 size = gfc_evaluate_now (size, pblock); 5062 5063 /* Check whether multiplying the stride by the number of 5064 elements in this dimension would overflow. We must also check 5065 whether the current dimension has zero size in order to avoid 5066 division by zero. 5067 */ 5068 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, 5069 gfc_array_index_type, 5070 fold_convert (gfc_array_index_type, 5071 TYPE_MAX_VALUE (gfc_array_index_type)), 5072 size); 5073 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR, 5074 boolean_type_node, tmp, stride), 5075 PRED_FORTRAN_OVERFLOW); 5076 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond, 5077 integer_one_node, integer_zero_node); 5078 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR, 5079 boolean_type_node, size, 5080 gfc_index_zero_node), 5081 PRED_FORTRAN_SIZE_ZERO); 5082 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond, 5083 integer_zero_node, tmp); 5084 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node, 5085 *overflow, tmp); 5086 *overflow = gfc_evaluate_now (tmp, pblock); 5087 5088 /* Multiply the stride by the number of elements in this dimension. */ 5089 stride = fold_build2_loc (input_location, MULT_EXPR, 5090 gfc_array_index_type, stride, size); 5091 stride = gfc_evaluate_now (stride, pblock); 5092 } 5093 5094 for (n = rank; n < rank + corank; n++) 5095 { 5096 ubound = upper[n]; 5097 5098 /* Set lower bound. */ 5099 gfc_init_se (&se, NULL); 5100 if (lower == NULL || lower[n] == NULL) 5101 { 5102 gcc_assert (n == rank + corank - 1); 5103 se.expr = gfc_index_one_node; 5104 } 5105 else 5106 { 5107 if (ubound || n == rank + corank - 1) 5108 { 5109 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type); 5110 gfc_add_block_to_block (pblock, &se.pre); 5111 } 5112 else 5113 { 5114 se.expr = gfc_index_one_node; 5115 ubound = lower[n]; 5116 } 5117 } 5118 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor, 5119 gfc_rank_cst[n], se.expr); 5120 5121 if (n < rank + corank - 1) 5122 { 5123 gfc_init_se (&se, NULL); 5124 gcc_assert (ubound); 5125 gfc_conv_expr_type (&se, ubound, gfc_array_index_type); 5126 gfc_add_block_to_block (pblock, &se.pre); 5127 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor, 5128 gfc_rank_cst[n], se.expr); 5129 } 5130 } 5131 5132 /* The stride is the number of elements in the array, so multiply by the 5133 size of an element to get the total size. Obviously, if there is a 5134 SOURCE expression (expr3) we must use its element size. */ 5135 if (expr3_elem_size != NULL_TREE) 5136 tmp = expr3_elem_size; 5137 else if (expr3 != NULL) 5138 { 5139 if (expr3->ts.type == BT_CLASS) 5140 { 5141 gfc_se se_sz; 5142 gfc_expr *sz = gfc_copy_expr (expr3); 5143 gfc_add_vptr_component (sz); 5144 gfc_add_size_component (sz); 5145 gfc_init_se (&se_sz, NULL); 5146 gfc_conv_expr (&se_sz, sz); 5147 gfc_free_expr (sz); 5148 tmp = se_sz.expr; 5149 } 5150 else 5151 { 5152 tmp = gfc_typenode_for_spec (&expr3->ts); 5153 tmp = TYPE_SIZE_UNIT (tmp); 5154 } 5155 } 5156 else 5157 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type)); 5158 5159 /* Convert to size_t. */ 5160 element_size = fold_convert (size_type_node, tmp); 5161 5162 if (rank == 0) 5163 return element_size; 5164 5165 *nelems = gfc_evaluate_now (stride, pblock); 5166 stride = fold_convert (size_type_node, stride); 5167 5168 /* First check for overflow. Since an array of type character can 5169 have zero element_size, we must check for that before 5170 dividing. */ 5171 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, 5172 size_type_node, 5173 TYPE_MAX_VALUE (size_type_node), element_size); 5174 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR, 5175 boolean_type_node, tmp, stride), 5176 PRED_FORTRAN_OVERFLOW); 5177 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond, 5178 integer_one_node, integer_zero_node); 5179 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR, 5180 boolean_type_node, element_size, 5181 build_int_cst (size_type_node, 0)), 5182 PRED_FORTRAN_SIZE_ZERO); 5183 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond, 5184 integer_zero_node, tmp); 5185 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node, 5186 *overflow, tmp); 5187 *overflow = gfc_evaluate_now (tmp, pblock); 5188 5189 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node, 5190 stride, element_size); 5191 5192 if (poffset != NULL) 5193 { 5194 offset = gfc_evaluate_now (offset, pblock); 5195 *poffset = offset; 5196 } 5197 5198 if (integer_zerop (or_expr)) 5199 return size; 5200 if (integer_onep (or_expr)) 5201 return build_int_cst (size_type_node, 0); 5202 5203 var = gfc_create_var (TREE_TYPE (size), "size"); 5204 gfc_start_block (&thenblock); 5205 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0)); 5206 thencase = gfc_finish_block (&thenblock); 5207 5208 gfc_start_block (&elseblock); 5209 gfc_add_modify (&elseblock, var, size); 5210 elsecase = gfc_finish_block (&elseblock); 5211 5212 tmp = gfc_evaluate_now (or_expr, pblock); 5213 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase); 5214 gfc_add_expr_to_block (pblock, tmp); 5215 5216 return var; 5217} 5218 5219 5220/* Initializes the descriptor and generates a call to _gfor_allocate. Does 5221 the work for an ALLOCATE statement. */ 5222/*GCC ARRAYS*/ 5223 5224bool 5225gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg, 5226 tree errlen, tree label_finish, tree expr3_elem_size, 5227 tree *nelems, gfc_expr *expr3) 5228{ 5229 tree tmp; 5230 tree pointer; 5231 tree offset = NULL_TREE; 5232 tree token = NULL_TREE; 5233 tree size; 5234 tree msg; 5235 tree error = NULL_TREE; 5236 tree overflow; /* Boolean storing whether size calculation overflows. */ 5237 tree var_overflow = NULL_TREE; 5238 tree cond; 5239 tree set_descriptor; 5240 stmtblock_t set_descriptor_block; 5241 stmtblock_t elseblock; 5242 gfc_expr **lower; 5243 gfc_expr **upper; 5244 gfc_ref *ref, *prev_ref = NULL; 5245 bool allocatable, coarray, dimension; 5246 5247 ref = expr->ref; 5248 5249 /* Find the last reference in the chain. */ 5250 while (ref && ref->next != NULL) 5251 { 5252 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT 5253 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0)); 5254 prev_ref = ref; 5255 ref = ref->next; 5256 } 5257 5258 if (ref == NULL || ref->type != REF_ARRAY) 5259 return false; 5260 5261 if (!prev_ref) 5262 { 5263 allocatable = expr->symtree->n.sym->attr.allocatable; 5264 coarray = expr->symtree->n.sym->attr.codimension; 5265 dimension = expr->symtree->n.sym->attr.dimension; 5266 } 5267 else 5268 { 5269 allocatable = prev_ref->u.c.component->attr.allocatable; 5270 coarray = prev_ref->u.c.component->attr.codimension; 5271 dimension = prev_ref->u.c.component->attr.dimension; 5272 } 5273 5274 if (!dimension) 5275 gcc_assert (coarray); 5276 5277 /* Figure out the size of the array. */ 5278 switch (ref->u.ar.type) 5279 { 5280 case AR_ELEMENT: 5281 if (!coarray) 5282 { 5283 lower = NULL; 5284 upper = ref->u.ar.start; 5285 break; 5286 } 5287 /* Fall through. */ 5288 5289 case AR_SECTION: 5290 lower = ref->u.ar.start; 5291 upper = ref->u.ar.end; 5292 break; 5293 5294 case AR_FULL: 5295 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT); 5296 5297 lower = ref->u.ar.as->lower; 5298 upper = ref->u.ar.as->upper; 5299 break; 5300 5301 default: 5302 gcc_unreachable (); 5303 break; 5304 } 5305 5306 overflow = integer_zero_node; 5307 5308 gfc_init_block (&set_descriptor_block); 5309 size = gfc_array_init_size (se->expr, ref->u.ar.as->rank, 5310 ref->u.ar.as->corank, &offset, lower, upper, 5311 &se->pre, &set_descriptor_block, &overflow, 5312 expr3_elem_size, nelems, expr3, expr); 5313 5314 if (dimension) 5315 { 5316 var_overflow = gfc_create_var (integer_type_node, "overflow"); 5317 gfc_add_modify (&se->pre, var_overflow, overflow); 5318 5319 if (status == NULL_TREE) 5320 { 5321 /* Generate the block of code handling overflow. */ 5322 msg = gfc_build_addr_expr (pchar_type_node, 5323 gfc_build_localized_cstring_const 5324 ("Integer overflow when calculating the amount of " 5325 "memory to allocate")); 5326 error = build_call_expr_loc (input_location, 5327 gfor_fndecl_runtime_error, 1, msg); 5328 } 5329 else 5330 { 5331 tree status_type = TREE_TYPE (status); 5332 stmtblock_t set_status_block; 5333 5334 gfc_start_block (&set_status_block); 5335 gfc_add_modify (&set_status_block, status, 5336 build_int_cst (status_type, LIBERROR_ALLOCATION)); 5337 error = gfc_finish_block (&set_status_block); 5338 } 5339 } 5340 5341 gfc_start_block (&elseblock); 5342 5343 /* Allocate memory to store the data. */ 5344 if (POINTER_TYPE_P (TREE_TYPE (se->expr))) 5345 se->expr = build_fold_indirect_ref_loc (input_location, se->expr); 5346 5347 pointer = gfc_conv_descriptor_data_get (se->expr); 5348 STRIP_NOPS (pointer); 5349 5350 if (coarray && flag_coarray == GFC_FCOARRAY_LIB) 5351 token = gfc_build_addr_expr (NULL_TREE, 5352 gfc_conv_descriptor_token (se->expr)); 5353 5354 /* The allocatable variant takes the old pointer as first argument. */ 5355 if (allocatable) 5356 gfc_allocate_allocatable (&elseblock, pointer, size, token, 5357 status, errmsg, errlen, label_finish, expr); 5358 else 5359 gfc_allocate_using_malloc (&elseblock, pointer, size, status); 5360 5361 if (dimension) 5362 { 5363 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR, 5364 boolean_type_node, var_overflow, integer_zero_node), 5365 PRED_FORTRAN_OVERFLOW); 5366 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond, 5367 error, gfc_finish_block (&elseblock)); 5368 } 5369 else 5370 tmp = gfc_finish_block (&elseblock); 5371 5372 gfc_add_expr_to_block (&se->pre, tmp); 5373 5374 /* Update the array descriptors. */ 5375 if (dimension) 5376 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset); 5377 5378 set_descriptor = gfc_finish_block (&set_descriptor_block); 5379 if (status != NULL_TREE) 5380 { 5381 cond = fold_build2_loc (input_location, EQ_EXPR, 5382 boolean_type_node, status, 5383 build_int_cst (TREE_TYPE (status), 0)); 5384 gfc_add_expr_to_block (&se->pre, 5385 fold_build3_loc (input_location, COND_EXPR, void_type_node, 5386 gfc_likely (cond, PRED_FORTRAN_FAIL_ALLOC), 5387 set_descriptor, 5388 build_empty_stmt (input_location))); 5389 } 5390 else 5391 gfc_add_expr_to_block (&se->pre, set_descriptor); 5392 5393 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp 5394 && !coarray) 5395 { 5396 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr, 5397 ref->u.ar.as->rank); 5398 gfc_add_expr_to_block (&se->pre, tmp); 5399 } 5400 5401 return true; 5402} 5403 5404 5405/* Deallocate an array variable. Also used when an allocated variable goes 5406 out of scope. */ 5407/*GCC ARRAYS*/ 5408 5409tree 5410gfc_array_deallocate (tree descriptor, tree pstat, tree errmsg, tree errlen, 5411 tree label_finish, gfc_expr* expr) 5412{ 5413 tree var; 5414 tree tmp; 5415 stmtblock_t block; 5416 bool coarray = gfc_is_coarray (expr); 5417 5418 gfc_start_block (&block); 5419 5420 /* Get a pointer to the data. */ 5421 var = gfc_conv_descriptor_data_get (descriptor); 5422 STRIP_NOPS (var); 5423 5424 /* Parameter is the address of the data component. */ 5425 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, pstat, errmsg, 5426 errlen, label_finish, false, expr, coarray); 5427 gfc_add_expr_to_block (&block, tmp); 5428 5429 /* Zero the data pointer; only for coarrays an error can occur and then 5430 the allocation status may not be changed. */ 5431 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node, 5432 var, build_int_cst (TREE_TYPE (var), 0)); 5433 if (pstat != NULL_TREE && coarray && flag_coarray == GFC_FCOARRAY_LIB) 5434 { 5435 tree cond; 5436 tree stat = build_fold_indirect_ref_loc (input_location, pstat); 5437 5438 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, 5439 stat, build_int_cst (TREE_TYPE (stat), 0)); 5440 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, 5441 cond, tmp, build_empty_stmt (input_location)); 5442 } 5443 5444 gfc_add_expr_to_block (&block, tmp); 5445 5446 return gfc_finish_block (&block); 5447} 5448 5449 5450/* Create an array constructor from an initialization expression. 5451 We assume the frontend already did any expansions and conversions. */ 5452 5453tree 5454gfc_conv_array_initializer (tree type, gfc_expr * expr) 5455{ 5456 gfc_constructor *c; 5457 tree tmp; 5458 offset_int wtmp; 5459 gfc_se se; 5460 tree index, range; 5461 vec<constructor_elt, va_gc> *v = NULL; 5462 5463 if (expr->expr_type == EXPR_VARIABLE 5464 && expr->symtree->n.sym->attr.flavor == FL_PARAMETER 5465 && expr->symtree->n.sym->value) 5466 expr = expr->symtree->n.sym->value; 5467 5468 switch (expr->expr_type) 5469 { 5470 case EXPR_CONSTANT: 5471 case EXPR_STRUCTURE: 5472 /* A single scalar or derived type value. Create an array with all 5473 elements equal to that value. */ 5474 gfc_init_se (&se, NULL); 5475 5476 if (expr->expr_type == EXPR_CONSTANT) 5477 gfc_conv_constant (&se, expr); 5478 else 5479 gfc_conv_structure (&se, expr, 1); 5480 5481 wtmp = wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1; 5482 /* This will probably eat buckets of memory for large arrays. */ 5483 while (wtmp != 0) 5484 { 5485 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr); 5486 wtmp -= 1; 5487 } 5488 break; 5489 5490 case EXPR_ARRAY: 5491 /* Create a vector of all the elements. */ 5492 for (c = gfc_constructor_first (expr->value.constructor); 5493 c; c = gfc_constructor_next (c)) 5494 { 5495 if (c->iterator) 5496 { 5497 /* Problems occur when we get something like 5498 integer :: a(lots) = (/(i, i=1, lots)/) */ 5499 gfc_fatal_error ("The number of elements in the array " 5500 "constructor at %L requires an increase of " 5501 "the allowed %d upper limit. See " 5502 "%<-fmax-array-constructor%> option", 5503 &expr->where, flag_max_array_constructor); 5504 return NULL_TREE; 5505 } 5506 if (mpz_cmp_si (c->offset, 0) != 0) 5507 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind); 5508 else 5509 index = NULL_TREE; 5510 5511 if (mpz_cmp_si (c->repeat, 1) > 0) 5512 { 5513 tree tmp1, tmp2; 5514 mpz_t maxval; 5515 5516 mpz_init (maxval); 5517 mpz_add (maxval, c->offset, c->repeat); 5518 mpz_sub_ui (maxval, maxval, 1); 5519 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind); 5520 if (mpz_cmp_si (c->offset, 0) != 0) 5521 { 5522 mpz_add_ui (maxval, c->offset, 1); 5523 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind); 5524 } 5525 else 5526 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind); 5527 5528 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2); 5529 mpz_clear (maxval); 5530 } 5531 else 5532 range = NULL; 5533 5534 gfc_init_se (&se, NULL); 5535 switch (c->expr->expr_type) 5536 { 5537 case EXPR_CONSTANT: 5538 gfc_conv_constant (&se, c->expr); 5539 break; 5540 5541 case EXPR_STRUCTURE: 5542 gfc_conv_structure (&se, c->expr, 1); 5543 break; 5544 5545 default: 5546 /* Catch those occasional beasts that do not simplify 5547 for one reason or another, assuming that if they are 5548 standard defying the frontend will catch them. */ 5549 gfc_conv_expr (&se, c->expr); 5550 break; 5551 } 5552 5553 if (range == NULL_TREE) 5554 CONSTRUCTOR_APPEND_ELT (v, index, se.expr); 5555 else 5556 { 5557 if (index != NULL_TREE) 5558 CONSTRUCTOR_APPEND_ELT (v, index, se.expr); 5559 CONSTRUCTOR_APPEND_ELT (v, range, se.expr); 5560 } 5561 } 5562 break; 5563 5564 case EXPR_NULL: 5565 return gfc_build_null_descriptor (type); 5566 5567 default: 5568 gcc_unreachable (); 5569 } 5570 5571 /* Create a constructor from the list of elements. */ 5572 tmp = build_constructor (type, v); 5573 TREE_CONSTANT (tmp) = 1; 5574 return tmp; 5575} 5576 5577 5578/* Generate code to evaluate non-constant coarray cobounds. */ 5579 5580void 5581gfc_trans_array_cobounds (tree type, stmtblock_t * pblock, 5582 const gfc_symbol *sym) 5583{ 5584 int dim; 5585 tree ubound; 5586 tree lbound; 5587 gfc_se se; 5588 gfc_array_spec *as; 5589 5590 as = sym->as; 5591 5592 for (dim = as->rank; dim < as->rank + as->corank; dim++) 5593 { 5594 /* Evaluate non-constant array bound expressions. */ 5595 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim); 5596 if (as->lower[dim] && !INTEGER_CST_P (lbound)) 5597 { 5598 gfc_init_se (&se, NULL); 5599 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type); 5600 gfc_add_block_to_block (pblock, &se.pre); 5601 gfc_add_modify (pblock, lbound, se.expr); 5602 } 5603 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim); 5604 if (as->upper[dim] && !INTEGER_CST_P (ubound)) 5605 { 5606 gfc_init_se (&se, NULL); 5607 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type); 5608 gfc_add_block_to_block (pblock, &se.pre); 5609 gfc_add_modify (pblock, ubound, se.expr); 5610 } 5611 } 5612} 5613 5614 5615/* Generate code to evaluate non-constant array bounds. Sets *poffset and 5616 returns the size (in elements) of the array. */ 5617 5618static tree 5619gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset, 5620 stmtblock_t * pblock) 5621{ 5622 gfc_array_spec *as; 5623 tree size; 5624 tree stride; 5625 tree offset; 5626 tree ubound; 5627 tree lbound; 5628 tree tmp; 5629 gfc_se se; 5630 5631 int dim; 5632 5633 as = sym->as; 5634 5635 size = gfc_index_one_node; 5636 offset = gfc_index_zero_node; 5637 for (dim = 0; dim < as->rank; dim++) 5638 { 5639 /* Evaluate non-constant array bound expressions. */ 5640 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim); 5641 if (as->lower[dim] && !INTEGER_CST_P (lbound)) 5642 { 5643 gfc_init_se (&se, NULL); 5644 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type); 5645 gfc_add_block_to_block (pblock, &se.pre); 5646 gfc_add_modify (pblock, lbound, se.expr); 5647 } 5648 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim); 5649 if (as->upper[dim] && !INTEGER_CST_P (ubound)) 5650 { 5651 gfc_init_se (&se, NULL); 5652 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type); 5653 gfc_add_block_to_block (pblock, &se.pre); 5654 gfc_add_modify (pblock, ubound, se.expr); 5655 } 5656 /* The offset of this dimension. offset = offset - lbound * stride. */ 5657 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 5658 lbound, size); 5659 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type, 5660 offset, tmp); 5661 5662 /* The size of this dimension, and the stride of the next. */ 5663 if (dim + 1 < as->rank) 5664 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1); 5665 else 5666 stride = GFC_TYPE_ARRAY_SIZE (type); 5667 5668 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride))) 5669 { 5670 /* Calculate stride = size * (ubound + 1 - lbound). */ 5671 tmp = fold_build2_loc (input_location, MINUS_EXPR, 5672 gfc_array_index_type, 5673 gfc_index_one_node, lbound); 5674 tmp = fold_build2_loc (input_location, PLUS_EXPR, 5675 gfc_array_index_type, ubound, tmp); 5676 tmp = fold_build2_loc (input_location, MULT_EXPR, 5677 gfc_array_index_type, size, tmp); 5678 if (stride) 5679 gfc_add_modify (pblock, stride, tmp); 5680 else 5681 stride = gfc_evaluate_now (tmp, pblock); 5682 5683 /* Make sure that negative size arrays are translated 5684 to being zero size. */ 5685 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node, 5686 stride, gfc_index_zero_node); 5687 tmp = fold_build3_loc (input_location, COND_EXPR, 5688 gfc_array_index_type, tmp, 5689 stride, gfc_index_zero_node); 5690 gfc_add_modify (pblock, stride, tmp); 5691 } 5692 5693 size = stride; 5694 } 5695 5696 gfc_trans_array_cobounds (type, pblock, sym); 5697 gfc_trans_vla_type_sizes (sym, pblock); 5698 5699 *poffset = offset; 5700 return size; 5701} 5702 5703 5704/* Generate code to initialize/allocate an array variable. */ 5705 5706void 5707gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym, 5708 gfc_wrapped_block * block) 5709{ 5710 stmtblock_t init; 5711 tree type; 5712 tree tmp = NULL_TREE; 5713 tree size; 5714 tree offset; 5715 tree space; 5716 tree inittree; 5717 bool onstack; 5718 5719 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable)); 5720 5721 /* Do nothing for USEd variables. */ 5722 if (sym->attr.use_assoc) 5723 return; 5724 5725 type = TREE_TYPE (decl); 5726 gcc_assert (GFC_ARRAY_TYPE_P (type)); 5727 onstack = TREE_CODE (type) != POINTER_TYPE; 5728 5729 gfc_init_block (&init); 5730 5731 /* Evaluate character string length. */ 5732 if (sym->ts.type == BT_CHARACTER 5733 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl)) 5734 { 5735 gfc_conv_string_length (sym->ts.u.cl, NULL, &init); 5736 5737 gfc_trans_vla_type_sizes (sym, &init); 5738 5739 /* Emit a DECL_EXPR for this variable, which will cause the 5740 gimplifier to allocate storage, and all that good stuff. */ 5741 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl); 5742 gfc_add_expr_to_block (&init, tmp); 5743 } 5744 5745 if (onstack) 5746 { 5747 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE); 5748 return; 5749 } 5750 5751 type = TREE_TYPE (type); 5752 5753 gcc_assert (!sym->attr.use_assoc); 5754 gcc_assert (!TREE_STATIC (decl)); 5755 gcc_assert (!sym->module); 5756 5757 if (sym->ts.type == BT_CHARACTER 5758 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl)) 5759 gfc_conv_string_length (sym->ts.u.cl, NULL, &init); 5760 5761 size = gfc_trans_array_bounds (type, sym, &offset, &init); 5762 5763 /* Don't actually allocate space for Cray Pointees. */ 5764 if (sym->attr.cray_pointee) 5765 { 5766 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL) 5767 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset); 5768 5769 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE); 5770 return; 5771 } 5772 5773 if (flag_stack_arrays) 5774 { 5775 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE); 5776 space = build_decl (sym->declared_at.lb->location, 5777 VAR_DECL, create_tmp_var_name ("A"), 5778 TREE_TYPE (TREE_TYPE (decl))); 5779 gfc_trans_vla_type_sizes (sym, &init); 5780 } 5781 else 5782 { 5783 /* The size is the number of elements in the array, so multiply by the 5784 size of an element to get the total size. */ 5785 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type)); 5786 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 5787 size, fold_convert (gfc_array_index_type, tmp)); 5788 5789 /* Allocate memory to hold the data. */ 5790 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size); 5791 gfc_add_modify (&init, decl, tmp); 5792 5793 /* Free the temporary. */ 5794 tmp = gfc_call_free (convert (pvoid_type_node, decl)); 5795 space = NULL_TREE; 5796 } 5797 5798 /* Set offset of the array. */ 5799 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL) 5800 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset); 5801 5802 /* Automatic arrays should not have initializers. */ 5803 gcc_assert (!sym->value); 5804 5805 inittree = gfc_finish_block (&init); 5806 5807 if (space) 5808 { 5809 tree addr; 5810 pushdecl (space); 5811 5812 /* Don't create new scope, emit the DECL_EXPR in exactly the scope 5813 where also space is located. */ 5814 gfc_init_block (&init); 5815 tmp = fold_build1_loc (input_location, DECL_EXPR, 5816 TREE_TYPE (space), space); 5817 gfc_add_expr_to_block (&init, tmp); 5818 addr = fold_build1_loc (sym->declared_at.lb->location, 5819 ADDR_EXPR, TREE_TYPE (decl), space); 5820 gfc_add_modify (&init, decl, addr); 5821 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE); 5822 tmp = NULL_TREE; 5823 } 5824 gfc_add_init_cleanup (block, inittree, tmp); 5825} 5826 5827 5828/* Generate entry and exit code for g77 calling convention arrays. */ 5829 5830void 5831gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block) 5832{ 5833 tree parm; 5834 tree type; 5835 locus loc; 5836 tree offset; 5837 tree tmp; 5838 tree stmt; 5839 stmtblock_t init; 5840 5841 gfc_save_backend_locus (&loc); 5842 gfc_set_backend_locus (&sym->declared_at); 5843 5844 /* Descriptor type. */ 5845 parm = sym->backend_decl; 5846 type = TREE_TYPE (parm); 5847 gcc_assert (GFC_ARRAY_TYPE_P (type)); 5848 5849 gfc_start_block (&init); 5850 5851 if (sym->ts.type == BT_CHARACTER 5852 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL) 5853 gfc_conv_string_length (sym->ts.u.cl, NULL, &init); 5854 5855 /* Evaluate the bounds of the array. */ 5856 gfc_trans_array_bounds (type, sym, &offset, &init); 5857 5858 /* Set the offset. */ 5859 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL) 5860 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset); 5861 5862 /* Set the pointer itself if we aren't using the parameter directly. */ 5863 if (TREE_CODE (parm) != PARM_DECL) 5864 { 5865 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm)); 5866 gfc_add_modify (&init, parm, tmp); 5867 } 5868 stmt = gfc_finish_block (&init); 5869 5870 gfc_restore_backend_locus (&loc); 5871 5872 /* Add the initialization code to the start of the function. */ 5873 5874 if (sym->attr.optional || sym->attr.not_always_present) 5875 { 5876 tmp = gfc_conv_expr_present (sym); 5877 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location)); 5878 } 5879 5880 gfc_add_init_cleanup (block, stmt, NULL_TREE); 5881} 5882 5883 5884/* Modify the descriptor of an array parameter so that it has the 5885 correct lower bound. Also move the upper bound accordingly. 5886 If the array is not packed, it will be copied into a temporary. 5887 For each dimension we set the new lower and upper bounds. Then we copy the 5888 stride and calculate the offset for this dimension. We also work out 5889 what the stride of a packed array would be, and see it the two match. 5890 If the array need repacking, we set the stride to the values we just 5891 calculated, recalculate the offset and copy the array data. 5892 Code is also added to copy the data back at the end of the function. 5893 */ 5894 5895void 5896gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, 5897 gfc_wrapped_block * block) 5898{ 5899 tree size; 5900 tree type; 5901 tree offset; 5902 locus loc; 5903 stmtblock_t init; 5904 tree stmtInit, stmtCleanup; 5905 tree lbound; 5906 tree ubound; 5907 tree dubound; 5908 tree dlbound; 5909 tree dumdesc; 5910 tree tmp; 5911 tree stride, stride2; 5912 tree stmt_packed; 5913 tree stmt_unpacked; 5914 tree partial; 5915 gfc_se se; 5916 int n; 5917 int checkparm; 5918 int no_repack; 5919 bool optional_arg; 5920 5921 /* Do nothing for pointer and allocatable arrays. */ 5922 if (sym->attr.pointer || sym->attr.allocatable) 5923 return; 5924 5925 if (sym->attr.dummy && gfc_is_nodesc_array (sym)) 5926 { 5927 gfc_trans_g77_array (sym, block); 5928 return; 5929 } 5930 5931 gfc_save_backend_locus (&loc); 5932 gfc_set_backend_locus (&sym->declared_at); 5933 5934 /* Descriptor type. */ 5935 type = TREE_TYPE (tmpdesc); 5936 gcc_assert (GFC_ARRAY_TYPE_P (type)); 5937 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc); 5938 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc); 5939 gfc_start_block (&init); 5940 5941 if (sym->ts.type == BT_CHARACTER 5942 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL) 5943 gfc_conv_string_length (sym->ts.u.cl, NULL, &init); 5944 5945 checkparm = (sym->as->type == AS_EXPLICIT 5946 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)); 5947 5948 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc) 5949 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc)); 5950 5951 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc)) 5952 { 5953 /* For non-constant shape arrays we only check if the first dimension 5954 is contiguous. Repacking higher dimensions wouldn't gain us 5955 anything as we still don't know the array stride. */ 5956 partial = gfc_create_var (boolean_type_node, "partial"); 5957 TREE_USED (partial) = 1; 5958 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]); 5959 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp, 5960 gfc_index_one_node); 5961 gfc_add_modify (&init, partial, tmp); 5962 } 5963 else 5964 partial = NULL_TREE; 5965 5966 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive 5967 here, however I think it does the right thing. */ 5968 if (no_repack) 5969 { 5970 /* Set the first stride. */ 5971 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]); 5972 stride = gfc_evaluate_now (stride, &init); 5973 5974 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, 5975 stride, gfc_index_zero_node); 5976 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, 5977 tmp, gfc_index_one_node, stride); 5978 stride = GFC_TYPE_ARRAY_STRIDE (type, 0); 5979 gfc_add_modify (&init, stride, tmp); 5980 5981 /* Allow the user to disable array repacking. */ 5982 stmt_unpacked = NULL_TREE; 5983 } 5984 else 5985 { 5986 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0))); 5987 /* A library call to repack the array if necessary. */ 5988 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc); 5989 stmt_unpacked = build_call_expr_loc (input_location, 5990 gfor_fndecl_in_pack, 1, tmp); 5991 5992 stride = gfc_index_one_node; 5993 5994 if (warn_array_temporaries) 5995 gfc_warning (OPT_Warray_temporaries, 5996 "Creating array temporary at %L", &loc); 5997 } 5998 5999 /* This is for the case where the array data is used directly without 6000 calling the repack function. */ 6001 if (no_repack || partial != NULL_TREE) 6002 stmt_packed = gfc_conv_descriptor_data_get (dumdesc); 6003 else 6004 stmt_packed = NULL_TREE; 6005 6006 /* Assign the data pointer. */ 6007 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE) 6008 { 6009 /* Don't repack unknown shape arrays when the first stride is 1. */ 6010 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed), 6011 partial, stmt_packed, stmt_unpacked); 6012 } 6013 else 6014 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked; 6015 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp)); 6016 6017 offset = gfc_index_zero_node; 6018 size = gfc_index_one_node; 6019 6020 /* Evaluate the bounds of the array. */ 6021 for (n = 0; n < sym->as->rank; n++) 6022 { 6023 if (checkparm || !sym->as->upper[n]) 6024 { 6025 /* Get the bounds of the actual parameter. */ 6026 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]); 6027 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]); 6028 } 6029 else 6030 { 6031 dubound = NULL_TREE; 6032 dlbound = NULL_TREE; 6033 } 6034 6035 lbound = GFC_TYPE_ARRAY_LBOUND (type, n); 6036 if (!INTEGER_CST_P (lbound)) 6037 { 6038 gfc_init_se (&se, NULL); 6039 gfc_conv_expr_type (&se, sym->as->lower[n], 6040 gfc_array_index_type); 6041 gfc_add_block_to_block (&init, &se.pre); 6042 gfc_add_modify (&init, lbound, se.expr); 6043 } 6044 6045 ubound = GFC_TYPE_ARRAY_UBOUND (type, n); 6046 /* Set the desired upper bound. */ 6047 if (sym->as->upper[n]) 6048 { 6049 /* We know what we want the upper bound to be. */ 6050 if (!INTEGER_CST_P (ubound)) 6051 { 6052 gfc_init_se (&se, NULL); 6053 gfc_conv_expr_type (&se, sym->as->upper[n], 6054 gfc_array_index_type); 6055 gfc_add_block_to_block (&init, &se.pre); 6056 gfc_add_modify (&init, ubound, se.expr); 6057 } 6058 6059 /* Check the sizes match. */ 6060 if (checkparm) 6061 { 6062 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */ 6063 char * msg; 6064 tree temp; 6065 6066 temp = fold_build2_loc (input_location, MINUS_EXPR, 6067 gfc_array_index_type, ubound, lbound); 6068 temp = fold_build2_loc (input_location, PLUS_EXPR, 6069 gfc_array_index_type, 6070 gfc_index_one_node, temp); 6071 stride2 = fold_build2_loc (input_location, MINUS_EXPR, 6072 gfc_array_index_type, dubound, 6073 dlbound); 6074 stride2 = fold_build2_loc (input_location, PLUS_EXPR, 6075 gfc_array_index_type, 6076 gfc_index_one_node, stride2); 6077 tmp = fold_build2_loc (input_location, NE_EXPR, 6078 gfc_array_index_type, temp, stride2); 6079 msg = xasprintf ("Dimension %d of array '%s' has extent " 6080 "%%ld instead of %%ld", n+1, sym->name); 6081 6082 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg, 6083 fold_convert (long_integer_type_node, temp), 6084 fold_convert (long_integer_type_node, stride2)); 6085 6086 free (msg); 6087 } 6088 } 6089 else 6090 { 6091 /* For assumed shape arrays move the upper bound by the same amount 6092 as the lower bound. */ 6093 tmp = fold_build2_loc (input_location, MINUS_EXPR, 6094 gfc_array_index_type, dubound, dlbound); 6095 tmp = fold_build2_loc (input_location, PLUS_EXPR, 6096 gfc_array_index_type, tmp, lbound); 6097 gfc_add_modify (&init, ubound, tmp); 6098 } 6099 /* The offset of this dimension. offset = offset - lbound * stride. */ 6100 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 6101 lbound, stride); 6102 offset = fold_build2_loc (input_location, MINUS_EXPR, 6103 gfc_array_index_type, offset, tmp); 6104 6105 /* The size of this dimension, and the stride of the next. */ 6106 if (n + 1 < sym->as->rank) 6107 { 6108 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1); 6109 6110 if (no_repack || partial != NULL_TREE) 6111 stmt_unpacked = 6112 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]); 6113 6114 /* Figure out the stride if not a known constant. */ 6115 if (!INTEGER_CST_P (stride)) 6116 { 6117 if (no_repack) 6118 stmt_packed = NULL_TREE; 6119 else 6120 { 6121 /* Calculate stride = size * (ubound + 1 - lbound). */ 6122 tmp = fold_build2_loc (input_location, MINUS_EXPR, 6123 gfc_array_index_type, 6124 gfc_index_one_node, lbound); 6125 tmp = fold_build2_loc (input_location, PLUS_EXPR, 6126 gfc_array_index_type, ubound, tmp); 6127 size = fold_build2_loc (input_location, MULT_EXPR, 6128 gfc_array_index_type, size, tmp); 6129 stmt_packed = size; 6130 } 6131 6132 /* Assign the stride. */ 6133 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE) 6134 tmp = fold_build3_loc (input_location, COND_EXPR, 6135 gfc_array_index_type, partial, 6136 stmt_unpacked, stmt_packed); 6137 else 6138 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked; 6139 gfc_add_modify (&init, stride, tmp); 6140 } 6141 } 6142 else 6143 { 6144 stride = GFC_TYPE_ARRAY_SIZE (type); 6145 6146 if (stride && !INTEGER_CST_P (stride)) 6147 { 6148 /* Calculate size = stride * (ubound + 1 - lbound). */ 6149 tmp = fold_build2_loc (input_location, MINUS_EXPR, 6150 gfc_array_index_type, 6151 gfc_index_one_node, lbound); 6152 tmp = fold_build2_loc (input_location, PLUS_EXPR, 6153 gfc_array_index_type, 6154 ubound, tmp); 6155 tmp = fold_build2_loc (input_location, MULT_EXPR, 6156 gfc_array_index_type, 6157 GFC_TYPE_ARRAY_STRIDE (type, n), tmp); 6158 gfc_add_modify (&init, stride, tmp); 6159 } 6160 } 6161 } 6162 6163 gfc_trans_array_cobounds (type, &init, sym); 6164 6165 /* Set the offset. */ 6166 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL) 6167 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset); 6168 6169 gfc_trans_vla_type_sizes (sym, &init); 6170 6171 stmtInit = gfc_finish_block (&init); 6172 6173 /* Only do the entry/initialization code if the arg is present. */ 6174 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc); 6175 optional_arg = (sym->attr.optional 6176 || (sym->ns->proc_name->attr.entry_master 6177 && sym->attr.dummy)); 6178 if (optional_arg) 6179 { 6180 tmp = gfc_conv_expr_present (sym); 6181 stmtInit = build3_v (COND_EXPR, tmp, stmtInit, 6182 build_empty_stmt (input_location)); 6183 } 6184 6185 /* Cleanup code. */ 6186 if (no_repack) 6187 stmtCleanup = NULL_TREE; 6188 else 6189 { 6190 stmtblock_t cleanup; 6191 gfc_start_block (&cleanup); 6192 6193 if (sym->attr.intent != INTENT_IN) 6194 { 6195 /* Copy the data back. */ 6196 tmp = build_call_expr_loc (input_location, 6197 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc); 6198 gfc_add_expr_to_block (&cleanup, tmp); 6199 } 6200 6201 /* Free the temporary. */ 6202 tmp = gfc_call_free (tmpdesc); 6203 gfc_add_expr_to_block (&cleanup, tmp); 6204 6205 stmtCleanup = gfc_finish_block (&cleanup); 6206 6207 /* Only do the cleanup if the array was repacked. */ 6208 tmp = build_fold_indirect_ref_loc (input_location, dumdesc); 6209 tmp = gfc_conv_descriptor_data_get (tmp); 6210 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, 6211 tmp, tmpdesc); 6212 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup, 6213 build_empty_stmt (input_location)); 6214 6215 if (optional_arg) 6216 { 6217 tmp = gfc_conv_expr_present (sym); 6218 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup, 6219 build_empty_stmt (input_location)); 6220 } 6221 } 6222 6223 /* We don't need to free any memory allocated by internal_pack as it will 6224 be freed at the end of the function by pop_context. */ 6225 gfc_add_init_cleanup (block, stmtInit, stmtCleanup); 6226 6227 gfc_restore_backend_locus (&loc); 6228} 6229 6230 6231/* Calculate the overall offset, including subreferences. */ 6232static void 6233gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset, 6234 bool subref, gfc_expr *expr) 6235{ 6236 tree tmp; 6237 tree field; 6238 tree stride; 6239 tree index; 6240 gfc_ref *ref; 6241 gfc_se start; 6242 int n; 6243 6244 /* If offset is NULL and this is not a subreferenced array, there is 6245 nothing to do. */ 6246 if (offset == NULL_TREE) 6247 { 6248 if (subref) 6249 offset = gfc_index_zero_node; 6250 else 6251 return; 6252 } 6253 6254 tmp = build_array_ref (desc, offset, NULL); 6255 6256 /* Offset the data pointer for pointer assignments from arrays with 6257 subreferences; e.g. my_integer => my_type(:)%integer_component. */ 6258 if (subref) 6259 { 6260 /* Go past the array reference. */ 6261 for (ref = expr->ref; ref; ref = ref->next) 6262 if (ref->type == REF_ARRAY && 6263 ref->u.ar.type != AR_ELEMENT) 6264 { 6265 ref = ref->next; 6266 break; 6267 } 6268 6269 /* Calculate the offset for each subsequent subreference. */ 6270 for (; ref; ref = ref->next) 6271 { 6272 switch (ref->type) 6273 { 6274 case REF_COMPONENT: 6275 field = ref->u.c.component->backend_decl; 6276 gcc_assert (field && TREE_CODE (field) == FIELD_DECL); 6277 tmp = fold_build3_loc (input_location, COMPONENT_REF, 6278 TREE_TYPE (field), 6279 tmp, field, NULL_TREE); 6280 break; 6281 6282 case REF_SUBSTRING: 6283 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE); 6284 gfc_init_se (&start, NULL); 6285 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node); 6286 gfc_add_block_to_block (block, &start.pre); 6287 tmp = gfc_build_array_ref (tmp, start.expr, NULL); 6288 break; 6289 6290 case REF_ARRAY: 6291 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE 6292 && ref->u.ar.type == AR_ELEMENT); 6293 6294 /* TODO - Add bounds checking. */ 6295 stride = gfc_index_one_node; 6296 index = gfc_index_zero_node; 6297 for (n = 0; n < ref->u.ar.dimen; n++) 6298 { 6299 tree itmp; 6300 tree jtmp; 6301 6302 /* Update the index. */ 6303 gfc_init_se (&start, NULL); 6304 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type); 6305 itmp = gfc_evaluate_now (start.expr, block); 6306 gfc_init_se (&start, NULL); 6307 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type); 6308 jtmp = gfc_evaluate_now (start.expr, block); 6309 itmp = fold_build2_loc (input_location, MINUS_EXPR, 6310 gfc_array_index_type, itmp, jtmp); 6311 itmp = fold_build2_loc (input_location, MULT_EXPR, 6312 gfc_array_index_type, itmp, stride); 6313 index = fold_build2_loc (input_location, PLUS_EXPR, 6314 gfc_array_index_type, itmp, index); 6315 index = gfc_evaluate_now (index, block); 6316 6317 /* Update the stride. */ 6318 gfc_init_se (&start, NULL); 6319 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type); 6320 itmp = fold_build2_loc (input_location, MINUS_EXPR, 6321 gfc_array_index_type, start.expr, 6322 jtmp); 6323 itmp = fold_build2_loc (input_location, PLUS_EXPR, 6324 gfc_array_index_type, 6325 gfc_index_one_node, itmp); 6326 stride = fold_build2_loc (input_location, MULT_EXPR, 6327 gfc_array_index_type, stride, itmp); 6328 stride = gfc_evaluate_now (stride, block); 6329 } 6330 6331 /* Apply the index to obtain the array element. */ 6332 tmp = gfc_build_array_ref (tmp, index, NULL); 6333 break; 6334 6335 default: 6336 gcc_unreachable (); 6337 break; 6338 } 6339 } 6340 } 6341 6342 /* Set the target data pointer. */ 6343 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp); 6344 gfc_conv_descriptor_data_set (block, parm, offset); 6345} 6346 6347 6348/* gfc_conv_expr_descriptor needs the string length an expression 6349 so that the size of the temporary can be obtained. This is done 6350 by adding up the string lengths of all the elements in the 6351 expression. Function with non-constant expressions have their 6352 string lengths mapped onto the actual arguments using the 6353 interface mapping machinery in trans-expr.c. */ 6354static void 6355get_array_charlen (gfc_expr *expr, gfc_se *se) 6356{ 6357 gfc_interface_mapping mapping; 6358 gfc_formal_arglist *formal; 6359 gfc_actual_arglist *arg; 6360 gfc_se tse; 6361 6362 if (expr->ts.u.cl->length 6363 && gfc_is_constant_expr (expr->ts.u.cl->length)) 6364 { 6365 if (!expr->ts.u.cl->backend_decl) 6366 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre); 6367 return; 6368 } 6369 6370 switch (expr->expr_type) 6371 { 6372 case EXPR_OP: 6373 get_array_charlen (expr->value.op.op1, se); 6374 6375 /* For parentheses the expression ts.u.cl is identical. */ 6376 if (expr->value.op.op == INTRINSIC_PARENTHESES) 6377 return; 6378 6379 expr->ts.u.cl->backend_decl = 6380 gfc_create_var (gfc_charlen_type_node, "sln"); 6381 6382 if (expr->value.op.op2) 6383 { 6384 get_array_charlen (expr->value.op.op2, se); 6385 6386 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT); 6387 6388 /* Add the string lengths and assign them to the expression 6389 string length backend declaration. */ 6390 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl, 6391 fold_build2_loc (input_location, PLUS_EXPR, 6392 gfc_charlen_type_node, 6393 expr->value.op.op1->ts.u.cl->backend_decl, 6394 expr->value.op.op2->ts.u.cl->backend_decl)); 6395 } 6396 else 6397 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl, 6398 expr->value.op.op1->ts.u.cl->backend_decl); 6399 break; 6400 6401 case EXPR_FUNCTION: 6402 if (expr->value.function.esym == NULL 6403 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT) 6404 { 6405 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre); 6406 break; 6407 } 6408 6409 /* Map expressions involving the dummy arguments onto the actual 6410 argument expressions. */ 6411 gfc_init_interface_mapping (&mapping); 6412 formal = gfc_sym_get_dummy_args (expr->symtree->n.sym); 6413 arg = expr->value.function.actual; 6414 6415 /* Set se = NULL in the calls to the interface mapping, to suppress any 6416 backend stuff. */ 6417 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL) 6418 { 6419 if (!arg->expr) 6420 continue; 6421 if (formal->sym) 6422 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr); 6423 } 6424 6425 gfc_init_se (&tse, NULL); 6426 6427 /* Build the expression for the character length and convert it. */ 6428 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length); 6429 6430 gfc_add_block_to_block (&se->pre, &tse.pre); 6431 gfc_add_block_to_block (&se->post, &tse.post); 6432 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr); 6433 tse.expr = fold_build2_loc (input_location, MAX_EXPR, 6434 gfc_charlen_type_node, tse.expr, 6435 build_int_cst (gfc_charlen_type_node, 0)); 6436 expr->ts.u.cl->backend_decl = tse.expr; 6437 gfc_free_interface_mapping (&mapping); 6438 break; 6439 6440 default: 6441 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre); 6442 break; 6443 } 6444} 6445 6446 6447/* Helper function to check dimensions. */ 6448static bool 6449transposed_dims (gfc_ss *ss) 6450{ 6451 int n; 6452 6453 for (n = 0; n < ss->dimen; n++) 6454 if (ss->dim[n] != n) 6455 return true; 6456 return false; 6457} 6458 6459 6460/* Convert the last ref of a scalar coarray from an AR_ELEMENT to an 6461 AR_FULL, suitable for the scalarizer. */ 6462 6463static gfc_ss * 6464walk_coarray (gfc_expr *e) 6465{ 6466 gfc_ss *ss; 6467 6468 gcc_assert (gfc_get_corank (e) > 0); 6469 6470 ss = gfc_walk_expr (e); 6471 6472 /* Fix scalar coarray. */ 6473 if (ss == gfc_ss_terminator) 6474 { 6475 gfc_ref *ref; 6476 6477 ref = e->ref; 6478 while (ref) 6479 { 6480 if (ref->type == REF_ARRAY 6481 && ref->u.ar.codimen > 0) 6482 break; 6483 6484 ref = ref->next; 6485 } 6486 6487 gcc_assert (ref != NULL); 6488 if (ref->u.ar.type == AR_ELEMENT) 6489 ref->u.ar.type = AR_SECTION; 6490 ss = gfc_reverse_ss (gfc_walk_array_ref (ss, e, ref)); 6491 } 6492 6493 return ss; 6494} 6495 6496 6497/* Convert an array for passing as an actual argument. Expressions and 6498 vector subscripts are evaluated and stored in a temporary, which is then 6499 passed. For whole arrays the descriptor is passed. For array sections 6500 a modified copy of the descriptor is passed, but using the original data. 6501 6502 This function is also used for array pointer assignments, and there 6503 are three cases: 6504 6505 - se->want_pointer && !se->direct_byref 6506 EXPR is an actual argument. On exit, se->expr contains a 6507 pointer to the array descriptor. 6508 6509 - !se->want_pointer && !se->direct_byref 6510 EXPR is an actual argument to an intrinsic function or the 6511 left-hand side of a pointer assignment. On exit, se->expr 6512 contains the descriptor for EXPR. 6513 6514 - !se->want_pointer && se->direct_byref 6515 EXPR is the right-hand side of a pointer assignment and 6516 se->expr is the descriptor for the previously-evaluated 6517 left-hand side. The function creates an assignment from 6518 EXPR to se->expr. 6519 6520 6521 The se->force_tmp flag disables the non-copying descriptor optimization 6522 that is used for transpose. It may be used in cases where there is an 6523 alias between the transpose argument and another argument in the same 6524 function call. */ 6525 6526void 6527gfc_conv_expr_descriptor (gfc_se *se, gfc_expr *expr) 6528{ 6529 gfc_ss *ss; 6530 gfc_ss_type ss_type; 6531 gfc_ss_info *ss_info; 6532 gfc_loopinfo loop; 6533 gfc_array_info *info; 6534 int need_tmp; 6535 int n; 6536 tree tmp; 6537 tree desc; 6538 stmtblock_t block; 6539 tree start; 6540 tree offset; 6541 int full; 6542 bool subref_array_target = false; 6543 gfc_expr *arg, *ss_expr; 6544 6545 if (se->want_coarray) 6546 ss = walk_coarray (expr); 6547 else 6548 ss = gfc_walk_expr (expr); 6549 6550 gcc_assert (ss != NULL); 6551 gcc_assert (ss != gfc_ss_terminator); 6552 6553 ss_info = ss->info; 6554 ss_type = ss_info->type; 6555 ss_expr = ss_info->expr; 6556 6557 /* Special case: TRANSPOSE which needs no temporary. */ 6558 while (expr->expr_type == EXPR_FUNCTION && expr->value.function.isym 6559 && NULL != (arg = gfc_get_noncopying_intrinsic_argument (expr))) 6560 { 6561 /* This is a call to transpose which has already been handled by the 6562 scalarizer, so that we just need to get its argument's descriptor. */ 6563 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE); 6564 expr = expr->value.function.actual->expr; 6565 } 6566 6567 /* Special case things we know we can pass easily. */ 6568 switch (expr->expr_type) 6569 { 6570 case EXPR_VARIABLE: 6571 /* If we have a linear array section, we can pass it directly. 6572 Otherwise we need to copy it into a temporary. */ 6573 6574 gcc_assert (ss_type == GFC_SS_SECTION); 6575 gcc_assert (ss_expr == expr); 6576 info = &ss_info->data.array; 6577 6578 /* Get the descriptor for the array. */ 6579 gfc_conv_ss_descriptor (&se->pre, ss, 0); 6580 desc = info->descriptor; 6581 6582 subref_array_target = se->direct_byref && is_subref_array (expr); 6583 need_tmp = gfc_ref_needs_temporary_p (expr->ref) 6584 && !subref_array_target; 6585 6586 if (se->force_tmp) 6587 need_tmp = 1; 6588 6589 if (need_tmp) 6590 full = 0; 6591 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc))) 6592 { 6593 /* Create a new descriptor if the array doesn't have one. */ 6594 full = 0; 6595 } 6596 else if (info->ref->u.ar.type == AR_FULL || se->descriptor_only) 6597 full = 1; 6598 else if (se->direct_byref) 6599 full = 0; 6600 else 6601 full = gfc_full_array_ref_p (info->ref, NULL); 6602 6603 if (full && !transposed_dims (ss)) 6604 { 6605 if (se->direct_byref && !se->byref_noassign) 6606 { 6607 /* Copy the descriptor for pointer assignments. */ 6608 gfc_add_modify (&se->pre, se->expr, desc); 6609 6610 /* Add any offsets from subreferences. */ 6611 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE, 6612 subref_array_target, expr); 6613 } 6614 else if (se->want_pointer) 6615 { 6616 /* We pass full arrays directly. This means that pointers and 6617 allocatable arrays should also work. */ 6618 se->expr = gfc_build_addr_expr (NULL_TREE, desc); 6619 } 6620 else 6621 { 6622 se->expr = desc; 6623 } 6624 6625 if (expr->ts.type == BT_CHARACTER) 6626 se->string_length = gfc_get_expr_charlen (expr); 6627 6628 gfc_free_ss_chain (ss); 6629 return; 6630 } 6631 break; 6632 6633 case EXPR_FUNCTION: 6634 /* A transformational function return value will be a temporary 6635 array descriptor. We still need to go through the scalarizer 6636 to create the descriptor. Elemental functions are handled as 6637 arbitrary expressions, i.e. copy to a temporary. */ 6638 6639 if (se->direct_byref) 6640 { 6641 gcc_assert (ss_type == GFC_SS_FUNCTION && ss_expr == expr); 6642 6643 /* For pointer assignments pass the descriptor directly. */ 6644 if (se->ss == NULL) 6645 se->ss = ss; 6646 else 6647 gcc_assert (se->ss == ss); 6648 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr); 6649 gfc_conv_expr (se, expr); 6650 gfc_free_ss_chain (ss); 6651 return; 6652 } 6653 6654 if (ss_expr != expr || ss_type != GFC_SS_FUNCTION) 6655 { 6656 if (ss_expr != expr) 6657 /* Elemental function. */ 6658 gcc_assert ((expr->value.function.esym != NULL 6659 && expr->value.function.esym->attr.elemental) 6660 || (expr->value.function.isym != NULL 6661 && expr->value.function.isym->elemental) 6662 || gfc_inline_intrinsic_function_p (expr)); 6663 else 6664 gcc_assert (ss_type == GFC_SS_INTRINSIC); 6665 6666 need_tmp = 1; 6667 if (expr->ts.type == BT_CHARACTER 6668 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT) 6669 get_array_charlen (expr, se); 6670 6671 info = NULL; 6672 } 6673 else 6674 { 6675 /* Transformational function. */ 6676 info = &ss_info->data.array; 6677 need_tmp = 0; 6678 } 6679 break; 6680 6681 case EXPR_ARRAY: 6682 /* Constant array constructors don't need a temporary. */ 6683 if (ss_type == GFC_SS_CONSTRUCTOR 6684 && expr->ts.type != BT_CHARACTER 6685 && gfc_constant_array_constructor_p (expr->value.constructor)) 6686 { 6687 need_tmp = 0; 6688 info = &ss_info->data.array; 6689 } 6690 else 6691 { 6692 need_tmp = 1; 6693 info = NULL; 6694 } 6695 break; 6696 6697 default: 6698 /* Something complicated. Copy it into a temporary. */ 6699 need_tmp = 1; 6700 info = NULL; 6701 break; 6702 } 6703 6704 /* If we are creating a temporary, we don't need to bother about aliases 6705 anymore. */ 6706 if (need_tmp) 6707 se->force_tmp = 0; 6708 6709 gfc_init_loopinfo (&loop); 6710 6711 /* Associate the SS with the loop. */ 6712 gfc_add_ss_to_loop (&loop, ss); 6713 6714 /* Tell the scalarizer not to bother creating loop variables, etc. */ 6715 if (!need_tmp) 6716 loop.array_parameter = 1; 6717 else 6718 /* The right-hand side of a pointer assignment mustn't use a temporary. */ 6719 gcc_assert (!se->direct_byref); 6720 6721 /* Setup the scalarizing loops and bounds. */ 6722 gfc_conv_ss_startstride (&loop); 6723 6724 if (need_tmp) 6725 { 6726 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl) 6727 get_array_charlen (expr, se); 6728 6729 /* Tell the scalarizer to make a temporary. */ 6730 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts), 6731 ((expr->ts.type == BT_CHARACTER) 6732 ? expr->ts.u.cl->backend_decl 6733 : NULL), 6734 loop.dimen); 6735 6736 se->string_length = loop.temp_ss->info->string_length; 6737 gcc_assert (loop.temp_ss->dimen == loop.dimen); 6738 gfc_add_ss_to_loop (&loop, loop.temp_ss); 6739 } 6740 6741 gfc_conv_loop_setup (&loop, & expr->where); 6742 6743 if (need_tmp) 6744 { 6745 /* Copy into a temporary and pass that. We don't need to copy the data 6746 back because expressions and vector subscripts must be INTENT_IN. */ 6747 /* TODO: Optimize passing function return values. */ 6748 gfc_se lse; 6749 gfc_se rse; 6750 6751 /* Start the copying loops. */ 6752 gfc_mark_ss_chain_used (loop.temp_ss, 1); 6753 gfc_mark_ss_chain_used (ss, 1); 6754 gfc_start_scalarized_body (&loop, &block); 6755 6756 /* Copy each data element. */ 6757 gfc_init_se (&lse, NULL); 6758 gfc_copy_loopinfo_to_se (&lse, &loop); 6759 gfc_init_se (&rse, NULL); 6760 gfc_copy_loopinfo_to_se (&rse, &loop); 6761 6762 lse.ss = loop.temp_ss; 6763 rse.ss = ss; 6764 6765 gfc_conv_scalarized_array_ref (&lse, NULL); 6766 if (expr->ts.type == BT_CHARACTER) 6767 { 6768 gfc_conv_expr (&rse, expr); 6769 if (POINTER_TYPE_P (TREE_TYPE (rse.expr))) 6770 rse.expr = build_fold_indirect_ref_loc (input_location, 6771 rse.expr); 6772 } 6773 else 6774 gfc_conv_expr_val (&rse, expr); 6775 6776 gfc_add_block_to_block (&block, &rse.pre); 6777 gfc_add_block_to_block (&block, &lse.pre); 6778 6779 lse.string_length = rse.string_length; 6780 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true, 6781 expr->expr_type == EXPR_VARIABLE 6782 || expr->expr_type == EXPR_ARRAY, true); 6783 gfc_add_expr_to_block (&block, tmp); 6784 6785 /* Finish the copying loops. */ 6786 gfc_trans_scalarizing_loops (&loop, &block); 6787 6788 desc = loop.temp_ss->info->data.array.descriptor; 6789 } 6790 else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss)) 6791 { 6792 desc = info->descriptor; 6793 se->string_length = ss_info->string_length; 6794 } 6795 else 6796 { 6797 /* We pass sections without copying to a temporary. Make a new 6798 descriptor and point it at the section we want. The loop variable 6799 limits will be the limits of the section. 6800 A function may decide to repack the array to speed up access, but 6801 we're not bothered about that here. */ 6802 int dim, ndim, codim; 6803 tree parm; 6804 tree parmtype; 6805 tree stride; 6806 tree from; 6807 tree to; 6808 tree base; 6809 6810 ndim = info->ref ? info->ref->u.ar.dimen : ss->dimen; 6811 6812 if (se->want_coarray) 6813 { 6814 gfc_array_ref *ar = &info->ref->u.ar; 6815 6816 codim = gfc_get_corank (expr); 6817 for (n = 0; n < codim - 1; n++) 6818 { 6819 /* Make sure we are not lost somehow. */ 6820 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE); 6821 6822 /* Make sure the call to gfc_conv_section_startstride won't 6823 generate unnecessary code to calculate stride. */ 6824 gcc_assert (ar->stride[n + ndim] == NULL); 6825 6826 gfc_conv_section_startstride (&loop.pre, ss, n + ndim); 6827 loop.from[n + loop.dimen] = info->start[n + ndim]; 6828 loop.to[n + loop.dimen] = info->end[n + ndim]; 6829 } 6830 6831 gcc_assert (n == codim - 1); 6832 evaluate_bound (&loop.pre, info->start, ar->start, 6833 info->descriptor, n + ndim, true); 6834 loop.from[n + loop.dimen] = info->start[n + ndim]; 6835 } 6836 else 6837 codim = 0; 6838 6839 /* Set the string_length for a character array. */ 6840 if (expr->ts.type == BT_CHARACTER) 6841 se->string_length = gfc_get_expr_charlen (expr); 6842 6843 desc = info->descriptor; 6844 if (se->direct_byref && !se->byref_noassign) 6845 { 6846 /* For pointer assignments we fill in the destination. */ 6847 parm = se->expr; 6848 parmtype = TREE_TYPE (parm); 6849 } 6850 else 6851 { 6852 /* Otherwise make a new one. */ 6853 parmtype = gfc_get_element_type (TREE_TYPE (desc)); 6854 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim, 6855 loop.from, loop.to, 0, 6856 GFC_ARRAY_UNKNOWN, false); 6857 parm = gfc_create_var (parmtype, "parm"); 6858 } 6859 6860 offset = gfc_index_zero_node; 6861 6862 /* The following can be somewhat confusing. We have two 6863 descriptors, a new one and the original array. 6864 {parm, parmtype, dim} refer to the new one. 6865 {desc, type, n, loop} refer to the original, which maybe 6866 a descriptorless array. 6867 The bounds of the scalarization are the bounds of the section. 6868 We don't have to worry about numeric overflows when calculating 6869 the offsets because all elements are within the array data. */ 6870 6871 /* Set the dtype. */ 6872 tmp = gfc_conv_descriptor_dtype (parm); 6873 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype)); 6874 6875 /* Set offset for assignments to pointer only to zero if it is not 6876 the full array. */ 6877 if ((se->direct_byref || se->use_offset) 6878 && ((info->ref && info->ref->u.ar.type != AR_FULL) 6879 || (expr->expr_type == EXPR_ARRAY && se->use_offset))) 6880 base = gfc_index_zero_node; 6881 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc))) 6882 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre); 6883 else 6884 base = NULL_TREE; 6885 6886 for (n = 0; n < ndim; n++) 6887 { 6888 stride = gfc_conv_array_stride (desc, n); 6889 6890 /* Work out the offset. */ 6891 if (info->ref 6892 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT) 6893 { 6894 gcc_assert (info->subscript[n] 6895 && info->subscript[n]->info->type == GFC_SS_SCALAR); 6896 start = info->subscript[n]->info->data.scalar.value; 6897 } 6898 else 6899 { 6900 /* Evaluate and remember the start of the section. */ 6901 start = info->start[n]; 6902 stride = gfc_evaluate_now (stride, &loop.pre); 6903 } 6904 6905 tmp = gfc_conv_array_lbound (desc, n); 6906 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp), 6907 start, tmp); 6908 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp), 6909 tmp, stride); 6910 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp), 6911 offset, tmp); 6912 6913 if (info->ref 6914 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT) 6915 { 6916 /* For elemental dimensions, we only need the offset. */ 6917 continue; 6918 } 6919 6920 /* Vector subscripts need copying and are handled elsewhere. */ 6921 if (info->ref) 6922 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE); 6923 6924 /* look for the corresponding scalarizer dimension: dim. */ 6925 for (dim = 0; dim < ndim; dim++) 6926 if (ss->dim[dim] == n) 6927 break; 6928 6929 /* loop exited early: the DIM being looked for has been found. */ 6930 gcc_assert (dim < ndim); 6931 6932 /* Set the new lower bound. */ 6933 from = loop.from[dim]; 6934 to = loop.to[dim]; 6935 6936 /* If we have an array section or are assigning make sure that 6937 the lower bound is 1. References to the full 6938 array should otherwise keep the original bounds. */ 6939 if ((!info->ref 6940 || info->ref->u.ar.type != AR_FULL) 6941 && !integer_onep (from)) 6942 { 6943 tmp = fold_build2_loc (input_location, MINUS_EXPR, 6944 gfc_array_index_type, gfc_index_one_node, 6945 from); 6946 to = fold_build2_loc (input_location, PLUS_EXPR, 6947 gfc_array_index_type, to, tmp); 6948 from = gfc_index_one_node; 6949 } 6950 gfc_conv_descriptor_lbound_set (&loop.pre, parm, 6951 gfc_rank_cst[dim], from); 6952 6953 /* Set the new upper bound. */ 6954 gfc_conv_descriptor_ubound_set (&loop.pre, parm, 6955 gfc_rank_cst[dim], to); 6956 6957 /* Multiply the stride by the section stride to get the 6958 total stride. */ 6959 stride = fold_build2_loc (input_location, MULT_EXPR, 6960 gfc_array_index_type, 6961 stride, info->stride[n]); 6962 6963 if ((se->direct_byref || se->use_offset) 6964 && ((info->ref && info->ref->u.ar.type != AR_FULL) 6965 || (expr->expr_type == EXPR_ARRAY && se->use_offset))) 6966 { 6967 base = fold_build2_loc (input_location, MINUS_EXPR, 6968 TREE_TYPE (base), base, stride); 6969 } 6970 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)) || se->use_offset) 6971 { 6972 tmp = gfc_conv_array_lbound (desc, n); 6973 tmp = fold_build2_loc (input_location, MINUS_EXPR, 6974 TREE_TYPE (base), tmp, loop.from[dim]); 6975 tmp = fold_build2_loc (input_location, MULT_EXPR, 6976 TREE_TYPE (base), tmp, 6977 gfc_conv_array_stride (desc, n)); 6978 base = fold_build2_loc (input_location, PLUS_EXPR, 6979 TREE_TYPE (base), tmp, base); 6980 } 6981 6982 /* Store the new stride. */ 6983 gfc_conv_descriptor_stride_set (&loop.pre, parm, 6984 gfc_rank_cst[dim], stride); 6985 } 6986 6987 for (n = loop.dimen; n < loop.dimen + codim; n++) 6988 { 6989 from = loop.from[n]; 6990 to = loop.to[n]; 6991 gfc_conv_descriptor_lbound_set (&loop.pre, parm, 6992 gfc_rank_cst[n], from); 6993 if (n < loop.dimen + codim - 1) 6994 gfc_conv_descriptor_ubound_set (&loop.pre, parm, 6995 gfc_rank_cst[n], to); 6996 } 6997 6998 if (se->data_not_needed) 6999 gfc_conv_descriptor_data_set (&loop.pre, parm, 7000 gfc_index_zero_node); 7001 else 7002 /* Point the data pointer at the 1st element in the section. */ 7003 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset, 7004 subref_array_target, expr); 7005 7006 if (((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc))) 7007 && !se->data_not_needed) 7008 || (se->use_offset && base != NULL_TREE)) 7009 { 7010 /* Set the offset. */ 7011 gfc_conv_descriptor_offset_set (&loop.pre, parm, base); 7012 } 7013 else 7014 { 7015 /* Only the callee knows what the correct offset it, so just set 7016 it to zero here. */ 7017 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node); 7018 } 7019 desc = parm; 7020 } 7021 7022 if (!se->direct_byref || se->byref_noassign) 7023 { 7024 /* Get a pointer to the new descriptor. */ 7025 if (se->want_pointer) 7026 se->expr = gfc_build_addr_expr (NULL_TREE, desc); 7027 else 7028 se->expr = desc; 7029 } 7030 7031 gfc_add_block_to_block (&se->pre, &loop.pre); 7032 gfc_add_block_to_block (&se->post, &loop.post); 7033 7034 /* Cleanup the scalarizer. */ 7035 gfc_cleanup_loop (&loop); 7036} 7037 7038/* Helper function for gfc_conv_array_parameter if array size needs to be 7039 computed. */ 7040 7041static void 7042array_parameter_size (tree desc, gfc_expr *expr, tree *size) 7043{ 7044 tree elem; 7045 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc))) 7046 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc)); 7047 else if (expr->rank > 1) 7048 *size = build_call_expr_loc (input_location, 7049 gfor_fndecl_size0, 1, 7050 gfc_build_addr_expr (NULL, desc)); 7051 else 7052 { 7053 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node); 7054 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node); 7055 7056 *size = fold_build2_loc (input_location, MINUS_EXPR, 7057 gfc_array_index_type, ubound, lbound); 7058 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, 7059 *size, gfc_index_one_node); 7060 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type, 7061 *size, gfc_index_zero_node); 7062 } 7063 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc))); 7064 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 7065 *size, fold_convert (gfc_array_index_type, elem)); 7066} 7067 7068/* Convert an array for passing as an actual parameter. */ 7069/* TODO: Optimize passing g77 arrays. */ 7070 7071void 7072gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, bool g77, 7073 const gfc_symbol *fsym, const char *proc_name, 7074 tree *size) 7075{ 7076 tree ptr; 7077 tree desc; 7078 tree tmp = NULL_TREE; 7079 tree stmt; 7080 tree parent = DECL_CONTEXT (current_function_decl); 7081 bool full_array_var; 7082 bool this_array_result; 7083 bool contiguous; 7084 bool no_pack; 7085 bool array_constructor; 7086 bool good_allocatable; 7087 bool ultimate_ptr_comp; 7088 bool ultimate_alloc_comp; 7089 gfc_symbol *sym; 7090 stmtblock_t block; 7091 gfc_ref *ref; 7092 7093 ultimate_ptr_comp = false; 7094 ultimate_alloc_comp = false; 7095 7096 for (ref = expr->ref; ref; ref = ref->next) 7097 { 7098 if (ref->next == NULL) 7099 break; 7100 7101 if (ref->type == REF_COMPONENT) 7102 { 7103 ultimate_ptr_comp = ref->u.c.component->attr.pointer; 7104 ultimate_alloc_comp = ref->u.c.component->attr.allocatable; 7105 } 7106 } 7107 7108 full_array_var = false; 7109 contiguous = false; 7110 7111 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp) 7112 full_array_var = gfc_full_array_ref_p (ref, &contiguous); 7113 7114 sym = full_array_var ? expr->symtree->n.sym : NULL; 7115 7116 /* The symbol should have an array specification. */ 7117 gcc_assert (!sym || sym->as || ref->u.ar.as); 7118 7119 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER) 7120 { 7121 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp); 7122 expr->ts.u.cl->backend_decl = tmp; 7123 se->string_length = tmp; 7124 } 7125 7126 /* Is this the result of the enclosing procedure? */ 7127 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE); 7128 if (this_array_result 7129 && (sym->backend_decl != current_function_decl) 7130 && (sym->backend_decl != parent)) 7131 this_array_result = false; 7132 7133 /* Passing address of the array if it is not pointer or assumed-shape. */ 7134 if (full_array_var && g77 && !this_array_result 7135 && sym->ts.type != BT_DERIVED && sym->ts.type != BT_CLASS) 7136 { 7137 tmp = gfc_get_symbol_decl (sym); 7138 7139 if (sym->ts.type == BT_CHARACTER) 7140 se->string_length = sym->ts.u.cl->backend_decl; 7141 7142 if (!sym->attr.pointer 7143 && sym->as 7144 && sym->as->type != AS_ASSUMED_SHAPE 7145 && sym->as->type != AS_DEFERRED 7146 && sym->as->type != AS_ASSUMED_RANK 7147 && !sym->attr.allocatable) 7148 { 7149 /* Some variables are declared directly, others are declared as 7150 pointers and allocated on the heap. */ 7151 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp))) 7152 se->expr = tmp; 7153 else 7154 se->expr = gfc_build_addr_expr (NULL_TREE, tmp); 7155 if (size) 7156 array_parameter_size (tmp, expr, size); 7157 return; 7158 } 7159 7160 if (sym->attr.allocatable) 7161 { 7162 if (sym->attr.dummy || sym->attr.result) 7163 { 7164 gfc_conv_expr_descriptor (se, expr); 7165 tmp = se->expr; 7166 } 7167 if (size) 7168 array_parameter_size (tmp, expr, size); 7169 se->expr = gfc_conv_array_data (tmp); 7170 return; 7171 } 7172 } 7173 7174 /* A convenient reduction in scope. */ 7175 contiguous = g77 && !this_array_result && contiguous; 7176 7177 /* There is no need to pack and unpack the array, if it is contiguous 7178 and not a deferred- or assumed-shape array, or if it is simply 7179 contiguous. */ 7180 no_pack = ((sym && sym->as 7181 && !sym->attr.pointer 7182 && sym->as->type != AS_DEFERRED 7183 && sym->as->type != AS_ASSUMED_RANK 7184 && sym->as->type != AS_ASSUMED_SHAPE) 7185 || 7186 (ref && ref->u.ar.as 7187 && ref->u.ar.as->type != AS_DEFERRED 7188 && ref->u.ar.as->type != AS_ASSUMED_RANK 7189 && ref->u.ar.as->type != AS_ASSUMED_SHAPE) 7190 || 7191 gfc_is_simply_contiguous (expr, false)); 7192 7193 no_pack = contiguous && no_pack; 7194 7195 /* Array constructors are always contiguous and do not need packing. */ 7196 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY; 7197 7198 /* Same is true of contiguous sections from allocatable variables. */ 7199 good_allocatable = contiguous 7200 && expr->symtree 7201 && expr->symtree->n.sym->attr.allocatable; 7202 7203 /* Or ultimate allocatable components. */ 7204 ultimate_alloc_comp = contiguous && ultimate_alloc_comp; 7205 7206 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp) 7207 { 7208 gfc_conv_expr_descriptor (se, expr); 7209 /* Deallocate the allocatable components of structures that are 7210 not variable. */ 7211 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS) 7212 && expr->ts.u.derived->attr.alloc_comp 7213 && expr->expr_type != EXPR_VARIABLE) 7214 { 7215 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se->expr, expr->rank); 7216 7217 /* The components shall be deallocated before their containing entity. */ 7218 gfc_prepend_expr_to_block (&se->post, tmp); 7219 } 7220 if (expr->ts.type == BT_CHARACTER) 7221 se->string_length = expr->ts.u.cl->backend_decl; 7222 if (size) 7223 array_parameter_size (se->expr, expr, size); 7224 se->expr = gfc_conv_array_data (se->expr); 7225 return; 7226 } 7227 7228 if (this_array_result) 7229 { 7230 /* Result of the enclosing function. */ 7231 gfc_conv_expr_descriptor (se, expr); 7232 if (size) 7233 array_parameter_size (se->expr, expr, size); 7234 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr); 7235 7236 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE 7237 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr)))) 7238 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location, 7239 se->expr)); 7240 7241 return; 7242 } 7243 else 7244 { 7245 /* Every other type of array. */ 7246 se->want_pointer = 1; 7247 gfc_conv_expr_descriptor (se, expr); 7248 if (size) 7249 array_parameter_size (build_fold_indirect_ref_loc (input_location, 7250 se->expr), 7251 expr, size); 7252 } 7253 7254 /* Deallocate the allocatable components of structures that are 7255 not variable, for descriptorless arguments. 7256 Arguments with a descriptor are handled in gfc_conv_procedure_call. */ 7257 if (g77 && (expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS) 7258 && expr->ts.u.derived->attr.alloc_comp 7259 && expr->expr_type != EXPR_VARIABLE) 7260 { 7261 tmp = build_fold_indirect_ref_loc (input_location, se->expr); 7262 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank); 7263 7264 /* The components shall be deallocated before their containing entity. */ 7265 gfc_prepend_expr_to_block (&se->post, tmp); 7266 } 7267 7268 if (g77 || (fsym && fsym->attr.contiguous 7269 && !gfc_is_simply_contiguous (expr, false))) 7270 { 7271 tree origptr = NULL_TREE; 7272 7273 desc = se->expr; 7274 7275 /* For contiguous arrays, save the original value of the descriptor. */ 7276 if (!g77) 7277 { 7278 origptr = gfc_create_var (pvoid_type_node, "origptr"); 7279 tmp = build_fold_indirect_ref_loc (input_location, desc); 7280 tmp = gfc_conv_array_data (tmp); 7281 tmp = fold_build2_loc (input_location, MODIFY_EXPR, 7282 TREE_TYPE (origptr), origptr, 7283 fold_convert (TREE_TYPE (origptr), tmp)); 7284 gfc_add_expr_to_block (&se->pre, tmp); 7285 } 7286 7287 /* Repack the array. */ 7288 if (warn_array_temporaries) 7289 { 7290 if (fsym) 7291 gfc_warning (OPT_Warray_temporaries, 7292 "Creating array temporary at %L for argument %qs", 7293 &expr->where, fsym->name); 7294 else 7295 gfc_warning (OPT_Warray_temporaries, 7296 "Creating array temporary at %L", &expr->where); 7297 } 7298 7299 ptr = build_call_expr_loc (input_location, 7300 gfor_fndecl_in_pack, 1, desc); 7301 7302 if (fsym && fsym->attr.optional && sym && sym->attr.optional) 7303 { 7304 tmp = gfc_conv_expr_present (sym); 7305 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr), 7306 tmp, fold_convert (TREE_TYPE (se->expr), ptr), 7307 fold_convert (TREE_TYPE (se->expr), null_pointer_node)); 7308 } 7309 7310 ptr = gfc_evaluate_now (ptr, &se->pre); 7311 7312 /* Use the packed data for the actual argument, except for contiguous arrays, 7313 where the descriptor's data component is set. */ 7314 if (g77) 7315 se->expr = ptr; 7316 else 7317 { 7318 tmp = build_fold_indirect_ref_loc (input_location, desc); 7319 7320 gfc_ss * ss = gfc_walk_expr (expr); 7321 if (!transposed_dims (ss)) 7322 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr); 7323 else 7324 { 7325 tree old_field, new_field; 7326 7327 /* The original descriptor has transposed dims so we can't reuse 7328 it directly; we have to create a new one. */ 7329 tree old_desc = tmp; 7330 tree new_desc = gfc_create_var (TREE_TYPE (old_desc), "arg_desc"); 7331 7332 old_field = gfc_conv_descriptor_dtype (old_desc); 7333 new_field = gfc_conv_descriptor_dtype (new_desc); 7334 gfc_add_modify (&se->pre, new_field, old_field); 7335 7336 old_field = gfc_conv_descriptor_offset (old_desc); 7337 new_field = gfc_conv_descriptor_offset (new_desc); 7338 gfc_add_modify (&se->pre, new_field, old_field); 7339 7340 for (int i = 0; i < expr->rank; i++) 7341 { 7342 old_field = gfc_conv_descriptor_dimension (old_desc, 7343 gfc_rank_cst[get_array_ref_dim_for_loop_dim (ss, i)]); 7344 new_field = gfc_conv_descriptor_dimension (new_desc, 7345 gfc_rank_cst[i]); 7346 gfc_add_modify (&se->pre, new_field, old_field); 7347 } 7348 7349 if (flag_coarray == GFC_FCOARRAY_LIB 7350 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (old_desc)) 7351 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (old_desc)) 7352 == GFC_ARRAY_ALLOCATABLE) 7353 { 7354 old_field = gfc_conv_descriptor_token (old_desc); 7355 new_field = gfc_conv_descriptor_token (new_desc); 7356 gfc_add_modify (&se->pre, new_field, old_field); 7357 } 7358 7359 gfc_conv_descriptor_data_set (&se->pre, new_desc, ptr); 7360 se->expr = gfc_build_addr_expr (NULL_TREE, new_desc); 7361 } 7362 gfc_free_ss (ss); 7363 } 7364 7365 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS) 7366 { 7367 char * msg; 7368 7369 if (fsym && proc_name) 7370 msg = xasprintf ("An array temporary was created for argument " 7371 "'%s' of procedure '%s'", fsym->name, proc_name); 7372 else 7373 msg = xasprintf ("An array temporary was created"); 7374 7375 tmp = build_fold_indirect_ref_loc (input_location, 7376 desc); 7377 tmp = gfc_conv_array_data (tmp); 7378 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, 7379 fold_convert (TREE_TYPE (tmp), ptr), tmp); 7380 7381 if (fsym && fsym->attr.optional && sym && sym->attr.optional) 7382 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, 7383 boolean_type_node, 7384 gfc_conv_expr_present (sym), tmp); 7385 7386 gfc_trans_runtime_check (false, true, tmp, &se->pre, 7387 &expr->where, msg); 7388 free (msg); 7389 } 7390 7391 gfc_start_block (&block); 7392 7393 /* Copy the data back. */ 7394 if (fsym == NULL || fsym->attr.intent != INTENT_IN) 7395 { 7396 tmp = build_call_expr_loc (input_location, 7397 gfor_fndecl_in_unpack, 2, desc, ptr); 7398 gfc_add_expr_to_block (&block, tmp); 7399 } 7400 7401 /* Free the temporary. */ 7402 tmp = gfc_call_free (convert (pvoid_type_node, ptr)); 7403 gfc_add_expr_to_block (&block, tmp); 7404 7405 stmt = gfc_finish_block (&block); 7406 7407 gfc_init_block (&block); 7408 /* Only if it was repacked. This code needs to be executed before the 7409 loop cleanup code. */ 7410 tmp = build_fold_indirect_ref_loc (input_location, 7411 desc); 7412 tmp = gfc_conv_array_data (tmp); 7413 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, 7414 fold_convert (TREE_TYPE (tmp), ptr), tmp); 7415 7416 if (fsym && fsym->attr.optional && sym && sym->attr.optional) 7417 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, 7418 boolean_type_node, 7419 gfc_conv_expr_present (sym), tmp); 7420 7421 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location)); 7422 7423 gfc_add_expr_to_block (&block, tmp); 7424 gfc_add_block_to_block (&block, &se->post); 7425 7426 gfc_init_block (&se->post); 7427 7428 /* Reset the descriptor pointer. */ 7429 if (!g77) 7430 { 7431 tmp = build_fold_indirect_ref_loc (input_location, desc); 7432 gfc_conv_descriptor_data_set (&se->post, tmp, origptr); 7433 } 7434 7435 gfc_add_block_to_block (&se->post, &block); 7436 } 7437} 7438 7439 7440/* Generate code to deallocate an array, if it is allocated. */ 7441 7442tree 7443gfc_trans_dealloc_allocated (tree descriptor, bool coarray, gfc_expr *expr) 7444{ 7445 tree tmp; 7446 tree var; 7447 stmtblock_t block; 7448 7449 gfc_start_block (&block); 7450 7451 var = gfc_conv_descriptor_data_get (descriptor); 7452 STRIP_NOPS (var); 7453 7454 /* Call array_deallocate with an int * present in the second argument. 7455 Although it is ignored here, it's presence ensures that arrays that 7456 are already deallocated are ignored. */ 7457 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, NULL_TREE, 7458 NULL_TREE, NULL_TREE, NULL_TREE, true, 7459 expr, coarray); 7460 gfc_add_expr_to_block (&block, tmp); 7461 7462 /* Zero the data pointer. */ 7463 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node, 7464 var, build_int_cst (TREE_TYPE (var), 0)); 7465 gfc_add_expr_to_block (&block, tmp); 7466 7467 return gfc_finish_block (&block); 7468} 7469 7470 7471/* This helper function calculates the size in words of a full array. */ 7472 7473tree 7474gfc_full_array_size (stmtblock_t *block, tree decl, int rank) 7475{ 7476 tree idx; 7477 tree nelems; 7478 tree tmp; 7479 idx = gfc_rank_cst[rank - 1]; 7480 nelems = gfc_conv_descriptor_ubound_get (decl, idx); 7481 tmp = gfc_conv_descriptor_lbound_get (decl, idx); 7482 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type, 7483 nelems, tmp); 7484 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, 7485 tmp, gfc_index_one_node); 7486 tmp = gfc_evaluate_now (tmp, block); 7487 7488 nelems = gfc_conv_descriptor_stride_get (decl, idx); 7489 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 7490 nelems, tmp); 7491 return gfc_evaluate_now (tmp, block); 7492} 7493 7494 7495/* Allocate dest to the same size as src, and copy src -> dest. 7496 If no_malloc is set, only the copy is done. */ 7497 7498static tree 7499duplicate_allocatable (tree dest, tree src, tree type, int rank, 7500 bool no_malloc, bool no_memcpy, tree str_sz, 7501 tree add_when_allocated) 7502{ 7503 tree tmp; 7504 tree size; 7505 tree nelems; 7506 tree null_cond; 7507 tree null_data; 7508 stmtblock_t block; 7509 7510 /* If the source is null, set the destination to null. Then, 7511 allocate memory to the destination. */ 7512 gfc_init_block (&block); 7513 7514 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest))) 7515 { 7516 tmp = null_pointer_node; 7517 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp); 7518 gfc_add_expr_to_block (&block, tmp); 7519 null_data = gfc_finish_block (&block); 7520 7521 gfc_init_block (&block); 7522 if (str_sz != NULL_TREE) 7523 size = str_sz; 7524 else 7525 size = TYPE_SIZE_UNIT (TREE_TYPE (type)); 7526 7527 if (!no_malloc) 7528 { 7529 tmp = gfc_call_malloc (&block, type, size); 7530 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node, 7531 dest, fold_convert (type, tmp)); 7532 gfc_add_expr_to_block (&block, tmp); 7533 } 7534 7535 if (!no_memcpy) 7536 { 7537 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY); 7538 tmp = build_call_expr_loc (input_location, tmp, 3, dest, src, 7539 fold_convert (size_type_node, size)); 7540 gfc_add_expr_to_block (&block, tmp); 7541 } 7542 } 7543 else 7544 { 7545 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node); 7546 null_data = gfc_finish_block (&block); 7547 7548 gfc_init_block (&block); 7549 if (rank) 7550 nelems = gfc_full_array_size (&block, src, rank); 7551 else 7552 nelems = gfc_index_one_node; 7553 7554 if (str_sz != NULL_TREE) 7555 tmp = fold_convert (gfc_array_index_type, str_sz); 7556 else 7557 tmp = fold_convert (gfc_array_index_type, 7558 TYPE_SIZE_UNIT (gfc_get_element_type (type))); 7559 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, 7560 nelems, tmp); 7561 if (!no_malloc) 7562 { 7563 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src)); 7564 tmp = gfc_call_malloc (&block, tmp, size); 7565 gfc_conv_descriptor_data_set (&block, dest, tmp); 7566 } 7567 7568 /* We know the temporary and the value will be the same length, 7569 so can use memcpy. */ 7570 if (!no_memcpy) 7571 { 7572 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY); 7573 tmp = build_call_expr_loc (input_location, tmp, 3, 7574 gfc_conv_descriptor_data_get (dest), 7575 gfc_conv_descriptor_data_get (src), 7576 fold_convert (size_type_node, size)); 7577 gfc_add_expr_to_block (&block, tmp); 7578 } 7579 } 7580 7581 gfc_add_expr_to_block (&block, add_when_allocated); 7582 tmp = gfc_finish_block (&block); 7583 7584 /* Null the destination if the source is null; otherwise do 7585 the allocate and copy. */ 7586 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (src))) 7587 null_cond = src; 7588 else 7589 null_cond = gfc_conv_descriptor_data_get (src); 7590 7591 null_cond = convert (pvoid_type_node, null_cond); 7592 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, 7593 null_cond, null_pointer_node); 7594 return build3_v (COND_EXPR, null_cond, tmp, null_data); 7595} 7596 7597 7598/* Allocate dest to the same size as src, and copy data src -> dest. */ 7599 7600tree 7601gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank, 7602 tree add_when_allocated) 7603{ 7604 return duplicate_allocatable (dest, src, type, rank, false, false, 7605 NULL_TREE, add_when_allocated); 7606} 7607 7608 7609/* Copy data src -> dest. */ 7610 7611tree 7612gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank) 7613{ 7614 return duplicate_allocatable (dest, src, type, rank, true, false, 7615 NULL_TREE, NULL_TREE); 7616} 7617 7618/* Allocate dest to the same size as src, but don't copy anything. */ 7619 7620tree 7621gfc_duplicate_allocatable_nocopy (tree dest, tree src, tree type, int rank) 7622{ 7623 return duplicate_allocatable (dest, src, type, rank, false, true, 7624 NULL_TREE, NULL_TREE); 7625} 7626 7627 7628/* Recursively traverse an object of derived type, generating code to 7629 deallocate, nullify or copy allocatable components. This is the work horse 7630 function for the functions named in this enum. */ 7631 7632enum {DEALLOCATE_ALLOC_COMP = 1, DEALLOCATE_ALLOC_COMP_NO_CAF, 7633 NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP, COPY_ONLY_ALLOC_COMP, 7634 COPY_ALLOC_COMP_CAF}; 7635 7636static tree 7637structure_alloc_comps (gfc_symbol * der_type, tree decl, 7638 tree dest, int rank, int purpose) 7639{ 7640 gfc_component *c; 7641 gfc_loopinfo loop; 7642 stmtblock_t fnblock; 7643 stmtblock_t loopbody; 7644 stmtblock_t tmpblock; 7645 tree decl_type; 7646 tree tmp; 7647 tree comp; 7648 tree dcmp; 7649 tree nelems; 7650 tree index; 7651 tree var; 7652 tree cdecl; 7653 tree ctype; 7654 tree vref, dref; 7655 tree null_cond = NULL_TREE; 7656 tree add_when_allocated; 7657 bool called_dealloc_with_status; 7658 7659 gfc_init_block (&fnblock); 7660 7661 decl_type = TREE_TYPE (decl); 7662 7663 if ((POINTER_TYPE_P (decl_type)) 7664 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0)) 7665 { 7666 decl = build_fold_indirect_ref_loc (input_location, decl); 7667 /* Deref dest in sync with decl, but only when it is not NULL. */ 7668 if (dest) 7669 dest = build_fold_indirect_ref_loc (input_location, dest); 7670 } 7671 7672 /* Just in case it gets dereferenced. */ 7673 decl_type = TREE_TYPE (decl); 7674 7675 /* If this is an array of derived types with allocatable components 7676 build a loop and recursively call this function. */ 7677 if (TREE_CODE (decl_type) == ARRAY_TYPE 7678 || (GFC_DESCRIPTOR_TYPE_P (decl_type) && rank != 0)) 7679 { 7680 tmp = gfc_conv_array_data (decl); 7681 var = build_fold_indirect_ref_loc (input_location, tmp); 7682 7683 /* Get the number of elements - 1 and set the counter. */ 7684 if (GFC_DESCRIPTOR_TYPE_P (decl_type)) 7685 { 7686 /* Use the descriptor for an allocatable array. Since this 7687 is a full array reference, we only need the descriptor 7688 information from dimension = rank. */ 7689 tmp = gfc_full_array_size (&fnblock, decl, rank); 7690 tmp = fold_build2_loc (input_location, MINUS_EXPR, 7691 gfc_array_index_type, tmp, 7692 gfc_index_one_node); 7693 7694 null_cond = gfc_conv_descriptor_data_get (decl); 7695 null_cond = fold_build2_loc (input_location, NE_EXPR, 7696 boolean_type_node, null_cond, 7697 build_int_cst (TREE_TYPE (null_cond), 0)); 7698 } 7699 else 7700 { 7701 /* Otherwise use the TYPE_DOMAIN information. */ 7702 tmp = array_type_nelts (decl_type); 7703 tmp = fold_convert (gfc_array_index_type, tmp); 7704 } 7705 7706 /* Remember that this is, in fact, the no. of elements - 1. */ 7707 nelems = gfc_evaluate_now (tmp, &fnblock); 7708 index = gfc_create_var (gfc_array_index_type, "S"); 7709 7710 /* Build the body of the loop. */ 7711 gfc_init_block (&loopbody); 7712 7713 vref = gfc_build_array_ref (var, index, NULL); 7714 7715 if (purpose == COPY_ALLOC_COMP || purpose == COPY_ONLY_ALLOC_COMP) 7716 { 7717 tmp = build_fold_indirect_ref_loc (input_location, 7718 gfc_conv_array_data (dest)); 7719 dref = gfc_build_array_ref (tmp, index, NULL); 7720 tmp = structure_alloc_comps (der_type, vref, dref, rank, 7721 COPY_ALLOC_COMP); 7722 } 7723 else 7724 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose); 7725 7726 gfc_add_expr_to_block (&loopbody, tmp); 7727 7728 /* Build the loop and return. */ 7729 gfc_init_loopinfo (&loop); 7730 loop.dimen = 1; 7731 loop.from[0] = gfc_index_zero_node; 7732 loop.loopvar[0] = index; 7733 loop.to[0] = nelems; 7734 gfc_trans_scalarizing_loops (&loop, &loopbody); 7735 gfc_add_block_to_block (&fnblock, &loop.pre); 7736 7737 tmp = gfc_finish_block (&fnblock); 7738 /* When copying allocateable components, the above implements the 7739 deep copy. Nevertheless is a deep copy only allowed, when the current 7740 component is allocated, for which code will be generated in 7741 gfc_duplicate_allocatable (), where the deep copy code is just added 7742 into the if's body, by adding tmp (the deep copy code) as last 7743 argument to gfc_duplicate_allocatable (). */ 7744 if (purpose == COPY_ALLOC_COMP 7745 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest))) 7746 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank, 7747 tmp); 7748 else if (null_cond != NULL_TREE) 7749 tmp = build3_v (COND_EXPR, null_cond, tmp, 7750 build_empty_stmt (input_location)); 7751 7752 return tmp; 7753 } 7754 7755 /* Otherwise, act on the components or recursively call self to 7756 act on a chain of components. */ 7757 for (c = der_type->components; c; c = c->next) 7758 { 7759 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED 7760 || c->ts.type == BT_CLASS) 7761 && c->ts.u.derived->attr.alloc_comp; 7762 cdecl = c->backend_decl; 7763 ctype = TREE_TYPE (cdecl); 7764 7765 switch (purpose) 7766 { 7767 case DEALLOCATE_ALLOC_COMP: 7768 case DEALLOCATE_ALLOC_COMP_NO_CAF: 7769 7770 /* gfc_deallocate_scalar_with_status calls gfc_deallocate_alloc_comp 7771 (i.e. this function) so generate all the calls and suppress the 7772 recursion from here, if necessary. */ 7773 called_dealloc_with_status = false; 7774 gfc_init_block (&tmpblock); 7775 7776 if ((c->ts.type == BT_DERIVED && !c->attr.pointer) 7777 || (c->ts.type == BT_CLASS && !CLASS_DATA (c)->attr.class_pointer)) 7778 { 7779 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, 7780 decl, cdecl, NULL_TREE); 7781 7782 /* The finalizer frees allocatable components. */ 7783 called_dealloc_with_status 7784 = gfc_add_comp_finalizer_call (&tmpblock, comp, c, 7785 purpose == DEALLOCATE_ALLOC_COMP); 7786 } 7787 else 7788 comp = NULL_TREE; 7789 7790 if (c->attr.allocatable && !c->attr.proc_pointer 7791 && (c->attr.dimension 7792 || (c->attr.codimension 7793 && purpose != DEALLOCATE_ALLOC_COMP_NO_CAF))) 7794 { 7795 if (comp == NULL_TREE) 7796 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, 7797 decl, cdecl, NULL_TREE); 7798 tmp = gfc_trans_dealloc_allocated (comp, c->attr.codimension, NULL); 7799 gfc_add_expr_to_block (&tmpblock, tmp); 7800 } 7801 else if (c->attr.allocatable && !c->attr.codimension) 7802 { 7803 /* Allocatable scalar components. */ 7804 if (comp == NULL_TREE) 7805 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, 7806 decl, cdecl, NULL_TREE); 7807 7808 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL, 7809 c->ts); 7810 gfc_add_expr_to_block (&tmpblock, tmp); 7811 called_dealloc_with_status = true; 7812 7813 tmp = fold_build2_loc (input_location, MODIFY_EXPR, 7814 void_type_node, comp, 7815 build_int_cst (TREE_TYPE (comp), 0)); 7816 gfc_add_expr_to_block (&tmpblock, tmp); 7817 } 7818 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable 7819 && (!CLASS_DATA (c)->attr.codimension 7820 || purpose != DEALLOCATE_ALLOC_COMP_NO_CAF)) 7821 { 7822 /* Allocatable CLASS components. */ 7823 7824 /* Add reference to '_data' component. */ 7825 tmp = CLASS_DATA (c)->backend_decl; 7826 comp = fold_build3_loc (input_location, COMPONENT_REF, 7827 TREE_TYPE (tmp), comp, tmp, NULL_TREE); 7828 7829 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp))) 7830 tmp = gfc_trans_dealloc_allocated (comp, 7831 CLASS_DATA (c)->attr.codimension, NULL); 7832 else 7833 { 7834 tmp = gfc_deallocate_scalar_with_status (comp, NULL_TREE, true, NULL, 7835 CLASS_DATA (c)->ts); 7836 gfc_add_expr_to_block (&tmpblock, tmp); 7837 called_dealloc_with_status = true; 7838 7839 tmp = fold_build2_loc (input_location, MODIFY_EXPR, 7840 void_type_node, comp, 7841 build_int_cst (TREE_TYPE (comp), 0)); 7842 } 7843 gfc_add_expr_to_block (&tmpblock, tmp); 7844 7845 /* Finally, reset the vptr to the declared type vtable and, if 7846 necessary reset the _len field. */ 7847 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, 7848 decl, cdecl, NULL_TREE); 7849 tmp = gfc_class_vptr_get (comp); 7850 if (UNLIMITED_POLY (c)) 7851 { 7852 gfc_add_modify (&tmpblock, tmp, 7853 build_int_cst (TREE_TYPE (tmp), 0)); 7854 tmp = gfc_class_len_get (comp); 7855 gfc_add_modify (&tmpblock, tmp, 7856 build_int_cst (TREE_TYPE (tmp), 0)); 7857 } 7858 else 7859 { 7860 tree vtab; 7861 gfc_symbol *vtable; 7862 vtable = gfc_find_derived_vtab (c->ts.u.derived); 7863 vtab = vtable->backend_decl; 7864 if (vtab == NULL_TREE) 7865 vtab = gfc_get_symbol_decl(vtable); 7866 vtab = gfc_build_addr_expr (NULL, vtab); 7867 vtab = fold_convert (TREE_TYPE (tmp), vtab); 7868 gfc_add_modify (&tmpblock, tmp, vtab); 7869 } 7870 } 7871 7872 if (cmp_has_alloc_comps 7873 && !c->attr.pointer 7874 && !called_dealloc_with_status) 7875 { 7876 /* Do not deallocate the components of ultimate pointer 7877 components or iteratively call self if call has been made 7878 to gfc_trans_dealloc_allocated */ 7879 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, 7880 decl, cdecl, NULL_TREE); 7881 rank = c->as ? c->as->rank : 0; 7882 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE, 7883 rank, purpose); 7884 gfc_add_expr_to_block (&fnblock, tmp); 7885 } 7886 7887 /* Now add the deallocation of this component. */ 7888 gfc_add_block_to_block (&fnblock, &tmpblock); 7889 break; 7890 7891 case NULLIFY_ALLOC_COMP: 7892 if (c->attr.pointer || c->attr.proc_pointer) 7893 continue; 7894 else if (c->attr.allocatable 7895 && (c->attr.dimension|| c->attr.codimension)) 7896 { 7897 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, 7898 decl, cdecl, NULL_TREE); 7899 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node); 7900 } 7901 else if (c->attr.allocatable) 7902 { 7903 /* Allocatable scalar components. */ 7904 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, 7905 decl, cdecl, NULL_TREE); 7906 tmp = fold_build2_loc (input_location, MODIFY_EXPR, 7907 void_type_node, comp, 7908 build_int_cst (TREE_TYPE (comp), 0)); 7909 gfc_add_expr_to_block (&fnblock, tmp); 7910 if (gfc_deferred_strlen (c, &comp)) 7911 { 7912 comp = fold_build3_loc (input_location, COMPONENT_REF, 7913 TREE_TYPE (comp), 7914 decl, comp, NULL_TREE); 7915 tmp = fold_build2_loc (input_location, MODIFY_EXPR, 7916 TREE_TYPE (comp), comp, 7917 build_int_cst (TREE_TYPE (comp), 0)); 7918 gfc_add_expr_to_block (&fnblock, tmp); 7919 } 7920 } 7921 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable) 7922 { 7923 /* Allocatable CLASS components. */ 7924 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, 7925 decl, cdecl, NULL_TREE); 7926 /* Add reference to '_data' component. */ 7927 tmp = CLASS_DATA (c)->backend_decl; 7928 comp = fold_build3_loc (input_location, COMPONENT_REF, 7929 TREE_TYPE (tmp), comp, tmp, NULL_TREE); 7930 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp))) 7931 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node); 7932 else 7933 { 7934 tmp = fold_build2_loc (input_location, MODIFY_EXPR, 7935 void_type_node, comp, 7936 build_int_cst (TREE_TYPE (comp), 0)); 7937 gfc_add_expr_to_block (&fnblock, tmp); 7938 } 7939 } 7940 else if (cmp_has_alloc_comps) 7941 { 7942 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, 7943 decl, cdecl, NULL_TREE); 7944 rank = c->as ? c->as->rank : 0; 7945 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE, 7946 rank, purpose); 7947 gfc_add_expr_to_block (&fnblock, tmp); 7948 } 7949 break; 7950 7951 case COPY_ALLOC_COMP_CAF: 7952 if (!c->attr.codimension 7953 && (c->ts.type != BT_CLASS || CLASS_DATA (c)->attr.coarray_comp) 7954 && (c->ts.type != BT_DERIVED 7955 || !c->ts.u.derived->attr.coarray_comp)) 7956 continue; 7957 7958 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl, 7959 cdecl, NULL_TREE); 7960 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest, 7961 cdecl, NULL_TREE); 7962 7963 if (c->attr.codimension) 7964 { 7965 if (c->ts.type == BT_CLASS) 7966 { 7967 comp = gfc_class_data_get (comp); 7968 dcmp = gfc_class_data_get (dcmp); 7969 } 7970 gfc_conv_descriptor_data_set (&fnblock, dcmp, 7971 gfc_conv_descriptor_data_get (comp)); 7972 } 7973 else 7974 { 7975 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp, 7976 rank, purpose); 7977 gfc_add_expr_to_block (&fnblock, tmp); 7978 7979 } 7980 break; 7981 7982 case COPY_ALLOC_COMP: 7983 if (c->attr.pointer) 7984 continue; 7985 7986 /* We need source and destination components. */ 7987 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl, 7988 cdecl, NULL_TREE); 7989 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest, 7990 cdecl, NULL_TREE); 7991 dcmp = fold_convert (TREE_TYPE (comp), dcmp); 7992 7993 if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable) 7994 { 7995 tree ftn_tree; 7996 tree size; 7997 tree dst_data; 7998 tree src_data; 7999 tree null_data; 8000 8001 dst_data = gfc_class_data_get (dcmp); 8002 src_data = gfc_class_data_get (comp); 8003 size = fold_convert (size_type_node, 8004 gfc_class_vtab_size_get (comp)); 8005 8006 if (CLASS_DATA (c)->attr.dimension) 8007 { 8008 nelems = gfc_conv_descriptor_size (src_data, 8009 CLASS_DATA (c)->as->rank); 8010 size = fold_build2_loc (input_location, MULT_EXPR, 8011 size_type_node, size, 8012 fold_convert (size_type_node, 8013 nelems)); 8014 } 8015 else 8016 nelems = build_int_cst (size_type_node, 1); 8017 8018 if (CLASS_DATA (c)->attr.dimension 8019 || CLASS_DATA (c)->attr.codimension) 8020 { 8021 src_data = gfc_conv_descriptor_data_get (src_data); 8022 dst_data = gfc_conv_descriptor_data_get (dst_data); 8023 } 8024 8025 gfc_init_block (&tmpblock); 8026 8027 /* Coarray component have to have the same allocation status and 8028 shape/type-parameter/effective-type on the LHS and RHS of an 8029 intrinsic assignment. Hence, we did not deallocated them - and 8030 do not allocate them here. */ 8031 if (!CLASS_DATA (c)->attr.codimension) 8032 { 8033 ftn_tree = builtin_decl_explicit (BUILT_IN_MALLOC); 8034 tmp = build_call_expr_loc (input_location, ftn_tree, 1, size); 8035 gfc_add_modify (&tmpblock, dst_data, 8036 fold_convert (TREE_TYPE (dst_data), tmp)); 8037 } 8038 8039 tmp = gfc_copy_class_to_class (comp, dcmp, nelems, 8040 UNLIMITED_POLY (c)); 8041 gfc_add_expr_to_block (&tmpblock, tmp); 8042 tmp = gfc_finish_block (&tmpblock); 8043 8044 gfc_init_block (&tmpblock); 8045 gfc_add_modify (&tmpblock, dst_data, 8046 fold_convert (TREE_TYPE (dst_data), 8047 null_pointer_node)); 8048 null_data = gfc_finish_block (&tmpblock); 8049 8050 null_cond = fold_build2_loc (input_location, NE_EXPR, 8051 boolean_type_node, src_data, 8052 null_pointer_node); 8053 8054 gfc_add_expr_to_block (&fnblock, build3_v (COND_EXPR, null_cond, 8055 tmp, null_data)); 8056 continue; 8057 } 8058 8059 /* To implement guarded deep copy, i.e., deep copy only allocatable 8060 components that are really allocated, the deep copy code has to 8061 be generated first and then added to the if-block in 8062 gfc_duplicate_allocatable (). */ 8063 if (cmp_has_alloc_comps) 8064 { 8065 rank = c->as ? c->as->rank : 0; 8066 tmp = fold_convert (TREE_TYPE (dcmp), comp); 8067 gfc_add_modify (&fnblock, dcmp, tmp); 8068 add_when_allocated = structure_alloc_comps (c->ts.u.derived, 8069 comp, dcmp, 8070 rank, purpose); 8071 } 8072 else 8073 add_when_allocated = NULL_TREE; 8074 8075 if (gfc_deferred_strlen (c, &tmp)) 8076 { 8077 tree len, size; 8078 len = tmp; 8079 tmp = fold_build3_loc (input_location, COMPONENT_REF, 8080 TREE_TYPE (len), 8081 decl, len, NULL_TREE); 8082 len = fold_build3_loc (input_location, COMPONENT_REF, 8083 TREE_TYPE (len), 8084 dest, len, NULL_TREE); 8085 tmp = fold_build2_loc (input_location, MODIFY_EXPR, 8086 TREE_TYPE (len), len, tmp); 8087 gfc_add_expr_to_block (&fnblock, tmp); 8088 size = size_of_string_in_bytes (c->ts.kind, len); 8089 /* This component can not have allocatable components, 8090 therefore add_when_allocated of duplicate_allocatable () 8091 is always NULL. */ 8092 tmp = duplicate_allocatable (dcmp, comp, ctype, rank, 8093 false, false, size, NULL_TREE); 8094 gfc_add_expr_to_block (&fnblock, tmp); 8095 } 8096 else if (c->attr.allocatable && !c->attr.proc_pointer 8097 && (!(cmp_has_alloc_comps && c->as) 8098 || c->attr.codimension)) 8099 { 8100 rank = c->as ? c->as->rank : 0; 8101 if (c->attr.codimension) 8102 tmp = gfc_copy_allocatable_data (dcmp, comp, ctype, rank); 8103 else 8104 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank, 8105 add_when_allocated); 8106 gfc_add_expr_to_block (&fnblock, tmp); 8107 } 8108 else 8109 if (cmp_has_alloc_comps) 8110 gfc_add_expr_to_block (&fnblock, add_when_allocated); 8111 8112 break; 8113 8114 default: 8115 gcc_unreachable (); 8116 break; 8117 } 8118 } 8119 8120 return gfc_finish_block (&fnblock); 8121} 8122 8123/* Recursively traverse an object of derived type, generating code to 8124 nullify allocatable components. */ 8125 8126tree 8127gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank) 8128{ 8129 return structure_alloc_comps (der_type, decl, NULL_TREE, rank, 8130 NULLIFY_ALLOC_COMP); 8131} 8132 8133 8134/* Recursively traverse an object of derived type, generating code to 8135 deallocate allocatable components. */ 8136 8137tree 8138gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank) 8139{ 8140 return structure_alloc_comps (der_type, decl, NULL_TREE, rank, 8141 DEALLOCATE_ALLOC_COMP); 8142} 8143 8144 8145/* Recursively traverse an object of derived type, generating code to 8146 deallocate allocatable components. But do not deallocate coarrays. 8147 To be used for intrinsic assignment, which may not change the allocation 8148 status of coarrays. */ 8149 8150tree 8151gfc_deallocate_alloc_comp_no_caf (gfc_symbol * der_type, tree decl, int rank) 8152{ 8153 return structure_alloc_comps (der_type, decl, NULL_TREE, rank, 8154 DEALLOCATE_ALLOC_COMP_NO_CAF); 8155} 8156 8157 8158tree 8159gfc_reassign_alloc_comp_caf (gfc_symbol *der_type, tree decl, tree dest) 8160{ 8161 return structure_alloc_comps (der_type, decl, dest, 0, COPY_ALLOC_COMP_CAF); 8162} 8163 8164 8165/* Recursively traverse an object of derived type, generating code to 8166 copy it and its allocatable components. */ 8167 8168tree 8169gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank) 8170{ 8171 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP); 8172} 8173 8174 8175/* Recursively traverse an object of derived type, generating code to 8176 copy only its allocatable components. */ 8177 8178tree 8179gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank) 8180{ 8181 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP); 8182} 8183 8184 8185/* Returns the value of LBOUND for an expression. This could be broken out 8186 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is 8187 called by gfc_alloc_allocatable_for_assignment. */ 8188static tree 8189get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size) 8190{ 8191 tree lbound; 8192 tree ubound; 8193 tree stride; 8194 tree cond, cond1, cond3, cond4; 8195 tree tmp; 8196 gfc_ref *ref; 8197 8198 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc))) 8199 { 8200 tmp = gfc_rank_cst[dim]; 8201 lbound = gfc_conv_descriptor_lbound_get (desc, tmp); 8202 ubound = gfc_conv_descriptor_ubound_get (desc, tmp); 8203 stride = gfc_conv_descriptor_stride_get (desc, tmp); 8204 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node, 8205 ubound, lbound); 8206 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node, 8207 stride, gfc_index_zero_node); 8208 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR, 8209 boolean_type_node, cond3, cond1); 8210 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, 8211 stride, gfc_index_zero_node); 8212 if (assumed_size) 8213 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, 8214 tmp, build_int_cst (gfc_array_index_type, 8215 expr->rank - 1)); 8216 else 8217 cond = boolean_false_node; 8218 8219 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR, 8220 boolean_type_node, cond3, cond4); 8221 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR, 8222 boolean_type_node, cond, cond1); 8223 8224 return fold_build3_loc (input_location, COND_EXPR, 8225 gfc_array_index_type, cond, 8226 lbound, gfc_index_one_node); 8227 } 8228 8229 if (expr->expr_type == EXPR_FUNCTION) 8230 { 8231 /* A conversion function, so use the argument. */ 8232 gcc_assert (expr->value.function.isym 8233 && expr->value.function.isym->conversion); 8234 expr = expr->value.function.actual->expr; 8235 } 8236 8237 if (expr->expr_type == EXPR_VARIABLE) 8238 { 8239 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl); 8240 for (ref = expr->ref; ref; ref = ref->next) 8241 { 8242 if (ref->type == REF_COMPONENT 8243 && ref->u.c.component->as 8244 && ref->next 8245 && ref->next->u.ar.type == AR_FULL) 8246 tmp = TREE_TYPE (ref->u.c.component->backend_decl); 8247 } 8248 return GFC_TYPE_ARRAY_LBOUND(tmp, dim); 8249 } 8250 8251 return gfc_index_one_node; 8252} 8253 8254 8255/* Returns true if an expression represents an lhs that can be reallocated 8256 on assignment. */ 8257 8258bool 8259gfc_is_reallocatable_lhs (gfc_expr *expr) 8260{ 8261 gfc_ref * ref; 8262 8263 if (!expr->ref) 8264 return false; 8265 8266 /* An allocatable variable. */ 8267 if (expr->symtree->n.sym->attr.allocatable 8268 && expr->ref 8269 && expr->ref->type == REF_ARRAY 8270 && expr->ref->u.ar.type == AR_FULL) 8271 return true; 8272 8273 /* All that can be left are allocatable components. */ 8274 if ((expr->symtree->n.sym->ts.type != BT_DERIVED 8275 && expr->symtree->n.sym->ts.type != BT_CLASS) 8276 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp) 8277 return false; 8278 8279 /* Find a component ref followed by an array reference. */ 8280 for (ref = expr->ref; ref; ref = ref->next) 8281 if (ref->next 8282 && ref->type == REF_COMPONENT 8283 && ref->next->type == REF_ARRAY 8284 && !ref->next->next) 8285 break; 8286 8287 if (!ref) 8288 return false; 8289 8290 /* Return true if valid reallocatable lhs. */ 8291 if (ref->u.c.component->attr.allocatable 8292 && ref->next->u.ar.type == AR_FULL) 8293 return true; 8294 8295 return false; 8296} 8297 8298 8299static tree 8300concat_str_length (gfc_expr* expr) 8301{ 8302 tree type; 8303 tree len1; 8304 tree len2; 8305 gfc_se se; 8306 8307 type = gfc_typenode_for_spec (&expr->value.op.op1->ts); 8308 len1 = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 8309 if (len1 == NULL_TREE) 8310 { 8311 if (expr->value.op.op1->expr_type == EXPR_OP) 8312 len1 = concat_str_length (expr->value.op.op1); 8313 else if (expr->value.op.op1->expr_type == EXPR_CONSTANT) 8314 len1 = build_int_cst (gfc_charlen_type_node, 8315 expr->value.op.op1->value.character.length); 8316 else if (expr->value.op.op1->ts.u.cl->length) 8317 { 8318 gfc_init_se (&se, NULL); 8319 gfc_conv_expr (&se, expr->value.op.op1->ts.u.cl->length); 8320 len1 = se.expr; 8321 } 8322 else 8323 { 8324 /* Last resort! */ 8325 gfc_init_se (&se, NULL); 8326 se.want_pointer = 1; 8327 se.descriptor_only = 1; 8328 gfc_conv_expr (&se, expr->value.op.op1); 8329 len1 = se.string_length; 8330 } 8331 } 8332 8333 type = gfc_typenode_for_spec (&expr->value.op.op2->ts); 8334 len2 = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 8335 if (len2 == NULL_TREE) 8336 { 8337 if (expr->value.op.op2->expr_type == EXPR_OP) 8338 len2 = concat_str_length (expr->value.op.op2); 8339 else if (expr->value.op.op2->expr_type == EXPR_CONSTANT) 8340 len2 = build_int_cst (gfc_charlen_type_node, 8341 expr->value.op.op2->value.character.length); 8342 else if (expr->value.op.op2->ts.u.cl->length) 8343 { 8344 gfc_init_se (&se, NULL); 8345 gfc_conv_expr (&se, expr->value.op.op2->ts.u.cl->length); 8346 len2 = se.expr; 8347 } 8348 else 8349 { 8350 /* Last resort! */ 8351 gfc_init_se (&se, NULL); 8352 se.want_pointer = 1; 8353 se.descriptor_only = 1; 8354 gfc_conv_expr (&se, expr->value.op.op2); 8355 len2 = se.string_length; 8356 } 8357 } 8358 8359 gcc_assert(len1 && len2); 8360 len1 = fold_convert (gfc_charlen_type_node, len1); 8361 len2 = fold_convert (gfc_charlen_type_node, len2); 8362 8363 return fold_build2_loc (input_location, PLUS_EXPR, 8364 gfc_charlen_type_node, len1, len2); 8365} 8366 8367 8368/* Allocate the lhs of an assignment to an allocatable array, otherwise 8369 reallocate it. */ 8370 8371tree 8372gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop, 8373 gfc_expr *expr1, 8374 gfc_expr *expr2) 8375{ 8376 stmtblock_t realloc_block; 8377 stmtblock_t alloc_block; 8378 stmtblock_t fblock; 8379 gfc_ss *rss; 8380 gfc_ss *lss; 8381 gfc_array_info *linfo; 8382 tree realloc_expr; 8383 tree alloc_expr; 8384 tree size1; 8385 tree size2; 8386 tree array1; 8387 tree cond_null; 8388 tree cond; 8389 tree tmp; 8390 tree tmp2; 8391 tree lbound; 8392 tree ubound; 8393 tree desc; 8394 tree old_desc; 8395 tree desc2; 8396 tree offset; 8397 tree jump_label1; 8398 tree jump_label2; 8399 tree neq_size; 8400 tree lbd; 8401 int n; 8402 int dim; 8403 gfc_array_spec * as; 8404 8405 /* x = f(...) with x allocatable. In this case, expr1 is the rhs. 8406 Find the lhs expression in the loop chain and set expr1 and 8407 expr2 accordingly. */ 8408 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL) 8409 { 8410 expr2 = expr1; 8411 /* Find the ss for the lhs. */ 8412 lss = loop->ss; 8413 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain) 8414 if (lss->info->expr && lss->info->expr->expr_type == EXPR_VARIABLE) 8415 break; 8416 if (lss == gfc_ss_terminator) 8417 return NULL_TREE; 8418 expr1 = lss->info->expr; 8419 } 8420 8421 /* Bail out if this is not a valid allocate on assignment. */ 8422 if (!gfc_is_reallocatable_lhs (expr1) 8423 || (expr2 && !expr2->rank)) 8424 return NULL_TREE; 8425 8426 /* Find the ss for the lhs. */ 8427 lss = loop->ss; 8428 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain) 8429 if (lss->info->expr == expr1) 8430 break; 8431 8432 if (lss == gfc_ss_terminator) 8433 return NULL_TREE; 8434 8435 linfo = &lss->info->data.array; 8436 8437 /* Find an ss for the rhs. For operator expressions, we see the 8438 ss's for the operands. Any one of these will do. */ 8439 rss = loop->ss; 8440 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain) 8441 if (rss->info->expr != expr1 && rss != loop->temp_ss) 8442 break; 8443 8444 if (expr2 && rss == gfc_ss_terminator) 8445 return NULL_TREE; 8446 8447 gfc_start_block (&fblock); 8448 8449 /* Since the lhs is allocatable, this must be a descriptor type. 8450 Get the data and array size. */ 8451 desc = linfo->descriptor; 8452 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc))); 8453 array1 = gfc_conv_descriptor_data_get (desc); 8454 8455 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is 8456 deallocated if expr is an array of different shape or any of the 8457 corresponding length type parameter values of variable and expr 8458 differ." This assures F95 compatibility. */ 8459 jump_label1 = gfc_build_label_decl (NULL_TREE); 8460 jump_label2 = gfc_build_label_decl (NULL_TREE); 8461 8462 /* Allocate if data is NULL. */ 8463 cond_null = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, 8464 array1, build_int_cst (TREE_TYPE (array1), 0)); 8465 8466 if (expr1->ts.deferred) 8467 cond_null = gfc_evaluate_now (boolean_true_node, &fblock); 8468 else 8469 cond_null= gfc_evaluate_now (cond_null, &fblock); 8470 8471 tmp = build3_v (COND_EXPR, cond_null, 8472 build1_v (GOTO_EXPR, jump_label1), 8473 build_empty_stmt (input_location)); 8474 gfc_add_expr_to_block (&fblock, tmp); 8475 8476 /* Get arrayspec if expr is a full array. */ 8477 if (expr2 && expr2->expr_type == EXPR_FUNCTION 8478 && expr2->value.function.isym 8479 && expr2->value.function.isym->conversion) 8480 { 8481 /* For conversion functions, take the arg. */ 8482 gfc_expr *arg = expr2->value.function.actual->expr; 8483 as = gfc_get_full_arrayspec_from_expr (arg); 8484 } 8485 else if (expr2) 8486 as = gfc_get_full_arrayspec_from_expr (expr2); 8487 else 8488 as = NULL; 8489 8490 /* If the lhs shape is not the same as the rhs jump to setting the 8491 bounds and doing the reallocation....... */ 8492 for (n = 0; n < expr1->rank; n++) 8493 { 8494 /* Check the shape. */ 8495 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]); 8496 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]); 8497 tmp = fold_build2_loc (input_location, MINUS_EXPR, 8498 gfc_array_index_type, 8499 loop->to[n], loop->from[n]); 8500 tmp = fold_build2_loc (input_location, PLUS_EXPR, 8501 gfc_array_index_type, 8502 tmp, lbound); 8503 tmp = fold_build2_loc (input_location, MINUS_EXPR, 8504 gfc_array_index_type, 8505 tmp, ubound); 8506 cond = fold_build2_loc (input_location, NE_EXPR, 8507 boolean_type_node, 8508 tmp, gfc_index_zero_node); 8509 tmp = build3_v (COND_EXPR, cond, 8510 build1_v (GOTO_EXPR, jump_label1), 8511 build_empty_stmt (input_location)); 8512 gfc_add_expr_to_block (&fblock, tmp); 8513 } 8514 8515 /* ....else jump past the (re)alloc code. */ 8516 tmp = build1_v (GOTO_EXPR, jump_label2); 8517 gfc_add_expr_to_block (&fblock, tmp); 8518 8519 /* Add the label to start automatic (re)allocation. */ 8520 tmp = build1_v (LABEL_EXPR, jump_label1); 8521 gfc_add_expr_to_block (&fblock, tmp); 8522 8523 /* If the lhs has not been allocated, its bounds will not have been 8524 initialized and so its size is set to zero. */ 8525 size1 = gfc_create_var (gfc_array_index_type, NULL); 8526 gfc_init_block (&alloc_block); 8527 gfc_add_modify (&alloc_block, size1, gfc_index_zero_node); 8528 gfc_init_block (&realloc_block); 8529 gfc_add_modify (&realloc_block, size1, 8530 gfc_conv_descriptor_size (desc, expr1->rank)); 8531 tmp = build3_v (COND_EXPR, cond_null, 8532 gfc_finish_block (&alloc_block), 8533 gfc_finish_block (&realloc_block)); 8534 gfc_add_expr_to_block (&fblock, tmp); 8535 8536 /* Get the rhs size and fix it. */ 8537 if (expr2) 8538 desc2 = rss->info->data.array.descriptor; 8539 else 8540 desc2 = NULL_TREE; 8541 8542 size2 = gfc_index_one_node; 8543 for (n = 0; n < expr2->rank; n++) 8544 { 8545 tmp = fold_build2_loc (input_location, MINUS_EXPR, 8546 gfc_array_index_type, 8547 loop->to[n], loop->from[n]); 8548 tmp = fold_build2_loc (input_location, PLUS_EXPR, 8549 gfc_array_index_type, 8550 tmp, gfc_index_one_node); 8551 size2 = fold_build2_loc (input_location, MULT_EXPR, 8552 gfc_array_index_type, 8553 tmp, size2); 8554 } 8555 size2 = gfc_evaluate_now (size2, &fblock); 8556 8557 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, 8558 size1, size2); 8559 8560 /* If the lhs is deferred length, assume that the element size 8561 changes and force a reallocation. */ 8562 if (expr1->ts.deferred) 8563 neq_size = gfc_evaluate_now (boolean_true_node, &fblock); 8564 else 8565 neq_size = gfc_evaluate_now (cond, &fblock); 8566 8567 /* Deallocation of allocatable components will have to occur on 8568 reallocation. Fix the old descriptor now. */ 8569 if ((expr1->ts.type == BT_DERIVED) 8570 && expr1->ts.u.derived->attr.alloc_comp) 8571 old_desc = gfc_evaluate_now (desc, &fblock); 8572 else 8573 old_desc = NULL_TREE; 8574 8575 /* Now modify the lhs descriptor and the associated scalarizer 8576 variables. F2003 7.4.1.3: "If variable is or becomes an 8577 unallocated allocatable variable, then it is allocated with each 8578 deferred type parameter equal to the corresponding type parameters 8579 of expr , with the shape of expr , and with each lower bound equal 8580 to the corresponding element of LBOUND(expr)." 8581 Reuse size1 to keep a dimension-by-dimension track of the 8582 stride of the new array. */ 8583 size1 = gfc_index_one_node; 8584 offset = gfc_index_zero_node; 8585 8586 for (n = 0; n < expr2->rank; n++) 8587 { 8588 tmp = fold_build2_loc (input_location, MINUS_EXPR, 8589 gfc_array_index_type, 8590 loop->to[n], loop->from[n]); 8591 tmp = fold_build2_loc (input_location, PLUS_EXPR, 8592 gfc_array_index_type, 8593 tmp, gfc_index_one_node); 8594 8595 lbound = gfc_index_one_node; 8596 ubound = tmp; 8597 8598 if (as) 8599 { 8600 lbd = get_std_lbound (expr2, desc2, n, 8601 as->type == AS_ASSUMED_SIZE); 8602 ubound = fold_build2_loc (input_location, 8603 MINUS_EXPR, 8604 gfc_array_index_type, 8605 ubound, lbound); 8606 ubound = fold_build2_loc (input_location, 8607 PLUS_EXPR, 8608 gfc_array_index_type, 8609 ubound, lbd); 8610 lbound = lbd; 8611 } 8612 8613 gfc_conv_descriptor_lbound_set (&fblock, desc, 8614 gfc_rank_cst[n], 8615 lbound); 8616 gfc_conv_descriptor_ubound_set (&fblock, desc, 8617 gfc_rank_cst[n], 8618 ubound); 8619 gfc_conv_descriptor_stride_set (&fblock, desc, 8620 gfc_rank_cst[n], 8621 size1); 8622 lbound = gfc_conv_descriptor_lbound_get (desc, 8623 gfc_rank_cst[n]); 8624 tmp2 = fold_build2_loc (input_location, MULT_EXPR, 8625 gfc_array_index_type, 8626 lbound, size1); 8627 offset = fold_build2_loc (input_location, MINUS_EXPR, 8628 gfc_array_index_type, 8629 offset, tmp2); 8630 size1 = fold_build2_loc (input_location, MULT_EXPR, 8631 gfc_array_index_type, 8632 tmp, size1); 8633 } 8634 8635 /* Set the lhs descriptor and scalarizer offsets. For rank > 1, 8636 the array offset is saved and the info.offset is used for a 8637 running offset. Use the saved_offset instead. */ 8638 tmp = gfc_conv_descriptor_offset (desc); 8639 gfc_add_modify (&fblock, tmp, offset); 8640 if (linfo->saved_offset 8641 && TREE_CODE (linfo->saved_offset) == VAR_DECL) 8642 gfc_add_modify (&fblock, linfo->saved_offset, tmp); 8643 8644 /* Now set the deltas for the lhs. */ 8645 for (n = 0; n < expr1->rank; n++) 8646 { 8647 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]); 8648 dim = lss->dim[n]; 8649 tmp = fold_build2_loc (input_location, MINUS_EXPR, 8650 gfc_array_index_type, tmp, 8651 loop->from[dim]); 8652 if (linfo->delta[dim] 8653 && TREE_CODE (linfo->delta[dim]) == VAR_DECL) 8654 gfc_add_modify (&fblock, linfo->delta[dim], tmp); 8655 } 8656 8657 /* Get the new lhs size in bytes. */ 8658 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred) 8659 { 8660 if (expr2->ts.deferred) 8661 { 8662 if (TREE_CODE (expr2->ts.u.cl->backend_decl) == VAR_DECL) 8663 tmp = expr2->ts.u.cl->backend_decl; 8664 else 8665 tmp = rss->info->string_length; 8666 } 8667 else 8668 { 8669 tmp = expr2->ts.u.cl->backend_decl; 8670 if (!tmp && expr2->expr_type == EXPR_OP 8671 && expr2->value.op.op == INTRINSIC_CONCAT) 8672 { 8673 tmp = concat_str_length (expr2); 8674 expr2->ts.u.cl->backend_decl = gfc_evaluate_now (tmp, &fblock); 8675 } 8676 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp); 8677 } 8678 8679 if (expr1->ts.u.cl->backend_decl 8680 && TREE_CODE (expr1->ts.u.cl->backend_decl) == VAR_DECL) 8681 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp); 8682 else 8683 gfc_add_modify (&fblock, lss->info->string_length, tmp); 8684 } 8685 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl) 8686 { 8687 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts))); 8688 tmp = fold_build2_loc (input_location, MULT_EXPR, 8689 gfc_array_index_type, tmp, 8690 expr1->ts.u.cl->backend_decl); 8691 } 8692 else 8693 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts)); 8694 tmp = fold_convert (gfc_array_index_type, tmp); 8695 size2 = fold_build2_loc (input_location, MULT_EXPR, 8696 gfc_array_index_type, 8697 tmp, size2); 8698 size2 = fold_convert (size_type_node, size2); 8699 size2 = fold_build2_loc (input_location, MAX_EXPR, size_type_node, 8700 size2, size_one_node); 8701 size2 = gfc_evaluate_now (size2, &fblock); 8702 8703 /* For deferred character length, the 'size' field of the dtype might 8704 have changed so set the dtype. */ 8705 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)) 8706 && expr1->ts.type == BT_CHARACTER && expr1->ts.deferred) 8707 { 8708 tree type; 8709 tmp = gfc_conv_descriptor_dtype (desc); 8710 if (expr2->ts.u.cl->backend_decl) 8711 type = gfc_typenode_for_spec (&expr2->ts); 8712 else 8713 type = gfc_typenode_for_spec (&expr1->ts); 8714 8715 gfc_add_modify (&fblock, tmp, 8716 gfc_get_dtype_rank_type (expr1->rank,type)); 8717 } 8718 8719 /* Realloc expression. Note that the scalarizer uses desc.data 8720 in the array reference - (*desc.data)[<element>]. */ 8721 gfc_init_block (&realloc_block); 8722 8723 if ((expr1->ts.type == BT_DERIVED) 8724 && expr1->ts.u.derived->attr.alloc_comp) 8725 { 8726 tmp = gfc_deallocate_alloc_comp_no_caf (expr1->ts.u.derived, old_desc, 8727 expr1->rank); 8728 gfc_add_expr_to_block (&realloc_block, tmp); 8729 } 8730 8731 tmp = build_call_expr_loc (input_location, 8732 builtin_decl_explicit (BUILT_IN_REALLOC), 2, 8733 fold_convert (pvoid_type_node, array1), 8734 size2); 8735 gfc_conv_descriptor_data_set (&realloc_block, 8736 desc, tmp); 8737 8738 if ((expr1->ts.type == BT_DERIVED) 8739 && expr1->ts.u.derived->attr.alloc_comp) 8740 { 8741 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc, 8742 expr1->rank); 8743 gfc_add_expr_to_block (&realloc_block, tmp); 8744 } 8745 8746 realloc_expr = gfc_finish_block (&realloc_block); 8747 8748 /* Only reallocate if sizes are different. */ 8749 tmp = build3_v (COND_EXPR, neq_size, realloc_expr, 8750 build_empty_stmt (input_location)); 8751 realloc_expr = tmp; 8752 8753 8754 /* Malloc expression. */ 8755 gfc_init_block (&alloc_block); 8756 tmp = build_call_expr_loc (input_location, 8757 builtin_decl_explicit (BUILT_IN_MALLOC), 8758 1, size2); 8759 gfc_conv_descriptor_data_set (&alloc_block, 8760 desc, tmp); 8761 8762 /* We already set the dtype in the case of deferred character 8763 length arrays. */ 8764 if (!(GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)) 8765 && expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)) 8766 { 8767 tmp = gfc_conv_descriptor_dtype (desc); 8768 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc))); 8769 } 8770 8771 if ((expr1->ts.type == BT_DERIVED) 8772 && expr1->ts.u.derived->attr.alloc_comp) 8773 { 8774 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc, 8775 expr1->rank); 8776 gfc_add_expr_to_block (&alloc_block, tmp); 8777 } 8778 alloc_expr = gfc_finish_block (&alloc_block); 8779 8780 /* Malloc if not allocated; realloc otherwise. */ 8781 tmp = build_int_cst (TREE_TYPE (array1), 0); 8782 cond = fold_build2_loc (input_location, EQ_EXPR, 8783 boolean_type_node, 8784 array1, tmp); 8785 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr); 8786 gfc_add_expr_to_block (&fblock, tmp); 8787 8788 /* Make sure that the scalarizer data pointer is updated. */ 8789 if (linfo->data 8790 && TREE_CODE (linfo->data) == VAR_DECL) 8791 { 8792 tmp = gfc_conv_descriptor_data_get (desc); 8793 gfc_add_modify (&fblock, linfo->data, tmp); 8794 } 8795 8796 /* Add the exit label. */ 8797 tmp = build1_v (LABEL_EXPR, jump_label2); 8798 gfc_add_expr_to_block (&fblock, tmp); 8799 8800 return gfc_finish_block (&fblock); 8801} 8802 8803 8804/* NULLIFY an allocatable/pointer array on function entry, free it on exit. 8805 Do likewise, recursively if necessary, with the allocatable components of 8806 derived types. */ 8807 8808void 8809gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block) 8810{ 8811 tree type; 8812 tree tmp; 8813 tree descriptor; 8814 stmtblock_t init; 8815 stmtblock_t cleanup; 8816 locus loc; 8817 int rank; 8818 bool sym_has_alloc_comp, has_finalizer; 8819 8820 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED 8821 || sym->ts.type == BT_CLASS) 8822 && sym->ts.u.derived->attr.alloc_comp; 8823 has_finalizer = sym->ts.type == BT_CLASS || sym->ts.type == BT_DERIVED 8824 ? gfc_is_finalizable (sym->ts.u.derived, NULL) : false; 8825 8826 /* Make sure the frontend gets these right. */ 8827 gcc_assert (sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp 8828 || has_finalizer); 8829 8830 gfc_save_backend_locus (&loc); 8831 gfc_set_backend_locus (&sym->declared_at); 8832 gfc_init_block (&init); 8833 8834 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL 8835 || TREE_CODE (sym->backend_decl) == PARM_DECL); 8836 8837 if (sym->ts.type == BT_CHARACTER 8838 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl)) 8839 { 8840 gfc_conv_string_length (sym->ts.u.cl, NULL, &init); 8841 gfc_trans_vla_type_sizes (sym, &init); 8842 } 8843 8844 /* Dummy, use associated and result variables don't need anything special. */ 8845 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result) 8846 { 8847 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE); 8848 gfc_restore_backend_locus (&loc); 8849 return; 8850 } 8851 8852 descriptor = sym->backend_decl; 8853 8854 /* Although static, derived types with default initializers and 8855 allocatable components must not be nulled wholesale; instead they 8856 are treated component by component. */ 8857 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp && !has_finalizer) 8858 { 8859 /* SAVEd variables are not freed on exit. */ 8860 gfc_trans_static_array_pointer (sym); 8861 8862 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE); 8863 gfc_restore_backend_locus (&loc); 8864 return; 8865 } 8866 8867 /* Get the descriptor type. */ 8868 type = TREE_TYPE (sym->backend_decl); 8869 8870 if ((sym_has_alloc_comp || (has_finalizer && sym->ts.type != BT_CLASS)) 8871 && !(sym->attr.pointer || sym->attr.allocatable)) 8872 { 8873 if (!sym->attr.save 8874 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program)) 8875 { 8876 if (sym->value == NULL 8877 || !gfc_has_default_initializer (sym->ts.u.derived)) 8878 { 8879 rank = sym->as ? sym->as->rank : 0; 8880 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived, 8881 descriptor, rank); 8882 gfc_add_expr_to_block (&init, tmp); 8883 } 8884 else 8885 gfc_init_default_dt (sym, &init, false); 8886 } 8887 } 8888 else if (!GFC_DESCRIPTOR_TYPE_P (type)) 8889 { 8890 /* If the backend_decl is not a descriptor, we must have a pointer 8891 to one. */ 8892 descriptor = build_fold_indirect_ref_loc (input_location, 8893 sym->backend_decl); 8894 type = TREE_TYPE (descriptor); 8895 } 8896 8897 /* NULLIFY the data pointer, for non-saved allocatables. */ 8898 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save && sym->attr.allocatable) 8899 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node); 8900 8901 gfc_restore_backend_locus (&loc); 8902 gfc_init_block (&cleanup); 8903 8904 /* Allocatable arrays need to be freed when they go out of scope. 8905 The allocatable components of pointers must not be touched. */ 8906 if (!sym->attr.allocatable && has_finalizer && sym->ts.type != BT_CLASS 8907 && !sym->attr.pointer && !sym->attr.artificial && !sym->attr.save 8908 && !sym->ns->proc_name->attr.is_main_program) 8909 { 8910 gfc_expr *e; 8911 sym->attr.referenced = 1; 8912 e = gfc_lval_expr_from_sym (sym); 8913 gfc_add_finalizer_call (&cleanup, e); 8914 gfc_free_expr (e); 8915 } 8916 else if ((!sym->attr.allocatable || !has_finalizer) 8917 && sym_has_alloc_comp && !(sym->attr.function || sym->attr.result) 8918 && !sym->attr.pointer && !sym->attr.save 8919 && !sym->ns->proc_name->attr.is_main_program) 8920 { 8921 int rank; 8922 rank = sym->as ? sym->as->rank : 0; 8923 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank); 8924 gfc_add_expr_to_block (&cleanup, tmp); 8925 } 8926 8927 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension) 8928 && !sym->attr.save && !sym->attr.result 8929 && !sym->ns->proc_name->attr.is_main_program) 8930 { 8931 gfc_expr *e; 8932 e = has_finalizer ? gfc_lval_expr_from_sym (sym) : NULL; 8933 tmp = gfc_trans_dealloc_allocated (sym->backend_decl, 8934 sym->attr.codimension, e); 8935 if (e) 8936 gfc_free_expr (e); 8937 gfc_add_expr_to_block (&cleanup, tmp); 8938 } 8939 8940 gfc_add_init_cleanup (block, gfc_finish_block (&init), 8941 gfc_finish_block (&cleanup)); 8942} 8943 8944/************ Expression Walking Functions ******************/ 8945 8946/* Walk a variable reference. 8947 8948 Possible extension - multiple component subscripts. 8949 x(:,:) = foo%a(:)%b(:) 8950 Transforms to 8951 forall (i=..., j=...) 8952 x(i,j) = foo%a(j)%b(i) 8953 end forall 8954 This adds a fair amount of complexity because you need to deal with more 8955 than one ref. Maybe handle in a similar manner to vector subscripts. 8956 Maybe not worth the effort. */ 8957 8958 8959static gfc_ss * 8960gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr) 8961{ 8962 gfc_ref *ref; 8963 8964 for (ref = expr->ref; ref; ref = ref->next) 8965 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT) 8966 break; 8967 8968 return gfc_walk_array_ref (ss, expr, ref); 8969} 8970 8971 8972gfc_ss * 8973gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref) 8974{ 8975 gfc_array_ref *ar; 8976 gfc_ss *newss; 8977 int n; 8978 8979 for (; ref; ref = ref->next) 8980 { 8981 if (ref->type == REF_SUBSTRING) 8982 { 8983 ss = gfc_get_scalar_ss (ss, ref->u.ss.start); 8984 ss = gfc_get_scalar_ss (ss, ref->u.ss.end); 8985 } 8986 8987 /* We're only interested in array sections from now on. */ 8988 if (ref->type != REF_ARRAY) 8989 continue; 8990 8991 ar = &ref->u.ar; 8992 8993 switch (ar->type) 8994 { 8995 case AR_ELEMENT: 8996 for (n = ar->dimen - 1; n >= 0; n--) 8997 ss = gfc_get_scalar_ss (ss, ar->start[n]); 8998 break; 8999 9000 case AR_FULL: 9001 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION); 9002 newss->info->data.array.ref = ref; 9003 9004 /* Make sure array is the same as array(:,:), this way 9005 we don't need to special case all the time. */ 9006 ar->dimen = ar->as->rank; 9007 for (n = 0; n < ar->dimen; n++) 9008 { 9009 ar->dimen_type[n] = DIMEN_RANGE; 9010 9011 gcc_assert (ar->start[n] == NULL); 9012 gcc_assert (ar->end[n] == NULL); 9013 gcc_assert (ar->stride[n] == NULL); 9014 } 9015 ss = newss; 9016 break; 9017 9018 case AR_SECTION: 9019 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION); 9020 newss->info->data.array.ref = ref; 9021 9022 /* We add SS chains for all the subscripts in the section. */ 9023 for (n = 0; n < ar->dimen; n++) 9024 { 9025 gfc_ss *indexss; 9026 9027 switch (ar->dimen_type[n]) 9028 { 9029 case DIMEN_ELEMENT: 9030 /* Add SS for elemental (scalar) subscripts. */ 9031 gcc_assert (ar->start[n]); 9032 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]); 9033 indexss->loop_chain = gfc_ss_terminator; 9034 newss->info->data.array.subscript[n] = indexss; 9035 break; 9036 9037 case DIMEN_RANGE: 9038 /* We don't add anything for sections, just remember this 9039 dimension for later. */ 9040 newss->dim[newss->dimen] = n; 9041 newss->dimen++; 9042 break; 9043 9044 case DIMEN_VECTOR: 9045 /* Create a GFC_SS_VECTOR index in which we can store 9046 the vector's descriptor. */ 9047 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n], 9048 1, GFC_SS_VECTOR); 9049 indexss->loop_chain = gfc_ss_terminator; 9050 newss->info->data.array.subscript[n] = indexss; 9051 newss->dim[newss->dimen] = n; 9052 newss->dimen++; 9053 break; 9054 9055 default: 9056 /* We should know what sort of section it is by now. */ 9057 gcc_unreachable (); 9058 } 9059 } 9060 /* We should have at least one non-elemental dimension, 9061 unless we are creating a descriptor for a (scalar) coarray. */ 9062 gcc_assert (newss->dimen > 0 9063 || newss->info->data.array.ref->u.ar.as->corank > 0); 9064 ss = newss; 9065 break; 9066 9067 default: 9068 /* We should know what sort of section it is by now. */ 9069 gcc_unreachable (); 9070 } 9071 9072 } 9073 return ss; 9074} 9075 9076 9077/* Walk an expression operator. If only one operand of a binary expression is 9078 scalar, we must also add the scalar term to the SS chain. */ 9079 9080static gfc_ss * 9081gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr) 9082{ 9083 gfc_ss *head; 9084 gfc_ss *head2; 9085 9086 head = gfc_walk_subexpr (ss, expr->value.op.op1); 9087 if (expr->value.op.op2 == NULL) 9088 head2 = head; 9089 else 9090 head2 = gfc_walk_subexpr (head, expr->value.op.op2); 9091 9092 /* All operands are scalar. Pass back and let the caller deal with it. */ 9093 if (head2 == ss) 9094 return head2; 9095 9096 /* All operands require scalarization. */ 9097 if (head != ss && (expr->value.op.op2 == NULL || head2 != head)) 9098 return head2; 9099 9100 /* One of the operands needs scalarization, the other is scalar. 9101 Create a gfc_ss for the scalar expression. */ 9102 if (head == ss) 9103 { 9104 /* First operand is scalar. We build the chain in reverse order, so 9105 add the scalar SS after the second operand. */ 9106 head = head2; 9107 while (head && head->next != ss) 9108 head = head->next; 9109 /* Check we haven't somehow broken the chain. */ 9110 gcc_assert (head); 9111 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1); 9112 } 9113 else /* head2 == head */ 9114 { 9115 gcc_assert (head2 == head); 9116 /* Second operand is scalar. */ 9117 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2); 9118 } 9119 9120 return head2; 9121} 9122 9123 9124/* Reverse a SS chain. */ 9125 9126gfc_ss * 9127gfc_reverse_ss (gfc_ss * ss) 9128{ 9129 gfc_ss *next; 9130 gfc_ss *head; 9131 9132 gcc_assert (ss != NULL); 9133 9134 head = gfc_ss_terminator; 9135 while (ss != gfc_ss_terminator) 9136 { 9137 next = ss->next; 9138 /* Check we didn't somehow break the chain. */ 9139 gcc_assert (next != NULL); 9140 ss->next = head; 9141 head = ss; 9142 ss = next; 9143 } 9144 9145 return (head); 9146} 9147 9148 9149/* Given an expression referring to a procedure, return the symbol of its 9150 interface. We can't get the procedure symbol directly as we have to handle 9151 the case of (deferred) type-bound procedures. */ 9152 9153gfc_symbol * 9154gfc_get_proc_ifc_for_expr (gfc_expr *procedure_ref) 9155{ 9156 gfc_symbol *sym; 9157 gfc_ref *ref; 9158 9159 if (procedure_ref == NULL) 9160 return NULL; 9161 9162 /* Normal procedure case. */ 9163 if (procedure_ref->expr_type == EXPR_FUNCTION 9164 && procedure_ref->value.function.esym) 9165 sym = procedure_ref->value.function.esym; 9166 else 9167 sym = procedure_ref->symtree->n.sym; 9168 9169 /* Typebound procedure case. */ 9170 for (ref = procedure_ref->ref; ref; ref = ref->next) 9171 { 9172 if (ref->type == REF_COMPONENT 9173 && ref->u.c.component->attr.proc_pointer) 9174 sym = ref->u.c.component->ts.interface; 9175 else 9176 sym = NULL; 9177 } 9178 9179 return sym; 9180} 9181 9182 9183/* Walk the arguments of an elemental function. 9184 PROC_EXPR is used to check whether an argument is permitted to be absent. If 9185 it is NULL, we don't do the check and the argument is assumed to be present. 9186*/ 9187 9188gfc_ss * 9189gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg, 9190 gfc_symbol *proc_ifc, gfc_ss_type type) 9191{ 9192 gfc_formal_arglist *dummy_arg; 9193 int scalar; 9194 gfc_ss *head; 9195 gfc_ss *tail; 9196 gfc_ss *newss; 9197 9198 head = gfc_ss_terminator; 9199 tail = NULL; 9200 9201 if (proc_ifc) 9202 dummy_arg = gfc_sym_get_dummy_args (proc_ifc); 9203 else 9204 dummy_arg = NULL; 9205 9206 scalar = 1; 9207 for (; arg; arg = arg->next) 9208 { 9209 if (!arg->expr || arg->expr->expr_type == EXPR_NULL) 9210 continue; 9211 9212 newss = gfc_walk_subexpr (head, arg->expr); 9213 if (newss == head) 9214 { 9215 /* Scalar argument. */ 9216 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE); 9217 newss = gfc_get_scalar_ss (head, arg->expr); 9218 newss->info->type = type; 9219 9220 } 9221 else 9222 scalar = 0; 9223 9224 if (dummy_arg != NULL 9225 && dummy_arg->sym->attr.optional 9226 && arg->expr->expr_type == EXPR_VARIABLE 9227 && (gfc_expr_attr (arg->expr).optional 9228 || gfc_expr_attr (arg->expr).allocatable 9229 || gfc_expr_attr (arg->expr).pointer)) 9230 newss->info->can_be_null_ref = true; 9231 9232 head = newss; 9233 if (!tail) 9234 { 9235 tail = head; 9236 while (tail->next != gfc_ss_terminator) 9237 tail = tail->next; 9238 } 9239 9240 if (dummy_arg != NULL) 9241 dummy_arg = dummy_arg->next; 9242 } 9243 9244 if (scalar) 9245 { 9246 /* If all the arguments are scalar we don't need the argument SS. */ 9247 gfc_free_ss_chain (head); 9248 /* Pass it back. */ 9249 return ss; 9250 } 9251 9252 /* Add it onto the existing chain. */ 9253 tail->next = ss; 9254 return head; 9255} 9256 9257 9258/* Walk a function call. Scalar functions are passed back, and taken out of 9259 scalarization loops. For elemental functions we walk their arguments. 9260 The result of functions returning arrays is stored in a temporary outside 9261 the loop, so that the function is only called once. Hence we do not need 9262 to walk their arguments. */ 9263 9264static gfc_ss * 9265gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr) 9266{ 9267 gfc_intrinsic_sym *isym; 9268 gfc_symbol *sym; 9269 gfc_component *comp = NULL; 9270 9271 isym = expr->value.function.isym; 9272 9273 /* Handle intrinsic functions separately. */ 9274 if (isym) 9275 return gfc_walk_intrinsic_function (ss, expr, isym); 9276 9277 sym = expr->value.function.esym; 9278 if (!sym) 9279 sym = expr->symtree->n.sym; 9280 9281 if (gfc_is_alloc_class_array_function (expr)) 9282 return gfc_get_array_ss (ss, expr, 9283 CLASS_DATA (expr->value.function.esym->result)->as->rank, 9284 GFC_SS_FUNCTION); 9285 9286 /* A function that returns arrays. */ 9287 comp = gfc_get_proc_ptr_comp (expr); 9288 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension) 9289 || (comp && comp->attr.dimension)) 9290 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION); 9291 9292 /* Walk the parameters of an elemental function. For now we always pass 9293 by reference. */ 9294 if (sym->attr.elemental || (comp && comp->attr.elemental)) 9295 { 9296 gfc_ss *old_ss = ss; 9297 9298 ss = gfc_walk_elemental_function_args (old_ss, 9299 expr->value.function.actual, 9300 gfc_get_proc_ifc_for_expr (expr), 9301 GFC_SS_REFERENCE); 9302 if (ss != old_ss 9303 && (comp 9304 || sym->attr.proc_pointer 9305 || sym->attr.if_source != IFSRC_DECL 9306 || sym->attr.array_outer_dependency)) 9307 ss->info->array_outer_dependency = 1; 9308 } 9309 9310 /* Scalar functions are OK as these are evaluated outside the scalarization 9311 loop. Pass back and let the caller deal with it. */ 9312 return ss; 9313} 9314 9315 9316/* An array temporary is constructed for array constructors. */ 9317 9318static gfc_ss * 9319gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr) 9320{ 9321 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR); 9322} 9323 9324 9325/* Walk an expression. Add walked expressions to the head of the SS chain. 9326 A wholly scalar expression will not be added. */ 9327 9328gfc_ss * 9329gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr) 9330{ 9331 gfc_ss *head; 9332 9333 switch (expr->expr_type) 9334 { 9335 case EXPR_VARIABLE: 9336 head = gfc_walk_variable_expr (ss, expr); 9337 return head; 9338 9339 case EXPR_OP: 9340 head = gfc_walk_op_expr (ss, expr); 9341 return head; 9342 9343 case EXPR_FUNCTION: 9344 head = gfc_walk_function_expr (ss, expr); 9345 return head; 9346 9347 case EXPR_CONSTANT: 9348 case EXPR_NULL: 9349 case EXPR_STRUCTURE: 9350 /* Pass back and let the caller deal with it. */ 9351 break; 9352 9353 case EXPR_ARRAY: 9354 head = gfc_walk_array_constructor (ss, expr); 9355 return head; 9356 9357 case EXPR_SUBSTRING: 9358 /* Pass back and let the caller deal with it. */ 9359 break; 9360 9361 default: 9362 gfc_internal_error ("bad expression type during walk (%d)", 9363 expr->expr_type); 9364 } 9365 return ss; 9366} 9367 9368 9369/* Entry point for expression walking. 9370 A return value equal to the passed chain means this is 9371 a scalar expression. It is up to the caller to take whatever action is 9372 necessary to translate these. */ 9373 9374gfc_ss * 9375gfc_walk_expr (gfc_expr * expr) 9376{ 9377 gfc_ss *res; 9378 9379 res = gfc_walk_subexpr (gfc_ss_terminator, expr); 9380 return gfc_reverse_ss (res); 9381} 9382