diff --git a/compile.c b/compile.c index e97c61eb1022f0..22996ed8ac031c 100644 --- a/compile.c +++ b/compile.c @@ -6480,11 +6480,20 @@ compile_single_keyword_splat_mutable(rb_iseq_t *iseq, LINK_ANCHOR *const args, c ADD_INSN1(args, argn, newhash, INT2FIX(0)); compile_hash(iseq, args, kwnode, TRUE, FALSE); ADD_SEND(args, argn, id_core_hash_merge_kwd, INT2FIX(2)); + + rb_category_warn( + RB_WARN_CATEGORY_PERFORMANCE, + "(Line: %d) This method call implicitly allocates a potentially unnecessary hash for the keyword splat, " \ + "because the block pass expression could cause an evaluation order issue if a hash is not " \ + "allocated for the keyword splat. You can avoid this allocation by assigning the block pass " \ + "expression to a local variable, and using that local variable.", + nd_line(RNODE(kwnode))); } #define SPLATARRAY_FALSE 0 #define SPLATARRAY_TRUE 1 #define DUP_SINGLE_KW_SPLAT 2 +#define MAYBE_UNNECESSARY_ALLOC_SPLAT 4 static int setup_args_core(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn, @@ -6605,10 +6614,21 @@ setup_args_core(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn, } static void -setup_args_splat_mut(unsigned int *flag, int dup_rest, int initial_dup_rest) +setup_args_splat_mut(unsigned int *flag, int dup_rest, int initial_dup_rest, const NODE *node) { if ((*flag & VM_CALL_ARGS_SPLAT) && dup_rest != initial_dup_rest) { *flag |= VM_CALL_ARGS_SPLAT_MUT; + + if (dup_rest & MAYBE_UNNECESSARY_ALLOC_SPLAT) { + rb_category_warn( + RB_WARN_CATEGORY_PERFORMANCE, + "(Line %d) This method call implicitly allocates a potentially unnecessary array for the positional splat, " \ + "because a keyword, keyword splat, or block pass expression could cause an evaluation order issue " \ + "if an array is not allocated for the positional splat. You can avoid this allocation by assigning " \ + "the related keyword, keyword splat, or block pass expression to a local variable and using that " \ + "local variable.", + nd_line(RNODE(node))); + } } } @@ -6680,14 +6700,14 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn, while (node) { NODE *key_node = RNODE_LIST(node)->nd_head; if (key_node && setup_args_dup_rest_p(key_node)) { - dup_rest = SPLATARRAY_TRUE; + dup_rest = SPLATARRAY_TRUE | MAYBE_UNNECESSARY_ALLOC_SPLAT; break; } node = RNODE_LIST(node)->nd_next; NODE *value_node = RNODE_LIST(node)->nd_head; if (setup_args_dup_rest_p(value_node)) { - dup_rest = SPLATARRAY_TRUE; + dup_rest = SPLATARRAY_TRUE | MAYBE_UNNECESSARY_ALLOC_SPLAT; break; } @@ -6702,7 +6722,8 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn, if (check_arg != argn && setup_args_dup_rest_p(RNODE_BLOCK_PASS(argn)->nd_body)) { // for block pass that may modify splatted argument, dup rest and kwrest if given - dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT; + if (dup_rest == SPLATARRAY_FALSE) dup_rest |= MAYBE_UNNECESSARY_ALLOC_SPLAT; + dup_rest |= SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT; } } initial_dup_rest = dup_rest; @@ -6730,7 +6751,7 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn, *flag |= VM_CALL_FORWARDING; ADD_GETLOCAL(args, argn, idx, get_lvar_level(iseq)); - setup_args_splat_mut(flag, dup_rest, initial_dup_rest); + setup_args_splat_mut(flag, dup_rest, initial_dup_rest, argn); return INT2FIX(argc); } else { @@ -6754,7 +6775,7 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn, else { ret = INT2FIX(setup_args_core(iseq, args, argn, &dup_rest, flag, keywords)); } - setup_args_splat_mut(flag, dup_rest, initial_dup_rest); + setup_args_splat_mut(flag, dup_rest, initial_dup_rest, argn); return ret; } diff --git a/prism_compile.c b/prism_compile.c index 6de3f69c298492..19877a12421bc2 100644 --- a/prism_compile.c +++ b/prism_compile.c @@ -1564,6 +1564,7 @@ pm_compile_hash_elements(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_l #define SPLATARRAY_FALSE 0 #define SPLATARRAY_TRUE 1 #define DUP_SINGLE_KW_SPLAT 2 +#define MAYBE_UNNECESSARY_ALLOC_SPLAT 4 // This is details. Users should call pm_setup_args() instead. static int @@ -1613,6 +1614,14 @@ pm_setup_args_core(const pm_arguments_node_t *arguments_node, const pm_node_t *b PUSH_INSN1(ret, location, newhash, INT2FIX(0)); pm_compile_hash_elements(iseq, argument, elements, 0, Qundef, true, ret, scope_node); PUSH_SEND(ret, location, id_core_hash_merge_kwd, INT2FIX(2)); + + rb_category_warn( + RB_WARN_CATEGORY_PERFORMANCE, + "(Line %d) This method call implicitly allocates a potentially unnecessary hash for the keyword splat, " \ + "because the block pass expression could cause an evaluation order issue if a hash is not " \ + "allocated for the keyword splat. You can avoid this allocation by assigning the block pass " \ + "expression to a local variable, and using that local variable.", + node_location->line); } else { pm_compile_hash_elements(iseq, argument, elements, 0, Qundef, true, ret, scope_node); @@ -1892,7 +1901,7 @@ pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block, size_t arguments_size; // Calls like foo(1, *f, **hash) that use splat and kwsplat could be - // eligible for eliding duping the rest array (dup_reset=false). + // eligible for eliding duping the rest array (dup_rest=false). if ( arguments_node != NULL && (arguments = &arguments_node->arguments, arguments_size = arguments->size) >= 2 && @@ -1914,12 +1923,12 @@ pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block, switch (PM_NODE_TYPE(element)) { case PM_ASSOC_NODE: { const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element; - if (pm_setup_args_dup_rest_p(assoc->key) || pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE; + if (pm_setup_args_dup_rest_p(assoc->key) || pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE | MAYBE_UNNECESSARY_ALLOC_SPLAT; break; } case PM_ASSOC_SPLAT_NODE: { const pm_assoc_splat_node_t *assoc = (const pm_assoc_splat_node_t *) element; - if (assoc->value != NULL && pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE; + if (assoc->value != NULL && pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE | MAYBE_UNNECESSARY_ALLOC_SPLAT; break; } default: @@ -1939,7 +1948,13 @@ pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block, const pm_node_t *block_expr = ((const pm_block_argument_node_t *)block)->expression; if (block_expr && pm_setup_args_dup_rest_p(block_expr)) { - dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT; + if (dup_rest == SPLATARRAY_FALSE || + (arguments_node != NULL && + !PM_NODE_FLAG_P(arguments_node, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_MULTIPLE_SPLATS) && + PM_NODE_TYPE_P(arguments->nodes[arguments_size - 1], PM_SPLAT_NODE))) { + dup_rest |= MAYBE_UNNECESSARY_ALLOC_SPLAT; + } + dup_rest |= SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT; initial_dup_rest = dup_rest; } @@ -1977,6 +1992,17 @@ pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block, // VM_CALL_ARGS_SPLAT_MUT flag. if (*flags & VM_CALL_ARGS_SPLAT && dup_rest != initial_dup_rest) { *flags |= VM_CALL_ARGS_SPLAT_MUT; + + if (dup_rest & MAYBE_UNNECESSARY_ALLOC_SPLAT) { + rb_category_warn( + RB_WARN_CATEGORY_PERFORMANCE, + "(Line %d) This method call implicitly allocates a potentially unnecessary array for the positional splat, " \ + "because a keyword, keyword splat, or block pass expression could cause an evaluation order issue " \ + "if an array is not allocated for the positional splat. You can avoid this allocation by assigning " \ + "the related keyword, keyword splat, or block pass expression to a local variable and using that " \ + "local variable.", + node_location->line); + } } return argc; diff --git a/test/ruby/test_call.rb b/test/ruby/test_call.rb index 7843f3b476e6c9..13dace435f564e 100644 --- a/test/ruby/test_call.rb +++ b/test/ruby/test_call.rb @@ -374,6 +374,69 @@ def o.foo(a, **h)= h[:splat_modified] = true assert_equal({splat_modified: false}, b) end + UNNECESSARY_POS_SPLAT_MESSAGE = "This method call implicitly allocates a potentially " \ + "unnecessary array for the positional splat, because a keyword, keyword splat, or " \ + "block pass expression could cause an evaluation order issue if an array is not " \ + "allocated for the positional splat\. You can avoid this allocation by assigning " \ + "the related keyword, keyword splat, or block pass expression to a local variable " \ + "and using that local variable." + def test_unnecessary_positional_splat_alloc_due_to_kw_warning + assert_in_out_err([], <<-INPUT, %w(), Regexp.new(UNNECESSARY_POS_SPLAT_MESSAGE)) + $VERBOSE = false + Warning[:performance] = true + eval(<<-RUBY) + def self.kw = {} + def self.x(...) = nil + a = [] + x(*a, kw:) + RUBY + INPUT + end + + def test_unnecessary_positional_splat_alloc_due_to_kw_splat_warning + assert_in_out_err([], <<-INPUT, %w(), Regexp.new(UNNECESSARY_POS_SPLAT_MESSAGE)) + $VERBOSE = false + Warning[:performance] = true + eval(<<-RUBY) + def self.kw = {} + def self.x(...) = nil + a = [] + x(*a, **kw) + RUBY + INPUT + end + + def test_unnecessary_positional_splat_alloc_due_to_block_warning + assert_in_out_err([], <<-INPUT, %w(), Regexp.new(UNNECESSARY_POS_SPLAT_MESSAGE)) + $VERBOSE = false + Warning[:performance] = true + eval(<<-RUBY) + def self.kw = {} + def self.x(...) = nil + a = [] + x(*a, &kw) + RUBY + INPUT + end + + def test_unnecessary_keyword_splat_alloc_due_to_block_warning + message = "This method call implicitly allocates a potentially " \ + "unnecessary hash for the keyword splat, because the block pass expression could " \ + "cause an evaluation order issue if a hash is not allocated for the keyword splat. " \ + "You can avoid this allocation by assigning the block pass expression to a local " \ + "variable, and using that local variable." + assert_in_out_err([], <<-INPUT, %w(), Regexp.new(message)); + $VERBOSE = false + Warning[:performance] = true + eval(<<-RUBY) + def self.kw = {} + def self.x(...) = nil + h = {} + x(**kw, &kw) + RUBY + INPUT + end + def test_anon_splat r2kh = Hash.ruby2_keywords_hash(kw: 2) r2kea = [r2kh]