Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Show performance warnings for easily avoidable unnecessary implicit splat allocations #13135

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 27 additions & 6 deletions compile.c
Original file line number Diff line number Diff line change
Expand Up @@ -6480,11 +6480,20 @@ compile_single_keyword_splat_mutable(rb_iseq_t *iseq, LINK_ANCHOR *const args, c
ADD_INSN1(args, argn, newhash, INT2FIX(0));
compile_hash(iseq, args, kwnode, TRUE, FALSE);
ADD_SEND(args, argn, id_core_hash_merge_kwd, INT2FIX(2));

rb_category_warn(
RB_WARN_CATEGORY_PERFORMANCE,
"(Line: %d) This method call implicitly allocates a potentially unnecessary hash for the keyword splat, " \
"because the block pass expression could cause an evaluation order issue if a hash is not " \
"allocated for the keyword splat. You can avoid this allocation by assigning the block pass " \
"expression to a local variable, and using that local variable.",
nd_line(RNODE(kwnode)));
}

#define SPLATARRAY_FALSE 0
#define SPLATARRAY_TRUE 1
#define DUP_SINGLE_KW_SPLAT 2
#define MAYBE_UNNECESSARY_ALLOC_SPLAT 4

static int
setup_args_core(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn,
Expand Down Expand Up @@ -6605,10 +6614,21 @@ setup_args_core(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn,
}

static void
setup_args_splat_mut(unsigned int *flag, int dup_rest, int initial_dup_rest)
setup_args_splat_mut(unsigned int *flag, int dup_rest, int initial_dup_rest, const NODE *node)
{
if ((*flag & VM_CALL_ARGS_SPLAT) && dup_rest != initial_dup_rest) {
*flag |= VM_CALL_ARGS_SPLAT_MUT;

if (dup_rest & MAYBE_UNNECESSARY_ALLOC_SPLAT) {
rb_category_warn(
RB_WARN_CATEGORY_PERFORMANCE,
"(Line %d) This method call implicitly allocates a potentially unnecessary array for the positional splat, " \
"because a keyword, keyword splat, or block pass expression could cause an evaluation order issue " \
"if an array is not allocated for the positional splat. You can avoid this allocation by assigning " \
"the related keyword, keyword splat, or block pass expression to a local variable and using that " \
"local variable.",
nd_line(RNODE(node)));
}
}
}

Expand Down Expand Up @@ -6680,14 +6700,14 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn,
while (node) {
NODE *key_node = RNODE_LIST(node)->nd_head;
if (key_node && setup_args_dup_rest_p(key_node)) {
dup_rest = SPLATARRAY_TRUE;
dup_rest = SPLATARRAY_TRUE | MAYBE_UNNECESSARY_ALLOC_SPLAT;
break;
}

node = RNODE_LIST(node)->nd_next;
NODE *value_node = RNODE_LIST(node)->nd_head;
if (setup_args_dup_rest_p(value_node)) {
dup_rest = SPLATARRAY_TRUE;
dup_rest = SPLATARRAY_TRUE | MAYBE_UNNECESSARY_ALLOC_SPLAT;
break;
}

Expand All @@ -6702,7 +6722,8 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn,

if (check_arg != argn && setup_args_dup_rest_p(RNODE_BLOCK_PASS(argn)->nd_body)) {
// for block pass that may modify splatted argument, dup rest and kwrest if given
dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
if (dup_rest == SPLATARRAY_FALSE) dup_rest |= MAYBE_UNNECESSARY_ALLOC_SPLAT;
dup_rest |= SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
}
}
initial_dup_rest = dup_rest;
Expand Down Expand Up @@ -6730,7 +6751,7 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn,
*flag |= VM_CALL_FORWARDING;

ADD_GETLOCAL(args, argn, idx, get_lvar_level(iseq));
setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
setup_args_splat_mut(flag, dup_rest, initial_dup_rest, argn);
return INT2FIX(argc);
}
else {
Expand All @@ -6754,7 +6775,7 @@ setup_args(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn,
else {
ret = INT2FIX(setup_args_core(iseq, args, argn, &dup_rest, flag, keywords));
}
setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
setup_args_splat_mut(flag, dup_rest, initial_dup_rest, argn);
return ret;
}

Expand Down
34 changes: 30 additions & 4 deletions prism_compile.c
Original file line number Diff line number Diff line change
Expand Up @@ -1564,6 +1564,7 @@ pm_compile_hash_elements(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_l
#define SPLATARRAY_FALSE 0
#define SPLATARRAY_TRUE 1
#define DUP_SINGLE_KW_SPLAT 2
#define MAYBE_UNNECESSARY_ALLOC_SPLAT 4

// This is details. Users should call pm_setup_args() instead.
static int
Expand Down Expand Up @@ -1613,6 +1614,14 @@ pm_setup_args_core(const pm_arguments_node_t *arguments_node, const pm_node_t *b
PUSH_INSN1(ret, location, newhash, INT2FIX(0));
pm_compile_hash_elements(iseq, argument, elements, 0, Qundef, true, ret, scope_node);
PUSH_SEND(ret, location, id_core_hash_merge_kwd, INT2FIX(2));

rb_category_warn(
RB_WARN_CATEGORY_PERFORMANCE,
"(Line %d) This method call implicitly allocates a potentially unnecessary hash for the keyword splat, " \
"because the block pass expression could cause an evaluation order issue if a hash is not " \
"allocated for the keyword splat. You can avoid this allocation by assigning the block pass " \
"expression to a local variable, and using that local variable.",
node_location->line);
}
else {
pm_compile_hash_elements(iseq, argument, elements, 0, Qundef, true, ret, scope_node);
Expand Down Expand Up @@ -1892,7 +1901,7 @@ pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block,
size_t arguments_size;

// Calls like foo(1, *f, **hash) that use splat and kwsplat could be
// eligible for eliding duping the rest array (dup_reset=false).
// eligible for eliding duping the rest array (dup_rest=false).
if (
arguments_node != NULL &&
(arguments = &arguments_node->arguments, arguments_size = arguments->size) >= 2 &&
Expand All @@ -1914,12 +1923,12 @@ pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block,
switch (PM_NODE_TYPE(element)) {
case PM_ASSOC_NODE: {
const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
if (pm_setup_args_dup_rest_p(assoc->key) || pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE;
if (pm_setup_args_dup_rest_p(assoc->key) || pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE | MAYBE_UNNECESSARY_ALLOC_SPLAT;
break;
}
case PM_ASSOC_SPLAT_NODE: {
const pm_assoc_splat_node_t *assoc = (const pm_assoc_splat_node_t *) element;
if (assoc->value != NULL && pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE;
if (assoc->value != NULL && pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE | MAYBE_UNNECESSARY_ALLOC_SPLAT;
break;
}
default:
Expand All @@ -1939,7 +1948,13 @@ pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block,
const pm_node_t *block_expr = ((const pm_block_argument_node_t *)block)->expression;

if (block_expr && pm_setup_args_dup_rest_p(block_expr)) {
dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
if (dup_rest == SPLATARRAY_FALSE ||
(arguments_node != NULL &&
!PM_NODE_FLAG_P(arguments_node, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_MULTIPLE_SPLATS) &&
PM_NODE_TYPE_P(arguments->nodes[arguments_size - 1], PM_SPLAT_NODE))) {
dup_rest |= MAYBE_UNNECESSARY_ALLOC_SPLAT;
}
dup_rest |= SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
initial_dup_rest = dup_rest;
}

Expand Down Expand Up @@ -1977,6 +1992,17 @@ pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block,
// VM_CALL_ARGS_SPLAT_MUT flag.
if (*flags & VM_CALL_ARGS_SPLAT && dup_rest != initial_dup_rest) {
*flags |= VM_CALL_ARGS_SPLAT_MUT;

if (dup_rest & MAYBE_UNNECESSARY_ALLOC_SPLAT) {
rb_category_warn(
RB_WARN_CATEGORY_PERFORMANCE,
"(Line %d) This method call implicitly allocates a potentially unnecessary array for the positional splat, " \
"because a keyword, keyword splat, or block pass expression could cause an evaluation order issue " \
"if an array is not allocated for the positional splat. You can avoid this allocation by assigning " \
"the related keyword, keyword splat, or block pass expression to a local variable and using that " \
"local variable.",
node_location->line);
}
}

return argc;
Expand Down
63 changes: 63 additions & 0 deletions test/ruby/test_call.rb
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,69 @@ def o.foo(a, **h)= h[:splat_modified] = true
assert_equal({splat_modified: false}, b)
end

UNNECESSARY_POS_SPLAT_MESSAGE = "This method call implicitly allocates a potentially " \
"unnecessary array for the positional splat, because a keyword, keyword splat, or " \
"block pass expression could cause an evaluation order issue if an array is not " \
"allocated for the positional splat\. You can avoid this allocation by assigning " \
"the related keyword, keyword splat, or block pass expression to a local variable " \
"and using that local variable."
def test_unnecessary_positional_splat_alloc_due_to_kw_warning
assert_in_out_err([], <<-INPUT, %w(), Regexp.new(UNNECESSARY_POS_SPLAT_MESSAGE))
$VERBOSE = false
Warning[:performance] = true
eval(<<-RUBY)
def self.kw = {}
def self.x(...) = nil
a = []
x(*a, kw:)
RUBY
INPUT
end

def test_unnecessary_positional_splat_alloc_due_to_kw_splat_warning
assert_in_out_err([], <<-INPUT, %w(), Regexp.new(UNNECESSARY_POS_SPLAT_MESSAGE))
$VERBOSE = false
Warning[:performance] = true
eval(<<-RUBY)
def self.kw = {}
def self.x(...) = nil
a = []
x(*a, **kw)
RUBY
INPUT
end

def test_unnecessary_positional_splat_alloc_due_to_block_warning
assert_in_out_err([], <<-INPUT, %w(), Regexp.new(UNNECESSARY_POS_SPLAT_MESSAGE))
$VERBOSE = false
Warning[:performance] = true
eval(<<-RUBY)
def self.kw = {}
def self.x(...) = nil
a = []
x(*a, &kw)
RUBY
INPUT
end

def test_unnecessary_keyword_splat_alloc_due_to_block_warning
message = "This method call implicitly allocates a potentially " \
"unnecessary hash for the keyword splat, because the block pass expression could " \
"cause an evaluation order issue if a hash is not allocated for the keyword splat. " \
"You can avoid this allocation by assigning the block pass expression to a local " \
"variable, and using that local variable."
assert_in_out_err([], <<-INPUT, %w(), Regexp.new(message));
$VERBOSE = false
Warning[:performance] = true
eval(<<-RUBY)
def self.kw = {}
def self.x(...) = nil
h = {}
x(**kw, &kw)
RUBY
INPUT
end

def test_anon_splat
r2kh = Hash.ruby2_keywords_hash(kw: 2)
r2kea = [r2kh]
Expand Down
Loading