1// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/compiler/frontend/kernel_to_il.h"
6
7#include <utility>
8
9#include "platform/assert.h"
10#include "platform/globals.h"
11#include "vm/class_id.h"
12#include "vm/compiler/aot/precompiler.h"
13#include "vm/compiler/backend/flow_graph_compiler.h"
14#include "vm/compiler/backend/il.h"
15#include "vm/compiler/backend/il_printer.h"
16#include "vm/compiler/backend/locations.h"
17#include "vm/compiler/backend/range_analysis.h"
18#include "vm/compiler/ffi/abi.h"
19#include "vm/compiler/ffi/marshaller.h"
20#include "vm/compiler/ffi/native_calling_convention.h"
21#include "vm/compiler/ffi/native_type.h"
22#include "vm/compiler/ffi/recognized_method.h"
23#include "vm/compiler/frontend/kernel_binary_flowgraph.h"
24#include "vm/compiler/frontend/kernel_translation_helper.h"
25#include "vm/compiler/frontend/prologue_builder.h"
26#include "vm/compiler/jit/compiler.h"
27#include "vm/compiler/runtime_api.h"
28#include "vm/kernel_isolate.h"
29#include "vm/kernel_loader.h"
30#include "vm/log.h"
31#include "vm/longjump.h"
32#include "vm/native_entry.h"
33#include "vm/object_store.h"
34#include "vm/report.h"
35#include "vm/resolver.h"
36#include "vm/runtime_entry.h"
37#include "vm/scopes.h"
38#include "vm/stack_frame.h"
39#include "vm/symbols.h"
40
41namespace dart {
42
43DEFINE_FLAG(bool,
44 print_huge_methods,
45 false,
46 "Print huge methods (less optimized)");
47
48DEFINE_FLAG(int,
49 force_switch_dispatch_type,
50 -1,
51 "Force switch statements to use a particular dispatch type: "
52 "-1=auto, 0=linear scan, 1=binary search, 2=jump table");
53
54namespace kernel {
55
56#define Z (zone_)
57#define H (translation_helper_)
58#define T (type_translator_)
59#define I Isolate::Current()
60#define IG IsolateGroup::Current()
61
62FlowGraphBuilder::FlowGraphBuilder(
63 ParsedFunction* parsed_function,
64 ZoneGrowableArray<const ICData*>* ic_data_array,
65 ZoneGrowableArray<intptr_t>* context_level_array,
66 InlineExitCollector* exit_collector,
67 bool optimizing,
68 intptr_t osr_id,
69 intptr_t first_block_id,
70 bool inlining_unchecked_entry)
71 : BaseFlowGraphBuilder(parsed_function,
72 first_block_id - 1,
73 osr_id,
74 context_level_array,
75 exit_collector,
76 inlining_unchecked_entry),
77 translation_helper_(Thread::Current()),
78 thread_(translation_helper_.thread()),
79 zone_(translation_helper_.zone()),
80 parsed_function_(parsed_function),
81 optimizing_(optimizing),
82 ic_data_array_(*ic_data_array),
83 next_function_id_(0),
84 loop_depth_(0),
85 try_depth_(0),
86 catch_depth_(0),
87 for_in_depth_(0),
88 block_expression_depth_(0),
89 graph_entry_(nullptr),
90 scopes_(nullptr),
91 breakable_block_(nullptr),
92 switch_block_(nullptr),
93 try_catch_block_(nullptr),
94 try_finally_block_(nullptr),
95 catch_block_(nullptr),
96 prepend_type_arguments_(Function::ZoneHandle(zone: zone_)),
97 throw_new_null_assertion_(Function::ZoneHandle(zone: zone_)) {
98 const auto& info = KernelProgramInfo::Handle(
99 Z, ptr: parsed_function->function().KernelProgramInfo());
100 H.InitFromKernelProgramInfo(info);
101}
102
103FlowGraphBuilder::~FlowGraphBuilder() {}
104
105Fragment FlowGraphBuilder::EnterScope(
106 intptr_t kernel_offset,
107 const LocalScope** context_scope /* = nullptr */) {
108 Fragment instructions;
109 const LocalScope* scope = scopes_->scopes.Lookup(key: kernel_offset);
110 if (scope->num_context_variables() > 0) {
111 instructions += PushContext(scope);
112 instructions += Drop();
113 }
114 if (context_scope != nullptr) {
115 *context_scope = scope;
116 }
117 return instructions;
118}
119
120Fragment FlowGraphBuilder::ExitScope(intptr_t kernel_offset) {
121 Fragment instructions;
122 const intptr_t context_size =
123 scopes_->scopes.Lookup(key: kernel_offset)->num_context_variables();
124 if (context_size > 0) {
125 instructions += PopContext();
126 }
127 return instructions;
128}
129
130Fragment FlowGraphBuilder::AdjustContextTo(int depth) {
131 ASSERT(depth <= context_depth_ && depth >= 0);
132 Fragment instructions;
133 if (depth < context_depth_) {
134 instructions += LoadContextAt(depth);
135 instructions += StoreLocal(TokenPosition::kNoSource,
136 parsed_function_->current_context_var());
137 instructions += Drop();
138 context_depth_ = depth;
139 }
140 return instructions;
141}
142
143Fragment FlowGraphBuilder::PushContext(const LocalScope* scope) {
144 ASSERT(scope->num_context_variables() > 0);
145 Fragment instructions = AllocateContext(scope->context_slots());
146 LocalVariable* context = MakeTemporary();
147 instructions += LoadLocal(variable: context);
148 instructions += LoadLocal(variable: parsed_function_->current_context_var());
149 instructions += StoreNativeField(Slot::Context_parent(),
150 StoreFieldInstr::Kind::kInitializing);
151 instructions += StoreLocal(TokenPosition::kNoSource,
152 parsed_function_->current_context_var());
153 ++context_depth_;
154 return instructions;
155}
156
157Fragment FlowGraphBuilder::PopContext() {
158 return AdjustContextTo(depth: context_depth_ - 1);
159}
160
161Fragment FlowGraphBuilder::LoadInstantiatorTypeArguments() {
162 // TODO(27590): We could use `active_class_->IsGeneric()`.
163 Fragment instructions;
164 if (scopes_ != nullptr && scopes_->type_arguments_variable != nullptr) {
165#ifdef DEBUG
166 Function& function =
167 Function::Handle(Z, ptr: parsed_function_->function().ptr());
168 while (function.IsClosureFunction()) {
169 function = function.parent_function();
170 }
171 ASSERT(function.IsFactory());
172#endif
173 instructions += LoadLocal(variable: scopes_->type_arguments_variable);
174 } else if (parsed_function_->has_receiver_var() &&
175 active_class_.ClassNumTypeArguments() > 0) {
176 ASSERT(!parsed_function_->function().IsFactory());
177 instructions += LoadLocal(variable: parsed_function_->receiver_var());
178 instructions += LoadNativeField(
179 Slot::GetTypeArgumentsSlotFor(thread: thread_, cls: *active_class_.klass));
180 } else {
181 instructions += NullConstant();
182 }
183 return instructions;
184}
185
186// This function is responsible for pushing a type arguments vector which
187// contains all type arguments of enclosing functions prepended to the type
188// arguments of the current function.
189Fragment FlowGraphBuilder::LoadFunctionTypeArguments() {
190 Fragment instructions;
191
192 const Function& function = parsed_function_->function();
193
194 if (function.IsGeneric() || function.HasGenericParent()) {
195 ASSERT(parsed_function_->function_type_arguments() != nullptr);
196 instructions += LoadLocal(variable: parsed_function_->function_type_arguments());
197 } else {
198 instructions += NullConstant();
199 }
200
201 return instructions;
202}
203
204Fragment FlowGraphBuilder::TranslateInstantiatedTypeArguments(
205 const TypeArguments& type_arguments) {
206 Fragment instructions;
207
208 if (type_arguments.IsNull() || type_arguments.IsInstantiated()) {
209 // There are no type references to type parameters so we can just take it.
210 instructions += Constant(type_arguments);
211 } else {
212 // The [type_arguments] vector contains a type reference to a type
213 // parameter we need to resolve it.
214 if (type_arguments.CanShareInstantiatorTypeArguments(
215 instantiator_class: *active_class_.klass)) {
216 // If the instantiator type arguments are just passed on, we don't need to
217 // resolve the type parameters.
218 //
219 // This is for example the case here:
220 // class Foo<T> {
221 // newList() => new List<T>();
222 // }
223 // We just use the type argument vector from the [Foo] object and pass it
224 // directly to the `new List<T>()` factory constructor.
225 instructions += LoadInstantiatorTypeArguments();
226 } else if (type_arguments.CanShareFunctionTypeArguments(
227 function: parsed_function_->function())) {
228 instructions += LoadFunctionTypeArguments();
229 } else {
230 // Otherwise we need to resolve [TypeParameterType]s in the type
231 // expression based on the current instantiator type argument vector.
232 if (!type_arguments.IsInstantiated(genericity: kCurrentClass)) {
233 instructions += LoadInstantiatorTypeArguments();
234 } else {
235 instructions += NullConstant();
236 }
237 if (!type_arguments.IsInstantiated(genericity: kFunctions)) {
238 instructions += LoadFunctionTypeArguments();
239 } else {
240 instructions += NullConstant();
241 }
242 instructions += InstantiateTypeArguments(type_arguments);
243 }
244 }
245 return instructions;
246}
247
248Fragment FlowGraphBuilder::CatchBlockEntry(const Array& handler_types,
249 intptr_t handler_index,
250 bool needs_stacktrace,
251 bool is_synthesized) {
252 LocalVariable* exception_var = CurrentException();
253 LocalVariable* stacktrace_var = CurrentStackTrace();
254 LocalVariable* raw_exception_var = CurrentRawException();
255 LocalVariable* raw_stacktrace_var = CurrentRawStackTrace();
256
257 CatchBlockEntryInstr* entry = new (Z) CatchBlockEntryInstr(
258 is_synthesized, // whether catch block was synthesized by FE compiler
259 AllocateBlockId(), CurrentTryIndex(), graph_entry_, handler_types,
260 handler_index, needs_stacktrace, GetNextDeoptId(), exception_var,
261 stacktrace_var, raw_exception_var, raw_stacktrace_var);
262 graph_entry_->AddCatchEntry(entry);
263
264 Fragment instructions(entry);
265
266 // Auxiliary variables introduced by the try catch can be captured if we are
267 // inside a function with yield/resume points. In this case we first need
268 // to restore the context to match the context at entry into the closure.
269 const bool should_restore_closure_context =
270 CurrentException()->is_captured() || CurrentCatchContext()->is_captured();
271 LocalVariable* context_variable = parsed_function_->current_context_var();
272 if (should_restore_closure_context) {
273 ASSERT(parsed_function_->function().IsClosureFunction());
274
275 LocalVariable* closure_parameter = parsed_function_->ParameterVariable(i: 0);
276 ASSERT(!closure_parameter->is_captured());
277 instructions += LoadLocal(variable: closure_parameter);
278 instructions += LoadNativeField(Slot::Closure_context());
279 instructions += StoreLocal(TokenPosition::kNoSource, context_variable);
280 instructions += Drop();
281 }
282
283 if (exception_var->is_captured()) {
284 instructions += LoadLocal(variable: context_variable);
285 instructions += LoadLocal(variable: raw_exception_var);
286 instructions += StoreNativeField(
287 Slot::GetContextVariableSlotFor(thread: thread_, var: *exception_var));
288 }
289 if (stacktrace_var->is_captured()) {
290 instructions += LoadLocal(variable: context_variable);
291 instructions += LoadLocal(variable: raw_stacktrace_var);
292 instructions += StoreNativeField(
293 Slot::GetContextVariableSlotFor(thread: thread_, var: *stacktrace_var));
294 }
295
296 // :saved_try_context_var can be captured in the context of
297 // of the closure, in this case CatchBlockEntryInstr restores
298 // :current_context_var to point to closure context in the
299 // same way as normal function prologue does.
300 // Update current context depth to reflect that.
301 const intptr_t saved_context_depth = context_depth_;
302 ASSERT(!CurrentCatchContext()->is_captured() ||
303 CurrentCatchContext()->owner()->context_level() == 0);
304 context_depth_ = 0;
305 instructions += LoadLocal(variable: CurrentCatchContext());
306 instructions += StoreLocal(TokenPosition::kNoSource,
307 parsed_function_->current_context_var());
308 instructions += Drop();
309 context_depth_ = saved_context_depth;
310
311 return instructions;
312}
313
314Fragment FlowGraphBuilder::TryCatch(int try_handler_index) {
315 // The body of the try needs to have it's own block in order to get a new try
316 // index.
317 //
318 // => We therefore create a block for the body (fresh try index) and another
319 // join block (with current try index).
320 Fragment body;
321 JoinEntryInstr* entry = BuildJoinEntry(try_handler_index);
322 body += LoadLocal(variable: parsed_function_->current_context_var());
323 body += StoreLocal(TokenPosition::kNoSource, CurrentCatchContext());
324 body += Drop();
325 body += Goto(entry);
326 return Fragment(body.entry, entry);
327}
328
329Fragment FlowGraphBuilder::CheckStackOverflowInPrologue(
330 TokenPosition position) {
331 ASSERT(loop_depth_ == 0);
332 return BaseFlowGraphBuilder::CheckStackOverflowInPrologue(position);
333}
334
335Fragment FlowGraphBuilder::CloneContext(
336 const ZoneGrowableArray<const Slot*>& context_slots) {
337 LocalVariable* context_variable = parsed_function_->current_context_var();
338
339 Fragment instructions = LoadLocal(variable: context_variable);
340
341 CloneContextInstr* clone_instruction = new (Z) CloneContextInstr(
342 InstructionSource(), Pop(), context_slots, GetNextDeoptId());
343 instructions <<= clone_instruction;
344 Push(clone_instruction);
345
346 instructions += StoreLocal(TokenPosition::kNoSource, context_variable);
347 instructions += Drop();
348 return instructions;
349}
350
351Fragment FlowGraphBuilder::InstanceCall(
352 TokenPosition position,
353 const String& name,
354 Token::Kind kind,
355 intptr_t type_args_len,
356 intptr_t argument_count,
357 const Array& argument_names,
358 intptr_t checked_argument_count,
359 const Function& interface_target,
360 const Function& tearoff_interface_target,
361 const InferredTypeMetadata* result_type,
362 bool use_unchecked_entry,
363 const CallSiteAttributesMetadata* call_site_attrs,
364 bool receiver_is_not_smi,
365 bool is_call_on_this) {
366 const intptr_t total_count = argument_count + (type_args_len > 0 ? 1 : 0);
367 InputsArray arguments = GetArguments(total_count);
368 InstanceCallInstr* call = new (Z) InstanceCallInstr(
369 InstructionSource(position), name, kind, std::move(arguments),
370 type_args_len, argument_names, checked_argument_count, ic_data_array_,
371 GetNextDeoptId(), interface_target, tearoff_interface_target);
372 if ((result_type != nullptr) && !result_type->IsTrivial()) {
373 call->SetResultType(Z, result_type->ToCompileType(Z));
374 }
375 if (use_unchecked_entry) {
376 call->set_entry_kind(Code::EntryKind::kUnchecked);
377 }
378 if (is_call_on_this) {
379 call->mark_as_call_on_this();
380 }
381 if (call_site_attrs != nullptr && call_site_attrs->receiver_type != nullptr &&
382 call_site_attrs->receiver_type->IsInstantiated()) {
383 call->set_receivers_static_type(call_site_attrs->receiver_type);
384 } else if (!interface_target.IsNull()) {
385 const Class& owner = Class::Handle(Z, ptr: interface_target.Owner());
386 const AbstractType& type =
387 AbstractType::ZoneHandle(Z, ptr: owner.DeclarationType());
388 call->set_receivers_static_type(&type);
389 }
390 call->set_receiver_is_not_smi(receiver_is_not_smi);
391 Push(call);
392 if (result_type != nullptr && result_type->IsConstant()) {
393 Fragment instructions(call);
394 instructions += Drop();
395 instructions += Constant(result_type->constant_value);
396 return instructions;
397 }
398 return Fragment(call);
399}
400
401Fragment FlowGraphBuilder::FfiCall(
402 const compiler::ffi::CallMarshaller& marshaller) {
403 Fragment body;
404
405 FfiCallInstr* const call = new (Z) FfiCallInstr(
406 GetNextDeoptId(), marshaller, parsed_function_->function().FfiIsLeaf());
407
408 for (intptr_t i = call->InputCount() - 1; i >= 0; --i) {
409 call->SetInputAt(i, value: Pop());
410 }
411
412 Push(call);
413 body <<= call;
414
415 return body;
416}
417
418Fragment FlowGraphBuilder::CCall(
419 const compiler::ffi::NativeCallingConvention& native_calling_convention) {
420 Fragment body;
421
422 const intptr_t num_arguments =
423 native_calling_convention.argument_locations().length() + 1;
424 InputsArray arguments(num_arguments);
425 arguments.FillWith(nullptr, 0, num_arguments);
426 for (intptr_t i = num_arguments - 1; i >= 0; --i) {
427 arguments[i] = Pop();
428 }
429 auto* const call =
430 new (Z) CCallInstr(native_calling_convention, std::move(arguments));
431
432 Push(definition: call);
433 body <<= call;
434
435 return body;
436}
437
438Fragment FlowGraphBuilder::CCall(intptr_t num_arguments,
439 Representation representation) {
440 const auto& native_function_type =
441 *compiler::ffi::NativeFunctionType::FromUnboxedRepresentation(
442 Z, num_arguments, representation);
443 const auto& native_calling_convention =
444 compiler::ffi::NativeCallingConvention::FromSignature(
445 Z, signature: native_function_type);
446 return CCall(native_calling_convention);
447}
448
449Fragment FlowGraphBuilder::RethrowException(TokenPosition position,
450 int catch_try_index) {
451 Fragment instructions;
452 Value* stacktrace = Pop();
453 Value* exception = Pop();
454 instructions += Fragment(new (Z) ReThrowInstr(
455 InstructionSource(position), catch_try_index,
456 GetNextDeoptId(), exception, stacktrace))
457 .closed();
458 // Use its side effect of leaving a constant on the stack (does not change
459 // the graph).
460 NullConstant();
461
462 return instructions;
463}
464
465Fragment FlowGraphBuilder::LoadLocal(LocalVariable* variable) {
466 // Captured 'this' is immutable, so within the outer method we don't need to
467 // load it from the context.
468 const ParsedFunction* pf = parsed_function_;
469 if (pf->function().HasThisParameter() && pf->has_receiver_var() &&
470 variable == pf->receiver_var()) {
471 ASSERT(variable == pf->ParameterVariable(0));
472 variable = pf->RawParameterVariable(i: 0);
473 }
474 if (variable->is_captured()) {
475 Fragment instructions;
476 instructions += LoadContextAt(variable->owner()->context_level());
477 instructions +=
478 LoadNativeField(Slot::GetContextVariableSlotFor(thread: thread_, var: *variable));
479 return instructions;
480 } else {
481 return BaseFlowGraphBuilder::LoadLocal(variable);
482 }
483}
484
485IndirectGotoInstr* FlowGraphBuilder::IndirectGoto(intptr_t target_count) {
486 Value* index = Pop();
487 return new (Z) IndirectGotoInstr(target_count, index);
488}
489
490Fragment FlowGraphBuilder::ThrowLateInitializationError(
491 TokenPosition position,
492 const char* throw_method_name,
493 const String& name) {
494 const Class& klass =
495 Class::ZoneHandle(Z, ptr: Library::LookupCoreClass(class_name: Symbols::LateError()));
496 ASSERT(!klass.IsNull());
497
498 const auto& error = klass.EnsureIsFinalized(thread: thread_);
499 ASSERT(error == Error::null());
500 const Function& throw_new =
501 Function::ZoneHandle(Z, ptr: klass.LookupStaticFunctionAllowPrivate(
502 H.DartSymbolObfuscate(content: throw_method_name)));
503 ASSERT(!throw_new.IsNull());
504
505 Fragment instructions;
506
507 // Call LateError._throwFoo.
508 instructions += Constant(name);
509 instructions += StaticCall(position, target: throw_new,
510 /* argument_count = */ 1, rebind_rule: ICData::kStatic);
511 instructions += Drop();
512
513 return instructions;
514}
515
516Fragment FlowGraphBuilder::StoreLateField(const Field& field,
517 LocalVariable* instance,
518 LocalVariable* setter_value) {
519 Fragment instructions;
520 TargetEntryInstr* is_uninitialized;
521 TargetEntryInstr* is_initialized;
522 const TokenPosition position = field.token_pos();
523 const bool is_static = field.is_static();
524 const bool is_final = field.is_final();
525
526 if (is_final) {
527 // Check whether the field has been initialized already.
528 if (is_static) {
529 instructions += LoadStaticField(field, /*calls_initializer=*/false);
530 } else {
531 instructions += LoadLocal(variable: instance);
532 instructions += LoadField(field, /*calls_initializer=*/false);
533 }
534 instructions += Constant(Object::sentinel());
535 instructions += BranchIfStrictEqual(&is_uninitialized, &is_initialized);
536 JoinEntryInstr* join = BuildJoinEntry();
537
538 {
539 // If the field isn't initialized, do nothing.
540 Fragment initialize(is_uninitialized);
541 initialize += Goto(join);
542 }
543
544 {
545 // If the field is already initialized, throw a LateInitializationError.
546 Fragment already_initialized(is_initialized);
547 already_initialized += ThrowLateInitializationError(
548 position, throw_method_name: "_throwFieldAlreadyInitialized",
549 name: String::ZoneHandle(Z, ptr: field.name()));
550 already_initialized += Goto(join);
551 }
552
553 instructions = Fragment(instructions.entry, join);
554 }
555
556 if (!is_static) {
557 instructions += LoadLocal(variable: instance);
558 }
559 instructions += LoadLocal(variable: setter_value);
560 if (is_static) {
561 instructions += StoreStaticField(position, field);
562 } else {
563 instructions += StoreFieldGuarded(field);
564 }
565
566 return instructions;
567}
568
569Fragment FlowGraphBuilder::NativeCall(const String& name,
570 const Function& function) {
571 InlineBailout("kernel::FlowGraphBuilder::NativeCall");
572 // +1 for result placeholder.
573 const intptr_t num_args =
574 function.NumParameters() + (function.IsGeneric() ? 1 : 0) + 1;
575
576 Fragment instructions;
577 instructions += NullConstant(); // Placeholder for the result.
578
579 InputsArray arguments = GetArguments(num_args);
580 NativeCallInstr* call = new (Z) NativeCallInstr(
581 name, function, FLAG_link_natives_lazily,
582 InstructionSource(function.end_token_pos()), std::move(arguments));
583 Push(call);
584 instructions <<= call;
585 return instructions;
586}
587
588Fragment FlowGraphBuilder::Return(TokenPosition position,
589 bool omit_result_type_check) {
590 Fragment instructions;
591 const Function& function = parsed_function_->function();
592
593 // Emit a type check of the return type in checked mode for all functions
594 // and in strong mode for native functions.
595 if (!omit_result_type_check && function.is_native()) {
596 const AbstractType& return_type =
597 AbstractType::Handle(Z, ptr: function.result_type());
598 instructions += CheckAssignable(dst_type: return_type, dst_name: Symbols::FunctionResult());
599 }
600
601 if (NeedsDebugStepCheck(function, position)) {
602 instructions += DebugStepCheck(position);
603 }
604
605 instructions += BaseFlowGraphBuilder::Return(position);
606
607 return instructions;
608}
609
610Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
611 const Function& target,
612 intptr_t argument_count,
613 ICData::RebindRule rebind_rule) {
614 return StaticCall(position, target, argument_count, argument_names: Array::null_array(),
615 rebind_rule);
616}
617
618void FlowGraphBuilder::SetResultTypeForStaticCall(
619 StaticCallInstr* call,
620 const Function& target,
621 intptr_t argument_count,
622 const InferredTypeMetadata* result_type) {
623 if (call->InitResultType(Z)) {
624 ASSERT((result_type == nullptr) || (result_type->cid == kDynamicCid) ||
625 (result_type->cid == call->result_cid()));
626 return;
627 }
628 if ((result_type != nullptr) && !result_type->IsTrivial()) {
629 call->SetResultType(Z, new_type: result_type->ToCompileType(Z));
630 }
631}
632
633Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
634 const Function& target,
635 intptr_t argument_count,
636 const Array& argument_names,
637 ICData::RebindRule rebind_rule,
638 const InferredTypeMetadata* result_type,
639 intptr_t type_args_count,
640 bool use_unchecked_entry) {
641 const intptr_t total_count = argument_count + (type_args_count > 0 ? 1 : 0);
642 InputsArray arguments = GetArguments(total_count);
643 StaticCallInstr* call = new (Z) StaticCallInstr(
644 InstructionSource(position), target, type_args_count, argument_names,
645 std::move(arguments), ic_data_array_, GetNextDeoptId(), rebind_rule);
646 SetResultTypeForStaticCall(call, target, argument_count, result_type);
647 if (use_unchecked_entry) {
648 call->set_entry_kind(Code::EntryKind::kUnchecked);
649 }
650 Push(call);
651 if (result_type != nullptr && result_type->IsConstant()) {
652 Fragment instructions(call);
653 instructions += Drop();
654 instructions += Constant(result_type->constant_value);
655 return instructions;
656 }
657 return Fragment(call);
658}
659
660Fragment FlowGraphBuilder::StringInterpolateSingle(TokenPosition position) {
661 Fragment instructions;
662 instructions += StaticCall(
663 position, target: CompilerState::Current().StringBaseInterpolateSingle(),
664 /* argument_count = */ 1, rebind_rule: ICData::kStatic);
665 return instructions;
666}
667
668Fragment FlowGraphBuilder::StringInterpolate(TokenPosition position) {
669 Fragment instructions;
670 instructions +=
671 StaticCall(position, target: CompilerState::Current().StringBaseInterpolate(),
672 /* argument_count = */ 1, rebind_rule: ICData::kStatic);
673 return instructions;
674}
675
676Fragment FlowGraphBuilder::ThrowTypeError() {
677 const Class& klass =
678 Class::ZoneHandle(Z, ptr: Library::LookupCoreClass(class_name: Symbols::TypeError()));
679 ASSERT(!klass.IsNull());
680 GrowableHandlePtrArray<const String> pieces(Z, 3);
681 pieces.Add(Symbols::TypeError());
682 pieces.Add(Symbols::Dot());
683 pieces.Add(H.DartSymbolObfuscate(content: "_create"));
684
685 const Function& constructor = Function::ZoneHandle(
686 Z, ptr: klass.LookupConstructorAllowPrivate(
687 name: String::ZoneHandle(Z, ptr: Symbols::FromConcatAll(thread: thread_, strs: pieces))));
688 ASSERT(!constructor.IsNull());
689
690 const String& url = H.DartString(
691 content: parsed_function_->function().ToLibNamePrefixedQualifiedCString(),
692 space: Heap::kOld);
693
694 Fragment instructions;
695
696 // Create instance of _TypeError
697 instructions += AllocateObject(TokenPosition::kNoSource, klass, 0);
698 LocalVariable* instance = MakeTemporary();
699
700 // Call _TypeError._create constructor.
701 instructions += LoadLocal(variable: instance); // this
702 instructions += Constant(url); // url
703 instructions += NullConstant(); // line
704 instructions += IntConstant(0); // column
705 instructions += Constant(H.DartSymbolPlain(content: "Malformed type.")); // message
706
707 instructions += StaticCall(position: TokenPosition::kNoSource, target: constructor,
708 /* argument_count = */ 5, rebind_rule: ICData::kStatic);
709 instructions += Drop();
710
711 // Throw the exception
712 instructions += ThrowException(TokenPosition::kNoSource);
713
714 return instructions;
715}
716
717Fragment FlowGraphBuilder::ThrowNoSuchMethodError(TokenPosition position,
718 const Function& target,
719 bool incompatible_arguments,
720 bool receiver_pushed) {
721 const Class& owner = Class::Handle(Z, ptr: target.Owner());
722 auto& receiver = Instance::ZoneHandle();
723 InvocationMirror::Kind kind = InvocationMirror::Kind::kMethod;
724 if (target.IsImplicitGetterFunction() || target.IsGetterFunction() ||
725 target.IsRecordFieldGetter()) {
726 kind = InvocationMirror::kGetter;
727 } else if (target.IsImplicitSetterFunction() || target.IsSetterFunction()) {
728 kind = InvocationMirror::kSetter;
729 }
730 InvocationMirror::Level level;
731 if (owner.IsTopLevel()) {
732 if (incompatible_arguments) {
733 receiver = target.UserVisibleSignature();
734 }
735 level = InvocationMirror::Level::kTopLevel;
736 } else {
737 receiver = owner.RareType();
738 if (target.kind() == UntaggedFunction::kConstructor) {
739 level = InvocationMirror::Level::kConstructor;
740 } else if (target.IsRecordFieldGetter()) {
741 level = InvocationMirror::Level::kDynamic;
742 } else {
743 level = InvocationMirror::Level::kStatic;
744 }
745 }
746
747 Fragment instructions;
748 if (!receiver_pushed) {
749 instructions += Constant(receiver); // receiver
750 }
751 instructions +=
752 ThrowNoSuchMethodError(position, selector: String::ZoneHandle(Z, ptr: target.name()),
753 level, kind, /*receiver_pushed*/ true);
754 return instructions;
755}
756
757Fragment FlowGraphBuilder::ThrowNoSuchMethodError(TokenPosition position,
758 const String& selector,
759 InvocationMirror::Level level,
760 InvocationMirror::Kind kind,
761 bool receiver_pushed) {
762 const Class& klass = Class::ZoneHandle(
763 Z, ptr: Library::LookupCoreClass(class_name: Symbols::NoSuchMethodError()));
764 ASSERT(!klass.IsNull());
765 const auto& error = klass.EnsureIsFinalized(H.thread());
766 ASSERT(error == Error::null());
767 const Function& throw_function = Function::ZoneHandle(
768 Z, ptr: klass.LookupStaticFunctionAllowPrivate(name: Symbols::ThrowNew()));
769 ASSERT(!throw_function.IsNull());
770
771 Fragment instructions;
772 if (!receiver_pushed) {
773 instructions += NullConstant(); // receiver
774 }
775 instructions += Constant(selector);
776 instructions += IntConstant(InvocationMirror::EncodeType(level, kind));
777 instructions += IntConstant(0); // type arguments length
778 instructions += NullConstant(); // type arguments
779 instructions += NullConstant(); // arguments
780 instructions += NullConstant(); // argumentNames
781 instructions += StaticCall(position, target: throw_function, /* argument_count = */ 7,
782 rebind_rule: ICData::kNoRebind);
783 return instructions;
784}
785
786LocalVariable* FlowGraphBuilder::LookupVariable(intptr_t kernel_offset) {
787 LocalVariable* local = scopes_->locals.Lookup(key: kernel_offset);
788 ASSERT(local != nullptr);
789 ASSERT(local->kernel_offset() == kernel_offset);
790 return local;
791}
792
793FlowGraph* FlowGraphBuilder::BuildGraph() {
794 const Function& function = parsed_function_->function();
795
796#ifdef DEBUG
797 // Check that all functions that are explicitly marked as recognized with the
798 // vm:recognized annotation are in fact recognized. The check can't be done on
799 // function creation, since the recognized status isn't set until later.
800 if ((function.IsRecognized() !=
801 MethodRecognizer::IsMarkedAsRecognized(function)) &&
802 !function.IsDynamicInvocationForwarder()) {
803 if (function.IsRecognized()) {
804 FATAL("Recognized method %s is not marked with the vm:recognized pragma.",
805 function.ToQualifiedCString());
806 } else {
807 FATAL("Non-recognized method %s is marked with the vm:recognized pragma.",
808 function.ToQualifiedCString());
809 }
810 }
811#endif
812
813 auto& kernel_data = TypedDataView::Handle(Z, ptr: function.KernelLibrary());
814 intptr_t kernel_data_program_offset = function.KernelLibraryOffset();
815
816 StreamingFlowGraphBuilder streaming_flow_graph_builder(
817 this, kernel_data, kernel_data_program_offset);
818 auto result = streaming_flow_graph_builder.BuildGraph();
819
820 FinalizeCoverageArray();
821 result->set_coverage_array(coverage_array());
822
823 if (streaming_flow_graph_builder.num_ast_nodes() >
824 FLAG_huge_method_cutoff_in_ast_nodes) {
825 if (FLAG_print_huge_methods) {
826 OS::PrintErr(
827 format: "Warning: \'%s\' from \'%s\' is too large. Some optimizations have "
828 "been "
829 "disabled, and the compiler might run out of memory. "
830 "Consider refactoring this code into smaller components.\n",
831 function.QualifiedUserVisibleNameCString(),
832 String::Handle(Z, ptr: Library::Handle(
833 Z, ptr: Class::Handle(Z, ptr: function.Owner()).library())
834 .url())
835 .ToCString());
836 }
837 result->mark_huge_method();
838 }
839
840 return result;
841}
842
843Fragment FlowGraphBuilder::NativeFunctionBody(const Function& function,
844 LocalVariable* first_parameter) {
845 ASSERT(function.is_native());
846 ASSERT(!IsRecognizedMethodForFlowGraph(function));
847
848 Fragment body;
849 String& name = String::ZoneHandle(Z, ptr: function.native_name());
850 if (function.IsGeneric()) {
851 body += LoadLocal(variable: parsed_function_->RawTypeArgumentsVariable());
852 }
853 for (intptr_t i = 0; i < function.NumParameters(); ++i) {
854 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i));
855 }
856 body += NativeCall(name, function);
857 // We typecheck results of native calls for type safety.
858 body +=
859 Return(position: TokenPosition::kNoSource, /* omit_result_type_check = */ false);
860 return body;
861}
862
863#define LOAD_NATIVE_FIELD(V) \
864 V(ByteDataViewLength, TypedDataBase_length) \
865 V(ByteDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
866 V(ByteDataViewTypedData, TypedDataView_typed_data) \
867 V(Finalizer_getCallback, Finalizer_callback) \
868 V(FinalizerBase_getAllEntries, FinalizerBase_all_entries) \
869 V(FinalizerBase_getDetachments, FinalizerBase_detachments) \
870 V(FinalizerEntry_getDetach, FinalizerEntry_detach) \
871 V(FinalizerEntry_getNext, FinalizerEntry_next) \
872 V(FinalizerEntry_getToken, FinalizerEntry_token) \
873 V(FinalizerEntry_getValue, FinalizerEntry_value) \
874 V(NativeFinalizer_getCallback, NativeFinalizer_callback) \
875 V(GrowableArrayLength, GrowableObjectArray_length) \
876 V(ReceivePort_getSendPort, ReceivePort_send_port) \
877 V(ReceivePort_getHandler, ReceivePort_handler) \
878 V(ImmutableLinkedHashBase_getData, ImmutableLinkedHashBase_data) \
879 V(ImmutableLinkedHashBase_getIndex, ImmutableLinkedHashBase_index) \
880 V(LinkedHashBase_getData, LinkedHashBase_data) \
881 V(LinkedHashBase_getDeletedKeys, LinkedHashBase_deleted_keys) \
882 V(LinkedHashBase_getHashMask, LinkedHashBase_hash_mask) \
883 V(LinkedHashBase_getIndex, LinkedHashBase_index) \
884 V(LinkedHashBase_getUsedData, LinkedHashBase_used_data) \
885 V(ObjectArrayLength, Array_length) \
886 V(Record_shape, Record_shape) \
887 V(SuspendState_getFunctionData, SuspendState_function_data) \
888 V(SuspendState_getThenCallback, SuspendState_then_callback) \
889 V(SuspendState_getErrorCallback, SuspendState_error_callback) \
890 V(TypedDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
891 V(TypedDataViewTypedData, TypedDataView_typed_data) \
892 V(TypedListBaseLength, TypedDataBase_length) \
893 V(WeakProperty_getKey, WeakProperty_key) \
894 V(WeakProperty_getValue, WeakProperty_value) \
895 V(WeakReference_getTarget, WeakReference_target)
896
897#define STORE_NATIVE_FIELD(V) \
898 V(Finalizer_setCallback, Finalizer_callback) \
899 V(FinalizerBase_setAllEntries, FinalizerBase_all_entries) \
900 V(FinalizerBase_setDetachments, FinalizerBase_detachments) \
901 V(FinalizerEntry_setToken, FinalizerEntry_token) \
902 V(NativeFinalizer_setCallback, NativeFinalizer_callback) \
903 V(ReceivePort_setHandler, ReceivePort_handler) \
904 V(LinkedHashBase_setData, LinkedHashBase_data) \
905 V(LinkedHashBase_setIndex, LinkedHashBase_index) \
906 V(SuspendState_setFunctionData, SuspendState_function_data) \
907 V(SuspendState_setThenCallback, SuspendState_then_callback) \
908 V(SuspendState_setErrorCallback, SuspendState_error_callback) \
909 V(WeakProperty_setKey, WeakProperty_key) \
910 V(WeakProperty_setValue, WeakProperty_value) \
911 V(WeakReference_setTarget, WeakReference_target)
912
913#define STORE_NATIVE_FIELD_NO_BARRIER(V) \
914 V(LinkedHashBase_setDeletedKeys, LinkedHashBase_deleted_keys) \
915 V(LinkedHashBase_setHashMask, LinkedHashBase_hash_mask) \
916 V(LinkedHashBase_setUsedData, LinkedHashBase_used_data)
917
918bool FlowGraphBuilder::IsRecognizedMethodForFlowGraph(
919 const Function& function) {
920 const MethodRecognizer::Kind kind = function.recognized_kind();
921
922 switch (kind) {
923 case MethodRecognizer::kRecord_fieldAt:
924 case MethodRecognizer::kRecord_fieldNames:
925 case MethodRecognizer::kRecord_numFields:
926 case MethodRecognizer::kSuspendState_clone:
927 case MethodRecognizer::kSuspendState_resume:
928 case MethodRecognizer::kTypedData_ByteDataView_factory:
929 case MethodRecognizer::kTypedData_Int8ArrayView_factory:
930 case MethodRecognizer::kTypedData_Uint8ArrayView_factory:
931 case MethodRecognizer::kTypedData_Uint8ClampedArrayView_factory:
932 case MethodRecognizer::kTypedData_Int16ArrayView_factory:
933 case MethodRecognizer::kTypedData_Uint16ArrayView_factory:
934 case MethodRecognizer::kTypedData_Int32ArrayView_factory:
935 case MethodRecognizer::kTypedData_Uint32ArrayView_factory:
936 case MethodRecognizer::kTypedData_Int64ArrayView_factory:
937 case MethodRecognizer::kTypedData_Uint64ArrayView_factory:
938 case MethodRecognizer::kTypedData_Float32ArrayView_factory:
939 case MethodRecognizer::kTypedData_Float64ArrayView_factory:
940 case MethodRecognizer::kTypedData_Float32x4ArrayView_factory:
941 case MethodRecognizer::kTypedData_Int32x4ArrayView_factory:
942 case MethodRecognizer::kTypedData_Float64x2ArrayView_factory:
943 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
944 case MethodRecognizer::kTypedData_UnmodifiableInt8ArrayView_factory:
945 case MethodRecognizer::kTypedData_UnmodifiableUint8ArrayView_factory:
946 case MethodRecognizer::kTypedData_UnmodifiableUint8ClampedArrayView_factory:
947 case MethodRecognizer::kTypedData_UnmodifiableInt16ArrayView_factory:
948 case MethodRecognizer::kTypedData_UnmodifiableUint16ArrayView_factory:
949 case MethodRecognizer::kTypedData_UnmodifiableInt32ArrayView_factory:
950 case MethodRecognizer::kTypedData_UnmodifiableUint32ArrayView_factory:
951 case MethodRecognizer::kTypedData_UnmodifiableInt64ArrayView_factory:
952 case MethodRecognizer::kTypedData_UnmodifiableUint64ArrayView_factory:
953 case MethodRecognizer::kTypedData_UnmodifiableFloat32ArrayView_factory:
954 case MethodRecognizer::kTypedData_UnmodifiableFloat64ArrayView_factory:
955 case MethodRecognizer::kTypedData_UnmodifiableFloat32x4ArrayView_factory:
956 case MethodRecognizer::kTypedData_UnmodifiableInt32x4ArrayView_factory:
957 case MethodRecognizer::kTypedData_UnmodifiableFloat64x2ArrayView_factory:
958 case MethodRecognizer::kTypedData_Int8Array_factory:
959 case MethodRecognizer::kTypedData_Uint8Array_factory:
960 case MethodRecognizer::kTypedData_Uint8ClampedArray_factory:
961 case MethodRecognizer::kTypedData_Int16Array_factory:
962 case MethodRecognizer::kTypedData_Uint16Array_factory:
963 case MethodRecognizer::kTypedData_Int32Array_factory:
964 case MethodRecognizer::kTypedData_Uint32Array_factory:
965 case MethodRecognizer::kTypedData_Int64Array_factory:
966 case MethodRecognizer::kTypedData_Uint64Array_factory:
967 case MethodRecognizer::kTypedData_Float32Array_factory:
968 case MethodRecognizer::kTypedData_Float64Array_factory:
969 case MethodRecognizer::kTypedData_Float32x4Array_factory:
970 case MethodRecognizer::kTypedData_Int32x4Array_factory:
971 case MethodRecognizer::kTypedData_Float64x2Array_factory:
972 case MethodRecognizer::kMemCopy:
973 case MethodRecognizer::kFfiLoadInt8:
974 case MethodRecognizer::kFfiLoadInt16:
975 case MethodRecognizer::kFfiLoadInt32:
976 case MethodRecognizer::kFfiLoadInt64:
977 case MethodRecognizer::kFfiLoadUint8:
978 case MethodRecognizer::kFfiLoadUint16:
979 case MethodRecognizer::kFfiLoadUint32:
980 case MethodRecognizer::kFfiLoadUint64:
981 case MethodRecognizer::kFfiLoadFloat:
982 case MethodRecognizer::kFfiLoadFloatUnaligned:
983 case MethodRecognizer::kFfiLoadDouble:
984 case MethodRecognizer::kFfiLoadDoubleUnaligned:
985 case MethodRecognizer::kFfiLoadPointer:
986 case MethodRecognizer::kFfiNativeCallbackFunction:
987 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
988 case MethodRecognizer::kFfiStoreInt8:
989 case MethodRecognizer::kFfiStoreInt16:
990 case MethodRecognizer::kFfiStoreInt32:
991 case MethodRecognizer::kFfiStoreInt64:
992 case MethodRecognizer::kFfiStoreUint8:
993 case MethodRecognizer::kFfiStoreUint16:
994 case MethodRecognizer::kFfiStoreUint32:
995 case MethodRecognizer::kFfiStoreUint64:
996 case MethodRecognizer::kFfiStoreFloat:
997 case MethodRecognizer::kFfiStoreFloatUnaligned:
998 case MethodRecognizer::kFfiStoreDouble:
999 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1000 case MethodRecognizer::kFfiStorePointer:
1001 case MethodRecognizer::kFfiFromAddress:
1002 case MethodRecognizer::kFfiGetAddress:
1003 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1004 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1005 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1006 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1007 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1008 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1009 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1010 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1011 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1012 case MethodRecognizer::kFfiAsExternalTypedDataDouble:
1013 case MethodRecognizer::kGetNativeField:
1014 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1015 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1016 case MethodRecognizer::kFinalizerBase_setIsolate:
1017 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1018 case MethodRecognizer::kFinalizerEntry_allocate:
1019 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1020 case MethodRecognizer::kObjectEquals:
1021 case MethodRecognizer::kStringBaseLength:
1022 case MethodRecognizer::kStringBaseIsEmpty:
1023 case MethodRecognizer::kClassIDgetID:
1024 case MethodRecognizer::kGrowableArrayAllocateWithData:
1025 case MethodRecognizer::kGrowableArrayCapacity:
1026 case MethodRecognizer::kObjectArrayAllocate:
1027 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1028 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1029 case MethodRecognizer::kFfiAbi:
1030 case MethodRecognizer::kUtf8DecoderScan:
1031 case MethodRecognizer::kHas63BitSmis:
1032 case MethodRecognizer::kExtensionStreamHasListener:
1033 case MethodRecognizer::kSmi_hashCode:
1034 case MethodRecognizer::kMint_hashCode:
1035 case MethodRecognizer::kDouble_hashCode:
1036#define CASE(method, slot) case MethodRecognizer::k##method:
1037 LOAD_NATIVE_FIELD(CASE)
1038 STORE_NATIVE_FIELD(CASE)
1039 STORE_NATIVE_FIELD_NO_BARRIER(CASE)
1040#undef CASE
1041 return true;
1042 case MethodRecognizer::kDoubleToInteger:
1043 case MethodRecognizer::kDoubleMod:
1044 case MethodRecognizer::kDoubleRoundToDouble:
1045 case MethodRecognizer::kDoubleTruncateToDouble:
1046 case MethodRecognizer::kDoubleFloorToDouble:
1047 case MethodRecognizer::kDoubleCeilToDouble:
1048 case MethodRecognizer::kMathDoublePow:
1049 case MethodRecognizer::kMathSin:
1050 case MethodRecognizer::kMathCos:
1051 case MethodRecognizer::kMathTan:
1052 case MethodRecognizer::kMathAsin:
1053 case MethodRecognizer::kMathAcos:
1054 case MethodRecognizer::kMathAtan:
1055 case MethodRecognizer::kMathAtan2:
1056 case MethodRecognizer::kMathExp:
1057 case MethodRecognizer::kMathLog:
1058 case MethodRecognizer::kMathSqrt:
1059 return FlowGraphCompiler::SupportsUnboxedDoubles();
1060 case MethodRecognizer::kDoubleCeilToInt:
1061 case MethodRecognizer::kDoubleFloorToInt:
1062 if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
1063#if defined(TARGET_ARCH_X64)
1064 return CompilerState::Current().is_aot() || FLAG_target_unknown_cpu;
1065#elif defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV32) || \
1066 defined(TARGET_ARCH_RISCV64)
1067 return true;
1068#else
1069 return false;
1070#endif
1071 default:
1072 return false;
1073 }
1074}
1075
1076FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
1077 const Function& function) {
1078 ASSERT(IsRecognizedMethodForFlowGraph(function));
1079
1080 graph_entry_ =
1081 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
1082
1083 auto normal_entry = BuildFunctionEntry(graph_entry_);
1084 graph_entry_->set_normal_entry(normal_entry);
1085
1086 PrologueInfo prologue_info(-1, -1);
1087 BlockEntryInstr* instruction_cursor =
1088 BuildPrologue(normal_entry: normal_entry, prologue_info: &prologue_info);
1089
1090 Fragment body(instruction_cursor);
1091 body += CheckStackOverflowInPrologue(position: function.token_pos());
1092
1093 const MethodRecognizer::Kind kind = function.recognized_kind();
1094 switch (kind) {
1095 case MethodRecognizer::kRecord_fieldAt:
1096 ASSERT_EQUAL(function.NumParameters(), 2);
1097 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1098 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 1));
1099 body += LoadIndexed(
1100 kRecordCid, /*index_scale*/ compiler::target::kCompressedWordSize);
1101 break;
1102 case MethodRecognizer::kRecord_fieldNames:
1103 body += LoadObjectStore();
1104 body += RawLoadField(
1105 offset: compiler::target::ObjectStore::record_field_names_offset());
1106 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1107 body += LoadNativeField(Slot::Record_shape());
1108 body += IntConstant(compiler::target::RecordShape::kFieldNamesIndexShift);
1109 body += SmiBinaryOp(Token::kSHR);
1110 body += IntConstant(compiler::target::RecordShape::kFieldNamesIndexMask);
1111 body += SmiBinaryOp(Token::kBIT_AND);
1112 body += LoadIndexed(
1113 kArrayCid, /*index_scale=*/compiler::target::kCompressedWordSize);
1114 break;
1115 case MethodRecognizer::kRecord_numFields:
1116 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1117 body += LoadNativeField(Slot::Record_shape());
1118 body += IntConstant(compiler::target::RecordShape::kNumFieldsMask);
1119 body += SmiBinaryOp(Token::kBIT_AND);
1120 break;
1121 case MethodRecognizer::kSuspendState_clone: {
1122 ASSERT_EQUAL(function.NumParameters(), 1);
1123 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1124 body += Call1ArgStub(position: TokenPosition::kNoSource,
1125 stub_id: Call1ArgStubInstr::StubId::kCloneSuspendState);
1126 break;
1127 }
1128 case MethodRecognizer::kSuspendState_resume: {
1129 const Code& resume_stub =
1130 Code::ZoneHandle(Z, IG->object_store()->resume_stub());
1131 body += NullConstant();
1132 body += TailCall(resume_stub);
1133 break;
1134 }
1135#define CASE(name) \
1136 case MethodRecognizer::kTypedData_##name##_factory: \
1137 body += BuildTypedDataFactoryConstructor(function, kTypedData##name##Cid); \
1138 break; \
1139 case MethodRecognizer::kTypedData_##name##View_factory: \
1140 body += BuildTypedDataViewFactoryConstructor(function, \
1141 kTypedData##name##ViewCid); \
1142 break; \
1143 case MethodRecognizer::kTypedData_Unmodifiable##name##View_factory: \
1144 body += BuildTypedDataViewFactoryConstructor( \
1145 function, kUnmodifiableTypedData##name##ViewCid); \
1146 break;
1147 CLASS_LIST_TYPED_DATA(CASE)
1148#undef CASE
1149 case MethodRecognizer::kTypedData_ByteDataView_factory:
1150 body += BuildTypedDataViewFactoryConstructor(function, cid: kByteDataViewCid);
1151 break;
1152 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1153 body += BuildTypedDataViewFactoryConstructor(
1154 function, cid: kUnmodifiableByteDataViewCid);
1155 break;
1156 case MethodRecognizer::kObjectEquals:
1157 ASSERT_EQUAL(function.NumParameters(), 2);
1158 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1159 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 1));
1160 body += StrictCompare(Token::kEQ_STRICT);
1161 break;
1162 case MethodRecognizer::kStringBaseLength:
1163 case MethodRecognizer::kStringBaseIsEmpty:
1164 ASSERT_EQUAL(function.NumParameters(), 1);
1165 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1166 body += LoadNativeField(Slot::String_length());
1167 if (kind == MethodRecognizer::kStringBaseIsEmpty) {
1168 body += IntConstant(0);
1169 body += StrictCompare(Token::kEQ_STRICT);
1170 }
1171 break;
1172 case MethodRecognizer::kClassIDgetID:
1173 ASSERT_EQUAL(function.NumParameters(), 1);
1174 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1175 body += LoadClassId();
1176 break;
1177 case MethodRecognizer::kGrowableArrayAllocateWithData: {
1178 ASSERT(function.IsFactory());
1179 ASSERT_EQUAL(function.NumParameters(), 2);
1180 const Class& cls =
1181 Class::ZoneHandle(Z, ptr: compiler::GrowableObjectArrayClass().ptr());
1182 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1183 body += AllocateObject(TokenPosition::kNoSource, cls, 1);
1184 LocalVariable* object = MakeTemporary();
1185 body += LoadLocal(variable: object);
1186 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 1));
1187 body += StoreNativeField(Slot::GrowableObjectArray_data(),
1188 StoreFieldInstr::Kind::kInitializing,
1189 kNoStoreBarrier);
1190 body += LoadLocal(variable: object);
1191 body += IntConstant(0);
1192 body += StoreNativeField(Slot::GrowableObjectArray_length(),
1193 StoreFieldInstr::Kind::kInitializing,
1194 kNoStoreBarrier);
1195 break;
1196 }
1197 case MethodRecognizer::kGrowableArrayCapacity:
1198 ASSERT_EQUAL(function.NumParameters(), 1);
1199 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1200 body += LoadNativeField(Slot::GrowableObjectArray_data());
1201 body += LoadNativeField(Slot::Array_length());
1202 break;
1203 case MethodRecognizer::kObjectArrayAllocate:
1204 ASSERT(function.IsFactory() && (function.NumParameters() == 2));
1205 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1206 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 1));
1207 body += CreateArray();
1208 break;
1209 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1210 ASSERT_EQUAL(function.NumParameters(), 5);
1211 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1212 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 1));
1213 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 2));
1214 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 3));
1215 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 4));
1216 body += MemoryCopy(kTypedDataUint8ArrayCid, kOneByteStringCid,
1217 /*unboxed_length=*/false);
1218 body += NullConstant();
1219 break;
1220 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1221 ASSERT_EQUAL(function.NumParameters(), 2);
1222 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1223 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 1));
1224 // Uses a store-release barrier so that other isolates will see the
1225 // contents of the index after seeing the index itself.
1226 body += StoreNativeField(Slot::ImmutableLinkedHashBase_index(),
1227 StoreFieldInstr::Kind::kOther, kEmitStoreBarrier,
1228 compiler::Assembler::kRelease);
1229 body += NullConstant();
1230 break;
1231 case MethodRecognizer::kUtf8DecoderScan:
1232 ASSERT_EQUAL(function.NumParameters(), 5);
1233 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0)); // decoder
1234 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 1)); // bytes
1235 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 2)); // start
1236 body += CheckNullOptimized(String::ZoneHandle(Z, ptr: function.name()));
1237 body += UnboxTruncate(to: kUnboxedIntPtr);
1238 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 3)); // end
1239 body += CheckNullOptimized(String::ZoneHandle(Z, ptr: function.name()));
1240 body += UnboxTruncate(to: kUnboxedIntPtr);
1241 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 4)); // table
1242 body += Utf8Scan();
1243 body += Box(kUnboxedIntPtr);
1244 break;
1245 case MethodRecognizer::kMemCopy: {
1246 // Keep consistent with inliner.cc (except boxed param).
1247 ASSERT_EQUAL(function.NumParameters(), 5);
1248 LocalVariable* arg_target = parsed_function_->RawParameterVariable(i: 0);
1249 LocalVariable* arg_target_offset_in_bytes =
1250 parsed_function_->RawParameterVariable(i: 1);
1251 LocalVariable* arg_source = parsed_function_->RawParameterVariable(i: 2);
1252 LocalVariable* arg_source_offset_in_bytes =
1253 parsed_function_->RawParameterVariable(i: 3);
1254 LocalVariable* arg_length_in_bytes =
1255 parsed_function_->RawParameterVariable(i: 4);
1256 body += LoadLocal(variable: arg_source);
1257 body += LoadLocal(variable: arg_target);
1258 body += LoadLocal(variable: arg_source_offset_in_bytes);
1259 body += LoadLocal(variable: arg_target_offset_in_bytes);
1260 body += LoadLocal(variable: arg_length_in_bytes);
1261 // Pointers and TypedData have the same layout.
1262 body += MemoryCopy(kTypedDataUint8ArrayCid, kTypedDataUint8ArrayCid,
1263 /*unboxed_length=*/false);
1264 body += NullConstant();
1265 } break;
1266 case MethodRecognizer::kFfiAbi:
1267 ASSERT_EQUAL(function.NumParameters(), 0);
1268 body += IntConstant(static_cast<int64_t>(compiler::ffi::TargetAbi()));
1269 break;
1270 case MethodRecognizer::kFfiNativeCallbackFunction:
1271 case MethodRecognizer::kFfiNativeAsyncCallbackFunction: {
1272 const auto& error = String::ZoneHandle(
1273 Z, ptr: Symbols::New(thread: thread_,
1274 cstr: "This function should be handled on call site."));
1275 body += Constant(error);
1276 body += ThrowException(TokenPosition::kNoSource);
1277 break;
1278 }
1279 case MethodRecognizer::kFfiLoadInt8:
1280 case MethodRecognizer::kFfiLoadInt16:
1281 case MethodRecognizer::kFfiLoadInt32:
1282 case MethodRecognizer::kFfiLoadInt64:
1283 case MethodRecognizer::kFfiLoadUint8:
1284 case MethodRecognizer::kFfiLoadUint16:
1285 case MethodRecognizer::kFfiLoadUint32:
1286 case MethodRecognizer::kFfiLoadUint64:
1287 case MethodRecognizer::kFfiLoadFloat:
1288 case MethodRecognizer::kFfiLoadFloatUnaligned:
1289 case MethodRecognizer::kFfiLoadDouble:
1290 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1291 case MethodRecognizer::kFfiLoadPointer: {
1292 const classid_t ffi_type_arg_cid =
1293 compiler::ffi::RecognizedMethodTypeArgCid(kind);
1294 const AlignmentType alignment =
1295 compiler::ffi::RecognizedMethodAlignment(kind);
1296 const classid_t typed_data_cid =
1297 compiler::ffi::ElementTypedDataCid(class_id: ffi_type_arg_cid);
1298
1299 ASSERT_EQUAL(function.NumParameters(), 2);
1300 LocalVariable* arg_pointer = parsed_function_->RawParameterVariable(i: 0);
1301 LocalVariable* arg_offset = parsed_function_->RawParameterVariable(i: 1);
1302
1303 body += LoadLocal(variable: arg_offset);
1304 body += CheckNullOptimized(String::ZoneHandle(Z, ptr: function.name()));
1305 LocalVariable* arg_offset_not_null = MakeTemporary();
1306
1307 body += LoadLocal(variable: arg_pointer);
1308 body += CheckNullOptimized(String::ZoneHandle(Z, ptr: function.name()));
1309 // No GC from here til LoadIndexed.
1310 body += LoadUntagged(compiler::target::PointerBase::data_offset());
1311 body += LoadLocal(variable: arg_offset_not_null);
1312 body += UnboxTruncate(to: kUnboxedFfiIntPtr);
1313 body += LoadIndexed(typed_data_cid, /*index_scale=*/1,
1314 /*index_unboxed=*/true, alignment);
1315 if (kind == MethodRecognizer::kFfiLoadFloat ||
1316 kind == MethodRecognizer::kFfiLoadFloatUnaligned) {
1317 body += FloatToDouble();
1318 }
1319 // Avoid any unnecessary (and potentially deoptimizing) int
1320 // conversions by using the representation returned from LoadIndexed.
1321 body +=
1322 Box(LoadIndexedInstr::RepresentationOfArrayElement(array_cid: typed_data_cid));
1323 if (kind == MethodRecognizer::kFfiLoadPointer) {
1324 const auto& pointer_class =
1325 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
1326 const auto& type_arguments = TypeArguments::ZoneHandle(
1327 Z, IG->object_store()->type_argument_never());
1328
1329 // We do not reify Pointer type arguments
1330 ASSERT(function.NumTypeParameters() == 1);
1331 LocalVariable* address = MakeTemporary();
1332 body += Constant(type_arguments);
1333 body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1334 LocalVariable* pointer = MakeTemporary();
1335 body += LoadLocal(variable: pointer);
1336 body += LoadLocal(variable: address);
1337 body += UnboxTruncate(to: kUnboxedIntPtr);
1338 body += StoreNativeField(Slot::PointerBase_data());
1339 body += DropTempsPreserveTop(1); // Drop [address] keep [pointer].
1340 }
1341 body += DropTempsPreserveTop(1); // Drop [arg_offset].
1342 } break;
1343 case MethodRecognizer::kFfiStoreInt8:
1344 case MethodRecognizer::kFfiStoreInt16:
1345 case MethodRecognizer::kFfiStoreInt32:
1346 case MethodRecognizer::kFfiStoreInt64:
1347 case MethodRecognizer::kFfiStoreUint8:
1348 case MethodRecognizer::kFfiStoreUint16:
1349 case MethodRecognizer::kFfiStoreUint32:
1350 case MethodRecognizer::kFfiStoreUint64:
1351 case MethodRecognizer::kFfiStoreFloat:
1352 case MethodRecognizer::kFfiStoreFloatUnaligned:
1353 case MethodRecognizer::kFfiStoreDouble:
1354 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1355 case MethodRecognizer::kFfiStorePointer: {
1356 const classid_t ffi_type_arg_cid =
1357 compiler::ffi::RecognizedMethodTypeArgCid(kind);
1358 const AlignmentType alignment =
1359 compiler::ffi::RecognizedMethodAlignment(kind);
1360 const classid_t typed_data_cid =
1361 compiler::ffi::ElementTypedDataCid(class_id: ffi_type_arg_cid);
1362
1363 LocalVariable* arg_pointer = parsed_function_->RawParameterVariable(i: 0);
1364 LocalVariable* arg_offset = parsed_function_->RawParameterVariable(i: 1);
1365 LocalVariable* arg_value = parsed_function_->RawParameterVariable(i: 2);
1366
1367 ASSERT_EQUAL(function.NumParameters(), 3);
1368 body += LoadLocal(variable: arg_offset);
1369 body += CheckNullOptimized(String::ZoneHandle(Z, ptr: function.name()));
1370 LocalVariable* arg_offset_not_null = MakeTemporary();
1371 body += LoadLocal(variable: arg_value);
1372 body += CheckNullOptimized(String::ZoneHandle(Z, ptr: function.name()));
1373 LocalVariable* arg_value_not_null = MakeTemporary();
1374
1375 body += LoadLocal(variable: arg_pointer); // Pointer.
1376 body += CheckNullOptimized(String::ZoneHandle(Z, ptr: function.name()));
1377 // No GC from here til StoreIndexed.
1378 body += LoadUntagged(compiler::target::PointerBase::data_offset());
1379 body += LoadLocal(variable: arg_offset_not_null);
1380 body += UnboxTruncate(to: kUnboxedFfiIntPtr);
1381 body += LoadLocal(variable: arg_value_not_null);
1382 if (kind == MethodRecognizer::kFfiStorePointer) {
1383 // This can only be Pointer, so it is always safe to LoadUntagged.
1384 body += LoadUntagged(compiler::target::PointerBase::data_offset());
1385 body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
1386 } else {
1387 // Avoid any unnecessary (and potentially deoptimizing) int
1388 // conversions by using the representation consumed by StoreIndexed.
1389 body += UnboxTruncate(
1390 to: StoreIndexedInstr::RepresentationOfArrayElement(array_cid: typed_data_cid));
1391 if (kind == MethodRecognizer::kFfiStoreFloat ||
1392 kind == MethodRecognizer::kFfiStoreFloatUnaligned) {
1393 body += DoubleToFloat();
1394 }
1395 }
1396 body += StoreIndexedTypedData(typed_data_cid, /*index_scale=*/1,
1397 /*index_unboxed=*/true, alignment);
1398 body += Drop(); // Drop [arg_value].
1399 body += Drop(); // Drop [arg_offset].
1400 body += NullConstant();
1401 } break;
1402 case MethodRecognizer::kFfiFromAddress: {
1403 const auto& pointer_class =
1404 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
1405 const auto& type_arguments = TypeArguments::ZoneHandle(
1406 Z, IG->object_store()->type_argument_never());
1407
1408 ASSERT(function.NumTypeParameters() == 1);
1409 ASSERT_EQUAL(function.NumParameters(), 1);
1410 body += Constant(type_arguments);
1411 body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1412 body += LoadLocal(variable: MakeTemporary()); // Duplicate Pointer.
1413 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0)); // Address.
1414 body += CheckNullOptimized(String::ZoneHandle(Z, ptr: function.name()));
1415 body += UnboxTruncate(to: kUnboxedIntPtr);
1416 body += StoreNativeField(Slot::PointerBase_data());
1417 } break;
1418 case MethodRecognizer::kFfiGetAddress: {
1419 ASSERT_EQUAL(function.NumParameters(), 1);
1420 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0)); // Pointer.
1421 body += CheckNullOptimized(String::ZoneHandle(Z, ptr: function.name()));
1422 // This can only be Pointer, so it is always safe to LoadUntagged.
1423 body += LoadUntagged(compiler::target::PointerBase::data_offset());
1424 body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
1425 body += Box(kUnboxedFfiIntPtr);
1426 } break;
1427 case MethodRecognizer::kHas63BitSmis: {
1428#if defined(HAS_SMI_63_BITS)
1429 body += Constant(Bool::True());
1430#else
1431 body += Constant(Bool::False());
1432#endif // defined(ARCH_IS_64_BIT)
1433 } break;
1434 case MethodRecognizer::kExtensionStreamHasListener: {
1435#ifdef PRODUCT
1436 body += Constant(Bool::False());
1437#else
1438 body += LoadServiceExtensionStream();
1439 body += RawLoadField(offset: compiler::target::StreamInfo::enabled_offset());
1440 // StreamInfo::enabled_ is a std::atomic<intptr_t>. This is effectively
1441 // relaxed order access, which is acceptable for this use case.
1442 body += IntToBool();
1443#endif // PRODUCT
1444 } break;
1445 case MethodRecognizer::kSmi_hashCode: {
1446 // TODO(dartbug.com/38985): We should make this LoadLocal+Unbox+
1447 // IntegerHash+Box. Though this would make use of unboxed values on stack
1448 // which isn't allowed in unoptimized mode.
1449 // Once force-optimized functions can be inlined, we should change this
1450 // code to the above.
1451 ASSERT_EQUAL(function.NumParameters(), 1);
1452 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1453 body += BuildIntegerHashCode(/*smi=*/true);
1454 } break;
1455 case MethodRecognizer::kMint_hashCode: {
1456 ASSERT_EQUAL(function.NumParameters(), 1);
1457 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1458 body += BuildIntegerHashCode(/*smi=*/false);
1459 } break;
1460 case MethodRecognizer::kDouble_hashCode: {
1461 ASSERT_EQUAL(function.NumParameters(), 1);
1462 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1463 body += UnboxTruncate(to: kUnboxedDouble);
1464 body += BuildDoubleHashCode();
1465 body += Box(kUnboxedInt64);
1466 } break;
1467 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1468 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1469 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1470 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1471 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1472 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1473 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1474 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1475 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1476 case MethodRecognizer::kFfiAsExternalTypedDataDouble: {
1477 const classid_t ffi_type_arg_cid =
1478 compiler::ffi::RecognizedMethodTypeArgCid(kind);
1479 const classid_t external_typed_data_cid =
1480 compiler::ffi::ElementExternalTypedDataCid(class_id: ffi_type_arg_cid);
1481
1482 auto class_table = thread_->isolate_group()->class_table();
1483 ASSERT(class_table->HasValidClassAt(external_typed_data_cid));
1484 const auto& typed_data_class =
1485 Class::ZoneHandle(H.zone(), ptr: class_table->At(cid: external_typed_data_cid));
1486
1487 // We assume that the caller has checked that the arguments are non-null
1488 // and length is in the range [0, kSmiMax/elementSize].
1489 ASSERT_EQUAL(function.NumParameters(), 2);
1490 LocalVariable* arg_pointer = parsed_function_->RawParameterVariable(i: 0);
1491 LocalVariable* arg_length = parsed_function_->RawParameterVariable(i: 1);
1492
1493 body += AllocateObject(TokenPosition::kNoSource, typed_data_class, 0);
1494 LocalVariable* typed_data_object = MakeTemporary();
1495
1496 // Initialize the result's length field.
1497 body += LoadLocal(variable: typed_data_object);
1498 body += LoadLocal(variable: arg_length);
1499 body += StoreNativeField(Slot::TypedDataBase_length(),
1500 StoreFieldInstr::Kind::kInitializing,
1501 kNoStoreBarrier);
1502
1503 // Initialize the result's data pointer field.
1504 body += LoadLocal(variable: typed_data_object);
1505 body += LoadLocal(variable: arg_pointer);
1506 body += LoadUntagged(compiler::target::PointerBase::data_offset());
1507 body += ConvertUntaggedToUnboxed(kUnboxedIntPtr);
1508 body += StoreNativeField(Slot::PointerBase_data(),
1509 StoreFieldInstr::Kind::kInitializing,
1510 kNoStoreBarrier);
1511 } break;
1512 case MethodRecognizer::kGetNativeField: {
1513 auto& name = String::ZoneHandle(Z, ptr: function.name());
1514 // Note: This method is force optimized so we can push untagged, etc.
1515 // Load TypedDataArray from Instance Handle implementing
1516 // NativeFieldWrapper.
1517 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0)); // Object.
1518 body += CheckNullOptimized(name);
1519 body += LoadNativeField(Slot::Instance_native_fields_array()); // Fields.
1520 body += CheckNullOptimized(name);
1521 // Load the native field at index.
1522 body += IntConstant(0); // Index.
1523 body += LoadIndexed(kIntPtrCid);
1524 body += Box(kUnboxedIntPtr);
1525 } break;
1526 case MethodRecognizer::kDoubleToInteger:
1527 case MethodRecognizer::kDoubleCeilToInt:
1528 case MethodRecognizer::kDoubleFloorToInt: {
1529 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1530 body += DoubleToInteger(kind);
1531 } break;
1532 case MethodRecognizer::kDoubleMod:
1533 case MethodRecognizer::kDoubleRoundToDouble:
1534 case MethodRecognizer::kDoubleTruncateToDouble:
1535 case MethodRecognizer::kDoubleFloorToDouble:
1536 case MethodRecognizer::kDoubleCeilToDouble:
1537 case MethodRecognizer::kMathDoublePow:
1538 case MethodRecognizer::kMathSin:
1539 case MethodRecognizer::kMathCos:
1540 case MethodRecognizer::kMathTan:
1541 case MethodRecognizer::kMathAsin:
1542 case MethodRecognizer::kMathAcos:
1543 case MethodRecognizer::kMathAtan:
1544 case MethodRecognizer::kMathAtan2:
1545 case MethodRecognizer::kMathExp:
1546 case MethodRecognizer::kMathLog: {
1547 for (intptr_t i = 0, n = function.NumParameters(); i < n; ++i) {
1548 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i));
1549 }
1550 if (!CompilerState::Current().is_aot() &&
1551 TargetCPUFeatures::double_truncate_round_supported() &&
1552 ((kind == MethodRecognizer::kDoubleTruncateToDouble) ||
1553 (kind == MethodRecognizer::kDoubleFloorToDouble) ||
1554 (kind == MethodRecognizer::kDoubleCeilToDouble))) {
1555 body += DoubleToDouble(kind);
1556 } else {
1557 body += InvokeMathCFunction(kind, function.NumParameters());
1558 }
1559 } break;
1560 case MethodRecognizer::kMathSqrt: {
1561 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1562 body += MathUnary(MathUnaryInstr::kSqrt);
1563 } break;
1564 case MethodRecognizer::kFinalizerBase_setIsolate:
1565 ASSERT_EQUAL(function.NumParameters(), 1);
1566 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1567 body += LoadIsolate();
1568 body += ConvertUntaggedToUnboxed(kUnboxedIntPtr);
1569 body += StoreNativeField(Slot::FinalizerBase_isolate());
1570 body += NullConstant();
1571 break;
1572 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1573 ASSERT_EQUAL(function.NumParameters(), 0);
1574 body += LoadIsolate();
1575 body += RawLoadField(offset: compiler::target::Isolate::finalizers_offset());
1576 break;
1577 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1578 ASSERT_EQUAL(function.NumParameters(), 1);
1579 body += LoadIsolate();
1580 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1581 body += RawStoreField(offset: compiler::target::Isolate::finalizers_offset());
1582 body += NullConstant();
1583 break;
1584 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1585 ASSERT_EQUAL(function.NumParameters(), 1);
1586 ASSERT(this->optimizing_);
1587 // This relies on being force-optimized to do an 'atomic' exchange w.r.t.
1588 // the GC.
1589 // As an alternative design we could introduce an ExchangeNativeFieldInstr
1590 // that uses the same machine code as std::atomic::exchange. Or we could
1591 // use an FfiNative to do that in C.
1592 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1593 // No GC from here til StoreNativeField.
1594 body += LoadNativeField(Slot::FinalizerBase_entries_collected());
1595 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1596 body += NullConstant();
1597 body += StoreNativeField(Slot::FinalizerBase_entries_collected());
1598 break;
1599 case MethodRecognizer::kFinalizerEntry_allocate: {
1600 // Object value, Object token, Object detach, FinalizerBase finalizer
1601 ASSERT_EQUAL(function.NumParameters(), 4);
1602
1603 const auto class_table = thread_->isolate_group()->class_table();
1604 ASSERT(class_table->HasValidClassAt(kFinalizerEntryCid));
1605 const auto& finalizer_entry_class =
1606 Class::ZoneHandle(H.zone(), ptr: class_table->At(cid: kFinalizerEntryCid));
1607
1608 body +=
1609 AllocateObject(TokenPosition::kNoSource, finalizer_entry_class, 0);
1610 LocalVariable* const entry = MakeTemporary("entry");
1611 // No GC from here to the end.
1612 body += LoadLocal(variable: entry);
1613 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1614 body += StoreNativeField(Slot::FinalizerEntry_value());
1615 body += LoadLocal(variable: entry);
1616 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 1));
1617 body += StoreNativeField(Slot::FinalizerEntry_token());
1618 body += LoadLocal(variable: entry);
1619 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 2));
1620 body += StoreNativeField(Slot::FinalizerEntry_detach());
1621 body += LoadLocal(variable: entry);
1622 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 3));
1623 body += StoreNativeField(Slot::FinalizerEntry_finalizer());
1624 body += LoadLocal(variable: entry);
1625 body += UnboxedIntConstant(0, kUnboxedIntPtr);
1626 body += StoreNativeField(Slot::FinalizerEntry_external_size());
1627 break;
1628 }
1629 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1630 ASSERT_EQUAL(function.NumParameters(), 1);
1631 body += LoadLocal(variable: parsed_function_->RawParameterVariable(i: 0));
1632 body += LoadNativeField(Slot::FinalizerEntry_external_size());
1633 body += Box(kUnboxedInt64);
1634 break;
1635#define IL_BODY(method, slot) \
1636 case MethodRecognizer::k##method: \
1637 ASSERT_EQUAL(function.NumParameters(), 1); \
1638 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1639 body += LoadNativeField(Slot::slot()); \
1640 break;
1641 LOAD_NATIVE_FIELD(IL_BODY)
1642#undef IL_BODY
1643#define IL_BODY(method, slot) \
1644 case MethodRecognizer::k##method: \
1645 ASSERT_EQUAL(function.NumParameters(), 2); \
1646 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1647 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1648 body += StoreNativeField(Slot::slot()); \
1649 body += NullConstant(); \
1650 break;
1651 STORE_NATIVE_FIELD(IL_BODY)
1652#undef IL_BODY
1653#define IL_BODY(method, slot) \
1654 case MethodRecognizer::k##method: \
1655 ASSERT_EQUAL(function.NumParameters(), 2); \
1656 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1657 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1658 body += StoreNativeField(Slot::slot(), StoreFieldInstr::Kind::kOther, \
1659 kNoStoreBarrier); \
1660 body += NullConstant(); \
1661 break;
1662 STORE_NATIVE_FIELD_NO_BARRIER(IL_BODY)
1663#undef IL_BODY
1664 default: {
1665 UNREACHABLE();
1666 break;
1667 }
1668 }
1669
1670 if (body.is_open()) {
1671 body +=
1672 Return(position: TokenPosition::kNoSource, /* omit_result_type_check = */ true);
1673 }
1674
1675 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
1676 prologue_info);
1677}
1678
1679Fragment FlowGraphBuilder::BuildTypedDataViewFactoryConstructor(
1680 const Function& function,
1681 classid_t cid) {
1682 auto token_pos = function.token_pos();
1683 auto class_table = Thread::Current()->isolate_group()->class_table();
1684
1685 ASSERT(class_table->HasValidClassAt(cid));
1686 const auto& view_class = Class::ZoneHandle(H.zone(), ptr: class_table->At(cid));
1687
1688 ASSERT(function.IsFactory() && (function.NumParameters() == 4));
1689 LocalVariable* typed_data = parsed_function_->RawParameterVariable(i: 1);
1690 LocalVariable* offset_in_bytes = parsed_function_->RawParameterVariable(i: 2);
1691 LocalVariable* length = parsed_function_->RawParameterVariable(i: 3);
1692
1693 Fragment body;
1694
1695 body += AllocateObject(token_pos, view_class, /*arg_count=*/0);
1696 LocalVariable* view_object = MakeTemporary();
1697
1698 body += LoadLocal(variable: view_object);
1699 body += LoadLocal(variable: typed_data);
1700 body += StoreNativeField(token_pos, Slot::TypedDataView_typed_data(),
1701 StoreFieldInstr::Kind::kInitializing);
1702
1703 body += LoadLocal(variable: view_object);
1704 body += LoadLocal(variable: offset_in_bytes);
1705 body +=
1706 StoreNativeField(token_pos, Slot::TypedDataView_offset_in_bytes(),
1707 StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
1708
1709 body += LoadLocal(variable: view_object);
1710 body += LoadLocal(variable: length);
1711 body +=
1712 StoreNativeField(token_pos, Slot::TypedDataBase_length(),
1713 StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
1714
1715 // Update the inner pointer.
1716 //
1717 // WARNING: Notice that we assume here no GC happens between those 4
1718 // instructions!
1719 body += LoadLocal(variable: view_object);
1720 body += LoadLocal(variable: typed_data);
1721 body += LoadUntagged(compiler::target::PointerBase::data_offset());
1722 body += ConvertUntaggedToUnboxed(kUnboxedIntPtr);
1723 body += LoadLocal(variable: offset_in_bytes);
1724 body += UnboxSmiToIntptr();
1725 body += AddIntptrIntegers();
1726 body += StoreNativeField(Slot::PointerBase_data());
1727
1728 return body;
1729}
1730
1731Fragment FlowGraphBuilder::BuildTypedDataFactoryConstructor(
1732 const Function& function,
1733 classid_t cid) {
1734 const auto token_pos = function.token_pos();
1735 ASSERT(
1736 Thread::Current()->isolate_group()->class_table()->HasValidClassAt(cid));
1737
1738 ASSERT(function.IsFactory() && (function.NumParameters() == 2));
1739 LocalVariable* length = parsed_function_->RawParameterVariable(i: 1);
1740
1741 Fragment instructions;
1742 instructions += LoadLocal(variable: length);
1743 // AllocateTypedData instruction checks that length is valid (a non-negative
1744 // Smi below maximum allowed length).
1745 instructions += AllocateTypedData(token_pos, cid);
1746 return instructions;
1747}
1748
1749static const LocalScope* MakeImplicitClosureScope(Zone* Z, const Class& klass) {
1750 ASSERT(!klass.IsNull());
1751 // Note that if klass is _Closure, DeclarationType will be _Closure,
1752 // and not the signature type.
1753 Type& klass_type = Type::ZoneHandle(Z, ptr: klass.DeclarationType());
1754
1755 LocalVariable* receiver_variable = new (Z) LocalVariable(
1756 TokenPosition::kNoSource, TokenPosition::kNoSource, Symbols::This(),
1757 klass_type, LocalVariable::kNoKernelOffset, /*param_type=*/nullptr);
1758
1759 receiver_variable->set_is_captured();
1760 // receiver_variable->set_is_final();
1761 LocalScope* scope = new (Z) LocalScope(nullptr, 0, 0);
1762 scope->set_context_level(0);
1763 scope->AddVariable(variable: receiver_variable);
1764 scope->AddContextVariable(var: receiver_variable);
1765 return scope;
1766}
1767
1768Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
1769 const Function& target) {
1770 // The function cannot be local and have parent generic functions.
1771 ASSERT(!target.HasGenericParent());
1772
1773 Fragment fragment;
1774 fragment += Constant(target);
1775
1776 // Allocate a context that closes over `this`.
1777 // Note: this must be kept in sync with ScopeBuilder::BuildScopes.
1778 const LocalScope* implicit_closure_scope =
1779 MakeImplicitClosureScope(Z, klass: Class::Handle(Z, ptr: target.Owner()));
1780 fragment += AllocateContext(implicit_closure_scope->context_slots());
1781 LocalVariable* context = MakeTemporary();
1782
1783 // Store `this`. The context doesn't need a parent pointer because it doesn't
1784 // close over anything else.
1785 fragment += LoadLocal(variable: context);
1786 fragment += LoadLocal(variable: parsed_function_->receiver_var());
1787 fragment += StoreNativeField(
1788 Slot::GetContextVariableSlotFor(
1789 thread_, *implicit_closure_scope->context_variables()[0]),
1790 StoreFieldInstr::Kind::kInitializing);
1791
1792 fragment += AllocateClosure();
1793 LocalVariable* closure = MakeTemporary();
1794
1795 // The function signature can have uninstantiated class type parameters.
1796 if (!target.HasInstantiatedSignature(genericity: kCurrentClass)) {
1797 fragment += LoadLocal(variable: closure);
1798 fragment += LoadInstantiatorTypeArguments();
1799 fragment += StoreNativeField(Slot::Closure_instantiator_type_arguments(),
1800 StoreFieldInstr::Kind::kInitializing);
1801 }
1802
1803 if (target.IsGeneric()) {
1804 // Only generic functions need to have properly initialized
1805 // delayed_type_arguments.
1806 fragment += LoadLocal(variable: closure);
1807 fragment += Constant(Object::empty_type_arguments());
1808 fragment += StoreNativeField(Slot::Closure_delayed_type_arguments(),
1809 StoreFieldInstr::Kind::kInitializing);
1810 }
1811
1812 return fragment;
1813}
1814
1815Fragment FlowGraphBuilder::CheckVariableTypeInCheckedMode(
1816 const AbstractType& dst_type,
1817 const String& name_symbol) {
1818 return Fragment();
1819}
1820
1821bool FlowGraphBuilder::NeedsDebugStepCheck(const Function& function,
1822 TokenPosition position) {
1823 return position.IsDebugPause() && !function.is_native() &&
1824 function.is_debuggable();
1825}
1826
1827bool FlowGraphBuilder::NeedsDebugStepCheck(Value* value,
1828 TokenPosition position) {
1829 if (!position.IsDebugPause()) {
1830 return false;
1831 }
1832 Definition* definition = value->definition();
1833 if (definition->IsConstant() || definition->IsLoadStaticField() ||
1834 definition->IsLoadLocal() || definition->IsAssertAssignable() ||
1835 definition->IsAllocateSmallRecord() || definition->IsAllocateRecord()) {
1836 return true;
1837 }
1838 if (auto const alloc = definition->AsAllocateClosure()) {
1839 return !alloc->known_function().IsNull();
1840 }
1841 return false;
1842}
1843
1844Fragment FlowGraphBuilder::EvaluateAssertion() {
1845 const Class& klass =
1846 Class::ZoneHandle(Z, ptr: Library::LookupCoreClass(class_name: Symbols::AssertionError()));
1847 ASSERT(!klass.IsNull());
1848 const auto& error = klass.EnsureIsFinalized(H.thread());
1849 ASSERT(error == Error::null());
1850 const Function& target = Function::ZoneHandle(
1851 Z, ptr: klass.LookupStaticFunctionAllowPrivate(name: Symbols::EvaluateAssertion()));
1852 ASSERT(!target.IsNull());
1853 return StaticCall(position: TokenPosition::kNoSource, target, /* argument_count = */ 1,
1854 rebind_rule: ICData::kStatic);
1855}
1856
1857Fragment FlowGraphBuilder::CheckBoolean(TokenPosition position) {
1858 Fragment instructions;
1859 LocalVariable* top_of_stack = MakeTemporary();
1860 instructions += LoadLocal(variable: top_of_stack);
1861 instructions += AssertBool(position);
1862 instructions += Drop();
1863 return instructions;
1864}
1865
1866Fragment FlowGraphBuilder::CheckAssignable(const AbstractType& dst_type,
1867 const String& dst_name,
1868 AssertAssignableInstr::Kind kind,
1869 TokenPosition token_pos) {
1870 Fragment instructions;
1871 if (!dst_type.IsTopTypeForSubtyping()) {
1872 LocalVariable* top_of_stack = MakeTemporary();
1873 instructions += LoadLocal(variable: top_of_stack);
1874 instructions +=
1875 AssertAssignableLoadTypeArguments(position: token_pos, dst_type, dst_name, kind);
1876 instructions += Drop();
1877 }
1878 return instructions;
1879}
1880
1881Fragment FlowGraphBuilder::AssertAssignableLoadTypeArguments(
1882 TokenPosition position,
1883 const AbstractType& dst_type,
1884 const String& dst_name,
1885 AssertAssignableInstr::Kind kind) {
1886 Fragment instructions;
1887
1888 instructions += Constant(AbstractType::ZoneHandle(ptr: dst_type.ptr()));
1889
1890 if (!dst_type.IsInstantiated(genericity: kCurrentClass)) {
1891 instructions += LoadInstantiatorTypeArguments();
1892 } else {
1893 instructions += NullConstant();
1894 }
1895
1896 if (!dst_type.IsInstantiated(genericity: kFunctions)) {
1897 instructions += LoadFunctionTypeArguments();
1898 } else {
1899 instructions += NullConstant();
1900 }
1901
1902 instructions += AssertAssignable(position, dst_name, kind);
1903
1904 return instructions;
1905}
1906
1907Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position,
1908 const AbstractType& sub_type_value,
1909 const AbstractType& super_type_value,
1910 const String& dst_name_value) {
1911 Fragment instructions;
1912 instructions += LoadInstantiatorTypeArguments();
1913 instructions += LoadFunctionTypeArguments();
1914 instructions += Constant(AbstractType::ZoneHandle(Z, ptr: sub_type_value.ptr()));
1915 instructions += Constant(AbstractType::ZoneHandle(Z, ptr: super_type_value.ptr()));
1916 instructions += Constant(String::ZoneHandle(Z, ptr: dst_name_value.ptr()));
1917 instructions += AssertSubtype(position);
1918 return instructions;
1919}
1920
1921Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position) {
1922 Fragment instructions;
1923
1924 Value* dst_name = Pop();
1925 Value* super_type = Pop();
1926 Value* sub_type = Pop();
1927 Value* function_type_args = Pop();
1928 Value* instantiator_type_args = Pop();
1929
1930 AssertSubtypeInstr* instr = new (Z) AssertSubtypeInstr(
1931 InstructionSource(position), instantiator_type_args, function_type_args,
1932 sub_type, super_type, dst_name, GetNextDeoptId());
1933 instructions += Fragment(instr);
1934
1935 return instructions;
1936}
1937
1938void FlowGraphBuilder::BuildTypeArgumentTypeChecks(TypeChecksToBuild mode,
1939 Fragment* implicit_checks) {
1940 const Function& dart_function = parsed_function_->function();
1941
1942 const Function* forwarding_target = nullptr;
1943 if (parsed_function_->is_forwarding_stub()) {
1944 forwarding_target = parsed_function_->forwarding_stub_super_target();
1945 ASSERT(!forwarding_target->IsNull());
1946 }
1947
1948 TypeParameters& type_parameters = TypeParameters::Handle(Z);
1949 if (dart_function.IsFactory()) {
1950 type_parameters = Class::Handle(Z, ptr: dart_function.Owner()).type_parameters();
1951 } else {
1952 type_parameters = dart_function.type_parameters();
1953 }
1954 const intptr_t num_type_params = type_parameters.Length();
1955 if (num_type_params == 0) return;
1956 if (forwarding_target != nullptr) {
1957 type_parameters = forwarding_target->type_parameters();
1958 ASSERT(type_parameters.Length() == num_type_params);
1959 }
1960 if (type_parameters.AllDynamicBounds()) {
1961 return; // All bounds are dynamic.
1962 }
1963 TypeParameter& type_param = TypeParameter::Handle(Z);
1964 String& name = String::Handle(Z);
1965 AbstractType& bound = AbstractType::Handle(Z);
1966 Fragment check_bounds;
1967 for (intptr_t i = 0; i < num_type_params; ++i) {
1968 bound = type_parameters.BoundAt(index: i);
1969 if (bound.IsTopTypeForSubtyping()) {
1970 continue;
1971 }
1972
1973 switch (mode) {
1974 case TypeChecksToBuild::kCheckAllTypeParameterBounds:
1975 break;
1976 case TypeChecksToBuild::kCheckCovariantTypeParameterBounds:
1977 if (!type_parameters.IsGenericCovariantImplAt(index: i)) {
1978 continue;
1979 }
1980 break;
1981 case TypeChecksToBuild::kCheckNonCovariantTypeParameterBounds:
1982 if (type_parameters.IsGenericCovariantImplAt(index: i)) {
1983 continue;
1984 }
1985 break;
1986 }
1987
1988 name = type_parameters.NameAt(index: i);
1989
1990 if (forwarding_target != nullptr) {
1991 type_param = forwarding_target->TypeParameterAt(index: i);
1992 } else if (dart_function.IsFactory()) {
1993 type_param = Class::Handle(Z, ptr: dart_function.Owner()).TypeParameterAt(index: i);
1994 } else {
1995 type_param = dart_function.TypeParameterAt(index: i);
1996 }
1997 ASSERT(type_param.IsFinalized());
1998 check_bounds +=
1999 AssertSubtype(position: TokenPosition::kNoSource, sub_type_value: type_param, super_type_value: bound, dst_name_value: name);
2000 }
2001
2002 // Type arguments passed through partial instantiation are guaranteed to be
2003 // bounds-checked at the point of partial instantiation, so we don't need to
2004 // check them again at the call-site.
2005 if (dart_function.IsClosureFunction() && !check_bounds.is_empty() &&
2006 FLAG_eliminate_type_checks) {
2007 LocalVariable* closure = parsed_function_->ParameterVariable(i: 0);
2008 *implicit_checks += TestDelayedTypeArgs(closure, /*present=*/{},
2009 /*absent=*/check_bounds);
2010 } else {
2011 *implicit_checks += check_bounds;
2012 }
2013}
2014
2015void FlowGraphBuilder::BuildArgumentTypeChecks(
2016 Fragment* explicit_checks,
2017 Fragment* implicit_checks,
2018 Fragment* implicit_redefinitions) {
2019 const Function& dart_function = parsed_function_->function();
2020
2021 const Function* forwarding_target = nullptr;
2022 if (parsed_function_->is_forwarding_stub()) {
2023 forwarding_target = parsed_function_->forwarding_stub_super_target();
2024 ASSERT(!forwarding_target->IsNull());
2025 }
2026
2027 const intptr_t num_params = dart_function.NumParameters();
2028 for (intptr_t i = dart_function.NumImplicitParameters(); i < num_params;
2029 ++i) {
2030 LocalVariable* param = parsed_function_->ParameterVariable(i);
2031 const String& name = param->name();
2032 if (!param->needs_type_check()) {
2033 continue;
2034 }
2035 if (param->is_captured()) {
2036 param = parsed_function_->RawParameterVariable(i);
2037 }
2038
2039 const AbstractType* target_type = &param->type();
2040 if (forwarding_target != nullptr) {
2041 // We add 1 to the parameter index to account for the receiver.
2042 target_type =
2043 &AbstractType::ZoneHandle(Z, ptr: forwarding_target->ParameterTypeAt(index: i));
2044 }
2045
2046 if (target_type->IsTopTypeForSubtyping()) continue;
2047
2048 const bool is_covariant = param->is_explicit_covariant_parameter();
2049 Fragment* checks = is_covariant ? explicit_checks : implicit_checks;
2050
2051 *checks += LoadLocal(variable: param);
2052 *checks += AssertAssignableLoadTypeArguments(
2053 position: param->token_pos(), dst_type: *target_type, dst_name: name,
2054 kind: AssertAssignableInstr::kParameterCheck);
2055 *checks += StoreLocal(param);
2056 *checks += Drop();
2057
2058 if (!is_covariant && implicit_redefinitions != nullptr && optimizing_) {
2059 // We generate slightly different code in optimized vs. un-optimized code,
2060 // which is ok since we don't allocate any deopt ids.
2061 AssertNoDeoptIdsAllocatedScope no_deopt_allocation(thread_);
2062
2063 *implicit_redefinitions += LoadLocal(variable: param);
2064 *implicit_redefinitions += RedefinitionWithType(*target_type);
2065 *implicit_redefinitions += StoreLocal(TokenPosition::kNoSource, param);
2066 *implicit_redefinitions += Drop();
2067 }
2068 }
2069}
2070
2071BlockEntryInstr* FlowGraphBuilder::BuildPrologue(BlockEntryInstr* normal_entry,
2072 PrologueInfo* prologue_info) {
2073 const bool compiling_for_osr = IsCompiledForOsr();
2074
2075 kernel::PrologueBuilder prologue_builder(
2076 parsed_function_, last_used_block_id_, compiling_for_osr, IsInlining());
2077 BlockEntryInstr* instruction_cursor =
2078 prologue_builder.BuildPrologue(entry: normal_entry, prologue_info);
2079
2080 last_used_block_id_ = prologue_builder.last_used_block_id();
2081
2082 return instruction_cursor;
2083}
2084
2085ArrayPtr FlowGraphBuilder::GetOptionalParameterNames(const Function& function) {
2086 if (!function.HasOptionalNamedParameters()) {
2087 return Array::null();
2088 }
2089
2090 const intptr_t num_fixed_params = function.num_fixed_parameters();
2091 const intptr_t num_opt_params = function.NumOptionalNamedParameters();
2092 const auto& names = Array::Handle(Z, ptr: Array::New(len: num_opt_params, space: Heap::kOld));
2093 auto& name = String::Handle(Z);
2094 for (intptr_t i = 0; i < num_opt_params; ++i) {
2095 name = function.ParameterNameAt(index: num_fixed_params + i);
2096 names.SetAt(i, name);
2097 }
2098 return names.ptr();
2099}
2100
2101Fragment FlowGraphBuilder::PushExplicitParameters(
2102 const Function& function,
2103 const Function& target /* = Function::null_function()*/) {
2104 Fragment instructions;
2105 for (intptr_t i = function.NumImplicitParameters(),
2106 n = function.NumParameters();
2107 i < n; ++i) {
2108 Fragment push_param = LoadLocal(variable: parsed_function_->ParameterVariable(i));
2109 if (!target.IsNull() && target.is_unboxed_parameter_at(index: i)) {
2110 Representation to;
2111 if (target.is_unboxed_integer_parameter_at(index: i)) {
2112 to = kUnboxedInt64;
2113 } else {
2114 ASSERT(target.is_unboxed_double_parameter_at(i));
2115 to = kUnboxedDouble;
2116 }
2117 const auto unbox = UnboxInstr::Create(to, value: Pop(), deopt_id: DeoptId::kNone,
2118 speculative_mode: Instruction::kNotSpeculative);
2119 Push(definition: unbox);
2120 push_param += Fragment(unbox);
2121 }
2122 instructions += push_param;
2123 }
2124 return instructions;
2125}
2126
2127FlowGraph* FlowGraphBuilder::BuildGraphOfMethodExtractor(
2128 const Function& method) {
2129 // A method extractor is the implicit getter for a method.
2130 const Function& function =
2131 Function::ZoneHandle(Z, ptr: method.extracted_method_closure());
2132
2133 graph_entry_ =
2134 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2135
2136 auto normal_entry = BuildFunctionEntry(graph_entry_);
2137 graph_entry_->set_normal_entry(normal_entry);
2138
2139 Fragment body(normal_entry);
2140 body += CheckStackOverflowInPrologue(position: method.token_pos());
2141 body += BuildImplicitClosureCreation(target: function);
2142 body += Return(position: TokenPosition::kNoSource);
2143
2144 // There is no prologue code for a method extractor.
2145 PrologueInfo prologue_info(-1, -1);
2146 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2147 prologue_info);
2148}
2149
2150FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodDispatcher(
2151 const Function& function) {
2152 // This function is specialized for a receiver class, a method name, and
2153 // the arguments descriptor at a call site.
2154 const ArgumentsDescriptor descriptor(saved_args_desc_array());
2155
2156 graph_entry_ =
2157 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2158
2159 auto normal_entry = BuildFunctionEntry(graph_entry_);
2160 graph_entry_->set_normal_entry(normal_entry);
2161
2162 PrologueInfo prologue_info(-1, -1);
2163 BlockEntryInstr* instruction_cursor =
2164 BuildPrologue(normal_entry: normal_entry, prologue_info: &prologue_info);
2165
2166 Fragment body(instruction_cursor);
2167 body += CheckStackOverflowInPrologue(position: function.token_pos());
2168
2169 // The receiver is the first argument to noSuchMethod, and it is the first
2170 // argument passed to the dispatcher function.
2171 body += LoadLocal(variable: parsed_function_->ParameterVariable(i: 0));
2172
2173 // The second argument to noSuchMethod is an invocation mirror. Push the
2174 // arguments for allocating the invocation mirror. First, the name.
2175 body += Constant(String::ZoneHandle(Z, ptr: function.name()));
2176
2177 // Second, the arguments descriptor.
2178 body += Constant(value: saved_args_desc_array());
2179
2180 // Third, an array containing the original arguments. Create it and fill
2181 // it in.
2182 const intptr_t receiver_index = descriptor.TypeArgsLen() > 0 ? 1 : 0;
2183 body += Constant(TypeArguments::ZoneHandle(Z, ptr: TypeArguments::null()));
2184 body += IntConstant(receiver_index + descriptor.Size());
2185 body += CreateArray();
2186 LocalVariable* array = MakeTemporary();
2187 if (receiver_index > 0) {
2188 LocalVariable* type_args = parsed_function_->function_type_arguments();
2189 ASSERT(type_args != nullptr);
2190 body += LoadLocal(variable: array);
2191 body += IntConstant(0);
2192 body += LoadLocal(variable: type_args);
2193 body += StoreIndexed(kArrayCid);
2194 }
2195 for (intptr_t i = 0; i < descriptor.PositionalCount(); ++i) {
2196 body += LoadLocal(variable: array);
2197 body += IntConstant(receiver_index + i);
2198 body += LoadLocal(variable: parsed_function_->ParameterVariable(i));
2199 body += StoreIndexed(kArrayCid);
2200 }
2201 String& name = String::Handle(Z);
2202 for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
2203 const intptr_t parameter_index = descriptor.PositionAt(i);
2204 name = descriptor.NameAt(i);
2205 name = Symbols::New(H.thread(), str: name);
2206 body += LoadLocal(variable: array);
2207 body += IntConstant(receiver_index + parameter_index);
2208 body += LoadLocal(variable: parsed_function_->ParameterVariable(i: parameter_index));
2209 body += StoreIndexed(kArrayCid);
2210 }
2211
2212 // Fourth, false indicating this is not a super NoSuchMethod.
2213 body += Constant(Bool::False());
2214
2215 const Class& mirror_class =
2216 Class::Handle(Z, ptr: Library::LookupCoreClass(class_name: Symbols::InvocationMirror()));
2217 ASSERT(!mirror_class.IsNull());
2218 const auto& error = mirror_class.EnsureIsFinalized(H.thread());
2219 ASSERT(error == Error::null());
2220 const Function& allocation_function = Function::ZoneHandle(
2221 Z, ptr: mirror_class.LookupStaticFunction(
2222 name: Library::PrivateCoreLibName(member: Symbols::AllocateInvocationMirror())));
2223 ASSERT(!allocation_function.IsNull());
2224 body += StaticCall(position: TokenPosition::kMinSource, target: allocation_function,
2225 /* argument_count = */ 4, rebind_rule: ICData::kStatic);
2226
2227 const int kTypeArgsLen = 0;
2228 ArgumentsDescriptor two_arguments(
2229 Array::Handle(Z, ptr: ArgumentsDescriptor::NewBoxed(type_args_len: kTypeArgsLen, num_arguments: 2)));
2230 Function& no_such_method =
2231 Function::ZoneHandle(Z, ptr: Resolver::ResolveDynamicForReceiverClass(
2232 receiver_class: Class::Handle(Z, ptr: function.Owner()),
2233 function_name: Symbols::NoSuchMethod(), args_desc: two_arguments));
2234 if (no_such_method.IsNull()) {
2235 // If noSuchMethod is not found on the receiver class, call
2236 // Object.noSuchMethod.
2237 no_such_method = Resolver::ResolveDynamicForReceiverClass(
2238 receiver_class: Class::Handle(Z, IG->object_store()->object_class()),
2239 function_name: Symbols::NoSuchMethod(), args_desc: two_arguments);
2240 }
2241 body += StaticCall(position: TokenPosition::kMinSource, target: no_such_method,
2242 /* argument_count = */ 2, rebind_rule: ICData::kNSMDispatch);
2243 body += Return(position: TokenPosition::kNoSource);
2244
2245 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2246 prologue_info);
2247}
2248
2249FlowGraph* FlowGraphBuilder::BuildGraphOfRecordFieldGetter(
2250 const Function& function) {
2251 graph_entry_ =
2252 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2253
2254 auto normal_entry = BuildFunctionEntry(graph_entry_);
2255 graph_entry_->set_normal_entry(normal_entry);
2256
2257 JoinEntryInstr* nsm = BuildJoinEntry();
2258 JoinEntryInstr* done = BuildJoinEntry();
2259
2260 Fragment body(normal_entry);
2261 body += CheckStackOverflowInPrologue(position: function.token_pos());
2262
2263 String& name = String::ZoneHandle(Z, ptr: function.name());
2264 ASSERT(Field::IsGetterName(name));
2265 name = Field::NameFromGetter(getter_name: name);
2266
2267 // Get an array of field names.
2268 const Class& cls = Class::Handle(Z, IG->class_table()->At(cid: kRecordCid));
2269 const auto& error = cls.EnsureIsFinalized(thread: thread_);
2270 ASSERT(error == Error::null());
2271 const Function& get_field_names_function = Function::ZoneHandle(
2272 Z, ptr: cls.LookupFunctionAllowPrivate(name: Symbols::Get_fieldNames()));
2273 ASSERT(!get_field_names_function.IsNull());
2274 body += LoadLocal(variable: parsed_function_->receiver_var());
2275 body += StaticCall(position: TokenPosition::kNoSource, target: get_field_names_function, argument_count: 1,
2276 rebind_rule: ICData::kNoRebind);
2277 LocalVariable* field_names = MakeTemporary("field_names");
2278
2279 body += LoadLocal(variable: field_names);
2280 body += LoadNativeField(Slot::Array_length());
2281 LocalVariable* num_named = MakeTemporary("num_named");
2282
2283 // num_positional = num_fields - field_names.length
2284 body += LoadLocal(variable: parsed_function_->receiver_var());
2285 body += LoadNativeField(Slot::Record_shape());
2286 body += IntConstant(compiler::target::RecordShape::kNumFieldsMask);
2287 body += SmiBinaryOp(Token::kBIT_AND);
2288 body += LoadLocal(variable: num_named);
2289 body += SmiBinaryOp(Token::kSUB);
2290 LocalVariable* num_positional = MakeTemporary("num_positional");
2291
2292 const intptr_t field_index =
2293 Record::GetPositionalFieldIndexFromFieldName(field_name: name);
2294 if (field_index >= 0) {
2295 // Get positional record field by index.
2296 body += IntConstant(field_index);
2297 body += LoadLocal(variable: num_positional);
2298 body += SmiRelationalOp(Token::kLT);
2299 TargetEntryInstr* valid_index;
2300 TargetEntryInstr* invalid_index;
2301 body += BranchIfTrue(&valid_index, &invalid_index);
2302
2303 body.current = valid_index;
2304 body += LoadLocal(variable: parsed_function_->receiver_var());
2305 body += LoadNativeField(Slot::GetRecordFieldSlot(
2306 thread: thread_, offset_in_bytes: compiler::target::Record::field_offset(index: field_index)));
2307
2308 body += StoreLocal(TokenPosition::kNoSource,
2309 parsed_function_->expression_temp_var());
2310 body += Drop();
2311 body += Goto(done);
2312
2313 body.current = invalid_index;
2314 }
2315
2316 // Search field among named fields.
2317 body += IntConstant(0);
2318 body += LoadLocal(variable: num_named);
2319 body += SmiRelationalOp(Token::kLT);
2320 TargetEntryInstr* has_named_fields;
2321 TargetEntryInstr* no_named_fields;
2322 body += BranchIfTrue(&has_named_fields, &no_named_fields);
2323
2324 Fragment(no_named_fields) + Goto(nsm);
2325 body.current = has_named_fields;
2326
2327 LocalVariable* index = parsed_function_->expression_temp_var();
2328 body += IntConstant(0);
2329 body += StoreLocal(TokenPosition::kNoSource, index);
2330 body += Drop();
2331
2332 JoinEntryInstr* loop = BuildJoinEntry();
2333 body += Goto(loop);
2334 body.current = loop;
2335
2336 body += LoadLocal(variable: field_names);
2337 body += LoadLocal(variable: index);
2338 body += LoadIndexed(kArrayCid,
2339 /*index_scale*/ compiler::target::kCompressedWordSize);
2340 body += Constant(name);
2341 TargetEntryInstr* found;
2342 TargetEntryInstr* continue_search;
2343 body += BranchIfEqual(&found, &continue_search);
2344
2345 body.current = continue_search;
2346 body += LoadLocal(variable: index);
2347 body += IntConstant(1);
2348 body += SmiBinaryOp(Token::kADD);
2349 body += StoreLocal(TokenPosition::kNoSource, index);
2350 body += Drop();
2351
2352 body += LoadLocal(variable: index);
2353 body += LoadLocal(variable: num_named);
2354 body += SmiRelationalOp(Token::kLT);
2355 TargetEntryInstr* has_more_fields;
2356 TargetEntryInstr* no_more_fields;
2357 body += BranchIfTrue(&has_more_fields, &no_more_fields);
2358
2359 Fragment(has_more_fields) + Goto(loop);
2360 Fragment(no_more_fields) + Goto(nsm);
2361
2362 body.current = found;
2363
2364 body += LoadLocal(variable: parsed_function_->receiver_var());
2365
2366 body += LoadLocal(variable: num_positional);
2367 body += LoadLocal(variable: index);
2368 body += SmiBinaryOp(Token::kADD);
2369
2370 body += LoadIndexed(kRecordCid,
2371 /*index_scale*/ compiler::target::kCompressedWordSize);
2372
2373 body += StoreLocal(TokenPosition::kNoSource,
2374 parsed_function_->expression_temp_var());
2375 body += Drop();
2376 body += Goto(done);
2377
2378 body.current = done;
2379
2380 body += LoadLocal(variable: parsed_function_->expression_temp_var());
2381 body += DropTempsPreserveTop(3); // field_names, num_named, num_positional
2382 body += Return(position: TokenPosition::kNoSource);
2383
2384 Fragment throw_nsm(nsm);
2385 throw_nsm += LoadLocal(variable: parsed_function_->receiver_var());
2386 throw_nsm += ThrowNoSuchMethodError(position: TokenPosition::kNoSource, target: function,
2387 /*incompatible_arguments=*/false,
2388 /*receiver_pushed=*/true);
2389 throw_nsm += ThrowException(TokenPosition::kNoSource); // Close graph.
2390
2391 // There is no prologue code for a record field getter.
2392 PrologueInfo prologue_info(-1, -1);
2393 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2394 prologue_info);
2395}
2396
2397// Information used by the various dynamic closure call fragment builders.
2398struct FlowGraphBuilder::ClosureCallInfo {
2399 ClosureCallInfo(LocalVariable* closure,
2400 JoinEntryInstr* throw_no_such_method,
2401 const Array& arguments_descriptor_array,
2402 ParsedFunction::DynamicClosureCallVars* const vars)
2403 : closure(ASSERT_NOTNULL(closure)),
2404 throw_no_such_method(ASSERT_NOTNULL(throw_no_such_method)),
2405 descriptor(arguments_descriptor_array),
2406 vars(ASSERT_NOTNULL(vars)) {}
2407
2408 LocalVariable* const closure;
2409 JoinEntryInstr* const throw_no_such_method;
2410 const ArgumentsDescriptor descriptor;
2411 ParsedFunction::DynamicClosureCallVars* const vars;
2412
2413 // Set up by BuildClosureCallDefaultTypeHandling() when needed. These values
2414 // are read-only, so they don't need real local variables and are created
2415 // using MakeTemporary().
2416 LocalVariable* signature = nullptr;
2417 LocalVariable* num_fixed_params = nullptr;
2418 LocalVariable* num_opt_params = nullptr;
2419 LocalVariable* num_max_params = nullptr;
2420 LocalVariable* has_named_params = nullptr;
2421 LocalVariable* named_parameter_names = nullptr;
2422 LocalVariable* parameter_types = nullptr;
2423 LocalVariable* type_parameters = nullptr;
2424 LocalVariable* num_type_parameters = nullptr;
2425 LocalVariable* type_parameter_flags = nullptr;
2426 LocalVariable* instantiator_type_args = nullptr;
2427 LocalVariable* parent_function_type_args = nullptr;
2428 LocalVariable* num_parent_type_args = nullptr;
2429};
2430
2431Fragment FlowGraphBuilder::TestClosureFunctionGeneric(
2432 const ClosureCallInfo& info,
2433 Fragment generic,
2434 Fragment not_generic) {
2435 JoinEntryInstr* after_branch = BuildJoinEntry();
2436
2437 Fragment check;
2438 check += LoadLocal(variable: info.type_parameters);
2439 TargetEntryInstr* is_not_generic;
2440 TargetEntryInstr* is_generic;
2441 check += BranchIfNull(&is_not_generic, &is_generic);
2442
2443 generic.Prepend(is_generic);
2444 generic += Goto(after_branch);
2445
2446 not_generic.Prepend(is_not_generic);
2447 not_generic += Goto(after_branch);
2448
2449 return Fragment(check.entry, after_branch);
2450}
2451
2452Fragment FlowGraphBuilder::TestClosureFunctionNamedParameterRequired(
2453 const ClosureCallInfo& info,
2454 Fragment set,
2455 Fragment not_set) {
2456 // Required named arguments only exist if null_safety is enabled.
2457 if (!IG->use_strict_null_safety_checks()) return not_set;
2458
2459 Fragment check_required;
2460 // We calculate the index to dereference in the parameter names array.
2461 check_required += LoadLocal(variable: info.vars->current_param_index);
2462 check_required +=
2463 IntConstant(compiler::target::kNumParameterFlagsPerElementLog2);
2464 check_required += SmiBinaryOp(Token::kSHR);
2465 check_required += LoadLocal(variable: info.num_opt_params);
2466 check_required += SmiBinaryOp(Token::kADD);
2467 LocalVariable* flags_index = MakeTemporary("flags_index"); // Read-only.
2468
2469 // One read-only stack value (flag_index) that must be dropped
2470 // after we rejoin at after_check.
2471 JoinEntryInstr* after_check = BuildJoinEntry();
2472
2473 // Now we check to see if the flags index is within the bounds of the
2474 // parameters names array. If not, it cannot be required.
2475 check_required += LoadLocal(variable: flags_index);
2476 check_required += LoadLocal(variable: info.named_parameter_names);
2477 check_required += LoadNativeField(Slot::Array_length());
2478 check_required += SmiRelationalOp(Token::kLT);
2479 TargetEntryInstr* valid_index;
2480 TargetEntryInstr* invalid_index;
2481 check_required += BranchIfTrue(&valid_index, &invalid_index);
2482
2483 JoinEntryInstr* join_not_set = BuildJoinEntry();
2484
2485 Fragment(invalid_index) + Goto(join_not_set);
2486
2487 // Otherwise, we need to retrieve the value. We're guaranteed the Smis in
2488 // the flag slots are non-null, so after loading we can immediate check
2489 // the required flag bit for the given named parameter.
2490 check_required.current = valid_index;
2491 check_required += LoadLocal(variable: info.named_parameter_names);
2492 check_required += LoadLocal(variable: flags_index);
2493 check_required += LoadIndexed(
2494 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
2495 check_required += LoadLocal(variable: info.vars->current_param_index);
2496 check_required +=
2497 IntConstant(compiler::target::kNumParameterFlagsPerElement - 1);
2498 check_required += SmiBinaryOp(Token::kBIT_AND);
2499 // If the below changes, we'll need to multiply by the number of parameter
2500 // flags before shifting.
2501 static_assert(compiler::target::kNumParameterFlags == 1,
2502 "IL builder assumes only one flag bit per parameter");
2503 check_required += SmiBinaryOp(Token::kSHR);
2504 check_required +=
2505 IntConstant(1 << compiler::target::kRequiredNamedParameterFlag);
2506 check_required += SmiBinaryOp(Token::kBIT_AND);
2507 check_required += IntConstant(0);
2508 TargetEntryInstr* is_not_set;
2509 TargetEntryInstr* is_set;
2510 check_required += BranchIfEqual(&is_not_set, &is_set);
2511
2512 Fragment(is_not_set) + Goto(join_not_set);
2513
2514 set.Prepend(is_set);
2515 set += Goto(after_check);
2516
2517 not_set.Prepend(join_not_set);
2518 not_set += Goto(after_check);
2519
2520 // After rejoining, drop the introduced temporaries.
2521 check_required.current = after_check;
2522 check_required += DropTemporary(&flags_index);
2523 return check_required;
2524}
2525
2526Fragment FlowGraphBuilder::BuildClosureCallDefaultTypeHandling(
2527 const ClosureCallInfo& info) {
2528 if (info.descriptor.TypeArgsLen() > 0) {
2529 ASSERT(parsed_function_->function_type_arguments() != nullptr);
2530 // A TAV was provided, so we don't need default type argument handling
2531 // and can just take the arguments we were given.
2532 Fragment store_provided;
2533 store_provided += LoadLocal(variable: parsed_function_->function_type_arguments());
2534 store_provided += StoreLocal(info.vars->function_type_args);
2535 store_provided += Drop();
2536 return store_provided;
2537 }
2538
2539 // Load the defaults, instantiating or replacing them with the other type
2540 // arguments as appropriate.
2541 Fragment store_default;
2542 store_default += LoadLocal(variable: info.closure);
2543 store_default += LoadNativeField(Slot::Closure_function());
2544 store_default += LoadNativeField(Slot::Function_data());
2545 LocalVariable* closure_data = MakeTemporary("closure_data");
2546
2547 store_default += LoadLocal(variable: closure_data);
2548 store_default += BuildExtractUnboxedSlotBitFieldIntoSmi<
2549 ClosureData::PackedDefaultTypeArgumentsKind>(
2550 Slot::ClosureData_packed_fields());
2551 LocalVariable* default_tav_kind = MakeTemporary("default_tav_kind");
2552
2553 // Two locals to drop after join, closure_data and default_tav_kind.
2554 JoinEntryInstr* done = BuildJoinEntry();
2555
2556 store_default += LoadLocal(variable: default_tav_kind);
2557 TargetEntryInstr* is_instantiated;
2558 TargetEntryInstr* is_not_instantiated;
2559 store_default += IntConstant(static_cast<intptr_t>(
2560 ClosureData::DefaultTypeArgumentsKind::kIsInstantiated));
2561 store_default += BranchIfEqual(&is_instantiated, &is_not_instantiated);
2562 store_default.current = is_not_instantiated; // Check next case.
2563 store_default += LoadLocal(variable: default_tav_kind);
2564 TargetEntryInstr* needs_instantiation;
2565 TargetEntryInstr* can_share;
2566 store_default += IntConstant(static_cast<intptr_t>(
2567 ClosureData::DefaultTypeArgumentsKind::kNeedsInstantiation));
2568 store_default += BranchIfEqual(&needs_instantiation, &can_share);
2569 store_default.current = can_share; // Check next case.
2570 store_default += LoadLocal(variable: default_tav_kind);
2571 TargetEntryInstr* can_share_instantiator;
2572 TargetEntryInstr* can_share_function;
2573 store_default += IntConstant(static_cast<intptr_t>(
2574 ClosureData::DefaultTypeArgumentsKind::kSharesInstantiatorTypeArguments));
2575 store_default += BranchIfEqual(&can_share_instantiator, &can_share_function);
2576
2577 Fragment instantiated(is_instantiated);
2578 instantiated += LoadLocal(variable: info.type_parameters);
2579 instantiated += LoadNativeField(Slot::TypeParameters_defaults());
2580 instantiated += StoreLocal(info.vars->function_type_args);
2581 instantiated += Drop();
2582 instantiated += Goto(done);
2583
2584 Fragment do_instantiation(needs_instantiation);
2585 // Load the instantiator type arguments.
2586 do_instantiation += LoadLocal(variable: info.instantiator_type_args);
2587 // Load the parent function type arguments. (No local function type arguments
2588 // can be used within the defaults).
2589 do_instantiation += LoadLocal(variable: info.parent_function_type_args);
2590 // Load the default type arguments to instantiate.
2591 do_instantiation += LoadLocal(variable: info.type_parameters);
2592 do_instantiation += LoadNativeField(Slot::TypeParameters_defaults());
2593 do_instantiation += InstantiateDynamicTypeArguments();
2594 do_instantiation += StoreLocal(info.vars->function_type_args);
2595 do_instantiation += Drop();
2596 do_instantiation += Goto(done);
2597
2598 Fragment share_instantiator(can_share_instantiator);
2599 share_instantiator += LoadLocal(variable: info.instantiator_type_args);
2600 share_instantiator += StoreLocal(info.vars->function_type_args);
2601 share_instantiator += Drop();
2602 share_instantiator += Goto(done);
2603
2604 Fragment share_function(can_share_function);
2605 // Since the defaults won't have local type parameters, these must all be
2606 // from the parent function type arguments, so we can just use it.
2607 share_function += LoadLocal(variable: info.parent_function_type_args);
2608 share_function += StoreLocal(info.vars->function_type_args);
2609 share_function += Drop();
2610 share_function += Goto(done);
2611
2612 store_default.current = done; // Return here after branching.
2613 store_default += DropTemporary(&default_tav_kind);
2614 store_default += DropTemporary(&closure_data);
2615
2616 Fragment store_delayed;
2617 store_delayed += LoadLocal(variable: info.closure);
2618 store_delayed += LoadNativeField(Slot::Closure_delayed_type_arguments());
2619 store_delayed += StoreLocal(info.vars->function_type_args);
2620 store_delayed += Drop();
2621
2622 // Use the delayed type args if present, else the default ones.
2623 return TestDelayedTypeArgs(info.closure, store_delayed, store_default);
2624}
2625
2626Fragment FlowGraphBuilder::BuildClosureCallNamedArgumentsCheck(
2627 const ClosureCallInfo& info) {
2628 // When no named arguments are provided, we just need to check for possible
2629 // required named arguments.
2630 if (info.descriptor.NamedCount() == 0) {
2631 // No work to do if there are no possible required named parameters.
2632 if (!IG->use_strict_null_safety_checks()) {
2633 return Fragment();
2634 }
2635 // If the below changes, we can no longer assume that flag slots existing
2636 // means there are required parameters.
2637 static_assert(compiler::target::kNumParameterFlags == 1,
2638 "IL builder assumes only one flag bit per parameter");
2639 // No named args were provided, so check for any required named params.
2640 // Here, we assume that the only parameter flag saved is the required bit
2641 // for named parameters. If this changes, we'll need to check each flag
2642 // entry appropriately for any set required bits.
2643 Fragment has_any;
2644 has_any += LoadLocal(variable: info.num_opt_params);
2645 has_any += LoadLocal(variable: info.named_parameter_names);
2646 has_any += LoadNativeField(Slot::Array_length());
2647 TargetEntryInstr* no_required;
2648 TargetEntryInstr* has_required;
2649 has_any += BranchIfEqual(&no_required, &has_required);
2650
2651 Fragment(has_required) + Goto(info.throw_no_such_method);
2652
2653 return Fragment(has_any.entry, no_required);
2654 }
2655
2656 // Otherwise, we need to loop through the parameter names to check the names
2657 // of named arguments for validity (and possibly missing required ones).
2658 Fragment check_names;
2659 check_names += LoadLocal(variable: info.vars->current_param_index);
2660 LocalVariable* old_index = MakeTemporary("old_index"); // Read-only.
2661 check_names += LoadLocal(variable: info.vars->current_num_processed);
2662 LocalVariable* old_processed = MakeTemporary("old_processed"); // Read-only.
2663
2664 // Two local stack values (old_index, old_processed) to drop after rejoining
2665 // at done.
2666 JoinEntryInstr* loop = BuildJoinEntry();
2667 JoinEntryInstr* done = BuildJoinEntry();
2668
2669 check_names += IntConstant(0);
2670 check_names += StoreLocal(info.vars->current_num_processed);
2671 check_names += Drop();
2672 check_names += IntConstant(0);
2673 check_names += StoreLocal(info.vars->current_param_index);
2674 check_names += Drop();
2675 check_names += Goto(loop);
2676
2677 Fragment loop_check(loop);
2678 loop_check += LoadLocal(variable: info.vars->current_param_index);
2679 loop_check += LoadLocal(variable: info.num_opt_params);
2680 loop_check += SmiRelationalOp(Token::kLT);
2681 TargetEntryInstr* no_more;
2682 TargetEntryInstr* more;
2683 loop_check += BranchIfTrue(&more, &no_more);
2684
2685 Fragment(no_more) + Goto(done);
2686
2687 Fragment loop_body(more);
2688 // First load the name we need to check against.
2689 loop_body += LoadLocal(variable: info.named_parameter_names);
2690 loop_body += LoadLocal(variable: info.vars->current_param_index);
2691 loop_body += LoadIndexed(
2692 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
2693 LocalVariable* param_name = MakeTemporary("param_name"); // Read only.
2694
2695 // One additional local value on the stack within the loop body (param_name)
2696 // that should be dropped after rejoining at loop_incr.
2697 JoinEntryInstr* loop_incr = BuildJoinEntry();
2698
2699 // Now iterate over the ArgumentsDescriptor names and check for a match.
2700 for (intptr_t i = 0; i < info.descriptor.NamedCount(); i++) {
2701 const auto& name = String::ZoneHandle(Z, ptr: info.descriptor.NameAt(i));
2702 loop_body += Constant(name);
2703 loop_body += LoadLocal(variable: param_name);
2704 TargetEntryInstr* match;
2705 TargetEntryInstr* mismatch;
2706 loop_body += BranchIfEqual(&match, &mismatch);
2707 loop_body.current = mismatch;
2708
2709 // We have a match, so go to the next name after storing the corresponding
2710 // parameter index on the stack and incrementing the number of matched
2711 // arguments. (No need to check the required bit for provided parameters.)
2712 Fragment matched(match);
2713 matched += LoadLocal(variable: info.vars->current_param_index);
2714 matched += LoadLocal(variable: info.num_fixed_params);
2715 matched += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
2716 matched += StoreLocal(info.vars->named_argument_parameter_indices.At(index: i));
2717 matched += Drop();
2718 matched += LoadLocal(variable: info.vars->current_num_processed);
2719 matched += IntConstant(1);
2720 matched += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
2721 matched += StoreLocal(info.vars->current_num_processed);
2722 matched += Drop();
2723 matched += Goto(loop_incr);
2724 }
2725
2726 // None of the names in the arguments descriptor matched, so check if this
2727 // is a required parameter.
2728 loop_body += TestClosureFunctionNamedParameterRequired(
2729 info,
2730 /*set=*/Goto(info.throw_no_such_method),
2731 /*not_set=*/{});
2732
2733 loop_body += Goto(loop_incr);
2734
2735 Fragment incr_index(loop_incr);
2736 incr_index += DropTemporary(&param_name);
2737 incr_index += LoadLocal(variable: info.vars->current_param_index);
2738 incr_index += IntConstant(1);
2739 incr_index += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
2740 incr_index += StoreLocal(info.vars->current_param_index);
2741 incr_index += Drop();
2742 incr_index += Goto(loop);
2743
2744 Fragment check_processed(done);
2745 check_processed += LoadLocal(variable: info.vars->current_num_processed);
2746 check_processed += IntConstant(info.descriptor.NamedCount());
2747 TargetEntryInstr* all_processed;
2748 TargetEntryInstr* bad_name;
2749 check_processed += BranchIfEqual(&all_processed, &bad_name);
2750
2751 // Didn't find a matching parameter name for at least one argument name.
2752 Fragment(bad_name) + Goto(info.throw_no_such_method);
2753
2754 // Drop the temporaries at the end of the fragment.
2755 check_names.current = all_processed;
2756 check_names += LoadLocal(variable: old_processed);
2757 check_names += StoreLocal(info.vars->current_num_processed);
2758 check_names += Drop();
2759 check_names += DropTemporary(&old_processed);
2760 check_names += LoadLocal(variable: old_index);
2761 check_names += StoreLocal(info.vars->current_param_index);
2762 check_names += Drop();
2763 check_names += DropTemporary(&old_index);
2764 return check_names;
2765}
2766
2767Fragment FlowGraphBuilder::BuildClosureCallArgumentsValidCheck(
2768 const ClosureCallInfo& info) {
2769 Fragment check_entry;
2770 // We only need to check the length of any explicitly provided type arguments.
2771 if (info.descriptor.TypeArgsLen() > 0) {
2772 Fragment check_type_args_length;
2773 check_type_args_length += LoadLocal(variable: info.type_parameters);
2774 TargetEntryInstr* null;
2775 TargetEntryInstr* not_null;
2776 check_type_args_length += BranchIfNull(&null, &not_null);
2777 check_type_args_length.current = not_null; // Continue in non-error case.
2778 check_type_args_length += LoadLocal(variable: info.signature);
2779 check_type_args_length += BuildExtractUnboxedSlotBitFieldIntoSmi<
2780 UntaggedFunctionType::PackedNumTypeParameters>(
2781 Slot::FunctionType_packed_type_parameter_counts());
2782 check_type_args_length += IntConstant(info.descriptor.TypeArgsLen());
2783 TargetEntryInstr* equal;
2784 TargetEntryInstr* not_equal;
2785 check_type_args_length += BranchIfEqual(&equal, &not_equal);
2786 check_type_args_length.current = equal; // Continue in non-error case.
2787
2788 // The function is not generic.
2789 Fragment(null) + Goto(info.throw_no_such_method);
2790
2791 // An incorrect number of type arguments were passed.
2792 Fragment(not_equal) + Goto(info.throw_no_such_method);
2793
2794 // Type arguments should not be provided if there are delayed type
2795 // arguments, as then the closure itself is not generic.
2796 check_entry += TestDelayedTypeArgs(
2797 closure: info.closure, /*present=*/Goto(info.throw_no_such_method),
2798 /*absent=*/check_type_args_length);
2799 }
2800
2801 check_entry += LoadLocal(variable: info.has_named_params);
2802 TargetEntryInstr* has_named;
2803 TargetEntryInstr* has_positional;
2804 check_entry += BranchIfTrue(&has_named, &has_positional);
2805 JoinEntryInstr* join_after_optional = BuildJoinEntry();
2806 check_entry.current = join_after_optional;
2807
2808 if (info.descriptor.NamedCount() > 0) {
2809 // No reason to continue checking, as this function doesn't take named args.
2810 Fragment(has_positional) + Goto(info.throw_no_such_method);
2811 } else {
2812 Fragment check_pos(has_positional);
2813 check_pos += LoadLocal(variable: info.num_fixed_params);
2814 check_pos += IntConstant(info.descriptor.PositionalCount());
2815 check_pos += SmiRelationalOp(Token::kLTE);
2816 TargetEntryInstr* enough;
2817 TargetEntryInstr* too_few;
2818 check_pos += BranchIfTrue(&enough, &too_few);
2819 check_pos.current = enough;
2820
2821 Fragment(too_few) + Goto(info.throw_no_such_method);
2822
2823 check_pos += IntConstant(info.descriptor.PositionalCount());
2824 check_pos += LoadLocal(variable: info.num_max_params);
2825 check_pos += SmiRelationalOp(Token::kLTE);
2826 TargetEntryInstr* valid;
2827 TargetEntryInstr* too_many;
2828 check_pos += BranchIfTrue(&valid, &too_many);
2829 check_pos.current = valid;
2830
2831 Fragment(too_many) + Goto(info.throw_no_such_method);
2832
2833 check_pos += Goto(join_after_optional);
2834 }
2835
2836 Fragment check_named(has_named);
2837
2838 TargetEntryInstr* same;
2839 TargetEntryInstr* different;
2840 check_named += LoadLocal(variable: info.num_fixed_params);
2841 check_named += IntConstant(info.descriptor.PositionalCount());
2842 check_named += BranchIfEqual(&same, &different);
2843 check_named.current = same;
2844
2845 Fragment(different) + Goto(info.throw_no_such_method);
2846
2847 if (info.descriptor.NamedCount() > 0) {
2848 check_named += IntConstant(info.descriptor.NamedCount());
2849 check_named += LoadLocal(variable: info.num_opt_params);
2850 check_named += SmiRelationalOp(Token::kLTE);
2851 TargetEntryInstr* valid;
2852 TargetEntryInstr* too_many;
2853 check_named += BranchIfTrue(&valid, &too_many);
2854 check_named.current = valid;
2855
2856 Fragment(too_many) + Goto(info.throw_no_such_method);
2857 }
2858
2859 // Check the names for optional arguments. If applicable, also check that all
2860 // required named parameters are provided.
2861 check_named += BuildClosureCallNamedArgumentsCheck(info);
2862 check_named += Goto(join_after_optional);
2863
2864 check_entry.current = join_after_optional;
2865 return check_entry;
2866}
2867
2868Fragment FlowGraphBuilder::BuildClosureCallTypeArgumentsTypeCheck(
2869 const ClosureCallInfo& info) {
2870 JoinEntryInstr* done = BuildJoinEntry();
2871 JoinEntryInstr* loop = BuildJoinEntry();
2872
2873 // We assume that the value stored in :t_type_parameters is not null (i.e.,
2874 // the function stored in :t_function is generic).
2875 Fragment loop_init;
2876
2877 // A null bounds vector represents a vector of dynamic and no check is needed.
2878 loop_init += LoadLocal(variable: info.type_parameters);
2879 loop_init += LoadNativeField(Slot::TypeParameters_bounds());
2880 TargetEntryInstr* null_bounds;
2881 TargetEntryInstr* non_null_bounds;
2882 loop_init += BranchIfNull(&null_bounds, &non_null_bounds);
2883
2884 Fragment(null_bounds) + Goto(done);
2885
2886 loop_init.current = non_null_bounds;
2887 // Loop over the type parameters array.
2888 loop_init += IntConstant(0);
2889 loop_init += StoreLocal(info.vars->current_param_index);
2890 loop_init += Drop();
2891 loop_init += Goto(loop);
2892
2893 Fragment loop_check(loop);
2894 loop_check += LoadLocal(variable: info.vars->current_param_index);
2895 loop_check += LoadLocal(variable: info.num_type_parameters);
2896 loop_check += SmiRelationalOp(Token::kLT);
2897 TargetEntryInstr* more;
2898 TargetEntryInstr* no_more;
2899 loop_check += BranchIfTrue(&more, &no_more);
2900
2901 Fragment(no_more) + Goto(done);
2902
2903 Fragment loop_test_flag(more);
2904 JoinEntryInstr* next = BuildJoinEntry();
2905 JoinEntryInstr* check = BuildJoinEntry();
2906 loop_test_flag += LoadLocal(variable: info.type_parameter_flags);
2907 TargetEntryInstr* null_flags;
2908 TargetEntryInstr* non_null_flags;
2909 loop_test_flag += BranchIfNull(&null_flags, &non_null_flags);
2910
2911 Fragment(null_flags) + Goto(check); // Check type if null (non-covariant).
2912
2913 loop_test_flag.current = non_null_flags; // Test flags if not null.
2914 loop_test_flag += LoadLocal(variable: info.type_parameter_flags);
2915 loop_test_flag += LoadLocal(variable: info.vars->current_param_index);
2916 loop_test_flag += IntConstant(TypeParameters::kFlagsPerSmiShift);
2917 loop_test_flag += SmiBinaryOp(Token::kSHR);
2918 loop_test_flag += LoadIndexed(
2919 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
2920 loop_test_flag += LoadLocal(variable: info.vars->current_param_index);
2921 loop_test_flag += IntConstant(TypeParameters::kFlagsPerSmiMask);
2922 loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
2923 loop_test_flag += SmiBinaryOp(Token::kSHR);
2924 loop_test_flag += IntConstant(1);
2925 loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
2926 loop_test_flag += IntConstant(0);
2927 TargetEntryInstr* is_noncovariant;
2928 TargetEntryInstr* is_covariant;
2929 loop_test_flag += BranchIfEqual(&is_noncovariant, &is_covariant);
2930
2931 Fragment(is_covariant) + Goto(next); // Continue if covariant.
2932 Fragment(is_noncovariant) + Goto(check); // Check type if non-covariant.
2933
2934 Fragment loop_prep_type_param(check);
2935 JoinEntryInstr* dynamic_type_param = BuildJoinEntry();
2936 JoinEntryInstr* call = BuildJoinEntry();
2937
2938 // Load type argument already stored in function_type_args if non null.
2939 loop_prep_type_param += LoadLocal(variable: info.vars->function_type_args);
2940 TargetEntryInstr* null_ftav;
2941 TargetEntryInstr* non_null_ftav;
2942 loop_prep_type_param += BranchIfNull(&null_ftav, &non_null_ftav);
2943
2944 Fragment(null_ftav) + Goto(dynamic_type_param);
2945
2946 loop_prep_type_param.current = non_null_ftav;
2947 loop_prep_type_param += LoadLocal(variable: info.vars->function_type_args);
2948 loop_prep_type_param += LoadLocal(variable: info.vars->current_param_index);
2949 loop_prep_type_param += LoadLocal(variable: info.num_parent_type_args);
2950 loop_prep_type_param += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
2951 loop_prep_type_param += LoadIndexed(
2952 kTypeArgumentsCid, /*index_scale*/ compiler::target::kCompressedWordSize);
2953 loop_prep_type_param += StoreLocal(info.vars->current_type_param);
2954 loop_prep_type_param += Drop();
2955 loop_prep_type_param += Goto(call);
2956
2957 Fragment loop_dynamic_type_param(dynamic_type_param);
2958 // If function_type_args is null, the instantiated type param is dynamic.
2959 loop_dynamic_type_param += Constant(Type::ZoneHandle(ptr: Type::DynamicType()));
2960 loop_dynamic_type_param += StoreLocal(info.vars->current_type_param);
2961 loop_dynamic_type_param += Drop();
2962 loop_dynamic_type_param += Goto(call);
2963
2964 Fragment loop_call_check(call);
2965 // Load instantiators.
2966 loop_call_check += LoadLocal(variable: info.instantiator_type_args);
2967 loop_call_check += LoadLocal(variable: info.vars->function_type_args);
2968 // Load instantiated type parameter.
2969 loop_call_check += LoadLocal(variable: info.vars->current_type_param);
2970 // Load bound from type parameters.
2971 loop_call_check += LoadLocal(variable: info.type_parameters);
2972 loop_call_check += LoadNativeField(Slot::TypeParameters_bounds());
2973 loop_call_check += LoadLocal(variable: info.vars->current_param_index);
2974 loop_call_check += LoadIndexed(
2975 kTypeArgumentsCid, /*index_scale*/ compiler::target::kCompressedWordSize);
2976 // Load (canonicalized) name of type parameter in signature.
2977 loop_call_check += LoadLocal(variable: info.type_parameters);
2978 loop_call_check += LoadNativeField(Slot::TypeParameters_names());
2979 loop_call_check += LoadLocal(variable: info.vars->current_param_index);
2980 loop_call_check += LoadIndexed(
2981 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
2982 // Assert that the passed-in type argument is consistent with the bound of
2983 // the corresponding type parameter.
2984 loop_call_check += AssertSubtype(position: TokenPosition::kNoSource);
2985 loop_call_check += Goto(next);
2986
2987 Fragment loop_incr(next);
2988 loop_incr += LoadLocal(variable: info.vars->current_param_index);
2989 loop_incr += IntConstant(1);
2990 loop_incr += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
2991 loop_incr += StoreLocal(info.vars->current_param_index);
2992 loop_incr += Drop();
2993 loop_incr += Goto(loop);
2994
2995 return Fragment(loop_init.entry, done);
2996}
2997
2998Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeCheck(
2999 const ClosureCallInfo& info,
3000 LocalVariable* param_index,
3001 intptr_t arg_index,
3002 const String& arg_name) {
3003 Fragment instructions;
3004
3005 // Load value.
3006 instructions += LoadLocal(variable: parsed_function_->ParameterVariable(i: arg_index));
3007 // Load destination type.
3008 instructions += LoadLocal(variable: info.parameter_types);
3009 instructions += LoadLocal(variable: param_index);
3010 instructions += LoadIndexed(
3011 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3012 // Load instantiator type arguments.
3013 instructions += LoadLocal(variable: info.instantiator_type_args);
3014 // Load the full set of function type arguments.
3015 instructions += LoadLocal(variable: info.vars->function_type_args);
3016 // Check that the value has the right type.
3017 instructions += AssertAssignable(TokenPosition::kNoSource, arg_name,
3018 AssertAssignableInstr::kParameterCheck);
3019 // Make sure to store the result to keep data dependencies accurate.
3020 instructions += StoreLocal(parsed_function_->ParameterVariable(i: arg_index));
3021 instructions += Drop();
3022
3023 return instructions;
3024}
3025
3026Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeChecks(
3027 const ClosureCallInfo& info) {
3028 Fragment instructions;
3029
3030 // Only check explicit arguments (i.e., skip the receiver), as the receiver
3031 // is always assignable to its type (stored as dynamic).
3032 for (intptr_t i = 1; i < info.descriptor.PositionalCount(); i++) {
3033 instructions += IntConstant(i);
3034 LocalVariable* param_index = MakeTemporary("param_index");
3035 // We don't have a compile-time name, so this symbol signals the runtime
3036 // that it should recreate the type check using info from the stack.
3037 instructions += BuildClosureCallArgumentTypeCheck(
3038 info, param_index, arg_index: i, arg_name: Symbols::dynamic_assert_assignable_stc_check());
3039 instructions += DropTemporary(&param_index);
3040 }
3041
3042 for (intptr_t i = 0; i < info.descriptor.NamedCount(); i++) {
3043 const intptr_t arg_index = info.descriptor.PositionAt(i);
3044 auto const param_index = info.vars->named_argument_parameter_indices.At(index: i);
3045 // We have a compile-time name available, but we still want the runtime to
3046 // detect that the generated AssertAssignable instruction is dynamic.
3047 instructions += BuildClosureCallArgumentTypeCheck(
3048 info, param_index, arg_index,
3049 arg_name: Symbols::dynamic_assert_assignable_stc_check());
3050 }
3051
3052 return instructions;
3053}
3054
3055Fragment FlowGraphBuilder::BuildDynamicClosureCallChecks(
3056 LocalVariable* closure) {
3057 ClosureCallInfo info(closure, BuildThrowNoSuchMethod(),
3058 saved_args_desc_array(),
3059 parsed_function_->dynamic_closure_call_vars());
3060
3061 Fragment body;
3062 body += LoadLocal(variable: info.closure);
3063 body += LoadNativeField(Slot::Closure_function());
3064 body += LoadNativeField(Slot::Function_signature());
3065 info.signature = MakeTemporary("signature");
3066
3067 body += LoadLocal(variable: info.signature);
3068 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3069 FunctionType::PackedNumFixedParameters>(
3070 Slot::FunctionType_packed_parameter_counts());
3071 info.num_fixed_params = MakeTemporary("num_fixed_params");
3072
3073 body += LoadLocal(variable: info.signature);
3074 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3075 FunctionType::PackedNumOptionalParameters>(
3076 Slot::FunctionType_packed_parameter_counts());
3077 info.num_opt_params = MakeTemporary("num_opt_params");
3078
3079 body += LoadLocal(variable: info.num_fixed_params);
3080 body += LoadLocal(variable: info.num_opt_params);
3081 body += SmiBinaryOp(Token::kADD);
3082 info.num_max_params = MakeTemporary("num_max_params");
3083
3084 body += LoadLocal(variable: info.signature);
3085 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3086 FunctionType::PackedHasNamedOptionalParameters>(
3087 Slot::FunctionType_packed_parameter_counts());
3088
3089 body += IntConstant(0);
3090 body += StrictCompare(Token::kNE_STRICT);
3091 info.has_named_params = MakeTemporary("has_named_params");
3092
3093 body += LoadLocal(variable: info.signature);
3094 body += LoadNativeField(Slot::FunctionType_named_parameter_names());
3095 info.named_parameter_names = MakeTemporary("named_parameter_names");
3096
3097 body += LoadLocal(variable: info.signature);
3098 body += LoadNativeField(Slot::FunctionType_parameter_types());
3099 info.parameter_types = MakeTemporary("parameter_types");
3100
3101 body += LoadLocal(variable: info.signature);
3102 body += LoadNativeField(Slot::FunctionType_type_parameters());
3103 info.type_parameters = MakeTemporary("type_parameters");
3104
3105 body += LoadLocal(variable: info.closure);
3106 body += LoadNativeField(Slot::Closure_instantiator_type_arguments());
3107 info.instantiator_type_args = MakeTemporary("instantiator_type_args");
3108
3109 body += LoadLocal(variable: info.closure);
3110 body += LoadNativeField(Slot::Closure_function_type_arguments());
3111 info.parent_function_type_args = MakeTemporary("parent_function_type_args");
3112
3113 // At this point, all the read-only temporaries stored in the ClosureCallInfo
3114 // should be either loaded or still nullptr, if not needed for this function.
3115 // Now we check that the arguments to the closure call have the right shape.
3116 body += BuildClosureCallArgumentsValidCheck(info);
3117
3118 // If the closure function is not generic, there are no local function type
3119 // args. Thus, use whatever was stored for the parent function type arguments,
3120 // which has already been checked against any parent type parameter bounds.
3121 Fragment not_generic;
3122 not_generic += LoadLocal(variable: info.parent_function_type_args);
3123 not_generic += StoreLocal(info.vars->function_type_args);
3124 not_generic += Drop();
3125
3126 // If the closure function is generic, then we first need to calculate the
3127 // full set of function type arguments, then check the local function type
3128 // arguments against the closure function's type parameter bounds.
3129 Fragment generic;
3130 // Calculate the number of parent type arguments and store them in
3131 // info.num_parent_type_args.
3132 generic += LoadLocal(variable: info.signature);
3133 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3134 UntaggedFunctionType::PackedNumParentTypeArguments>(
3135 Slot::FunctionType_packed_type_parameter_counts());
3136 info.num_parent_type_args = MakeTemporary("num_parent_type_args");
3137
3138 // Hoist number of type parameters.
3139 generic += LoadLocal(variable: info.signature);
3140 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3141 UntaggedFunctionType::PackedNumTypeParameters>(
3142 Slot::FunctionType_packed_type_parameter_counts());
3143 info.num_type_parameters = MakeTemporary("num_type_parameters");
3144
3145 // Hoist type parameter flags.
3146 generic += LoadLocal(variable: info.type_parameters);
3147 generic += LoadNativeField(Slot::TypeParameters_flags());
3148 info.type_parameter_flags = MakeTemporary("type_parameter_flags");
3149
3150 // Calculate the local function type arguments and store them in
3151 // info.vars->function_type_args.
3152 generic += BuildClosureCallDefaultTypeHandling(info);
3153
3154 // Load the local function type args.
3155 generic += LoadLocal(variable: info.vars->function_type_args);
3156 // Load the parent function type args.
3157 generic += LoadLocal(variable: info.parent_function_type_args);
3158 // Load the number of parent type parameters.
3159 generic += LoadLocal(variable: info.num_parent_type_args);
3160 // Load the number of total type parameters.
3161 generic += LoadLocal(variable: info.num_parent_type_args);
3162 generic += LoadLocal(variable: info.num_type_parameters);
3163 generic += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3164
3165 // Call the static function for prepending type arguments.
3166 generic += StaticCall(position: TokenPosition::kNoSource,
3167 target: PrependTypeArgumentsFunction(), argument_count: 4, rebind_rule: ICData::kStatic);
3168 generic += StoreLocal(info.vars->function_type_args);
3169 generic += Drop();
3170
3171 // Now that we have the full set of function type arguments, check them
3172 // against the type parameter bounds. However, if the local function type
3173 // arguments are delayed type arguments, they have already been checked by
3174 // the type system and need not be checked again at the call site.
3175 auto const check_bounds = BuildClosureCallTypeArgumentsTypeCheck(info);
3176 if (FLAG_eliminate_type_checks) {
3177 generic += TestDelayedTypeArgs(closure: info.closure, /*present=*/{},
3178 /*absent=*/check_bounds);
3179 } else {
3180 generic += check_bounds;
3181 }
3182 generic += DropTemporary(&info.type_parameter_flags);
3183 generic += DropTemporary(&info.num_type_parameters);
3184 generic += DropTemporary(&info.num_parent_type_args);
3185
3186 // Call the appropriate fragment for setting up the function type arguments
3187 // and performing any needed type argument checking.
3188 body += TestClosureFunctionGeneric(info, generic, not_generic);
3189
3190 // Check that the values provided as arguments are assignable to the types
3191 // of the corresponding closure function parameters.
3192 body += BuildClosureCallArgumentTypeChecks(info);
3193
3194 // Drop all the read-only temporaries at the end of the fragment.
3195 body += DropTemporary(&info.parent_function_type_args);
3196 body += DropTemporary(&info.instantiator_type_args);
3197 body += DropTemporary(&info.type_parameters);
3198 body += DropTemporary(&info.parameter_types);
3199 body += DropTemporary(&info.named_parameter_names);
3200 body += DropTemporary(&info.has_named_params);
3201 body += DropTemporary(&info.num_max_params);
3202 body += DropTemporary(&info.num_opt_params);
3203 body += DropTemporary(&info.num_fixed_params);
3204 body += DropTemporary(&info.signature);
3205
3206 return body;
3207}
3208
3209FlowGraph* FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher(
3210 const Function& function) {
3211 const ArgumentsDescriptor descriptor(saved_args_desc_array());
3212 // Find the name of the field we should dispatch to.
3213 const Class& owner = Class::Handle(Z, ptr: function.Owner());
3214 ASSERT(!owner.IsNull());
3215 auto& field_name = String::Handle(Z, ptr: function.name());
3216 // If the field name has a dyn: tag, then remove it. We don't add dynamic
3217 // invocation forwarders for field getters used for invoking, we just use
3218 // the tag in the name of the invoke field dispatcher to detect dynamic calls.
3219 const bool is_dynamic_call =
3220 Function::IsDynamicInvocationForwarderName(name: field_name);
3221 if (is_dynamic_call) {
3222 field_name = Function::DemangleDynamicInvocationForwarderName(name: field_name);
3223 }
3224 const String& getter_name = String::ZoneHandle(
3225 Z, ptr: Symbols::New(thread: thread_,
3226 str: String::Handle(Z, ptr: Field::GetterSymbol(field_name))));
3227
3228 // Determine if this is `class Closure { get call => this; }`
3229 const Class& closure_class =
3230 Class::Handle(Z, IG->object_store()->closure_class());
3231 const bool is_closure_call = (owner.ptr() == closure_class.ptr()) &&
3232 field_name.Equals(str: Symbols::call());
3233
3234 graph_entry_ =
3235 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
3236
3237 auto normal_entry = BuildFunctionEntry(graph_entry_);
3238 graph_entry_->set_normal_entry(normal_entry);
3239
3240 PrologueInfo prologue_info(-1, -1);
3241 BlockEntryInstr* instruction_cursor =
3242 BuildPrologue(normal_entry: normal_entry, prologue_info: &prologue_info);
3243
3244 Fragment body(instruction_cursor);
3245 body += CheckStackOverflowInPrologue(position: function.token_pos());
3246
3247 // Build any dynamic closure call checks before pushing arguments to the
3248 // final call on the stack to make debugging easier.
3249 LocalVariable* closure = nullptr;
3250 if (is_closure_call) {
3251 closure = parsed_function_->ParameterVariable(i: 0);
3252 if (is_dynamic_call) {
3253 // The whole reason for making this invoke field dispatcher is that
3254 // this closure call needs checking, so we shouldn't inline a call to an
3255 // unchecked entry that can't tail call NSM.
3256 InlineBailout(
3257 "kernel::FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher");
3258
3259 body += BuildDynamicClosureCallChecks(closure);
3260 }
3261 }
3262
3263 if (descriptor.TypeArgsLen() > 0) {
3264 LocalVariable* type_args = parsed_function_->function_type_arguments();
3265 ASSERT(type_args != nullptr);
3266 body += LoadLocal(variable: type_args);
3267 }
3268
3269 if (is_closure_call) {
3270 // The closure itself is the first argument.
3271 body += LoadLocal(variable: closure);
3272 } else {
3273 // Invoke the getter to get the field value.
3274 body += LoadLocal(variable: parsed_function_->ParameterVariable(i: 0));
3275 const intptr_t kTypeArgsLen = 0;
3276 const intptr_t kNumArgsChecked = 1;
3277 body += InstanceCall(position: TokenPosition::kMinSource, name: getter_name, kind: Token::kGET,
3278 type_args_len: kTypeArgsLen, argument_count: 1, argument_names: Array::null_array(), checked_argument_count: kNumArgsChecked);
3279 }
3280
3281 // Push all arguments onto the stack.
3282 for (intptr_t pos = 1; pos < descriptor.Count(); pos++) {
3283 body += LoadLocal(variable: parsed_function_->ParameterVariable(i: pos));
3284 }
3285
3286 // Construct argument names array if necessary.
3287 const Array* argument_names = &Object::null_array();
3288 if (descriptor.NamedCount() > 0) {
3289 const auto& array_handle =
3290 Array::ZoneHandle(Z, ptr: Array::New(len: descriptor.NamedCount(), space: Heap::kNew));
3291 String& string_handle = String::Handle(Z);
3292 for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
3293 const intptr_t named_arg_index =
3294 descriptor.PositionAt(i) - descriptor.PositionalCount();
3295 string_handle = descriptor.NameAt(i);
3296 array_handle.SetAt(named_arg_index, string_handle);
3297 }
3298 argument_names = &array_handle;
3299 }
3300
3301 if (is_closure_call) {
3302 body += LoadLocal(variable: closure);
3303 if (!FLAG_precompiled_mode) {
3304 // Lookup the function in the closure.
3305 body += LoadNativeField(Slot::Closure_function());
3306 }
3307 body += ClosureCall(Function::null_function(), TokenPosition::kNoSource,
3308 descriptor.TypeArgsLen(), descriptor.Count(),
3309 *argument_names);
3310 } else {
3311 const intptr_t kNumArgsChecked = 1;
3312 body +=
3313 InstanceCall(position: TokenPosition::kMinSource,
3314 name: is_dynamic_call ? Symbols::DynamicCall() : Symbols::call(),
3315 kind: Token::kILLEGAL, type_args_len: descriptor.TypeArgsLen(),
3316 argument_count: descriptor.Count(), argument_names: *argument_names, checked_argument_count: kNumArgsChecked);
3317 }
3318
3319 body += Return(position: TokenPosition::kNoSource);
3320
3321 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
3322 prologue_info);
3323}
3324
3325FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodForwarder(
3326 const Function& function,
3327 bool is_implicit_closure_function,
3328 bool throw_no_such_method_error) {
3329 graph_entry_ =
3330 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
3331
3332 auto normal_entry = BuildFunctionEntry(graph_entry_);
3333 graph_entry_->set_normal_entry(normal_entry);
3334
3335 PrologueInfo prologue_info(-1, -1);
3336 BlockEntryInstr* instruction_cursor =
3337 BuildPrologue(normal_entry: normal_entry, prologue_info: &prologue_info);
3338
3339 Fragment body(instruction_cursor);
3340 body += CheckStackOverflowInPrologue(position: function.token_pos());
3341
3342 // If we are inside the tearoff wrapper function (implicit closure), we need
3343 // to extract the receiver from the context. We just replace it directly on
3344 // the stack to simplify the rest of the code.
3345 if (is_implicit_closure_function && !function.is_static()) {
3346 if (parsed_function_->has_arg_desc_var()) {
3347 body += LoadArgDescriptor();
3348 body += LoadNativeField(Slot::ArgumentsDescriptor_size());
3349 } else {
3350 ASSERT(function.NumOptionalParameters() == 0);
3351 body += IntConstant(function.NumParameters());
3352 }
3353 body += LoadLocal(variable: parsed_function_->current_context_var());
3354 body += LoadNativeField(Slot::GetContextVariableSlotFor(
3355 thread: thread_, var: *parsed_function_->receiver_var()));
3356 body += StoreFpRelativeSlot(
3357 kWordSize * compiler::target::frame_layout.param_end_from_fp);
3358 }
3359
3360 if (function.NeedsTypeArgumentTypeChecks()) {
3361 BuildTypeArgumentTypeChecks(mode: TypeChecksToBuild::kCheckAllTypeParameterBounds,
3362 implicit_checks: &body);
3363 }
3364
3365 if (function.NeedsArgumentTypeChecks()) {
3366 BuildArgumentTypeChecks(explicit_checks: &body, implicit_checks: &body, implicit_redefinitions: nullptr);
3367 }
3368
3369 body += MakeTemp();
3370 LocalVariable* result = MakeTemporary();
3371
3372 // Do "++argument_count" if any type arguments were passed.
3373 LocalVariable* argument_count_var = parsed_function_->expression_temp_var();
3374 body += IntConstant(0);
3375 body += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3376 body += Drop();
3377 if (function.IsGeneric()) {
3378 Fragment then;
3379 Fragment otherwise;
3380 otherwise += IntConstant(1);
3381 otherwise += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3382 otherwise += Drop();
3383 body += TestAnyTypeArgs(then, otherwise);
3384 }
3385
3386 if (function.HasOptionalParameters()) {
3387 body += LoadArgDescriptor();
3388 body += LoadNativeField(Slot::ArgumentsDescriptor_size());
3389 } else {
3390 body += IntConstant(function.NumParameters());
3391 }
3392 body += LoadLocal(variable: argument_count_var);
3393 body += SmiBinaryOp(Token::kADD, /* truncate= */ true);
3394 LocalVariable* argument_count = MakeTemporary();
3395
3396 // We are generating code like the following:
3397 //
3398 // var arguments = new Array<dynamic>(argument_count);
3399 //
3400 // int i = 0;
3401 // if (any type arguments are passed) {
3402 // arguments[0] = function_type_arguments;
3403 // ++i;
3404 // }
3405 //
3406 // for (; i < argument_count; ++i) {
3407 // arguments[i] = LoadFpRelativeSlot(
3408 // kWordSize * (frame_layout.param_end_from_fp + argument_count - i));
3409 // }
3410 body += Constant(TypeArguments::ZoneHandle(Z, ptr: TypeArguments::null()));
3411 body += LoadLocal(variable: argument_count);
3412 body += CreateArray();
3413 LocalVariable* arguments = MakeTemporary();
3414
3415 {
3416 // int i = 0
3417 LocalVariable* index = parsed_function_->expression_temp_var();
3418 body += IntConstant(0);
3419 body += StoreLocal(TokenPosition::kNoSource, index);
3420 body += Drop();
3421
3422 // if (any type arguments are passed) {
3423 // arguments[0] = function_type_arguments;
3424 // i = 1;
3425 // }
3426 if (function.IsGeneric()) {
3427 Fragment store;
3428 store += LoadLocal(variable: arguments);
3429 store += IntConstant(0);
3430 store += LoadFunctionTypeArguments();
3431 store += StoreIndexed(kArrayCid);
3432 store += IntConstant(1);
3433 store += StoreLocal(TokenPosition::kNoSource, index);
3434 store += Drop();
3435 body += TestAnyTypeArgs(store, Fragment());
3436 }
3437
3438 TargetEntryInstr* body_entry;
3439 TargetEntryInstr* loop_exit;
3440
3441 Fragment condition;
3442 // i < argument_count
3443 condition += LoadLocal(variable: index);
3444 condition += LoadLocal(variable: argument_count);
3445 condition += SmiRelationalOp(Token::kLT);
3446 condition += BranchIfTrue(&body_entry, &loop_exit, /*negate=*/false);
3447
3448 Fragment loop_body(body_entry);
3449
3450 // arguments[i] = LoadFpRelativeSlot(
3451 // kWordSize * (frame_layout.param_end_from_fp + argument_count - i));
3452 loop_body += LoadLocal(variable: arguments);
3453 loop_body += LoadLocal(variable: index);
3454 loop_body += LoadLocal(variable: argument_count);
3455 loop_body += LoadLocal(variable: index);
3456 loop_body += SmiBinaryOp(Token::kSUB, /*truncate=*/true);
3457 loop_body +=
3458 LoadFpRelativeSlot(compiler::target::kWordSize *
3459 compiler::target::frame_layout.param_end_from_fp,
3460 CompileType::Dynamic());
3461 loop_body += StoreIndexed(kArrayCid);
3462
3463 // ++i
3464 loop_body += LoadLocal(variable: index);
3465 loop_body += IntConstant(1);
3466 loop_body += SmiBinaryOp(Token::kADD, /*truncate=*/true);
3467 loop_body += StoreLocal(TokenPosition::kNoSource, index);
3468 loop_body += Drop();
3469
3470 JoinEntryInstr* join = BuildJoinEntry();
3471 loop_body += Goto(join);
3472
3473 Fragment loop(join);
3474 loop += condition;
3475
3476 Instruction* entry =
3477 new (Z) GotoInstr(join, CompilerState::Current().GetNextDeoptId());
3478 body += Fragment(entry, loop_exit);
3479 }
3480
3481 // Load receiver.
3482 if (is_implicit_closure_function) {
3483 if (throw_no_such_method_error) {
3484 const Function& parent =
3485 Function::ZoneHandle(Z, ptr: function.parent_function());
3486 const Class& owner = Class::ZoneHandle(Z, ptr: parent.Owner());
3487 AbstractType& type = AbstractType::ZoneHandle(Z);
3488 type = Type::New(clazz: owner, arguments: Object::null_type_arguments());
3489 type = ClassFinalizer::FinalizeType(type);
3490 body += Constant(type);
3491 } else {
3492 body += LoadLocal(variable: parsed_function_->current_context_var());
3493 body += LoadNativeField(Slot::GetContextVariableSlotFor(
3494 thread: thread_, var: *parsed_function_->receiver_var()));
3495 }
3496 } else {
3497 body += LoadLocal(variable: parsed_function_->ParameterVariable(i: 0));
3498 }
3499
3500 body += Constant(String::ZoneHandle(Z, ptr: function.name()));
3501
3502 if (!parsed_function_->has_arg_desc_var()) {
3503 // If there is no variable for the arguments descriptor (this function's
3504 // signature doesn't require it), then we need to create one.
3505 Array& args_desc = Array::ZoneHandle(
3506 Z, ptr: ArgumentsDescriptor::NewBoxed(type_args_len: 0, num_arguments: function.NumParameters()));
3507 body += Constant(args_desc);
3508 } else {
3509 body += LoadArgDescriptor();
3510 }
3511
3512 body += LoadLocal(variable: arguments);
3513
3514 if (throw_no_such_method_error) {
3515 const Function& parent =
3516 Function::ZoneHandle(Z, ptr: function.parent_function());
3517 const Class& owner = Class::ZoneHandle(Z, ptr: parent.Owner());
3518 InvocationMirror::Level im_level = owner.IsTopLevel()
3519 ? InvocationMirror::kTopLevel
3520 : InvocationMirror::kStatic;
3521 InvocationMirror::Kind im_kind;
3522 if (function.IsImplicitGetterFunction() || function.IsGetterFunction()) {
3523 im_kind = InvocationMirror::kGetter;
3524 } else if (function.IsImplicitSetterFunction() ||
3525 function.IsSetterFunction()) {
3526 im_kind = InvocationMirror::kSetter;
3527 } else {
3528 im_kind = InvocationMirror::kMethod;
3529 }
3530 body += IntConstant(InvocationMirror::EncodeType(level: im_level, kind: im_kind));
3531 } else {
3532 body += NullConstant();
3533 }
3534
3535 // Push the number of delayed type arguments.
3536 if (function.IsClosureFunction()) {
3537 LocalVariable* closure = parsed_function_->ParameterVariable(i: 0);
3538 Fragment then;
3539 then += IntConstant(function.NumTypeParameters());
3540 then += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3541 then += Drop();
3542 Fragment otherwise;
3543 otherwise += IntConstant(0);
3544 otherwise += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3545 otherwise += Drop();
3546 body += TestDelayedTypeArgs(closure, then, otherwise);
3547 body += LoadLocal(variable: argument_count_var);
3548 } else {
3549 body += IntConstant(0);
3550 }
3551
3552 const Class& mirror_class =
3553 Class::Handle(Z, ptr: Library::LookupCoreClass(class_name: Symbols::InvocationMirror()));
3554 ASSERT(!mirror_class.IsNull());
3555 const auto& error = mirror_class.EnsureIsFinalized(H.thread());
3556 ASSERT(error == Error::null());
3557 const Function& allocation_function = Function::ZoneHandle(
3558 Z, ptr: mirror_class.LookupStaticFunction(name: Library::PrivateCoreLibName(
3559 member: Symbols::AllocateInvocationMirrorForClosure())));
3560 ASSERT(!allocation_function.IsNull());
3561 body += StaticCall(position: TokenPosition::kMinSource, target: allocation_function,
3562 /* argument_count = */ 5, rebind_rule: ICData::kStatic);
3563
3564 if (throw_no_such_method_error) {
3565 const Class& klass = Class::ZoneHandle(
3566 Z, ptr: Library::LookupCoreClass(class_name: Symbols::NoSuchMethodError()));
3567 ASSERT(!klass.IsNull());
3568 const auto& error = klass.EnsureIsFinalized(H.thread());
3569 ASSERT(error == Error::null());
3570 const Function& throw_function = Function::ZoneHandle(
3571 Z,
3572 ptr: klass.LookupStaticFunctionAllowPrivate(name: Symbols::ThrowNewInvocation()));
3573 ASSERT(!throw_function.IsNull());
3574 body += StaticCall(position: TokenPosition::kNoSource, target: throw_function, argument_count: 2,
3575 rebind_rule: ICData::kStatic);
3576 } else {
3577 body += InstanceCall(
3578 position: TokenPosition::kNoSource, name: Symbols::NoSuchMethod(), kind: Token::kILLEGAL,
3579 /*type_args_len=*/0, /*argument_count=*/2, argument_names: Array::null_array(),
3580 /*checked_argument_count=*/1);
3581 }
3582 body += StoreLocal(TokenPosition::kNoSource, result);
3583 body += Drop();
3584
3585 body += Drop(); // arguments
3586 body += Drop(); // argument count
3587
3588 AbstractType& return_type = AbstractType::Handle(ptr: function.result_type());
3589 if (!return_type.IsTopTypeForSubtyping()) {
3590 body += AssertAssignableLoadTypeArguments(position: TokenPosition::kNoSource,
3591 dst_type: return_type, dst_name: Symbols::Empty());
3592 }
3593 body += Return(position: TokenPosition::kNoSource);
3594
3595 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
3596 prologue_info);
3597}
3598
3599Fragment FlowGraphBuilder::BuildDefaultTypeHandling(const Function& function) {
3600 if (function.IsGeneric()) {
3601 auto& default_types =
3602 TypeArguments::ZoneHandle(Z, ptr: function.InstantiateToBounds(thread: thread_));
3603
3604 if (!default_types.IsNull()) {
3605 Fragment then;
3606 Fragment otherwise;
3607
3608 otherwise += TranslateInstantiatedTypeArguments(type_arguments: default_types);
3609 otherwise += StoreLocal(TokenPosition::kNoSource,
3610 parsed_function_->function_type_arguments());
3611 otherwise += Drop();
3612 return TestAnyTypeArgs(then, otherwise);
3613 }
3614 }
3615 return Fragment();
3616}
3617
3618FunctionEntryInstr* FlowGraphBuilder::BuildSharedUncheckedEntryPoint(
3619 Fragment shared_prologue_linked_in,
3620 Fragment skippable_checks,
3621 Fragment redefinitions_if_skipped,
3622 Fragment body) {
3623 ASSERT(shared_prologue_linked_in.entry == graph_entry_->normal_entry());
3624 ASSERT(parsed_function_->has_entry_points_temp_var());
3625 Instruction* prologue_start = shared_prologue_linked_in.entry->next();
3626
3627 auto* join_entry = BuildJoinEntry();
3628
3629 Fragment normal_entry(shared_prologue_linked_in.entry);
3630 normal_entry +=
3631 IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kNone));
3632 normal_entry += StoreLocal(TokenPosition::kNoSource,
3633 parsed_function_->entry_points_temp_var());
3634 normal_entry += Drop();
3635 normal_entry += Goto(destination: join_entry);
3636
3637 auto* extra_target_entry = BuildFunctionEntry(graph_entry_);
3638 Fragment extra_entry(extra_target_entry);
3639 extra_entry += IntConstant(
3640 static_cast<intptr_t>(UncheckedEntryPointStyle::kSharedWithVariable));
3641 extra_entry += StoreLocal(TokenPosition::kNoSource,
3642 parsed_function_->entry_points_temp_var());
3643 extra_entry += Drop();
3644 extra_entry += Goto(destination: join_entry);
3645
3646 if (prologue_start != nullptr) {
3647 join_entry->LinkTo(prologue_start);
3648 } else {
3649 // Prologue is empty.
3650 shared_prologue_linked_in.current = join_entry;
3651 }
3652
3653 TargetEntryInstr* do_checks;
3654 TargetEntryInstr* skip_checks;
3655 shared_prologue_linked_in +=
3656 LoadLocal(variable: parsed_function_->entry_points_temp_var());
3657 shared_prologue_linked_in += BuildEntryPointsIntrospection();
3658 shared_prologue_linked_in +=
3659 LoadLocal(variable: parsed_function_->entry_points_temp_var());
3660 shared_prologue_linked_in += IntConstant(
3661 static_cast<intptr_t>(UncheckedEntryPointStyle::kSharedWithVariable));
3662 shared_prologue_linked_in +=
3663 BranchIfEqual(&skip_checks, &do_checks, /*negate=*/false);
3664
3665 JoinEntryInstr* rest_entry = BuildJoinEntry();
3666
3667 Fragment(do_checks) + skippable_checks + Goto(rest_entry);
3668 Fragment(skip_checks) + redefinitions_if_skipped + Goto(rest_entry);
3669 Fragment(rest_entry) + body;
3670
3671 return extra_target_entry;
3672}
3673
3674FunctionEntryInstr* FlowGraphBuilder::BuildSeparateUncheckedEntryPoint(
3675 BlockEntryInstr* normal_entry,
3676 Fragment normal_prologue,
3677 Fragment extra_prologue,
3678 Fragment shared_prologue,
3679 Fragment body) {
3680 auto* join_entry = BuildJoinEntry();
3681 auto* extra_entry = BuildFunctionEntry(graph_entry_);
3682
3683 Fragment normal(normal_entry);
3684 normal += IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kNone));
3685 normal += BuildEntryPointsIntrospection();
3686 normal += normal_prologue;
3687 normal += Goto(destination: join_entry);
3688
3689 Fragment extra(extra_entry);
3690 extra +=
3691 IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kSeparate));
3692 extra += BuildEntryPointsIntrospection();
3693 extra += extra_prologue;
3694 extra += Goto(destination: join_entry);
3695
3696 Fragment(join_entry) + shared_prologue + body;
3697 return extra_entry;
3698}
3699
3700FlowGraph* FlowGraphBuilder::BuildGraphOfImplicitClosureFunction(
3701 const Function& function) {
3702 const Function& parent = Function::ZoneHandle(Z, ptr: function.parent_function());
3703 Function& target = Function::ZoneHandle(Z, ptr: function.ImplicitClosureTarget(Z));
3704
3705 if (target.IsNull() ||
3706 (parent.num_fixed_parameters() != target.num_fixed_parameters())) {
3707 return BuildGraphOfNoSuchMethodForwarder(function, is_implicit_closure_function: true,
3708 throw_no_such_method_error: parent.is_static());
3709 }
3710
3711 graph_entry_ =
3712 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
3713
3714 auto normal_entry = BuildFunctionEntry(graph_entry_);
3715 graph_entry_->set_normal_entry(normal_entry);
3716
3717 PrologueInfo prologue_info(-1, -1);
3718 BlockEntryInstr* instruction_cursor =
3719 BuildPrologue(normal_entry: normal_entry, prologue_info: &prologue_info);
3720
3721 Fragment closure(instruction_cursor);
3722 closure += CheckStackOverflowInPrologue(position: function.token_pos());
3723 closure += BuildDefaultTypeHandling(function);
3724
3725 // For implicit closure functions, any non-covariant checks are either
3726 // performed by the type system or a dynamic invocation layer (dynamic closure
3727 // call dispatcher, mirror, etc.). Static targets never have covariant
3728 // arguments, and for non-static targets, they already perform the covariant
3729 // checks internally. Thus, no checks are needed and we just need to invoke
3730 // the target with the right receiver (unless static).
3731 //
3732 // TODO(dartbug.com/44195): Consider replacing the argument pushes + static
3733 // call with stack manipulation and a tail call instead.
3734
3735 intptr_t type_args_len = 0;
3736 if (function.IsGeneric()) {
3737 if (target.IsConstructor()) {
3738 const auto& result_type = AbstractType::Handle(Z, ptr: function.result_type());
3739 ASSERT(result_type.IsFinalized());
3740 // Instantiate a flattened type arguments vector which
3741 // includes type arguments corresponding to superclasses.
3742 // TranslateInstantiatedTypeArguments is smart enough to
3743 // avoid instantiation and reuse passed function type arguments
3744 // if there are no extra type arguments in the flattened vector.
3745 const auto& instantiated_type_arguments = TypeArguments::ZoneHandle(
3746 Z, ptr: Type::Cast(obj: result_type).GetInstanceTypeArguments(H.thread()));
3747 closure +=
3748 TranslateInstantiatedTypeArguments(type_arguments: instantiated_type_arguments);
3749 } else {
3750 type_args_len = function.NumTypeParameters();
3751 ASSERT(parsed_function_->function_type_arguments() != nullptr);
3752 closure += LoadLocal(variable: parsed_function_->function_type_arguments());
3753 }
3754 } else if (target.IsFactory()) {
3755 // Factories always take an extra implicit argument for
3756 // type arguments even if their classes don't have type parameters.
3757 closure += NullConstant();
3758 }
3759
3760 // Push receiver.
3761 if (target.IsGenerativeConstructor()) {
3762 const Class& cls = Class::ZoneHandle(Z, ptr: target.Owner());
3763 if (cls.NumTypeArguments() > 0) {
3764 if (!function.IsGeneric()) {
3765 closure += Constant(TypeArguments::ZoneHandle(
3766 Z, ptr: cls.GetDeclarationInstanceTypeArguments()));
3767 }
3768 closure += AllocateObject(function.token_pos(), cls, 1);
3769 } else {
3770 ASSERT(!function.IsGeneric());
3771 closure += AllocateObject(function.token_pos(), cls, 0);
3772 }
3773 LocalVariable* receiver = MakeTemporary();
3774 closure += LoadLocal(variable: receiver);
3775 } else if (!target.is_static()) {
3776 // The context has a fixed shape: a single variable which is the
3777 // closed-over receiver.
3778 closure += LoadLocal(variable: parsed_function_->ParameterVariable(i: 0));
3779 closure += LoadNativeField(Slot::Closure_context());
3780 closure += LoadNativeField(Slot::GetContextVariableSlotFor(
3781 thread: thread_, var: *parsed_function_->receiver_var()));
3782 }
3783
3784 closure += PushExplicitParameters(function);
3785
3786 // Forward parameters to the target.
3787 intptr_t argument_count = function.NumParameters() -
3788 function.NumImplicitParameters() +
3789 target.NumImplicitParameters();
3790 ASSERT(argument_count == target.NumParameters());
3791
3792 Array& argument_names =
3793 Array::ZoneHandle(Z, ptr: GetOptionalParameterNames(function));
3794
3795 closure += StaticCall(position: TokenPosition::kNoSource, target, argument_count,
3796 argument_names, rebind_rule: ICData::kNoRebind,
3797 /* result_type = */ nullptr, type_args_count: type_args_len);
3798
3799 if (target.IsGenerativeConstructor()) {
3800 // Drop result of constructor invocation, leave receiver
3801 // instance on the stack.
3802 closure += Drop();
3803 }
3804
3805 // Return the result.
3806 closure += Return(position: function.end_token_pos());
3807
3808 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
3809 prologue_info);
3810}
3811
3812FlowGraph* FlowGraphBuilder::BuildGraphOfFieldAccessor(
3813 const Function& function) {
3814 ASSERT(function.IsImplicitGetterOrSetter() ||
3815 function.IsDynamicInvocationForwarder());
3816
3817 // Instead of building a dynamic invocation forwarder that checks argument
3818 // type and then invokes original setter we simply generate the type check
3819 // and inlined field store. Scope builder takes care of setting correct
3820 // type check mode in this case.
3821 const auto& target = Function::Handle(
3822 Z, ptr: function.IsDynamicInvocationForwarder() ? function.ForwardingTarget()
3823 : function.ptr());
3824 ASSERT(target.IsImplicitGetterOrSetter());
3825
3826 const bool is_method = !function.IsStaticFunction();
3827 const bool is_setter = target.IsImplicitSetterFunction();
3828 const bool is_getter = target.IsImplicitGetterFunction() ||
3829 target.IsImplicitStaticGetterFunction();
3830 ASSERT(is_setter || is_getter);
3831
3832 const auto& field = Field::ZoneHandle(Z, ptr: target.accessor_field());
3833
3834 graph_entry_ =
3835 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
3836
3837 auto normal_entry = BuildFunctionEntry(graph_entry_);
3838 graph_entry_->set_normal_entry(normal_entry);
3839
3840 Fragment body(normal_entry);
3841 if (is_setter) {
3842 auto const setter_value =
3843 parsed_function_->ParameterVariable(i: is_method ? 1 : 0);
3844 if (is_method) {
3845 body += LoadLocal(variable: parsed_function_->ParameterVariable(i: 0));
3846 }
3847 body += LoadLocal(variable: setter_value);
3848
3849 // The dyn:* forwarder has to check the parameters that the
3850 // actual target will not check.
3851 // Though here we manually inline the target, so the dyn:* forwarder has to
3852 // check all parameters.
3853 const bool needs_type_check = function.IsDynamicInvocationForwarder() ||
3854 setter_value->needs_type_check();
3855 if (needs_type_check) {
3856 body += CheckAssignable(dst_type: setter_value->type(), dst_name: setter_value->name(),
3857 kind: AssertAssignableInstr::kParameterCheck,
3858 token_pos: field.token_pos());
3859 }
3860 body += BuildNullAssertions();
3861 if (field.is_late()) {
3862 if (is_method) {
3863 body += Drop();
3864 }
3865 body += Drop();
3866 body += StoreLateField(
3867 field, instance: is_method ? parsed_function_->ParameterVariable(i: 0) : nullptr,
3868 setter_value);
3869 } else {
3870 if (is_method) {
3871 body += StoreFieldGuarded(field, StoreFieldInstr::Kind::kOther);
3872 } else {
3873 body += StoreStaticField(TokenPosition::kNoSource, field);
3874 }
3875 }
3876 body += NullConstant();
3877 } else {
3878 ASSERT(is_getter);
3879 if (is_method) {
3880 body += LoadLocal(variable: parsed_function_->ParameterVariable(i: 0));
3881 body += LoadField(
3882 field, /*calls_initializer=*/field.NeedsInitializationCheckOnLoad());
3883 } else if (field.is_const()) {
3884 const auto& value = Object::Handle(Z, ptr: field.StaticConstFieldValue());
3885 if (value.IsError()) {
3886 Report::LongJump(error: Error::Cast(obj: value));
3887 }
3888 body += Constant(Instance::ZoneHandle(Z, ptr: Instance::RawCast(raw: value.ptr())));
3889 } else {
3890 // Static fields
3891 // - with trivial initializer
3892 // - without initializer if they are not late
3893 // are initialized eagerly and do not have implicit getters.
3894 // Static fields with non-trivial initializer need getter to perform
3895 // lazy initialization. Late fields without initializer need getter
3896 // to make sure they are already initialized.
3897 ASSERT(field.has_nontrivial_initializer() ||
3898 (field.is_late() && !field.has_initializer()));
3899 body += LoadStaticField(field, /*calls_initializer=*/true);
3900 }
3901
3902 if (is_method || !field.is_const()) {
3903#if defined(PRODUCT)
3904 RELEASE_ASSERT(!field.needs_load_guard());
3905#else
3906 // Always build fragment for load guard to maintain stable deopt_id
3907 // numbering, but link it into the graph only if field actually
3908 // needs load guard.
3909 Fragment load_guard = CheckAssignable(
3910 dst_type: AbstractType::Handle(Z, ptr: field.type()), dst_name: Symbols::FunctionResult());
3911 if (field.needs_load_guard()) {
3912 ASSERT(IG->HasAttemptedReload());
3913 body += load_guard;
3914 }
3915#endif
3916 }
3917 }
3918 body += Return(position: TokenPosition::kNoSource);
3919
3920 PrologueInfo prologue_info(-1, -1);
3921 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
3922 prologue_info);
3923}
3924
3925FlowGraph* FlowGraphBuilder::BuildGraphOfDynamicInvocationForwarder(
3926 const Function& function) {
3927 auto& name = String::Handle(Z, ptr: function.name());
3928 name = Function::DemangleDynamicInvocationForwarderName(name);
3929 const auto& target = Function::ZoneHandle(Z, ptr: function.ForwardingTarget());
3930 ASSERT(!target.IsNull());
3931
3932 if (target.IsImplicitSetterFunction() || target.IsImplicitGetterFunction()) {
3933 return BuildGraphOfFieldAccessor(function);
3934 }
3935 if (target.IsMethodExtractor()) {
3936 return BuildGraphOfMethodExtractor(method: target);
3937 }
3938
3939 graph_entry_ = new (Z) GraphEntryInstr(*parsed_function_, osr_id_);
3940
3941 auto normal_entry = BuildFunctionEntry(graph_entry_);
3942 graph_entry_->set_normal_entry(normal_entry);
3943
3944 PrologueInfo prologue_info(-1, -1);
3945 auto instruction_cursor = BuildPrologue(normal_entry: normal_entry, prologue_info: &prologue_info);
3946
3947 Fragment body;
3948 if (!function.is_native()) {
3949 body += CheckStackOverflowInPrologue(position: function.token_pos());
3950 }
3951
3952 ASSERT(parsed_function_->scope()->num_context_variables() == 0);
3953
3954 // Should never build a dynamic invocation forwarder for equality
3955 // operator.
3956 ASSERT(function.name() != Symbols::EqualOperator().ptr());
3957
3958 // Even if the caller did not pass argument vector we would still
3959 // call the target with instantiate-to-bounds type arguments.
3960 body += BuildDefaultTypeHandling(function);
3961
3962 // Build argument type checks that complement those that are emitted in the
3963 // target.
3964 BuildTypeArgumentTypeChecks(
3965 mode: TypeChecksToBuild::kCheckNonCovariantTypeParameterBounds, implicit_checks: &body);
3966 BuildArgumentTypeChecks(explicit_checks: &body, implicit_checks: &body, implicit_redefinitions: nullptr);
3967
3968 // Push all arguments and invoke the original method.
3969
3970 intptr_t type_args_len = 0;
3971 if (function.IsGeneric()) {
3972 type_args_len = function.NumTypeParameters();
3973 ASSERT(parsed_function_->function_type_arguments() != nullptr);
3974 body += LoadLocal(variable: parsed_function_->function_type_arguments());
3975 }
3976
3977 // Push receiver.
3978 ASSERT(function.NumImplicitParameters() == 1);
3979 body += LoadLocal(variable: parsed_function_->receiver_var());
3980 body += PushExplicitParameters(function, target);
3981
3982 const intptr_t argument_count = function.NumParameters();
3983 const auto& argument_names =
3984 Array::ZoneHandle(Z, ptr: GetOptionalParameterNames(function));
3985
3986 body += StaticCall(position: TokenPosition::kNoSource, target, argument_count,
3987 argument_names, rebind_rule: ICData::kNoRebind, result_type: nullptr, type_args_count: type_args_len);
3988
3989 if (target.has_unboxed_integer_return()) {
3990 body += Box(kUnboxedInt64);
3991 } else if (target.has_unboxed_double_return()) {
3992 body += Box(kUnboxedDouble);
3993 } else if (target.has_unboxed_record_return()) {
3994 // Handled in SelectRepresentations pass in optimized mode.
3995 ASSERT(optimizing_);
3996 }
3997
3998 // Later optimization passes assume that result of a x.[]=(...) call is not
3999 // used. We must guarantee this invariant because violation will lead to an
4000 // illegal IL once we replace x.[]=(...) with a sequence that does not
4001 // actually produce any value. See http://dartbug.com/29135 for more details.
4002 if (name.ptr() == Symbols::AssignIndexToken().ptr()) {
4003 body += Drop();
4004 body += NullConstant();
4005 }
4006
4007 body += Return(position: TokenPosition::kNoSource);
4008
4009 instruction_cursor->LinkTo(body.entry);
4010
4011 // When compiling for OSR, use a depth first search to find the OSR
4012 // entry and make graph entry jump to it instead of normal entry.
4013 // Catch entries are always considered reachable, even if they
4014 // become unreachable after OSR.
4015 if (IsCompiledForOsr()) {
4016 graph_entry_->RelinkToOsrEntry(Z, max_block_id: last_used_block_id_ + 1);
4017 }
4018 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4019 prologue_info);
4020}
4021
4022void FlowGraphBuilder::SetConstantRangeOfCurrentDefinition(
4023 const Fragment& fragment,
4024 int64_t min,
4025 int64_t max) {
4026 ASSERT(fragment.current->IsDefinition());
4027 Range range(RangeBoundary::FromConstant(val: min),
4028 RangeBoundary::FromConstant(val: max));
4029 fragment.current->AsDefinition()->set_range(range);
4030}
4031
4032static classid_t TypedDataCidUnboxed(Representation unboxed_representation) {
4033 switch (unboxed_representation) {
4034 case kUnboxedFloat:
4035 // Note kTypedDataFloat32ArrayCid loads kUnboxedDouble.
4036 UNREACHABLE();
4037 return kTypedDataFloat32ArrayCid;
4038 case kUnboxedInt32:
4039 return kTypedDataInt32ArrayCid;
4040 case kUnboxedUint32:
4041 return kTypedDataUint32ArrayCid;
4042 case kUnboxedInt64:
4043 return kTypedDataInt64ArrayCid;
4044 case kUnboxedDouble:
4045 return kTypedDataFloat64ArrayCid;
4046 default:
4047 UNREACHABLE();
4048 }
4049 UNREACHABLE();
4050}
4051
4052Fragment FlowGraphBuilder::StoreIndexedTypedDataUnboxed(
4053 Representation unboxed_representation,
4054 intptr_t index_scale,
4055 bool index_unboxed) {
4056 ASSERT(unboxed_representation == kUnboxedInt32 ||
4057 unboxed_representation == kUnboxedUint32 ||
4058 unboxed_representation == kUnboxedInt64 ||
4059 unboxed_representation == kUnboxedFloat ||
4060 unboxed_representation == kUnboxedDouble);
4061 Fragment fragment;
4062 if (unboxed_representation == kUnboxedFloat) {
4063 fragment += BitCast(from: kUnboxedFloat, to: kUnboxedInt32);
4064 unboxed_representation = kUnboxedInt32;
4065 }
4066 fragment += StoreIndexedTypedData(TypedDataCidUnboxed(unboxed_representation),
4067 index_scale, index_unboxed);
4068 return fragment;
4069}
4070
4071Fragment FlowGraphBuilder::LoadIndexedTypedDataUnboxed(
4072 Representation unboxed_representation,
4073 intptr_t index_scale,
4074 bool index_unboxed) {
4075 ASSERT(unboxed_representation == kUnboxedInt32 ||
4076 unboxed_representation == kUnboxedUint32 ||
4077 unboxed_representation == kUnboxedInt64 ||
4078 unboxed_representation == kUnboxedFloat ||
4079 unboxed_representation == kUnboxedDouble);
4080 Representation representation_for_load = unboxed_representation;
4081 if (unboxed_representation == kUnboxedFloat) {
4082 representation_for_load = kUnboxedInt32;
4083 }
4084 Fragment fragment;
4085 fragment += LoadIndexed(TypedDataCidUnboxed(unboxed_representation: representation_for_load),
4086 index_scale, index_unboxed);
4087 if (unboxed_representation == kUnboxedFloat) {
4088 fragment += BitCast(from: kUnboxedInt32, to: kUnboxedFloat);
4089 }
4090 return fragment;
4091}
4092
4093Fragment FlowGraphBuilder::EnterHandleScope() {
4094 Fragment body;
4095 body += LoadThread();
4096 body += ConvertUntaggedToUnboxed(kUnboxedIntPtr); // argument.
4097
4098 // LoadThread again, we can't store it in a temp because it will end up
4099 // in the environment of the FfiCall as untagged then.
4100 body += LoadThread();
4101 body += LoadUntagged(compiler::target::Thread::OffsetFromThread(
4102 runtime_entry: &kEnterHandleScopeRuntimeEntry));
4103 body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr); // function address.
4104
4105 body += CCall(/*num_arguments=*/1);
4106
4107 return body;
4108}
4109
4110Fragment FlowGraphBuilder::GetTopHandleScope() {
4111 Fragment body;
4112 body += LoadThread();
4113 body += LoadUntagged(compiler::target::Thread::api_top_scope_offset());
4114 body += ConvertUntaggedToUnboxed(kUnboxedIntPtr);
4115 return body;
4116}
4117
4118Fragment FlowGraphBuilder::ExitHandleScope() {
4119 Fragment code;
4120 code += LoadThread();
4121 code += ConvertUntaggedToUnboxed(kUnboxedIntPtr); // argument.
4122
4123 code += LoadThread();
4124 code += LoadUntagged(compiler::target::Thread::OffsetFromThread(
4125 runtime_entry: &kExitHandleScopeRuntimeEntry));
4126 code += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr); // function address.
4127
4128 code += CCall(/*num_arguments=*/1);
4129
4130 code += Drop();
4131 return code;
4132}
4133
4134Fragment FlowGraphBuilder::AllocateHandle() {
4135 Fragment code;
4136 // Get a reference to the top handle scope.
4137 code += GetTopHandleScope();
4138
4139 code += LoadThread();
4140 code += LoadUntagged(
4141 compiler::target::Thread::OffsetFromThread(runtime_entry: &kAllocateHandleRuntimeEntry));
4142 code += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr); // function address.
4143
4144 code += CCall(/*num_arguments=*/1);
4145
4146 return code;
4147}
4148
4149Fragment FlowGraphBuilder::RawLoadField(int32_t offset) {
4150 Fragment code;
4151 code += UnboxedIntConstant(offset, kUnboxedIntPtr);
4152 code += LoadIndexed(kArrayCid, /*index_scale=*/1, /*index_unboxed=*/true);
4153 return code;
4154}
4155
4156Fragment FlowGraphBuilder::RawStoreField(int32_t offset) {
4157 Fragment code;
4158 Value* value = Pop();
4159 Value* base = Pop();
4160 auto* instr = new (Z) RawStoreFieldInstr(base, value, offset);
4161 code <<= instr;
4162 return code;
4163}
4164
4165Fragment FlowGraphBuilder::WrapHandle() {
4166 Fragment code;
4167 LocalVariable* object = MakeTemporary();
4168 code += AllocateHandle();
4169
4170 code += LoadLocal(variable: MakeTemporary()); // Duplicate handle pointer.
4171 code += ConvertUnboxedToUntagged(kUnboxedIntPtr);
4172 code += LoadLocal(variable: object);
4173 code += RawStoreField(offset: compiler::target::LocalHandle::ptr_offset());
4174
4175 code += DropTempsPreserveTop(1); // Drop object below handle.
4176 return code;
4177}
4178
4179Fragment FlowGraphBuilder::UnwrapHandle() {
4180 Fragment code;
4181 code += ConvertUnboxedToUntagged(kUnboxedIntPtr);
4182 code += RawLoadField(offset: compiler::target::LocalHandle::ptr_offset());
4183 return code;
4184}
4185
4186Fragment FlowGraphBuilder::UnhandledException() {
4187 const auto class_table = thread_->isolate_group()->class_table();
4188 ASSERT(class_table->HasValidClassAt(kUnhandledExceptionCid));
4189 const auto& klass =
4190 Class::ZoneHandle(H.zone(), ptr: class_table->At(cid: kUnhandledExceptionCid));
4191 ASSERT(!klass.IsNull());
4192 Fragment body;
4193 body += AllocateObject(TokenPosition::kNoSource, klass, 0);
4194 LocalVariable* error_instance = MakeTemporary();
4195
4196 body += LoadLocal(variable: error_instance);
4197 body += LoadLocal(variable: CurrentException());
4198 body +=
4199 StoreNativeField(Slot::UnhandledException_exception(),
4200 StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
4201
4202 body += LoadLocal(variable: error_instance);
4203 body += LoadLocal(variable: CurrentStackTrace());
4204 body +=
4205 StoreNativeField(Slot::UnhandledException_stacktrace(),
4206 StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
4207
4208 return body;
4209}
4210
4211Fragment FlowGraphBuilder::UnboxTruncate(Representation to) {
4212 auto* unbox = UnboxInstr::Create(to, value: Pop(), deopt_id: DeoptId::kNone,
4213 speculative_mode: Instruction::kNotSpeculative);
4214 Push(definition: unbox);
4215 return Fragment(unbox);
4216}
4217
4218Fragment FlowGraphBuilder::LoadThread() {
4219 LoadThreadInstr* instr = new (Z) LoadThreadInstr();
4220 Push(instr);
4221 return Fragment(instr);
4222}
4223
4224Fragment FlowGraphBuilder::LoadIsolate() {
4225 Fragment body;
4226 body += LoadThread();
4227 body += LoadUntagged(compiler::target::Thread::isolate_offset());
4228 return body;
4229}
4230
4231Fragment FlowGraphBuilder::LoadIsolateGroup() {
4232 Fragment body;
4233 body += LoadThread();
4234 body += LoadUntagged(compiler::target::Thread::isolate_group_offset());
4235 return body;
4236}
4237
4238Fragment FlowGraphBuilder::LoadObjectStore() {
4239 Fragment body;
4240 body += LoadIsolateGroup();
4241 body += LoadUntagged(compiler::target::IsolateGroup::object_store_offset());
4242 return body;
4243}
4244
4245Fragment FlowGraphBuilder::LoadServiceExtensionStream() {
4246 Fragment body;
4247 body += LoadThread();
4248 body +=
4249 LoadUntagged(compiler::target::Thread::service_extension_stream_offset());
4250 return body;
4251}
4252
4253// TODO(http://dartbug.com/47487): Support unboxed output value.
4254Fragment FlowGraphBuilder::BoolToInt() {
4255 // TODO(http://dartbug.com/36855) Build IfThenElseInstr, instead of letting
4256 // the optimizer turn this into that.
4257
4258 LocalVariable* expression_temp = parsed_function_->expression_temp_var();
4259
4260 Fragment instructions;
4261 TargetEntryInstr* is_true;
4262 TargetEntryInstr* is_false;
4263
4264 instructions += BranchIfTrue(&is_true, &is_false);
4265 JoinEntryInstr* join = BuildJoinEntry();
4266
4267 {
4268 Fragment store_1(is_true);
4269 store_1 += IntConstant(1);
4270 store_1 += StoreLocal(TokenPosition::kNoSource, expression_temp);
4271 store_1 += Drop();
4272 store_1 += Goto(join);
4273 }
4274
4275 {
4276 Fragment store_0(is_false);
4277 store_0 += IntConstant(0);
4278 store_0 += StoreLocal(TokenPosition::kNoSource, expression_temp);
4279 store_0 += Drop();
4280 store_0 += Goto(join);
4281 }
4282
4283 instructions = Fragment(instructions.entry, join);
4284 instructions += LoadLocal(variable: expression_temp);
4285 return instructions;
4286}
4287
4288Fragment FlowGraphBuilder::IntToBool() {
4289 Fragment body;
4290 body += IntConstant(0);
4291 body += StrictCompare(Token::kNE_STRICT);
4292 return body;
4293}
4294
4295Fragment FlowGraphBuilder::IntRelationalOp(TokenPosition position,
4296 Token::Kind kind) {
4297 if (CompilerState::Current().is_aot()) {
4298 Value* right = Pop();
4299 Value* left = Pop();
4300 RelationalOpInstr* instr = new (Z) RelationalOpInstr(
4301 InstructionSource(position), kind, left, right, kMintCid,
4302 GetNextDeoptId(), Instruction::SpeculativeMode::kNotSpeculative);
4303 Push(instr);
4304 return Fragment(instr);
4305 }
4306 const String* name = nullptr;
4307 switch (kind) {
4308 case Token::kLT:
4309 name = &Symbols::LAngleBracket();
4310 break;
4311 case Token::kGT:
4312 name = &Symbols::RAngleBracket();
4313 break;
4314 case Token::kLTE:
4315 name = &Symbols::LessEqualOperator();
4316 break;
4317 case Token::kGTE:
4318 name = &Symbols::GreaterEqualOperator();
4319 break;
4320 default:
4321 UNREACHABLE();
4322 }
4323 return InstanceCall(
4324 position, name: *name, kind, /*type_args_len=*/0, /*argument_count=*/2,
4325 /*argument_names=*/Array::null_array(), /*checked_argument_count=*/2);
4326}
4327
4328Fragment FlowGraphBuilder::NativeReturn(
4329 const compiler::ffi::CallbackMarshaller& marshaller) {
4330 auto* instr = new (Z)
4331 NativeReturnInstr(InstructionSource(), Pop(), marshaller, DeoptId::kNone);
4332 return Fragment(instr).closed();
4333}
4334
4335Fragment FlowGraphBuilder::FfiPointerFromAddress() {
4336 LocalVariable* address = MakeTemporary();
4337 LocalVariable* result = parsed_function_->expression_temp_var();
4338
4339 Class& result_class =
4340 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
4341 // This class might only be instantiated as a return type of ffi calls.
4342 result_class.EnsureIsFinalized(thread: thread_);
4343
4344 TypeArguments& args =
4345 TypeArguments::ZoneHandle(Z, IG->object_store()->type_argument_never());
4346
4347 // A kernel transform for FFI in the front-end ensures that type parameters
4348 // do not appear in the type arguments to a any Pointer classes in an FFI
4349 // signature.
4350 ASSERT(args.IsNull() || args.IsInstantiated());
4351 args = args.Canonicalize(thread: thread_);
4352
4353 Fragment code;
4354 code += Constant(args);
4355 code += AllocateObject(TokenPosition::kNoSource, result_class, 1);
4356 LocalVariable* pointer = MakeTemporary();
4357 code += LoadLocal(variable: pointer);
4358 code += LoadLocal(variable: address);
4359 code += UnboxTruncate(to: kUnboxedIntPtr);
4360 code += StoreNativeField(Slot::PointerBase_data());
4361 code += StoreLocal(TokenPosition::kNoSource, result);
4362 code += Drop(); // StoreLocal^
4363 code += Drop(); // address
4364 code += LoadLocal(variable: result);
4365
4366 return code;
4367}
4368
4369Fragment FlowGraphBuilder::BitCast(Representation from, Representation to) {
4370 BitCastInstr* instr = new (Z) BitCastInstr(from, to, Pop());
4371 Push(instr);
4372 return Fragment(instr);
4373}
4374
4375Fragment FlowGraphBuilder::Call1ArgStub(TokenPosition position,
4376 Call1ArgStubInstr::StubId stub_id) {
4377 Call1ArgStubInstr* instr = new (Z) Call1ArgStubInstr(
4378 InstructionSource(position), stub_id, Pop(), GetNextDeoptId());
4379 Push(instr);
4380 return Fragment(instr);
4381}
4382
4383Fragment FlowGraphBuilder::Suspend(TokenPosition position,
4384 SuspendInstr::StubId stub_id) {
4385 Value* type_args =
4386 (stub_id == SuspendInstr::StubId::kAwaitWithTypeCheck) ? Pop() : nullptr;
4387 Value* operand = Pop();
4388 SuspendInstr* instr =
4389 new (Z) SuspendInstr(InstructionSource(position), stub_id, operand,
4390 type_args, GetNextDeoptId(), GetNextDeoptId());
4391 Push(instr);
4392 return Fragment(instr);
4393}
4394
4395Fragment FlowGraphBuilder::WrapTypedDataBaseInCompound(
4396 const AbstractType& compound_type) {
4397 const auto& compound_sub_class =
4398 Class::ZoneHandle(Z, ptr: compound_type.type_class());
4399 compound_sub_class.EnsureIsFinalized(thread: thread_);
4400 const auto& lib_ffi = Library::Handle(Z, ptr: Library::FfiLibrary());
4401 const auto& compound_class =
4402 Class::Handle(Z, ptr: lib_ffi.LookupClassAllowPrivate(name: Symbols::Compound()));
4403 const auto& compound_typed_data_base =
4404 Field::ZoneHandle(Z, ptr: compound_class.LookupInstanceFieldAllowPrivate(
4405 name: Symbols::_typedDataBase()));
4406 ASSERT(!compound_typed_data_base.IsNull());
4407
4408 Fragment body;
4409 LocalVariable* typed_data = MakeTemporary("typed_data_base");
4410 body += AllocateObject(TokenPosition::kNoSource, compound_sub_class, 0);
4411 body += LoadLocal(variable: MakeTemporary("compound")); // Duplicate Struct or Union.
4412 body += LoadLocal(variable: typed_data);
4413 body += StoreField(compound_typed_data_base,
4414 StoreFieldInstr::Kind::kInitializing);
4415 body += DropTempsPreserveTop(1); // Drop TypedData.
4416 return body;
4417}
4418
4419Fragment FlowGraphBuilder::LoadTypedDataBaseFromCompound() {
4420 const auto& lib_ffi = Library::Handle(Z, ptr: Library::FfiLibrary());
4421 const auto& compound_class =
4422 Class::Handle(Z, ptr: lib_ffi.LookupClassAllowPrivate(name: Symbols::Compound()));
4423 const auto& compound_typed_data_base =
4424 Field::ZoneHandle(Z, ptr: compound_class.LookupInstanceFieldAllowPrivate(
4425 name: Symbols::_typedDataBase()));
4426 ASSERT(!compound_typed_data_base.IsNull());
4427
4428 Fragment body;
4429 body += LoadField(compound_typed_data_base, /*calls_initializer=*/false);
4430 return body;
4431}
4432
4433Fragment FlowGraphBuilder::CopyFromCompoundToStack(
4434 LocalVariable* variable,
4435 const GrowableArray<Representation>& representations) {
4436 Fragment body;
4437 const intptr_t num_defs = representations.length();
4438 int offset_in_bytes = 0;
4439 for (intptr_t i = 0; i < num_defs; i++) {
4440 body += LoadLocal(variable);
4441 body += LoadTypedDataBaseFromCompound();
4442 body += LoadUntagged(compiler::target::PointerBase::data_offset());
4443 body += IntConstant(offset_in_bytes);
4444 const Representation representation = representations[i];
4445 offset_in_bytes += RepresentationUtils::ValueSize(rep: representation);
4446 body += LoadIndexedTypedDataUnboxed(unboxed_representation: representation, /*index_scale=*/1,
4447 /*index_unboxed=*/false);
4448 }
4449 return body;
4450}
4451
4452Fragment FlowGraphBuilder::PopFromStackToTypedDataBase(
4453 ZoneGrowableArray<LocalVariable*>* definitions,
4454 const GrowableArray<Representation>& representations) {
4455 Fragment body;
4456 const intptr_t num_defs = representations.length();
4457 ASSERT(definitions->length() == num_defs);
4458
4459 LocalVariable* uint8_list = MakeTemporary("uint8_list");
4460 int offset_in_bytes = 0;
4461 for (intptr_t i = 0; i < num_defs; i++) {
4462 const Representation representation = representations[i];
4463 body += LoadLocal(variable: uint8_list);
4464 body += LoadUntagged(compiler::target::PointerBase::data_offset());
4465 body += IntConstant(offset_in_bytes);
4466 body += LoadLocal(variable: definitions->At(index: i));
4467 body += StoreIndexedTypedDataUnboxed(unboxed_representation: representation, /*index_scale=*/1,
4468 /*index_unboxed=*/false);
4469 offset_in_bytes += RepresentationUtils::ValueSize(rep: representation);
4470 }
4471 body += DropTempsPreserveTop(num_defs); // Drop chunk defs keep TypedData.
4472 return body;
4473}
4474
4475static intptr_t chunk_size(intptr_t bytes_left) {
4476 ASSERT(bytes_left >= 1);
4477 if (bytes_left >= 8 && compiler::target::kWordSize == 8) {
4478 return 8;
4479 }
4480 if (bytes_left >= 4) {
4481 return 4;
4482 }
4483 if (bytes_left >= 2) {
4484 return 2;
4485 }
4486 return 1;
4487}
4488
4489static classid_t typed_data_cid(intptr_t chunk_size) {
4490 switch (chunk_size) {
4491 case 8:
4492 return kTypedDataInt64ArrayCid;
4493 case 4:
4494 return kTypedDataInt32ArrayCid;
4495 case 2:
4496 return kTypedDataInt16ArrayCid;
4497 case 1:
4498 return kTypedDataInt8ArrayCid;
4499 }
4500 UNREACHABLE();
4501}
4502
4503Fragment FlowGraphBuilder::CopyFromTypedDataBaseToUnboxedAddress(
4504 intptr_t length_in_bytes) {
4505 Fragment body;
4506 Value* unboxed_address_value = Pop();
4507 LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
4508 Push(unboxed_address_value->definition());
4509 LocalVariable* unboxed_address = MakeTemporary("unboxed_address");
4510
4511 intptr_t offset_in_bytes = 0;
4512 while (offset_in_bytes < length_in_bytes) {
4513 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
4514 const intptr_t chunk_sizee = chunk_size(bytes_left);
4515 const classid_t typed_data_cidd = typed_data_cid(chunk_size: chunk_sizee);
4516
4517 body += LoadLocal(variable: typed_data_base);
4518 body += LoadUntagged(compiler::target::PointerBase::data_offset());
4519 body += IntConstant(offset_in_bytes);
4520 body += LoadIndexed(typed_data_cidd, /*index_scale=*/1,
4521 /*index_unboxed=*/false);
4522 LocalVariable* chunk_value = MakeTemporary("chunk_value");
4523
4524 body += LoadLocal(variable: unboxed_address);
4525 body += ConvertUnboxedToUntagged(kUnboxedFfiIntPtr);
4526 body += IntConstant(offset_in_bytes);
4527 body += LoadLocal(variable: chunk_value);
4528 body += StoreIndexedTypedData(typed_data_cidd, /*index_scale=*/1,
4529 /*index_unboxed=*/false);
4530 body += DropTemporary(&chunk_value);
4531
4532 offset_in_bytes += chunk_sizee;
4533 }
4534 ASSERT(offset_in_bytes == length_in_bytes);
4535
4536 body += DropTemporary(&unboxed_address);
4537 body += DropTemporary(&typed_data_base);
4538 return body;
4539}
4540
4541Fragment FlowGraphBuilder::CopyFromUnboxedAddressToTypedDataBase(
4542 intptr_t length_in_bytes) {
4543 Fragment body;
4544 Value* typed_data_base_value = Pop();
4545 LocalVariable* unboxed_address = MakeTemporary("unboxed_address");
4546 Push(typed_data_base_value->definition());
4547 LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
4548
4549 intptr_t offset_in_bytes = 0;
4550 while (offset_in_bytes < length_in_bytes) {
4551 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
4552 const intptr_t chunk_sizee = chunk_size(bytes_left);
4553 const classid_t typed_data_cidd = typed_data_cid(chunk_size: chunk_sizee);
4554
4555 body += LoadLocal(variable: unboxed_address);
4556 body += ConvertUnboxedToUntagged(kUnboxedFfiIntPtr);
4557 body += IntConstant(offset_in_bytes);
4558 body += LoadIndexed(typed_data_cidd, /*index_scale=*/1,
4559 /*index_unboxed=*/false);
4560 LocalVariable* chunk_value = MakeTemporary("chunk_value");
4561
4562 body += LoadLocal(variable: typed_data_base);
4563 body += LoadUntagged(compiler::target::PointerBase::data_offset());
4564 body += IntConstant(offset_in_bytes);
4565 body += LoadLocal(variable: chunk_value);
4566 body += StoreIndexedTypedData(typed_data_cidd, /*index_scale=*/1,
4567 /*index_unboxed=*/false);
4568 body += DropTemporary(&chunk_value);
4569
4570 offset_in_bytes += chunk_sizee;
4571 }
4572 ASSERT(offset_in_bytes == length_in_bytes);
4573
4574 body += DropTemporary(&typed_data_base);
4575 body += DropTemporary(&unboxed_address);
4576 return body;
4577}
4578
4579Fragment FlowGraphBuilder::FfiCallConvertCompoundArgumentToNative(
4580 LocalVariable* variable,
4581 const compiler::ffi::BaseMarshaller& marshaller,
4582 intptr_t arg_index) {
4583 Fragment body;
4584 const auto& native_loc = marshaller.Location(arg_index);
4585 if (native_loc.IsStack() || native_loc.IsMultiple()) {
4586 // Break struct in pieces to separate IL definitions to pass those
4587 // separate definitions into the FFI call.
4588 GrowableArray<Representation> representations;
4589 marshaller.RepsInFfiCall(arg_index, out: &representations);
4590 body += CopyFromCompoundToStack(variable, representations);
4591 } else {
4592 ASSERT(native_loc.IsPointerToMemory());
4593 // Only load the typed data, do copying in the FFI call machine code.
4594 body += LoadLocal(variable); // User-defined struct.
4595 body += LoadTypedDataBaseFromCompound();
4596 }
4597 return body;
4598}
4599
4600Fragment FlowGraphBuilder::FfiCallConvertCompoundReturnToDart(
4601 const compiler::ffi::BaseMarshaller& marshaller,
4602 intptr_t arg_index) {
4603 Fragment body;
4604 // The typed data is allocated before the FFI call, and is populated in
4605 // machine code. So, here, it only has to be wrapped in the struct class.
4606 const auto& compound_type =
4607 AbstractType::Handle(Z, ptr: marshaller.CType(arg_index));
4608 body += WrapTypedDataBaseInCompound(compound_type);
4609 return body;
4610}
4611
4612Fragment FlowGraphBuilder::FfiCallbackConvertCompoundArgumentToDart(
4613 const compiler::ffi::BaseMarshaller& marshaller,
4614 intptr_t arg_index,
4615 ZoneGrowableArray<LocalVariable*>* definitions) {
4616 const intptr_t length_in_bytes =
4617 marshaller.Location(arg_index).payload_type().SizeInBytes();
4618
4619 Fragment body;
4620 if ((marshaller.Location(arg_index).IsMultiple() ||
4621 marshaller.Location(arg_index).IsStack())) {
4622 // Allocate and populate a TypedData from the individual NativeParameters.
4623 body += IntConstant(length_in_bytes);
4624 body +=
4625 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
4626 GrowableArray<Representation> representations;
4627 marshaller.RepsInFfiCall(arg_index, out: &representations);
4628 body += PopFromStackToTypedDataBase(definitions, representations);
4629 } else {
4630 ASSERT(marshaller.Location(arg_index).IsPointerToMemory());
4631 // Allocate a TypedData and copy contents pointed to by an address into it.
4632 LocalVariable* address_of_compound = MakeTemporary("address_of_compound");
4633 body += IntConstant(length_in_bytes);
4634 body +=
4635 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
4636 LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
4637 body += LoadLocal(variable: address_of_compound);
4638 body += LoadLocal(variable: typed_data_base);
4639 body += CopyFromUnboxedAddressToTypedDataBase(length_in_bytes);
4640 body += DropTempsPreserveTop(1); // address_of_compound.
4641 }
4642 // Wrap typed data in compound class.
4643 const auto& compound_type =
4644 AbstractType::Handle(Z, ptr: marshaller.CType(arg_index));
4645 body += WrapTypedDataBaseInCompound(compound_type);
4646 return body;
4647}
4648
4649Fragment FlowGraphBuilder::FfiCallbackConvertCompoundReturnToNative(
4650 const compiler::ffi::CallbackMarshaller& marshaller,
4651 intptr_t arg_index) {
4652 Fragment body;
4653 const auto& native_loc = marshaller.Location(arg_index);
4654 if (native_loc.IsMultiple()) {
4655 // We pass in typed data to native return instruction, and do the copying
4656 // in machine code.
4657 body += LoadTypedDataBaseFromCompound();
4658 } else {
4659 ASSERT(native_loc.IsPointerToMemory());
4660 // We copy the data into the right location in IL.
4661 const intptr_t length_in_bytes =
4662 marshaller.Location(arg_index).payload_type().SizeInBytes();
4663
4664 body += LoadTypedDataBaseFromCompound();
4665 LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
4666
4667 auto* pointer_to_return =
4668 new (Z) NativeParameterInstr(marshaller, compiler::ffi::kResultIndex);
4669 Push(pointer_to_return); // Address where return value should be stored.
4670 body <<= pointer_to_return;
4671 body += UnboxTruncate(to: kUnboxedFfiIntPtr);
4672 LocalVariable* unboxed_address = MakeTemporary("unboxed_address");
4673
4674 body += LoadLocal(variable: typed_data_base);
4675 body += LoadLocal(variable: unboxed_address);
4676 body += CopyFromTypedDataBaseToUnboxedAddress(length_in_bytes);
4677 body += DropTempsPreserveTop(1); // Keep address, drop typed_data_base.
4678 }
4679 return body;
4680}
4681
4682Fragment FlowGraphBuilder::FfiConvertPrimitiveToDart(
4683 const compiler::ffi::BaseMarshaller& marshaller,
4684 intptr_t arg_index) {
4685 ASSERT(!marshaller.IsCompound(arg_index));
4686
4687 Fragment body;
4688 if (marshaller.IsPointer(arg_index)) {
4689 body += Box(kUnboxedFfiIntPtr);
4690 body += FfiPointerFromAddress();
4691 } else if (marshaller.IsHandle(arg_index)) {
4692 body += UnwrapHandle();
4693 } else if (marshaller.IsVoid(arg_index)) {
4694 body += Drop();
4695 body += NullConstant();
4696 } else {
4697 if (marshaller.RequiresBitCast(index: arg_index)) {
4698 body += BitCast(
4699 from: marshaller.RepInFfiCall(def_index_global: marshaller.FirstDefinitionIndex(arg_index)),
4700 to: marshaller.RepInDart(arg_index));
4701 }
4702
4703 body += Box(marshaller.RepInDart(arg_index));
4704
4705 if (marshaller.IsBool(arg_index)) {
4706 body += IntToBool();
4707 }
4708 }
4709 return body;
4710}
4711
4712Fragment FlowGraphBuilder::FfiConvertPrimitiveToNative(
4713 const compiler::ffi::BaseMarshaller& marshaller,
4714 intptr_t arg_index) {
4715 ASSERT(!marshaller.IsCompound(arg_index));
4716
4717 Fragment body;
4718 if (marshaller.IsPointer(arg_index)) {
4719 // This can only be Pointer, so it is always safe to LoadUntagged.
4720 body += LoadUntagged(compiler::target::PointerBase::data_offset());
4721 body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
4722 } else if (marshaller.IsHandle(arg_index)) {
4723 body += WrapHandle();
4724 } else {
4725 if (marshaller.IsBool(arg_index)) {
4726 body += BoolToInt();
4727 }
4728
4729 body += UnboxTruncate(to: marshaller.RepInDart(arg_index));
4730 }
4731
4732 if (marshaller.RequiresBitCast(index: arg_index)) {
4733 body += BitCast(
4734 from: marshaller.RepInDart(arg_index),
4735 to: marshaller.RepInFfiCall(def_index_global: marshaller.FirstDefinitionIndex(arg_index)));
4736 }
4737
4738 return body;
4739}
4740
4741FlowGraph* FlowGraphBuilder::BuildGraphOfFfiTrampoline(
4742 const Function& function) {
4743 switch (function.GetFfiTrampolineKind()) {
4744 case FfiTrampolineKind::kSyncCallback:
4745 return BuildGraphOfSyncFfiCallback(function);
4746 case FfiTrampolineKind::kAsyncCallback:
4747 return BuildGraphOfAsyncFfiCallback(function);
4748 case FfiTrampolineKind::kCall:
4749 return BuildGraphOfFfiNative(function);
4750 }
4751 UNREACHABLE();
4752 return nullptr;
4753}
4754
4755FlowGraph* FlowGraphBuilder::BuildGraphOfFfiNative(const Function& function) {
4756 const intptr_t kClosureParameterOffset = 0;
4757 const intptr_t kFirstArgumentParameterOffset = kClosureParameterOffset + 1;
4758
4759 graph_entry_ =
4760 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
4761
4762 auto normal_entry = BuildFunctionEntry(graph_entry_);
4763 graph_entry_->set_normal_entry(normal_entry);
4764
4765 PrologueInfo prologue_info(-1, -1);
4766
4767 BlockEntryInstr* instruction_cursor =
4768 BuildPrologue(normal_entry: normal_entry, prologue_info: &prologue_info);
4769
4770 Fragment function_body(instruction_cursor);
4771 function_body += CheckStackOverflowInPrologue(position: function.token_pos());
4772
4773 const char* error = nullptr;
4774 const auto marshaller_ptr =
4775 compiler::ffi::CallMarshaller::FromFunction(Z, function, error: &error);
4776 // AbiSpecific integers can be incomplete causing us to not know the calling
4777 // convention. However, this is caught in asFunction in both JIT/AOT.
4778 RELEASE_ASSERT(error == nullptr);
4779 RELEASE_ASSERT(marshaller_ptr != nullptr);
4780 const auto& marshaller = *marshaller_ptr;
4781
4782 const bool signature_contains_handles = marshaller.ContainsHandles();
4783
4784 // FFI trampolines are accessed via closures, so non-covariant argument types
4785 // and type arguments are either statically checked by the type system or
4786 // dynamically checked via dynamic closure call dispatchers.
4787
4788 // Null check arguments before we go into the try catch, so that we don't
4789 // catch our own null errors.
4790 const intptr_t num_args = marshaller.num_args();
4791 for (intptr_t i = 0; i < num_args; i++) {
4792 if (marshaller.IsHandle(arg_index: i)) {
4793 continue;
4794 }
4795 function_body += LoadLocal(
4796 variable: parsed_function_->ParameterVariable(i: kFirstArgumentParameterOffset + i));
4797 // TODO(http://dartbug.com/47486): Support entry without checking for null.
4798 // Check for 'null'.
4799 function_body += CheckNullOptimized(
4800 String::ZoneHandle(
4801 Z, ptr: function.ParameterNameAt(index: kFirstArgumentParameterOffset + i)),
4802 CheckNullInstr::kArgumentError);
4803 function_body += StoreLocal(
4804 TokenPosition::kNoSource,
4805 parsed_function_->ParameterVariable(i: kFirstArgumentParameterOffset + i));
4806 function_body += Drop();
4807 }
4808
4809 Fragment body;
4810 intptr_t try_handler_index = -1;
4811 if (signature_contains_handles) {
4812 // Wrap in Try catch to transition from Native to Generated on a throw from
4813 // the dart_api.
4814 try_handler_index = AllocateTryIndex();
4815 body += TryCatch(try_handler_index);
4816 ++try_depth_;
4817 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
4818 // need it.
4819 // We no longer need the scope for passing in Handle arguments, but the
4820 // native function might for instance be relying on this scope for Dart API.
4821 body += EnterHandleScope();
4822 }
4823
4824 // Allocate typed data before FfiCall and pass it in to ffi call if needed.
4825 LocalVariable* typed_data = nullptr;
4826 if (marshaller.PassTypedData()) {
4827 body += IntConstant(marshaller.TypedDataSizeInBytes());
4828 body +=
4829 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
4830 typed_data = MakeTemporary();
4831 }
4832
4833 // Unbox and push the arguments.
4834 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
4835 if (marshaller.IsCompound(arg_index: i)) {
4836 body += FfiCallConvertCompoundArgumentToNative(
4837 variable: parsed_function_->ParameterVariable(i: kFirstArgumentParameterOffset +
4838 i),
4839 marshaller, arg_index: i);
4840 } else {
4841 body += LoadLocal(variable: parsed_function_->ParameterVariable(
4842 i: kFirstArgumentParameterOffset + i));
4843 // FfiCallInstr specifies all handle locations as Stack, and will pass a
4844 // pointer to the stack slot as the native handle argument.
4845 // Therefore we do not need to wrap handles.
4846 if (!marshaller.IsHandle(arg_index: i)) {
4847 body += FfiConvertPrimitiveToNative(marshaller, arg_index: i);
4848 }
4849 }
4850 }
4851
4852 // Push the function pointer, which is stored (as Pointer object) in the
4853 // first slot of the context.
4854 body +=
4855 LoadLocal(variable: parsed_function_->ParameterVariable(i: kClosureParameterOffset));
4856 body += LoadNativeField(Slot::Closure_context());
4857 body += LoadNativeField(Slot::GetContextVariableSlotFor(
4858 thread_, *MakeImplicitClosureScope(
4859 Z, Class::Handle(IG->object_store()->ffi_pointer_class()))
4860 ->context_variables()[0]));
4861
4862 // This can only be Pointer, so it is always safe to LoadUntagged.
4863 body += LoadUntagged(compiler::target::PointerBase::data_offset());
4864 body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
4865
4866 if (marshaller.PassTypedData()) {
4867 body += LoadLocal(variable: typed_data);
4868 }
4869
4870 body += FfiCall(marshaller);
4871
4872 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
4873 if (marshaller.IsPointer(arg_index: i)) {
4874 body += LoadLocal(variable: parsed_function_->ParameterVariable(
4875 i: kFirstArgumentParameterOffset + i));
4876 body += ReachabilityFence();
4877 }
4878 }
4879
4880 const intptr_t num_defs = marshaller.NumReturnDefinitions();
4881 ASSERT(num_defs >= 1);
4882 auto defs = new (Z) ZoneGrowableArray<LocalVariable*>(Z, num_defs);
4883 LocalVariable* def = MakeTemporary();
4884 defs->Add(value: def);
4885
4886 if (marshaller.PassTypedData()) {
4887 // Drop call result, typed data with contents is already on the stack.
4888 body += Drop();
4889 }
4890
4891 if (marshaller.IsCompound(arg_index: compiler::ffi::kResultIndex)) {
4892 body += FfiCallConvertCompoundReturnToDart(marshaller,
4893 arg_index: compiler::ffi::kResultIndex);
4894 } else {
4895 body += FfiConvertPrimitiveToDart(marshaller, arg_index: compiler::ffi::kResultIndex);
4896 }
4897
4898 if (signature_contains_handles) {
4899 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
4900 // need it.
4901 body += DropTempsPreserveTop(1); // Drop api_local_scope.
4902 body += ExitHandleScope();
4903 }
4904
4905 body += Return(position: TokenPosition::kNoSource);
4906
4907 if (signature_contains_handles) {
4908 --try_depth_;
4909 }
4910
4911 function_body += body;
4912
4913 if (signature_contains_handles) {
4914 ++catch_depth_;
4915 Fragment catch_body =
4916 CatchBlockEntry(handler_types: Array::empty_array(), handler_index: try_handler_index,
4917 /*needs_stacktrace=*/true, /*is_synthesized=*/true);
4918
4919 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
4920 // need it.
4921 // TODO(41984): If we want to pass in the handle scope, move it out
4922 // of the try catch.
4923 catch_body += ExitHandleScope();
4924
4925 catch_body += LoadLocal(variable: CurrentException());
4926 catch_body += LoadLocal(variable: CurrentStackTrace());
4927 catch_body += RethrowException(position: TokenPosition::kNoSource, catch_try_index: try_handler_index);
4928 --catch_depth_;
4929 }
4930
4931 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4932 prologue_info);
4933}
4934
4935Fragment FlowGraphBuilder::LoadNativeArg(
4936 const compiler::ffi::CallbackMarshaller& marshaller,
4937 intptr_t arg_index) {
4938 const intptr_t num_defs = marshaller.NumDefinitions(arg_index);
4939 auto defs = new (Z) ZoneGrowableArray<LocalVariable*>(Z, num_defs);
4940
4941 Fragment fragment;
4942 for (intptr_t j = 0; j < num_defs; j++) {
4943 const intptr_t def_index = marshaller.DefinitionIndex(def_index_in_arg: j, arg_index);
4944 auto* parameter = new (Z) NativeParameterInstr(marshaller, def_index);
4945 Push(parameter);
4946 fragment <<= parameter;
4947 LocalVariable* def = MakeTemporary();
4948 defs->Add(value: def);
4949 }
4950
4951 if (marshaller.IsCompound(arg_index)) {
4952 fragment +=
4953 FfiCallbackConvertCompoundArgumentToDart(marshaller, arg_index, definitions: defs);
4954 } else {
4955 fragment += FfiConvertPrimitiveToDart(marshaller, arg_index);
4956 }
4957 return fragment;
4958}
4959
4960FlowGraph* FlowGraphBuilder::BuildGraphOfSyncFfiCallback(
4961 const Function& function) {
4962 const char* error = nullptr;
4963 const auto marshaller_ptr =
4964 compiler::ffi::CallbackMarshaller::FromFunction(Z, function, error: &error);
4965 // AbiSpecific integers can be incomplete causing us to not know the calling
4966 // convention. However, this is caught fromFunction in both JIT/AOT.
4967 RELEASE_ASSERT(error == nullptr);
4968 RELEASE_ASSERT(marshaller_ptr != nullptr);
4969 const auto& marshaller = *marshaller_ptr;
4970
4971 graph_entry_ =
4972 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
4973
4974 auto* const native_entry =
4975 new (Z) NativeEntryInstr(marshaller, graph_entry_, AllocateBlockId(),
4976 CurrentTryIndex(), GetNextDeoptId());
4977
4978 graph_entry_->set_normal_entry(native_entry);
4979
4980 Fragment function_body(native_entry);
4981 function_body += CheckStackOverflowInPrologue(position: function.token_pos());
4982
4983 // Wrap the entire method in a big try/catch. This is important to ensure that
4984 // the VM does not crash if the callback throws an exception.
4985 const intptr_t try_handler_index = AllocateTryIndex();
4986 Fragment body = TryCatch(try_handler_index);
4987 ++try_depth_;
4988
4989 // Box and push the arguments.
4990 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
4991 body += LoadNativeArg(marshaller, arg_index: i);
4992 }
4993
4994 // Call the target.
4995 //
4996 // TODO(36748): Determine the hot-reload semantics of callbacks and update the
4997 // rebind-rule accordingly.
4998 body += StaticCall(position: TokenPosition::kNoSource,
4999 target: Function::ZoneHandle(Z, ptr: function.FfiCallbackTarget()),
5000 argument_count: marshaller.num_args(), argument_names: Array::empty_array(),
5001 rebind_rule: ICData::kNoRebind);
5002 if (marshaller.IsVoid(arg_index: compiler::ffi::kResultIndex)) {
5003 body += Drop();
5004 body += IntConstant(0);
5005 } else if (!marshaller.IsHandle(arg_index: compiler::ffi::kResultIndex)) {
5006 body += CheckNullOptimized(
5007 String::ZoneHandle(Z, ptr: Symbols::New(H.thread(), cstr: "return_value")),
5008 CheckNullInstr::kArgumentError);
5009 }
5010
5011 if (marshaller.IsCompound(arg_index: compiler::ffi::kResultIndex)) {
5012 body += FfiCallbackConvertCompoundReturnToNative(
5013 marshaller, arg_index: compiler::ffi::kResultIndex);
5014 } else {
5015 body +=
5016 FfiConvertPrimitiveToNative(marshaller, arg_index: compiler::ffi::kResultIndex);
5017 }
5018
5019 body += NativeReturn(marshaller);
5020
5021 --try_depth_;
5022 function_body += body;
5023
5024 ++catch_depth_;
5025 Fragment catch_body = CatchBlockEntry(handler_types: Array::empty_array(), handler_index: try_handler_index,
5026 /*needs_stacktrace=*/false,
5027 /*is_synthesized=*/true);
5028
5029 // Return the "exceptional return" value given in 'fromFunction'.
5030 //
5031 // For pointer and void return types, the exceptional return is always null --
5032 // return 0 instead.
5033 if (marshaller.IsPointer(arg_index: compiler::ffi::kResultIndex) ||
5034 marshaller.IsVoid(arg_index: compiler::ffi::kResultIndex)) {
5035 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5036 catch_body += UnboxedIntConstant(0, kUnboxedFfiIntPtr);
5037 } else if (marshaller.IsHandle(arg_index: compiler::ffi::kResultIndex)) {
5038 catch_body += UnhandledException();
5039 catch_body +=
5040 FfiConvertPrimitiveToNative(marshaller, arg_index: compiler::ffi::kResultIndex);
5041
5042 } else if (marshaller.IsCompound(arg_index: compiler::ffi::kResultIndex)) {
5043 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5044 // Manufacture empty result.
5045 const intptr_t size =
5046 Utils::RoundUp(x: marshaller.Location(arg_index: compiler::ffi::kResultIndex)
5047 .payload_type()
5048 .SizeInBytes(),
5049 alignment: compiler::target::kWordSize);
5050 catch_body += IntConstant(size);
5051 catch_body +=
5052 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5053 catch_body += WrapTypedDataBaseInCompound(
5054 compound_type: AbstractType::Handle(Z, ptr: marshaller.CType(arg_index: compiler::ffi::kResultIndex)));
5055 catch_body += FfiCallbackConvertCompoundReturnToNative(
5056 marshaller, arg_index: compiler::ffi::kResultIndex);
5057
5058 } else {
5059 catch_body += Constant(
5060 Instance::ZoneHandle(Z, ptr: function.FfiCallbackExceptionalReturn()));
5061 catch_body +=
5062 FfiConvertPrimitiveToNative(marshaller, arg_index: compiler::ffi::kResultIndex);
5063 }
5064
5065 catch_body += NativeReturn(marshaller);
5066 --catch_depth_;
5067
5068 PrologueInfo prologue_info(-1, -1);
5069 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
5070 prologue_info);
5071}
5072
5073FlowGraph* FlowGraphBuilder::BuildGraphOfAsyncFfiCallback(
5074 const Function& function) {
5075 const char* error = nullptr;
5076 const auto marshaller_ptr =
5077 compiler::ffi::CallbackMarshaller::FromFunction(Z, function, error: &error);
5078 // AbiSpecific integers can be incomplete causing us to not know the calling
5079 // convention. However, this is caught fromFunction in both JIT/AOT.
5080 RELEASE_ASSERT(error == nullptr);
5081 RELEASE_ASSERT(marshaller_ptr != nullptr);
5082 const auto& marshaller = *marshaller_ptr;
5083
5084 // Currently all async FFI callbacks return void. This is enforced by the
5085 // frontend.
5086 ASSERT(marshaller.IsVoid(compiler::ffi::kResultIndex));
5087
5088 graph_entry_ =
5089 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
5090
5091 auto* const native_entry =
5092 new (Z) NativeEntryInstr(marshaller, graph_entry_, AllocateBlockId(),
5093 CurrentTryIndex(), GetNextDeoptId());
5094
5095 graph_entry_->set_normal_entry(native_entry);
5096
5097 Fragment function_body(native_entry);
5098 function_body += CheckStackOverflowInPrologue(position: function.token_pos());
5099
5100 // Wrap the entire method in a big try/catch. This is important to ensure that
5101 // the VM does not crash if the callback throws an exception.
5102 const intptr_t try_handler_index = AllocateTryIndex();
5103 Fragment body = TryCatch(try_handler_index);
5104 ++try_depth_;
5105
5106 // Box and push the arguments into an array, to be sent to the target.
5107 body += Constant(TypeArguments::ZoneHandle(Z, ptr: TypeArguments::null()));
5108 body += IntConstant(marshaller.num_args());
5109 body += CreateArray();
5110 LocalVariable* array = MakeTemporary();
5111 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5112 body += LoadLocal(variable: array);
5113 body += IntConstant(i);
5114 body += LoadNativeArg(marshaller, arg_index: i);
5115 body += StoreIndexed(kArrayCid);
5116 }
5117
5118 // Send the arg array to the target. The arg array is still on the stack.
5119 body += Call1ArgStub(position: TokenPosition::kNoSource,
5120 stub_id: Call1ArgStubInstr::StubId::kFfiAsyncCallbackSend);
5121
5122 // All async FFI callbacks return void, so just return 0.
5123 body += Drop();
5124 body += UnboxedIntConstant(0, kUnboxedFfiIntPtr);
5125 body += NativeReturn(marshaller);
5126
5127 --try_depth_;
5128 function_body += body;
5129
5130 ++catch_depth_;
5131 Fragment catch_body = CatchBlockEntry(handler_types: Array::empty_array(), handler_index: try_handler_index,
5132 /*needs_stacktrace=*/false,
5133 /*is_synthesized=*/true);
5134
5135 // This catch indicates there's been some sort of error, but async callbacks
5136 // are fire-and-forget, and we don't guarantee delivery. So just return 0.
5137 catch_body += UnboxedIntConstant(0, kUnboxedFfiIntPtr);
5138 catch_body += NativeReturn(marshaller);
5139 --catch_depth_;
5140
5141 PrologueInfo prologue_info(-1, -1);
5142 return new (Z) FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
5143 prologue_info);
5144}
5145
5146void FlowGraphBuilder::SetCurrentTryCatchBlock(TryCatchBlock* try_catch_block) {
5147 try_catch_block_ = try_catch_block;
5148 SetCurrentTryIndex(try_catch_block == nullptr ? kInvalidTryIndex
5149 : try_catch_block->try_index());
5150}
5151
5152Fragment FlowGraphBuilder::NullAssertion(LocalVariable* variable) {
5153 Fragment code;
5154 if (!variable->type().NeedsNullAssertion()) {
5155 return code;
5156 }
5157
5158 TargetEntryInstr* then;
5159 TargetEntryInstr* otherwise;
5160
5161 code += LoadLocal(variable);
5162 code += NullConstant();
5163 code += BranchIfEqual(&then, &otherwise);
5164
5165 const Script& script =
5166 Script::Handle(Z, ptr: parsed_function_->function().script());
5167 intptr_t line = -1;
5168 intptr_t column = -1;
5169 script.GetTokenLocation(token_pos: variable->token_pos(), line: &line, column: &column);
5170
5171 // Build equivalent of `throw _AssertionError._throwNewNullAssertion(name)`
5172 // expression. We build throw (even through _throwNewNullAssertion already
5173 // throws) because call is not a valid last instruction for the block.
5174 // Blocks can only terminate with explicit control flow instructions
5175 // (Branch, Goto, Return or Throw).
5176 Fragment null_code(then);
5177 null_code += Constant(variable->name());
5178 null_code += IntConstant(line);
5179 null_code += IntConstant(column);
5180 null_code += StaticCall(position: variable->token_pos(),
5181 target: ThrowNewNullAssertionFunction(), argument_count: 3, rebind_rule: ICData::kStatic);
5182 null_code += ThrowException(TokenPosition::kNoSource);
5183 null_code += Drop();
5184
5185 return Fragment(code.entry, otherwise);
5186}
5187
5188Fragment FlowGraphBuilder::BuildNullAssertions() {
5189 Fragment code;
5190 if (IG->null_safety() || !IG->asserts() || !FLAG_null_assertions) {
5191 return code;
5192 }
5193
5194 const Function& dart_function = parsed_function_->function();
5195 for (intptr_t i = dart_function.NumImplicitParameters(),
5196 n = dart_function.NumParameters();
5197 i < n; ++i) {
5198 LocalVariable* variable = parsed_function_->ParameterVariable(i);
5199 code += NullAssertion(variable);
5200 }
5201 return code;
5202}
5203
5204const Function& FlowGraphBuilder::ThrowNewNullAssertionFunction() {
5205 if (throw_new_null_assertion_.IsNull()) {
5206 const Class& klass = Class::ZoneHandle(
5207 Z, ptr: Library::LookupCoreClass(class_name: Symbols::AssertionError()));
5208 ASSERT(!klass.IsNull());
5209 const auto& error = klass.EnsureIsFinalized(H.thread());
5210 ASSERT(error == Error::null());
5211 throw_new_null_assertion_ = klass.LookupStaticFunctionAllowPrivate(
5212 name: Symbols::ThrowNewNullAssertion());
5213 ASSERT(!throw_new_null_assertion_.IsNull());
5214 }
5215 return throw_new_null_assertion_;
5216}
5217
5218const Function& FlowGraphBuilder::PrependTypeArgumentsFunction() {
5219 if (prepend_type_arguments_.IsNull()) {
5220 const auto& dart_internal = Library::Handle(Z, ptr: Library::InternalLibrary());
5221 prepend_type_arguments_ = dart_internal.LookupFunctionAllowPrivate(
5222 name: Symbols::PrependTypeArguments());
5223 ASSERT(!prepend_type_arguments_.IsNull());
5224 }
5225 return prepend_type_arguments_;
5226}
5227
5228Fragment FlowGraphBuilder::BuildIntegerHashCode(bool smi) {
5229 Fragment body;
5230 Value* unboxed_value = Pop();
5231 HashIntegerOpInstr* hash =
5232 new HashIntegerOpInstr(unboxed_value, smi, DeoptId::kNone);
5233 Push(hash);
5234 body <<= hash;
5235 return body;
5236}
5237
5238Fragment FlowGraphBuilder::BuildDoubleHashCode() {
5239 Fragment body;
5240 Value* double_value = Pop();
5241 HashDoubleOpInstr* hash = new HashDoubleOpInstr(double_value, DeoptId::kNone);
5242 Push(hash);
5243 body <<= hash;
5244 body += Box(kUnboxedInt64);
5245 return body;
5246}
5247
5248SwitchHelper::SwitchHelper(Zone* zone,
5249 TokenPosition position,
5250 bool is_exhaustive,
5251 const AbstractType& expression_type,
5252 SwitchBlock* switch_block,
5253 intptr_t case_count)
5254 : zone_(zone),
5255 position_(position),
5256 is_exhaustive_(is_exhaustive),
5257 expression_type_(expression_type),
5258 switch_block_(switch_block),
5259 case_count_(case_count),
5260 case_bodies_(case_count),
5261 case_expression_counts_(case_count),
5262 expressions_(case_count),
5263 sorted_expressions_(case_count) {
5264 case_expression_counts_.FillWith(0, 0, case_count);
5265
5266 if (expression_type.nullability() == Nullability::kNonNullable) {
5267 if (expression_type.IsIntType() || expression_type.IsSmiType()) {
5268 is_optimizable_ = true;
5269 } else if (expression_type.HasTypeClass() &&
5270 Class::Handle(zone: zone_, ptr: expression_type.type_class())
5271 .is_enum_class()) {
5272 is_optimizable_ = true;
5273 is_enum_switch_ = true;
5274 }
5275 }
5276}
5277
5278int64_t SwitchHelper::ExpressionRange() const {
5279 const int64_t min = expression_min().AsInt64Value();
5280 const int64_t max = expression_max().AsInt64Value();
5281 ASSERT(min <= max);
5282 const uint64_t diff = static_cast<uint64_t>(max) - static_cast<uint64_t>(min);
5283 // Saturate to avoid overflow.
5284 if (diff > static_cast<uint64_t>(kMaxInt64 - 1)) {
5285 return kMaxInt64;
5286 }
5287 return static_cast<int64_t>(diff + 1);
5288}
5289
5290bool SwitchHelper::RequiresLowerBoundCheck() const {
5291 if (is_enum_switch()) {
5292 if (expression_min().IsZero()) {
5293 // Enum indexes are always positive.
5294 return false;
5295 }
5296 }
5297 return true;
5298}
5299
5300bool SwitchHelper::RequiresUpperBoundCheck() const {
5301 if (is_enum_switch()) {
5302 return has_default() || !is_exhaustive();
5303 }
5304 return true;
5305}
5306
5307SwitchDispatch SwitchHelper::SelectDispatchStrategy() {
5308 // For small to medium-sized switches, binary search is faster than a
5309 // jump table.
5310 // Please update runtime/tests/vm/dart/optimized_switch_test.dart
5311 // when changing this constant.
5312 const intptr_t kJumpTableMinExpressions = 16;
5313 // This limit comes from IndirectGotoInstr.
5314 // Realistically, the current limit should never be hit by any code.
5315 const intptr_t kJumpTableMaxSize = kMaxInt32;
5316 // Sometimes the switch expressions don't cover a contiguous range.
5317 // If the ratio of holes to expressions is too great we fall back to a
5318 // binary search to avoid code size explosion.
5319 const double kJumpTableMaxHolesRatio = 1.0;
5320
5321 if (!is_optimizable() || expressions().is_empty()) {
5322 // The switch is not optimizable, so we can only use linear scan.
5323 return kSwitchDispatchLinearScan;
5324 }
5325
5326 if (!CompilerState::Current().is_aot()) {
5327 // JIT mode supports hot-reload, which currently prevents us from
5328 // enabling optimized switches.
5329 return kSwitchDispatchLinearScan;
5330 }
5331
5332 if (FLAG_force_switch_dispatch_type == kSwitchDispatchLinearScan) {
5333 return kSwitchDispatchLinearScan;
5334 }
5335
5336 PrepareForOptimizedSwitch();
5337
5338 if (!is_optimizable()) {
5339 // While preparing for an optimized switch we might have discovered that
5340 // the switch is not optimizable after all.
5341 return kSwitchDispatchLinearScan;
5342 }
5343
5344 if (FLAG_force_switch_dispatch_type == kSwitchDispatchBinarySearch) {
5345 return kSwitchDispatchBinarySearch;
5346 }
5347
5348 const int64_t range = ExpressionRange();
5349 if (range > kJumpTableMaxSize) {
5350 return kSwitchDispatchBinarySearch;
5351 }
5352
5353 const intptr_t num_expressions = expressions().length();
5354 ASSERT(num_expressions <= range);
5355
5356 const intptr_t max_holes = num_expressions * kJumpTableMaxHolesRatio;
5357 const int64_t holes = range - num_expressions;
5358
5359 if (FLAG_force_switch_dispatch_type != kSwitchDispatchJumpTable) {
5360 if (num_expressions < kJumpTableMinExpressions) {
5361 return kSwitchDispatchBinarySearch;
5362 }
5363
5364 if (holes > max_holes) {
5365 return kSwitchDispatchBinarySearch;
5366 }
5367 }
5368
5369 // After this point we will use a jump table.
5370
5371 // In the general case, bounds checks are required before a jump table
5372 // to handle all possible integer values.
5373 // For enums, the set of possible index values is known and much smaller
5374 // than the set of all possible integer values. A jump table that covers
5375 // either or both bounds of the range of index values requires only one or
5376 // no bounds checks.
5377 // If the expressions of an enum switch don't cover the full range of
5378 // values we can try to extend the jump table to cover the full range, but
5379 // not beyond kJumpTableMaxHolesRatio.
5380 // The count of enum values is not available when the flow graph is
5381 // constructed. The lower bound is always 0 so eliminating the lower
5382 // bound check is still possible by extending expression_min to 0.
5383 //
5384 // In the case of an integer switch we try to extend expression_min to 0
5385 // for a different reason.
5386 // If the range starts at zero it directly maps to the jump table
5387 // and we don't need to adjust the switch variable before the
5388 // jump table.
5389 if (expression_min().AsInt64Value() > 0) {
5390 const intptr_t holes_budget = Utils::Minimum(
5391 // Holes still available.
5392 x: max_holes - holes,
5393 // Entries left in the jump table.
5394 y: kJumpTableMaxSize - range);
5395
5396 const int64_t required_holes = expression_min().AsInt64Value();
5397 if (required_holes <= holes_budget) {
5398 expression_min_ = &Object::smi_zero();
5399 }
5400 }
5401
5402 return kSwitchDispatchJumpTable;
5403}
5404
5405void SwitchHelper::PrepareForOptimizedSwitch() {
5406 // Find the min and max of integer representations of expressions.
5407 // We also populate SwitchExpressions.integer for later use.
5408 const Field* enum_index_field = nullptr;
5409 for (intptr_t i = 0; i < expressions_.length(); ++i) {
5410 SwitchExpression& expression = expressions_[i];
5411 sorted_expressions_.Add(&expression);
5412
5413 const Instance& value = expression.value();
5414 const Integer* integer = nullptr;
5415 if (is_enum_switch()) {
5416 if (enum_index_field == nullptr) {
5417 enum_index_field =
5418 &Field::Handle(zone: zone_, IG->object_store()->enum_index_field());
5419 }
5420 integer = &Integer::ZoneHandle(
5421 zone: zone_, ptr: Integer::RawCast(raw: value.GetField(field: *enum_index_field)));
5422 } else {
5423 integer = &Integer::Cast(obj: value);
5424 }
5425 expression.set_integer(*integer);
5426 if (i == 0) {
5427 expression_min_ = integer;
5428 expression_max_ = integer;
5429 } else {
5430 if (expression_min_->CompareWith(other: *integer) > 0) {
5431 expression_min_ = integer;
5432 }
5433 if (expression_max_->CompareWith(other: *integer) < 0) {
5434 expression_max_ = integer;
5435 }
5436 }
5437 }
5438
5439 // Sort expressions by their integer value.
5440 sorted_expressions_.Sort(
5441 [](SwitchExpression* const* a, SwitchExpression* const* b) {
5442 return (*a)->integer().CompareWith((*b)->integer());
5443 });
5444
5445 // Check that there are no duplicate case expressions.
5446 // Duplicate expressions are allowed in switch statements, but
5447 // optimized switches don't implemented them.
5448 for (intptr_t i = 0; i < sorted_expressions_.length() - 1; ++i) {
5449 const SwitchExpression& a = *sorted_expressions_.At(i);
5450 const SwitchExpression& b = *sorted_expressions_.At(i + 1);
5451 if (a.integer().Equals(other: b.integer())) {
5452 is_optimizable_ = false;
5453 break;
5454 }
5455 }
5456}
5457
5458void SwitchHelper::AddExpression(intptr_t case_index,
5459 TokenPosition position,
5460 const Instance& value) {
5461 case_expression_counts_[case_index]++;
5462
5463 expressions_.Add(SwitchExpression(case_index, position, value));
5464
5465 if (is_optimizable_) {
5466 // Check the type of the case expression for use in an optimized switch.
5467 if (!value.IsInstanceOf(other: expression_type_, other_instantiator_type_arguments: Object::null_type_arguments(),
5468 other_function_type_arguments: Object::null_type_arguments())) {
5469 is_optimizable_ = false;
5470 }
5471 }
5472}
5473
5474} // namespace kernel
5475
5476} // namespace dart
5477

source code of dart_sdk/runtime/vm/compiler/frontend/kernel_to_il.cc