Home | History | Annotate | Download | only in arm64

Lines Matching refs:node

6 #include "src/compiler/node-matchers.h"
7 #include "src/compiler/node-properties.h"
33 InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
34 if (CanBeImmediate(node, mode)) {
35 return UseImmediate(node);
37 return UseRegister(node);
40 // Use the zero register if the node has the immediate value zero, otherwise
42 InstructionOperand UseRegisterOrImmediateZero(Node* node) {
43 if (IsIntegerConstant(node) && (GetIntegerConstantValue(node) == 0)) {
44 return UseImmediate(node);
46 return UseRegister(node);
49 // Use the provided node if it has the required value, or create a
51 InstructionOperand UseImmediateOrTemp(Node* node, int32_t value) {
52 if (GetIntegerConstantValue(node) == value) {
53 return UseImmediate(node);
58 bool IsIntegerConstant(Node* node) {
59 return (node->opcode() == IrOpcode::kInt32Constant) ||
60 (node->opcode() == IrOpcode::kInt64Constant);
63 int64_t GetIntegerConstantValue(Node* node) {
64 if (node->opcode() == IrOpcode::kInt32Constant) {
65 return OpParameter<int32_t>(node);
67 DCHECK(node->opcode() == IrOpcode::kInt64Constant);
68 return OpParameter<int64_t>(node);
71 bool CanBeImmediate(Node* node, ImmediateMode mode) {
72 return IsIntegerConstant(node) &&
73 CanBeImmediate(GetIntegerConstantValue(node), mode);
119 void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
121 selector->Emit(opcode, g.DefineAsRegister(node),
122 g.UseRegister(node->InputAt(0)));
126 void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
128 selector->Emit(opcode, g.DefineAsRegister(node),
129 g.UseRegister(node->InputAt(0)),
130 g.UseRegister(node->InputAt(1)));
134 void VisitRRO(InstructionSelector* selector, ArchOpcode opcode, Node* node,
137 selector->Emit(opcode, g.DefineAsRegister(node),
138 g.UseRegister(node->InputAt(0)),
139 g.UseOperand(node->InputAt(1), operand_mode));
143 bool TryMatchAnyShift(InstructionSelector* selector, Node* node,
144 Node* input_node, InstructionCode* opcode, bool try_ror) {
147 if (!selector->CanCover(node, input_node)) return false;
178 Node* node, Node* left_node, Node* right_node,
181 if (!selector->CanCover(node, right_node)) return false;
190 *right_op = g->UseRegister(mright.left().node());
197 if (selector->CanCover(mright.node(), mright.left().node()) &&
199 Int32BinopMatcher mleft_of_right(mright.left().node());
204 *right_op = g->UseRegister(mleft_of_right.left().node());
217 void VisitBinop(InstructionSelector* selector, Node* node,
221 Matcher m(node);
237 Node* left_node = m.left().node();
238 Node* right_node = m.right().node();
248 TryMatchAnyExtend(&g, selector, node, left_node, right_node,
252 TryMatchAnyExtend(&g, selector, node, right_node, left_node,
256 } else if (TryMatchAnyShift(selector, node, right_node, &opcode,
260 inputs[input_count++] = g.UseRegister(m_shift.left().node());
261 inputs[input_count++] = g.UseImmediate(m_shift.right().node());
262 } else if (can_commute && TryMatchAnyShift(selector, node, left_node, &opcode,
267 inputs[input_count++] = g.UseRegister(m_shift.left().node());
268 inputs[input_count++] = g.UseImmediate(m_shift.right().node());
280 outputs[output_count++] = g.DefineAsRegister(node);
299 void VisitBinop(InstructionSelector* selector, Node* node, ArchOpcode opcode,
302 VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
307 void VisitAddSub(InstructionSelector* selector, Node* node, ArchOpcode opcode,
310 Matcher m(node);
313 selector->Emit(negate_opcode, g.DefineAsRegister(node),
314 g.UseRegister(m.left().node()),
317 VisitBinop<Matcher>(selector, node, opcode, kArithmeticImm);
340 void InstructionSelector::VisitLoad(Node* node) {
341 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
343 Node* base = node->InputAt(0);
344 Node* index = node->InputAt(1);
380 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index));
383 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
388 void InstructionSelector::VisitStore(Node* node) {
390 Node* base = node->InputAt(0);
391 Node* index = node->InputAt(1);
392 Node* value = node->InputAt(2);
394 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
473 void InstructionSelector::VisitCheckedLoad(Node* node) {
474 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
476 Node* const buffer = node->InputAt(0);
477 Node* const offset = node->InputAt(1);
478 Node* const length = node->InputAt(2);
505 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
510 void InstructionSelector::VisitCheckedStore(Node* node) {
511 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
513 Node* const buffer = node->InputAt(0);
514 Node* const offset = node->InputAt(1);
515 Node* const length = node->InputAt(2);
516 Node* const value = node->InputAt(3);
549 static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
581 Matcher mleft(m->left().node());
584 selector->Emit(inv_opcode, g.DefineAsRegister(node),
585 g.UseRegister(m->right().node()),
586 g.UseRegister(mleft.left().node()));
594 Matcher mright(m->right().node());
597 selector->Emit(inv_opcode, g.DefineAsRegister(node),
598 g.UseRegister(m->left().node()),
599 g.UseRegister(mright.left().node()));
605 selector->Emit(kArm64Not32, g.DefineAsRegister(node),
606 g.UseRegister(m->left().node()));
608 selector->Emit(kArm64Not, g.DefineAsRegister(node),
609 g.UseRegister(m->left().node()));
611 VisitBinop<Matcher>(selector, node, opcode, imm_mode);
616 void InstructionSelector::VisitWord32And(Node* node) {
618 Int32BinopMatcher m(node);
619 if (m.left().IsWord32Shr() && CanCover(node, m.left().node()) &&
630 Int32BinopMatcher mleft(m.left().node());
641 Emit(kArm64Ubfx32, g.DefineAsRegister(node),
642 g.UseRegister(mleft.left().node()),
643 g.UseImmediateOrTemp(mleft.right().node(), lsb),
651 this, node, &m, kArm64And32, CanCover(node, m.left().node()),
652 CanCover(node, m.right().node()), kLogical32Imm);
656 void InstructionSelector::VisitWord64And(Node* node) {
658 Int64BinopMatcher m(node);
659 if (m.left().IsWord64Shr() && CanCover(node, m.left().node()) &&
670 Int64BinopMatcher mleft(m.left().node());
681 Emit(kArm64Ubfx, g.DefineAsRegister(node),
682 g.UseRegister(mleft.left().node()),
683 g.UseImmediateOrTemp(mleft.right().node(), lsb),
691 this, node, &m, kArm64And, CanCover(node, m.left().node()),
692 CanCover(node, m.right().node()), kLogical64Imm);
696 void InstructionSelector::VisitWord32Or(Node* node) {
697 Int32BinopMatcher m(node);
699 this, node, &m, kArm64Or32, CanCover(node, m.left().node()),
700 CanCover(node, m.right().node()), kLogical32Imm);
704 void InstructionSelector::VisitWord64Or(Node* node) {
705 Int64BinopMatcher m(node);
707 this, node, &m, kArm64Or, CanCover(node, m.left().node()),
708 CanCover(node, m.right().node()), kLogical64Imm);
712 void InstructionSelector::VisitWord32Xor(Node* node) {
713 Int32BinopMatcher m(node);
715 this, node, &m, kArm64Eor32, CanCover(node, m.left().node()),
716 CanCover(node, m.right().node()), kLogical32Imm);
720 void InstructionSelector::VisitWord64Xor(Node* node) {
721 Int64BinopMatcher m(node);
723 this, node, &m, kArm64Eor, CanCover(node, m.left().node()),
724 CanCover(node, m.right().node()), kLogical64Imm);
728 void InstructionSelector::VisitWord32Shl(Node* node) {
729 Int32BinopMatcher m(node);
730 if (m.left().IsWord32And() && CanCover(node, m.left().node()) &&
733 Int32BinopMatcher mleft(m.left().node());
746 Emit(kArm64Lsl32, g.DefineAsRegister(node),
747 g.UseRegister(mleft.left().node()),
748 g.UseImmediate(m.right().node()));
753 Emit(kArm64Ubfiz32, g.DefineAsRegister(node),
754 g.UseRegister(mleft.left().node()),
755 g.UseImmediate(m.right().node()), g.TempImmediate(mask_width));
761 VisitRRO(this, kArm64Lsl32, node, kShift32Imm);
765 void InstructionSelector::VisitWord64Shl(Node* node) {
767 Int64BinopMatcher m(node);
772 Emit(kArm64Lsl, g.DefineAsRegister(node),
773 g.UseRegister(m.left().node()->InputAt(0)),
774 g.UseImmediate(m.right().node()));
777 VisitRRO(this, kArm64Lsl, node, kShift64Imm);
783 bool TryEmitBitfieldExtract32(InstructionSelector* selector, Node* node) {
785 Int32BinopMatcher m(node);
786 if (selector->CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
789 Int32BinopMatcher mleft(m.left().node());
798 selector->Emit(opcode, g.DefineAsRegister(node),
799 g.UseRegister(mleft.left().node()), g.TempImmediate(0),
810 void InstructionSelector::VisitWord32Shr(Node* node) {
811 Int32BinopMatcher m(node);
814 Int32BinopMatcher mleft(m.left().node());
824 Emit(kArm64Ubfx32, g.DefineAsRegister(node),
825 g.UseRegister(mleft.left().node()),
826 g.UseImmediateOrTemp(m.right().node(), lsb),
831 } else if (TryEmitBitfieldExtract32(this, node)) {
836 CanCover(node, node->InputAt(0))) {
840 Node* left = m.left().node();
845 Emit(kArm64Lsr, g.DefineAsRegister(node), smull_operand,
850 VisitRRO(this, kArm64Lsr32, node, kShift32Imm);
854 void InstructionSelector::VisitWord64Shr(Node* node) {
855 Int64BinopMatcher m(node);
858 Int64BinopMatcher mleft(m.left().node());
868 Emit(kArm64Ubfx, g.DefineAsRegister(node),
869 g.UseRegister(mleft.left().node()),
870 g.UseImmediateOrTemp(m.right().node(), lsb),
876 VisitRRO(this, kArm64Lsr, node, kShift64Imm);
880 void InstructionSelector::VisitWord32Sar(Node* node) {
881 if (TryEmitBitfieldExtract32(this, node)) {
885 Int32BinopMatcher m(node);
887 CanCover(node, node->InputAt(0))) {
891 Node* left = m.left().node();
896 Emit(kArm64Asr, g.DefineAsRegister(node), smull_operand,
902 CanCover(node, node->InputAt(0))) {
903 Node* add_node = m.left().node();
906 CanCover(add_node, madd_node.left().node())) {
912 Node* mul_node = madd_node.left().node();
923 Emit(kArm64Asr32, g.DefineAsRegister(node), add_operand,
924 g.UseImmediate(node->InputAt(1)));
929 VisitRRO(this, kArm64Asr32, node, kShift32Imm);
933 void InstructionSelector::VisitWord64Sar(Node* node) {
934 VisitRRO(this, kArm64Asr, node, kShift64Imm);
938 void InstructionSelector::VisitWord32Ror(Node* node) {
939 VisitRRO(this, kArm64Ror32, node, kShift32Imm);
943 void InstructionSelector::VisitWord64Ror(Node* node) {
944 VisitRRO(this, kArm64Ror, node, kShift64Imm);
948 void InstructionSelector::VisitWord64Clz(Node* node) {
950 Emit(kArm64Clz, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
954 void InstructionSelector::VisitWord32Clz(Node* node) {
956 Emit(kArm64Clz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
960 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
963 void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
966 void InstructionSelector::VisitWord32Popcnt(Node* node) { UNREACHABLE(); }
969 void InstructionSelector::VisitWord64Popcnt(Node* node) { UNREACHABLE(); }
972 void InstructionSelector::VisitInt32Add(Node* node) {
974 Int32BinopMatcher m(node);
976 if (m.left().IsInt32Mul() && CanCover(node, m.left().node())) {
977 Int32BinopMatcher mleft(m.left().node());
980 Emit(kArm64Madd32, g.DefineAsRegister(node),
981 g.UseRegister(mleft.left().node()),
982 g.UseRegister(mleft.right().node()),
983 g.UseRegister(m.right().node()));
988 if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) {
989 Int32BinopMatcher mright(m.right().node());
992 Emit(kArm64Madd32, g.DefineAsRegister(node),
993 g.UseRegister(mright.left().node()),
994 g.UseRegister(mright.right().node()),
995 g.UseRegister(m.left().node()));
999 VisitAddSub<Int32BinopMatcher>(this, node, kArm64Add32, kArm64Sub32);
1003 void InstructionSelector::VisitInt64Add(Node* node) {
1005 Int64BinopMatcher m(node);
1007 if (m.left().IsInt64Mul() && CanCover(node, m.left().node())) {
1008 Int64BinopMatcher mleft(m.left().node());
1011 Emit(kArm64Madd, g.DefineAsRegister(node),
1012 g.UseRegister(mleft.left().node()),
1013 g.UseRegister(mleft.right().node()),
1014 g.UseRegister(m.right().node()));
1019 if (m.right().IsInt64Mul() && CanCover(node, m.right().node())) {
1020 Int64BinopMatcher mright(m.right().node());
1023 Emit(kArm64Madd, g.DefineAsRegister(node),
1024 g.UseRegister(mright.left().node()),
1025 g.UseRegister(mright.right().node()),
1026 g.UseRegister(m.left().node()));
1030 VisitAddSub<Int64BinopMatcher>(this, node, kArm64Add, kArm64Sub);
1034 void InstructionSelector::VisitInt32Sub(Node* node) {
1036 Int32BinopMatcher m(node);
1039 if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) {
1040 Int32BinopMatcher mright(m.right().node());
1043 Emit(kArm64Msub32, g.DefineAsRegister(node),
1044 g.UseRegister(mright.left().node()),
1045 g.UseRegister(mright.right().node()),
1046 g.UseRegister(m.left().node()));
1051 VisitAddSub<Int32BinopMatcher>(this, node, kArm64Sub32, kArm64Add32);
1055 void InstructionSelector::VisitInt64Sub(Node* node) {
1057 Int64BinopMatcher m(node);
1060 if (m.right().IsInt64Mul() && CanCover(node, m.right().node())) {
1061 Int64BinopMatcher mright(m.right().node());
1064 Emit(kArm64Msub, g.DefineAsRegister(node),
1065 g.UseRegister(mright.left().node()),
1066 g.UseRegister(mright.right().node()),
1067 g.UseRegister(m.left().node()));
1072 VisitAddSub<Int64BinopMatcher>(this, node, kArm64Sub, kArm64Add);
1076 void InstructionSelector::VisitInt32Mul(Node* node) {
1078 Int32BinopMatcher m(node);
1085 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1086 g.UseRegister(m.left().node()), g.TempImmediate(shift));
1090 if (m.left().IsInt32Sub() && CanCover(node, m.left().node())) {
1091 Int32BinopMatcher mleft(m.left().node());
1095 Emit(kArm64Mneg32, g.DefineAsRegister(node),
1096 g.UseRegister(mleft.right().node()),
1097 g.UseRegister(m.right().node()));
1102 if (m.right().IsInt32Sub() && CanCover(node, m.right().node())) {
1103 Int32BinopMatcher mright(m.right().node());
1107 Emit(kArm64Mneg32, g.DefineAsRegister(node),
1108 g.UseRegister(m.left().node()),
1109 g.UseRegister(mright.right().node()));
1114 VisitRRR(this, kArm64Mul32, node);
1118 void InstructionSelector::VisitInt64Mul(Node* node) {
1120 Int64BinopMatcher m(node);
1127 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1128 g.UseRegister(m.left().node()), g.TempImmediate(shift));
1132 if (m.left().IsInt64Sub() && CanCover(node, m.left().node())) {
1133 Int64BinopMatcher mleft(m.left().node());
1137 Emit(kArm64Mneg, g.DefineAsRegister(node),
1138 g.UseRegister(mleft.right().node()),
1139 g.UseRegister(m.right().node()));
1144 if (m.right().IsInt64Sub() && CanCover(node, m.right().node())) {
1145 Int64BinopMatcher mright(m.right().node());
1149 Emit(kArm64Mneg, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1150 g.UseRegister(mright.right().node()));
1155 VisitRRR(this, kArm64Mul, node);
1159 void InstructionSelector::VisitInt32MulHigh(Node* node) {
1162 Emit(kArm64Smull, smull_operand, g.UseRegister(node->InputAt(0)),
1163 g.UseRegister(node->InputAt(1)));
1164 Emit(kArm64Asr, g.DefineAsRegister(node), smull_operand, g.TempImmediate(32));
1168 void InstructionSelector::VisitUint32MulHigh(Node* node) {
1171 Emit(kArm64Umull, smull_operand, g.UseRegister(node->InputAt(0)),
1172 g.UseRegister(node->InputAt(1)));
1173 Emit(kArm64Lsr, g.DefineAsRegister(node), smull_operand, g.TempImmediate(32));
1177 void InstructionSelector::VisitInt32Div(Node* node) {
1178 VisitRRR(this, kArm64Idiv32, node);
1182 void InstructionSelector::VisitInt64Div(Node* node) {
1183 VisitRRR(this, kArm64Idiv, node);
1187 void InstructionSelector::VisitUint32Div(Node* node) {
1188 VisitRRR(this, kArm64Udiv32, node);
1192 void InstructionSelector::VisitUint64Div(Node* node) {
1193 VisitRRR(this, kArm64Udiv, node);
1197 void InstructionSelector::VisitInt32Mod(Node* node) {
1198 VisitRRR(this, kArm64Imod32, node);
1202 void InstructionSelector::VisitInt64Mod(Node* node) {
1203 VisitRRR(this, kArm64Imod, node);
1207 void InstructionSelector::VisitUint32Mod(Node* node) {
1208 VisitRRR(this, kArm64Umod32, node);
1212 void InstructionSelector::VisitUint64Mod(Node* node) {
1213 VisitRRR(this, kArm64Umod, node);
1217 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
1218 VisitRR(this, kArm64Float32ToFloat64, node);
1222 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
1223 VisitRR(this, kArm64Int32ToFloat64, node);
1227 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
1228 VisitRR(this, kArm64Uint32ToFloat64, node);
1232 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
1233 VisitRR(this, kArm64Float64ToInt32, node);
1237 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
1238 VisitRR(this, kArm64Float64ToUint32, node);
1242 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1245 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1248 outputs[output_count++] = g.DefineAsRegister(node);
1250 Node* success_output = NodeProperties::FindProjection(node, 1);
1259 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1262 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1265 outputs[output_count++] = g.DefineAsRegister(node);
1267 Node* success_output = NodeProperties::FindProjection(node, 1);
1276 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1279 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1282 outputs[output_count++] = g.DefineAsRegister(node);
1284 Node* success_output = NodeProperties::FindProjection(node, 1);
1293 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1296 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1299 outputs[output_count++] = g.DefineAsRegister(node);
1301 Node* success_output = NodeProperties::FindProjection(node, 1);
1310 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1311 VisitRR(this, kArm64Sxtw, node);
1315 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1317 Node* value = node->InputAt(0);
1345 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1351 Emit(kArm64Mov32, g.DefineAsRegister(node), g.UseRegister(value));
1355 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
1356 VisitRR(this, kArm64Float64ToFloat32, node);
1360 void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
1361 switch (TruncationModeOf(node->op())) {
1363 return VisitRR(this, kArchTruncateDoubleToI, node);
1365 return VisitRR(this, kArm64Float64ToInt32, node);
1371 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1373 Node* value = node->InputAt(0);
1374 if (CanCover(node, value) && value->InputCount() >= 2) {
1379 Emit(kArm64Lsr, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
1380 g.UseImmediate(m.right().node()));
1385 Emit(kArm64Mov32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
1389 void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1390 VisitRR(this, kArm64Int64ToFloat32, node);
1394 void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1395 VisitRR(this, kArm64Int64ToFloat64, node);
1399 void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1400 VisitRR(this, kArm64Uint64ToFloat32, node);
1404 void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1405 VisitRR(this, kArm64Uint64ToFloat64, node);
1409 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1410 VisitRR(this, kArm64Float64ExtractLowWord32, node);
1414 void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1415 VisitRR(this, kArm64U64MoveFloat64, node);
1419 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1420 VisitRR(this, kArm64Float64MoveU64, node);
1424 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1425 VisitRR(this, kArm64Float64MoveU64, node);
1429 void InstructionSelector::VisitFloat32Add(Node* node) {
1430 VisitRRR(this, kArm64Float32Add, node);
1434 void InstructionSelector::VisitFloat64Add(Node* node) {
1435 VisitRRR(this, kArm64Float64Add, node);
1439 void InstructionSelector::VisitFloat32Sub(Node* node) {
1440 VisitRRR(this, kArm64Float32Sub, node);
1444 void InstructionSelector::VisitFloat64Sub(Node* node) {
1446 Float64BinopMatcher m(node);
1449 CanCover(m.node(), m.right().node())) {
1451 CanCover(m.right().node(), m.right().InputAt(0))) {
1454 Emit(kArm64Float64RoundUp, g.DefineAsRegister(node),
1455 g.UseRegister(mright0.right().node()));
1460 Emit(kArm64Float64Neg, g.DefineAsRegister(node),
1461 g.UseRegister(m.right().node()));
1464 VisitRRR(this, kArm64Float64Sub, node);
1468 void InstructionSelector::VisitFloat32Mul(Node* node) {
1469 VisitRRR(this, kArm64Float32Mul, node);
1473 void InstructionSelector::VisitFloat64Mul(Node* node) {
1474 VisitRRR(this, kArm64Float64Mul, node);
1478 void InstructionSelector::VisitFloat32Div(Node* node) {
1479 VisitRRR(this, kArm64Float32Div, node);
1483 void InstructionSelector::VisitFloat64Div(Node* node) {
1484 VisitRRR(this, kArm64Float64Div, node);
1488 void InstructionSelector::VisitFloat64Mod(Node* node) {
1490 Emit(kArm64Float64Mod, g.DefineAsFixed(node, d0),
1491 g.UseFixed(node->InputAt(0), d0),
1492 g.UseFixed(node->InputAt(1), d1))->MarkAsCall();
1496 void InstructionSelector::VisitFloat32Max(Node* node) {
1497 VisitRRR(this, kArm64Float32Max, node);
1501 void InstructionSelector::VisitFloat64Max(Node* node) {
1502 VisitRRR(this, kArm64Float64Max, node);
1506 void InstructionSelector::VisitFloat32Min(Node* node) {
1507 VisitRRR(this, kArm64Float32Min, node);
1511 void InstructionSelector::VisitFloat64Min(Node* node) {
1512 VisitRRR(this, kArm64Float64Min, node);
1516 void InstructionSelector::VisitFloat32Abs(Node* node) {
1517 VisitRR(this, kArm64Float32Abs, node);
1521 void InstructionSelector::VisitFloat64Abs(Node* node) {
1522 VisitRR(this, kArm64Float64Abs, node);
1526 void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1527 VisitRR(this, kArm64Float32Sqrt, node);
1531 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1532 VisitRR(this, kArm64Float64Sqrt, node);
1536 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1537 VisitRR(this, kArm64Float32RoundDown, node);
1541 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1542 VisitRR(this, kArm64Float64RoundDown, node);
1546 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1547 VisitRR(this, kArm64Float32RoundUp, node);
1551 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1552 VisitRR(this, kArm64Float64RoundUp, node);
1556 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1557 VisitRR(this, kArm64Float32RoundTruncate, node);
1561 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1562 VisitRR(this, kArm64Float64RoundTruncate, node);
1566 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1567 VisitRR(this, kArm64Float64RoundTiesAway, node);
1571 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1572 VisitRR(this, kArm64Float32RoundTiesEven, node);
1576 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1577 VisitRR(this, kArm64Float64RoundTiesEven, node);
1583 Node* node) {
1609 Emit(kArm64Poke, g.NoOutput(), g.UseRegister((*arguments)[slot].node()),
1643 void VisitWordCompare(InstructionSelector* selector, Node* node,
1647 Node* left = node->InputAt(0);
1648 Node* right = node->InputAt(1);
1665 void VisitWord32Compare(InstructionSelector* selector, Node* node,
1667 Int32BinopMatcher m(node);
1672 Node* sub = m.right().node();
1675 bool can_cover = selector->CanCover(node, sub);
1676 node->ReplaceInput(1, msub.right().node());
1677 // Even if the comparison node covers the subtraction, after the input
1678 // replacement above, the node still won't cover the input to the
1681 // input to the subtraction, as TryMatchAnyShift requires this node to
1684 // any other node.
1685 if (can_cover) sub->ReplaceInput(1, msub.left().node());
1689 VisitBinop<Int32BinopMatcher>(selector, node, opcode, kArithmeticImm, cont);
1693 void VisitWordTest(InstructionSelector* selector, Node* node,
1696 VisitCompare(selector, opcode, g.UseRegister(node), g.UseRegister(node),
1701 void VisitWord32Test(InstructionSelector* selector, Node* node,
1703 VisitWordTest(selector, node, kArm64Tst32, cont);
1707 void VisitWord64Test(InstructionSelector* selector, Node* node,
1709 VisitWordTest(selector, node, kArm64Tst, cont);
1714 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1717 Float32BinopMatcher m(node);
1719 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.left().node()),
1720 g.UseImmediate(m.right().node()), cont);
1723 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.right().node()),
1724 g.UseImmediate(m.left().node()), cont);
1726 VisitCompare(selector, kArm64Float32Cmp, g.UseRegister(m.left().node()),
1727 g.UseRegister(m.right().node()), cont);
1733 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1736 Float64BinopMatcher m(node);
1738 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.left().node()),
1739 g.UseImmediate(m.right().node()), cont);
1742 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.right().node()),
1743 g.UseImmediate(m.left().node()), cont);
1745 VisitCompare(selector, kArm64Float64Cmp, g.UseRegister(m.left().node()),
1746 g.UseRegister(m.right().node()), cont);
1753 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1756 Node* user = branch;
1757 Node* value = branch->InputAt(0);
1766 value = m.left().node();
1831 // <Operation>WithOverflow node.
1838 Node* const node = value->InputAt(0);
1839 Node* const result = NodeProperties::FindProjection(node, 0);
1841 switch (node->opcode()) {
1844 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32,
1848 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32,
1852 return VisitBinop<Int64BinopMatcher>(this, node, kArm64Add,
1856 return VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub,
1877 g.UseRegister(m.left().node()),
1894 g.UseRegister(m.left().node()),
1915 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1917 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1943 void InstructionSelector::VisitWord32Equal(Node* const node) {
1944 Node* const user = node;
1945 FlagsContinuation cont(kEqual, node);
1948 Node* const value = m.left().node();
1963 node->ReplaceInput(0, mequal.left().node());
1964 node->ReplaceInput(1, mequal.right().node());
1966 return VisitWord32Compare(this, node, &cont);
1974 VisitWord32Compare(this, node, &cont);
1978 void InstructionSelector::VisitInt32LessThan(Node* node) {
1979 FlagsContinuation cont(kSignedLessThan, node);
1980 VisitWord32Compare(this, node, &cont);
1984 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1985 FlagsContinuation cont(kSignedLessThanOrEqual, node);
1986 VisitWord32Compare(this, node, &cont);
1990 void InstructionSelector::VisitUint32LessThan(Node* node) {
1991 FlagsContinuation cont(kUnsignedLessThan, node);
1992 VisitWord32Compare(this, node, &cont);
1996 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1997 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1998 VisitWord32Compare(this, node, &cont);
2002 void InstructionSelector::VisitWord64Equal(Node* const node) {
2003 Node* const user = node;
2004 FlagsContinuation cont(kEqual, node);
2007 Node* const value = m.left().node();
2019 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2023 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
2024 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
2026 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32,
2030 VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32, kArithmeticImm, &cont);
2034 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
2035 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
2037 return VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32,
2041 VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32, kArithmeticImm, &cont);
2045 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
2046 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
2048 return VisitBinop<Int64BinopMatcher>(this, node, kArm64Add, kArithmeticImm,
2052 VisitBinop<Int64BinopMatcher>(this, node, kArm64Add, kArithmeticImm, &cont);
2056 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
2057 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
2059 return VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm,
2063 VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm, &cont);
2067 void InstructionSelector::VisitInt64LessThan(Node* node) {
2068 FlagsContinuation cont(kSignedLessThan, node);
2069 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2073 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
2074 FlagsContinuation cont(kSignedLessThanOrEqual, node);
2075 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2079 void InstructionSelector::VisitUint64LessThan(Node* node) {
2080 FlagsContinuation cont(kUnsignedLessThan, node);
2081 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2085 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
2086 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
2087 VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
2091 void InstructionSelector::VisitFloat32Equal(Node* node) {
2092 FlagsContinuation cont(kEqual, node);
2093 VisitFloat32Compare(this, node, &cont);
2097 void InstructionSelector::VisitFloat32LessThan(Node* node) {
2098 FlagsContinuation cont(kFloatLessThan, node);
2099 VisitFloat32Compare(this, node, &cont);
2103 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
2104 FlagsContinuation cont(kFloatLessThanOrEqual, node);
2105 VisitFloat32Compare(this, node, &cont);
2109 void InstructionSelector::VisitFloat64Equal(Node* node) {
2110 FlagsContinuation cont(kEqual, node);
2111 VisitFloat64Compare(this, node, &cont);
2115 void InstructionSelector::VisitFloat64LessThan(Node* node) {
2116 FlagsContinuation cont(kFloatLessThan, node);
2117 VisitFloat64Compare(this, node, &cont);
2121 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
2122 FlagsContinuation cont(kFloatLessThanOrEqual, node);
2123 VisitFloat64Compare(this, node, &cont);
2127 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
2129 Emit(kArm64Float64ExtractLowWord32, g.DefineAsRegister(node),
2130 g.UseRegister(node->InputAt(0)));
2134 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
2136 Emit(kArm64Float64ExtractHighWord32, g.DefineAsRegister(node),
2137 g.UseRegister(node->InputAt(0)));
2141 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
2143 Node* left = node->InputAt(0);
2144 Node* right = node->InputAt(1);
2146 CanCover(node, left)) {
2147 Node* right_of_left = left->InputAt(1);
2151 Emit(kArm64Float64MoveU64, g.DefineAsRegister(node), g.UseRegister(right));
2154 Emit(kArm64Float64InsertLowWord32, g.DefineAsRegister(node),
2159 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
2161 Node* left = node->InputAt(0);
2162 Node* right = node->InputAt(1);
2164 CanCover(node, left)) {
2165 Node* right_of_left = left->InputAt(1);
2168 Emit(kArm64Float64MoveU64, g.DefineAsRegister(node), g.UseRegister(left));
2171 Emit(kArm64Float64InsertHighWord32, g.DefineAsRegister(node),