Home | History | Annotate | Download | only in compiler

Lines Matching defs:scheduler_

219         scheduler_(scheduler),
233 Queue(scheduler_->graph_->end());
259 scheduler_->equivalence_->Run(exit);
291 scheduler_->UpdatePlacement(node, Scheduler::kFixed);
348 scheduler_->UpdatePlacement(node, Scheduler::kFixed);
352 scheduler_->UpdatePlacement(node, Scheduler::kFixed);
356 scheduler_->UpdatePlacement(node, Scheduler::kFixed);
360 scheduler_->UpdatePlacement(node, Scheduler::kFixed);
364 scheduler_->UpdatePlacement(node, Scheduler::kFixed);
368 scheduler_->UpdatePlacement(node, Scheduler::kFixed);
377 scheduler_->UpdatePlacement(node, Scheduler::kFixed);
550 node == scheduler_->graph_->end()->InputAt(0));
554 size_t entry_class = scheduler_->equivalence_->ClassOf(entry);
555 size_t exit_class = scheduler_->equivalence_->ClassOf(exit);
566 Scheduler* scheduler_;
1152 : scheduler_(scheduler), schedule_(scheduler->schedule_) {}
1155 if (scheduler_->GetPlacement(node) == Scheduler::kFixed) {
1157 scheduler_->schedule_root_nodes_.push_back(node);
1178 DCHECK_NE(Scheduler::kFixed, scheduler_->GetPlacement(from));
1179 scheduler_->IncrementUnscheduledUseCount(to, index, from);
1184 Scheduler* scheduler_;
1225 : scheduler_(scheduler), schedule_(scheduler->schedule_), queue_(zone) {}
1242 Scheduler::SchedulerData* data = scheduler_->GetData(node);
1245 if (scheduler_->GetPlacement(node) == Scheduler::kFixed) {
1267 Scheduler::SchedulerData* data = scheduler_->GetData(node);
1270 if (scheduler_->GetPlacement(node) == Scheduler::kFixed) return;
1273 if (scheduler_->GetPlacement(node) == Scheduler::kCoupled) {
1299 Scheduler* scheduler_;
1329 : scheduler_(scheduler),
1330 schedule_(scheduler_->schedule_),
1343 ZoneQueue<Node*>* queue = &(scheduler_->schedule_queue_);
1346 if (scheduler_->GetPlacement(node) == Scheduler::kCoupled) {
1351 if (scheduler_->GetData(node)->unscheduled_count_ != 0) continue;
1366 DCHECK_EQ(0, scheduler_->GetData(node)->unscheduled_count_);
1370 DCHECK_EQ(Scheduler::kSchedulable, scheduler_->GetPlacement(node));
1379 BasicBlock* min_block = scheduler_->GetData(node)->minimum_block_;
1400 } else if (scheduler_->flags_ & Scheduler::kSplitNodes) {
1484 ZoneMap<BasicBlock*, Node*> dominators(scheduler_->zone_);
1504 scheduler_->schedule_queue_.push(use_node);
1521 scheduler_->special_rpo_->GetOutgoingBlocks(header_block)) {
1545 return scheduler_->control_flow_builder_->FindPredecessorBlock(node);
1555 if (scheduler_->GetPlacement(use) == Scheduler::kCoupled) {
1564 if (scheduler_->GetPlacement(use) == Scheduler::kFixed) {
1575 if (scheduler_->GetPlacement(use) == Scheduler::kFixed) {
1589 scheduler_->FuseFloatingControl(block, node);
1604 DCHECK_EQ(0, scheduler_->GetData(node)->unscheduled_count_);
1616 DCHECK_EQ(0, scheduler_->GetData(node)->unscheduled_count_);
1622 scheduler_->scheduled_nodes_[block->id().ToSize()].push_back(node);
1623 scheduler_->UpdatePlacement(node, Scheduler::kScheduled);
1630 scheduler_->IncrementUnscheduledUseCount(input, index, node);
1632 Node* const copy = scheduler_->graph_->CloneNode(node);
1635 scheduler_->node_data_.resize(copy->id() + 1,
1636 scheduler_->DefaultSchedulerData());
1637 scheduler_->node_data_[copy->id()] = scheduler_->node_data_[node->id()];
1641 Scheduler* scheduler_;