reference, declaration → definition definition → references, declarations, derived classes, virtual overrides reference to multiple definitions → definitions unreferenced |
538 if (TII->isDS(Inst) && (Inst.mayStore() || Inst.mayLoad())) { 538 if (TII->isDS(Inst) && (Inst.mayStore() || Inst.mayLoad())) { 538 if (TII->isDS(Inst) && (Inst.mayStore() || Inst.mayLoad())) { 540 AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::addr); 544 setExpScore(&Inst, TII, TRI, MRI, AddrOpIdx, CurrScore); 547 if (Inst.mayStore()) { 548 if (AMDGPU::getNamedOperandIdx(Inst.getOpcode(), 551 &Inst, TII, TRI, MRI, 552 AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data0), 555 if (AMDGPU::getNamedOperandIdx(Inst.getOpcode(), 557 setExpScore(&Inst, TII, TRI, MRI, 558 AMDGPU::getNamedOperandIdx(Inst.getOpcode(), 562 } else if (AMDGPU::getAtomicNoRetOp(Inst.getOpcode()) != -1 && 563 Inst.getOpcode() != AMDGPU::DS_GWS_INIT && 564 Inst.getOpcode() != AMDGPU::DS_GWS_SEMA_V && 565 Inst.getOpcode() != AMDGPU::DS_GWS_SEMA_BR && 566 Inst.getOpcode() != AMDGPU::DS_GWS_SEMA_P && 567 Inst.getOpcode() != AMDGPU::DS_GWS_BARRIER && 568 Inst.getOpcode() != AMDGPU::DS_APPEND && 569 Inst.getOpcode() != AMDGPU::DS_CONSUME && 570 Inst.getOpcode() != AMDGPU::DS_ORDERED_COUNT) { 571 for (unsigned I = 0, E = Inst.getNumOperands(); I != E; ++I) { 572 const MachineOperand &Op = Inst.getOperand(I); 574 setExpScore(&Inst, TII, TRI, MRI, I, CurrScore); 578 } else if (TII->isFLAT(Inst)) { 579 if (Inst.mayStore()) { 581 &Inst, TII, TRI, MRI, 582 AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data), 584 } else if (AMDGPU::getAtomicNoRetOp(Inst.getOpcode()) != -1) { 586 &Inst, TII, TRI, MRI, 587 AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data), 590 } else if (TII->isMIMG(Inst)) { 591 if (Inst.mayStore()) { 592 setExpScore(&Inst, TII, TRI, MRI, 0, CurrScore); 593 } else if (AMDGPU::getAtomicNoRetOp(Inst.getOpcode()) != -1) { 595 &Inst, TII, TRI, MRI, 596 AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data), 599 } else if (TII->isMTBUF(Inst)) { 600 if (Inst.mayStore()) { 601 setExpScore(&Inst, TII, TRI, MRI, 0, CurrScore); 603 } else if (TII->isMUBUF(Inst)) { 604 if (Inst.mayStore()) { 605 setExpScore(&Inst, TII, TRI, MRI, 0, CurrScore); 606 } else if (AMDGPU::getAtomicNoRetOp(Inst.getOpcode()) != -1) { 608 &Inst, TII, TRI, MRI, 609 AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data), 613 if (TII->isEXP(Inst)) { 618 for (unsigned I = 0, E = Inst.getNumOperands(); I != E; ++I) { 619 MachineOperand &DefMO = Inst.getOperand(I); 627 for (unsigned I = 0, E = Inst.getNumOperands(); I != E; ++I) { 628 MachineOperand &MO = Inst.getOperand(I); 630 setExpScore(&Inst, TII, TRI, MRI, I, CurrScore); 648 for (unsigned I = 0, E = Inst.getNumOperands(); I != E; ++I) { 649 RegInterval Interval = getRegInterval(&Inst, TII, MRI, TRI, I, true); 656 if (TII->isDS(Inst) && Inst.mayStore()) { 656 if (TII->isDS(Inst) && Inst.mayStore()) {