Home | History | Annotate | Download | only in m_scheduler

Lines Matching refs:VG_IS_16_ALIGNED

717    vg_assert(VG_IS_16_ALIGNED(sz_vex));
718 vg_assert(VG_IS_16_ALIGNED(sz_vexsh1));
719 vg_assert(VG_IS_16_ALIGNED(sz_vexsh2));
720 vg_assert(VG_IS_16_ALIGNED(sz_spill));
722 vg_assert(VG_IS_16_ALIGNED(a_vex));
723 vg_assert(VG_IS_16_ALIGNED(a_vexsh1));
724 vg_assert(VG_IS_16_ALIGNED(a_vexsh2));
725 vg_assert(VG_IS_16_ALIGNED(a_spill));
749 vg_assert(VG_IS_16_ALIGNED(offsetof(VexGuestX86State,guest_XMM0)));
764 vg_assert(VG_IS_16_ALIGNED(offsetof(VexGuestAMD64State,guest_YMM0)));
774 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex.guest_VSR0));
775 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex_shadow1.guest_VSR0));
776 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex_shadow2.guest_VSR0));
778 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex.guest_VSR1));
779 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex_shadow1.guest_VSR1));
780 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex_shadow2.guest_VSR1));
786 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex.guest_D0));
787 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex_shadow1.guest_D0));
788 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex_shadow2.guest_D0));
799 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex.guest_Q0));
800 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex_shadow1.guest_Q0));
801 vg_assert(VG_IS_16_ALIGNED(& tst->arch.vex_shadow2.guest_Q0));