Lines Matching refs:tmp_vp
15 void* tmp_vp;
18 // XSAVE: [[tmp_vp_1:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
24 (void)__builtin_ia32_xsave(tmp_vp, tmp_ULLi);
26 // XSAVE: [[tmp_vp_2:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
32 (void)__builtin_ia32_xsave64(tmp_vp, tmp_ULLi);
34 // XSAVE: [[tmp_vp_3:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
40 (void)__builtin_ia32_xrstor(tmp_vp, tmp_ULLi);
42 // XSAVE: [[tmp_vp_4:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
48 (void)__builtin_ia32_xrstor64(tmp_vp, tmp_ULLi);
52 // XSAVEOPT: [[tmp_vp_1:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
58 (void)__builtin_ia32_xsaveopt(tmp_vp, tmp_ULLi);
60 // XSAVEOPT: [[tmp_vp_2:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
66 (void)__builtin_ia32_xsaveopt64(tmp_vp, tmp_ULLi);
70 // XSAVEC: [[tmp_vp_1:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
76 (void)__builtin_ia32_xsavec(tmp_vp, tmp_ULLi);
78 // XSAVEC: [[tmp_vp_2:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
84 (void)__builtin_ia32_xsavec64(tmp_vp, tmp_ULLi);
88 // XSAVES: [[tmp_vp_1:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
94 (void)__builtin_ia32_xsaves(tmp_vp, tmp_ULLi);
96 // XSAVES: [[tmp_vp_2:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
102 (void)__builtin_ia32_xsaves64(tmp_vp, tmp_ULLi);
104 // XSAVES: [[tmp_vp_3:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
110 (void)__builtin_ia32_xrstors(tmp_vp, tmp_ULLi);
112 // XSAVES: [[tmp_vp_4:%[0-9a-zA-z]+]] = load i8*, i8** %tmp_vp, align 8
118 (void)__builtin_ia32_xrstors64(tmp_vp, tmp_ULLi);