1 /** @file 2 IA32, X64 and IPF Specific relocation fixups 3 4 Copyright (c) 2004 - 2014, Intel Corporation. All rights reserved.<BR> 5 Portions Copyright (c) 2011 - 2013, ARM Ltd. All rights reserved.<BR> 6 This program and the accompanying materials 7 are licensed and made available under the terms and conditions of the BSD License 8 which accompanies this distribution. The full text of the license may be found at 9 http://opensource.org/licenses/bsd-license.php 10 11 THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, 12 WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. 13 14 --*/ 15 16 #include <Common/UefiBaseTypes.h> 17 #include <IndustryStandard/PeImage.h> 18 #include "PeCoffLib.h" 19 #include "CommonLib.h" 20 #include "EfiUtilityMsgs.h" 21 22 23 #define EXT_IMM64(Value, Address, Size, InstPos, ValPos) \ 24 Value |= (((UINT64)((*(Address) >> InstPos) & (((UINT64)1 << Size) - 1))) << ValPos) 25 26 #define INS_IMM64(Value, Address, Size, InstPos, ValPos) \ 27 *(UINT32*)Address = (*(UINT32*)Address & ~(((1 << Size) - 1) << InstPos)) | \ 28 ((UINT32)((((UINT64)Value >> ValPos) & (((UINT64)1 << Size) - 1))) << InstPos) 29 30 #define IMM64_IMM7B_INST_WORD_X 3 31 #define IMM64_IMM7B_SIZE_X 7 32 #define IMM64_IMM7B_INST_WORD_POS_X 4 33 #define IMM64_IMM7B_VAL_POS_X 0 34 35 #define IMM64_IMM9D_INST_WORD_X 3 36 #define IMM64_IMM9D_SIZE_X 9 37 #define IMM64_IMM9D_INST_WORD_POS_X 18 38 #define IMM64_IMM9D_VAL_POS_X 7 39 40 #define IMM64_IMM5C_INST_WORD_X 3 41 #define IMM64_IMM5C_SIZE_X 5 42 #define IMM64_IMM5C_INST_WORD_POS_X 13 43 #define IMM64_IMM5C_VAL_POS_X 16 44 45 #define IMM64_IC_INST_WORD_X 3 46 #define IMM64_IC_SIZE_X 1 47 #define IMM64_IC_INST_WORD_POS_X 12 48 #define IMM64_IC_VAL_POS_X 21 49 50 #define IMM64_IMM41a_INST_WORD_X 1 51 #define IMM64_IMM41a_SIZE_X 10 52 #define IMM64_IMM41a_INST_WORD_POS_X 14 53 #define IMM64_IMM41a_VAL_POS_X 22 54 55 #define IMM64_IMM41b_INST_WORD_X 1 56 #define IMM64_IMM41b_SIZE_X 8 57 #define IMM64_IMM41b_INST_WORD_POS_X 24 58 #define IMM64_IMM41b_VAL_POS_X 32 59 60 #define IMM64_IMM41c_INST_WORD_X 2 61 #define IMM64_IMM41c_SIZE_X 23 62 #define IMM64_IMM41c_INST_WORD_POS_X 0 63 #define IMM64_IMM41c_VAL_POS_X 40 64 65 #define IMM64_SIGN_INST_WORD_X 3 66 #define IMM64_SIGN_SIZE_X 1 67 #define IMM64_SIGN_INST_WORD_POS_X 27 68 #define IMM64_SIGN_VAL_POS_X 63 69 70 RETURN_STATUS 71 PeCoffLoaderRelocateIa32Image ( 72 IN UINT16 *Reloc, 73 IN OUT CHAR8 *Fixup, 74 IN OUT CHAR8 **FixupData, 75 IN UINT64 Adjust 76 ) 77 /*++ 78 79 Routine Description: 80 81 Performs an IA-32 specific relocation fixup 82 83 Arguments: 84 85 Reloc - Pointer to the relocation record 86 87 Fixup - Pointer to the address to fix up 88 89 FixupData - Pointer to a buffer to log the fixups 90 91 Adjust - The offset to adjust the fixup 92 93 Returns: 94 95 EFI_UNSUPPORTED - Unsupported now 96 97 --*/ 98 { 99 return RETURN_UNSUPPORTED; 100 } 101 102 RETURN_STATUS 103 PeCoffLoaderRelocateIpfImage ( 104 IN UINT16 *Reloc, 105 IN OUT CHAR8 *Fixup, 106 IN OUT CHAR8 **FixupData, 107 IN UINT64 Adjust 108 ) 109 /*++ 110 111 Routine Description: 112 113 Performs an Itanium-based specific relocation fixup 114 115 Arguments: 116 117 Reloc - Pointer to the relocation record 118 119 Fixup - Pointer to the address to fix up 120 121 FixupData - Pointer to a buffer to log the fixups 122 123 Adjust - The offset to adjust the fixup 124 125 Returns: 126 127 Status code 128 129 --*/ 130 { 131 UINT64 *F64; 132 UINT64 FixupVal; 133 134 switch ((*Reloc) >> 12) { 135 136 case EFI_IMAGE_REL_BASED_IA64_IMM64: 137 138 // 139 // Align it to bundle address before fixing up the 140 // 64-bit immediate value of the movl instruction. 141 // 142 143 Fixup = (CHAR8 *)((UINTN) Fixup & (UINTN) ~(15)); 144 FixupVal = (UINT64)0; 145 146 // 147 // Extract the lower 32 bits of IMM64 from bundle 148 // 149 EXT_IMM64(FixupVal, 150 (UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X, 151 IMM64_IMM7B_SIZE_X, 152 IMM64_IMM7B_INST_WORD_POS_X, 153 IMM64_IMM7B_VAL_POS_X 154 ); 155 156 EXT_IMM64(FixupVal, 157 (UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X, 158 IMM64_IMM9D_SIZE_X, 159 IMM64_IMM9D_INST_WORD_POS_X, 160 IMM64_IMM9D_VAL_POS_X 161 ); 162 163 EXT_IMM64(FixupVal, 164 (UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X, 165 IMM64_IMM5C_SIZE_X, 166 IMM64_IMM5C_INST_WORD_POS_X, 167 IMM64_IMM5C_VAL_POS_X 168 ); 169 170 EXT_IMM64(FixupVal, 171 (UINT32 *)Fixup + IMM64_IC_INST_WORD_X, 172 IMM64_IC_SIZE_X, 173 IMM64_IC_INST_WORD_POS_X, 174 IMM64_IC_VAL_POS_X 175 ); 176 177 EXT_IMM64(FixupVal, 178 (UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X, 179 IMM64_IMM41a_SIZE_X, 180 IMM64_IMM41a_INST_WORD_POS_X, 181 IMM64_IMM41a_VAL_POS_X 182 ); 183 184 // 185 // Update 64-bit address 186 // 187 FixupVal += Adjust; 188 189 // 190 // Insert IMM64 into bundle 191 // 192 INS_IMM64(FixupVal, 193 ((UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X), 194 IMM64_IMM7B_SIZE_X, 195 IMM64_IMM7B_INST_WORD_POS_X, 196 IMM64_IMM7B_VAL_POS_X 197 ); 198 199 INS_IMM64(FixupVal, 200 ((UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X), 201 IMM64_IMM9D_SIZE_X, 202 IMM64_IMM9D_INST_WORD_POS_X, 203 IMM64_IMM9D_VAL_POS_X 204 ); 205 206 INS_IMM64(FixupVal, 207 ((UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X), 208 IMM64_IMM5C_SIZE_X, 209 IMM64_IMM5C_INST_WORD_POS_X, 210 IMM64_IMM5C_VAL_POS_X 211 ); 212 213 INS_IMM64(FixupVal, 214 ((UINT32 *)Fixup + IMM64_IC_INST_WORD_X), 215 IMM64_IC_SIZE_X, 216 IMM64_IC_INST_WORD_POS_X, 217 IMM64_IC_VAL_POS_X 218 ); 219 220 INS_IMM64(FixupVal, 221 ((UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X), 222 IMM64_IMM41a_SIZE_X, 223 IMM64_IMM41a_INST_WORD_POS_X, 224 IMM64_IMM41a_VAL_POS_X 225 ); 226 227 INS_IMM64(FixupVal, 228 ((UINT32 *)Fixup + IMM64_IMM41b_INST_WORD_X), 229 IMM64_IMM41b_SIZE_X, 230 IMM64_IMM41b_INST_WORD_POS_X, 231 IMM64_IMM41b_VAL_POS_X 232 ); 233 234 INS_IMM64(FixupVal, 235 ((UINT32 *)Fixup + IMM64_IMM41c_INST_WORD_X), 236 IMM64_IMM41c_SIZE_X, 237 IMM64_IMM41c_INST_WORD_POS_X, 238 IMM64_IMM41c_VAL_POS_X 239 ); 240 241 INS_IMM64(FixupVal, 242 ((UINT32 *)Fixup + IMM64_SIGN_INST_WORD_X), 243 IMM64_SIGN_SIZE_X, 244 IMM64_SIGN_INST_WORD_POS_X, 245 IMM64_SIGN_VAL_POS_X 246 ); 247 248 F64 = (UINT64 *) Fixup; 249 if (*FixupData != NULL) { 250 *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64)); 251 *(UINT64 *)(*FixupData) = *F64; 252 *FixupData = *FixupData + sizeof(UINT64); 253 } 254 break; 255 256 default: 257 return RETURN_UNSUPPORTED; 258 } 259 260 return RETURN_SUCCESS; 261 } 262 263 /** 264 Pass in a pointer to an ARM MOVT or MOVW immediate instruciton and 265 return the immediate data encoded in the instruction 266 267 @param Instruction Pointer to ARM MOVT or MOVW immediate instruction 268 269 @return Immediate address encoded in the instruction 270 271 **/ 272 UINT16 273 ThumbMovtImmediateAddress ( 274 IN UINT16 *Instruction 275 ) 276 { 277 UINT32 Movt; 278 UINT16 Address; 279 280 // Thumb2 is two 16-bit instructions working together. Not a single 32-bit instruction 281 // Example MOVT R0, #0 is 0x0000f2c0 or 0xf2c0 0x0000 282 Movt = (*Instruction << 16) | (*(Instruction + 1)); 283 284 // imm16 = imm4:i:imm3:imm8 285 // imm4 -> Bit19:Bit16 286 // i -> Bit26 287 // imm3 -> Bit14:Bit12 288 // imm8 -> Bit7:Bit0 289 Address = (UINT16)(Movt & 0x000000ff); // imm8 290 Address |= (UINT16)((Movt >> 4) & 0x0000f700); // imm4 imm3 291 Address |= (((Movt & BIT26) != 0) ? BIT11 : 0); // i 292 return Address; 293 } 294 295 296 /** 297 Update an ARM MOVT or MOVW immediate instruction immediate data. 298 299 @param Instruction Pointer to ARM MOVT or MOVW immediate instruction 300 @param Address New addres to patch into the instruction 301 **/ 302 VOID 303 ThumbMovtImmediatePatch ( 304 IN OUT UINT16 *Instruction, 305 IN UINT16 Address 306 ) 307 { 308 UINT16 Patch; 309 310 // First 16-bit chunk of instruciton 311 Patch = ((Address >> 12) & 0x000f); // imm4 312 Patch |= (((Address & BIT11) != 0) ? BIT10 : 0); // i 313 *Instruction = (*Instruction & ~0x040f) | Patch; 314 315 // Second 16-bit chunk of instruction 316 Patch = Address & 0x000000ff; // imm8 317 Patch |= ((Address << 4) & 0x00007000); // imm3 318 Instruction++; 319 *Instruction = (*Instruction & ~0x70ff) | Patch; 320 } 321 322 /** 323 Pass in a pointer to an ARM MOVW/MOVT instruciton pair and 324 return the immediate data encoded in the two` instruction 325 326 @param Instructions Pointer to ARM MOVW/MOVT insturction pair 327 328 @return Immediate address encoded in the instructions 329 330 **/ 331 UINT32 332 EFIAPI 333 ThumbMovwMovtImmediateAddress ( 334 IN UINT16 *Instructions 335 ) 336 { 337 UINT16 *Word; 338 UINT16 *Top; 339 340 Word = Instructions; // MOVW 341 Top = Word + 2; // MOVT 342 343 return (ThumbMovtImmediateAddress (Top) << 16) + ThumbMovtImmediateAddress (Word); 344 } 345 346 347 /** 348 Update an ARM MOVW/MOVT immediate instruction instruction pair. 349 350 @param Instructions Pointer to ARM MOVW/MOVT instruction pair 351 @param Address New addres to patch into the instructions 352 **/ 353 VOID 354 EFIAPI 355 ThumbMovwMovtImmediatePatch ( 356 IN OUT UINT16 *Instructions, 357 IN UINT32 Address 358 ) 359 { 360 UINT16 *Word; 361 UINT16 *Top; 362 363 Word = (UINT16 *)Instructions; // MOVW 364 Top = Word + 2; // MOVT 365 366 ThumbMovtImmediatePatch (Word, (UINT16)(Address & 0xffff)); 367 ThumbMovtImmediatePatch (Top, (UINT16)(Address >> 16)); 368 } 369 370 371 /** 372 Performs an ARM-based specific relocation fixup and is a no-op on other 373 instruction sets. 374 375 @param Reloc Pointer to the relocation record. 376 @param Fixup Pointer to the address to fix up. 377 @param FixupData Pointer to a buffer to log the fixups. 378 @param Adjust The offset to adjust the fixup. 379 380 @return Status code. 381 382 **/ 383 RETURN_STATUS 384 PeCoffLoaderRelocateArmImage ( 385 IN UINT16 **Reloc, 386 IN OUT CHAR8 *Fixup, 387 IN OUT CHAR8 **FixupData, 388 IN UINT64 Adjust 389 ) 390 { 391 UINT16 *Fixup16; 392 UINT32 FixupVal; 393 394 Fixup16 = (UINT16 *) Fixup; 395 396 switch ((**Reloc) >> 12) { 397 398 case EFI_IMAGE_REL_BASED_ARM_MOV32T: 399 FixupVal = ThumbMovwMovtImmediateAddress (Fixup16) + (UINT32)Adjust; 400 ThumbMovwMovtImmediatePatch (Fixup16, FixupVal); 401 402 403 if (*FixupData != NULL) { 404 *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64)); 405 CopyMem (*FixupData, Fixup16, sizeof (UINT64)); 406 *FixupData = *FixupData + sizeof(UINT64); 407 } 408 break; 409 410 case EFI_IMAGE_REL_BASED_ARM_MOV32A: 411 // break omitted - ARM instruction encoding not implemented 412 default: 413 return RETURN_UNSUPPORTED; 414 } 415 416 return RETURN_SUCCESS; 417 } 418