archsup.S 39 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039
  1. /*++
  2. Copyright (c) 2017 Minoca Corp.
  3. This file is licensed under the terms of the GNU General Public License
  4. version 3. Alternative licensing terms are available. Contact
  5. info@minocacorp.com for details. See the LICENSE file at the root of this
  6. project for complete licensing information.
  7. Module Name:
  8. archsup.S
  9. Abstract:
  10. This module implements AMD64 processor architecture features not
  11. implementable in C.
  12. Author:
  13. Evan Green 8-Jun-2017
  14. Environment:
  15. Kernel
  16. --*/
  17. //
  18. // ------------------------------------------------------------------ Includes
  19. //
  20. #include <minoca/kernel/x64.inc>
  21. //
  22. // ---------------------------------------------------------------------- Code
  23. //
  24. ASSEMBLY_FILE_HEADER
  25. //
  26. // KERNEL_API
  27. // BOOL
  28. // ArAreInterruptsEnabled (
  29. // VOID
  30. // )
  31. //
  32. /*++
  33. Routine Description:
  34. This routine determines whether or not interrupts are currently enabled
  35. on the processor.
  36. Arguments:
  37. None.
  38. Return Value:
  39. TRUE if interrupts are enabled in the processor.
  40. FALSE if interrupts are globally disabled.
  41. --*/
  42. PROTECTED_FUNCTION(ArAreInterruptsEnabled)
  43. xorl %eax, %eax # Clear RAX.
  44. pushfq # Get Rflags.
  45. popq %rdi # Rflags in rdi.
  46. andl $IA32_EFLAG_IF, %edi # Isolate the Interrupt flag.
  47. setnz %al # Set eax to 1 if non-zero.
  48. ret #
  49. END_FUNCTION(ArAreInterruptsEnabled)
  50. //
  51. // KERNEL_API
  52. // BOOL
  53. // ArDisableInterrupts (
  54. // VOID
  55. // )
  56. //
  57. /*++
  58. Routine Description:
  59. This routine disables all interrupts on the current processor.
  60. Arguments:
  61. None.
  62. Return Value:
  63. TRUE if interrupts were previously enabled.
  64. FALSE if interrupts were previously disabled.
  65. --*/
  66. PROTECTED_FUNCTION(ArDisableInterrupts)
  67. xorl %eax, %eax # Zero eax.
  68. pushfq # Push flags.
  69. cli # Clear the interrupt flag.
  70. popq %rdi # Pop flags into eax.
  71. andl $IA32_EFLAG_IF, %edi # Isolate the Interrupt flag.
  72. setnz %al # Set eax to 1 if non-zero.
  73. ret
  74. END_FUNCTION(ArDisableInterrupts)
  75. //
  76. // KERNEL_API
  77. // VOID
  78. // ArEnableInterrupts (
  79. // VOID
  80. // )
  81. //
  82. /*++
  83. Routine Description:
  84. This routine enables interrupts on the current processor.
  85. Arguments:
  86. None.
  87. Return Value:
  88. None.
  89. --*/
  90. PROTECTED_FUNCTION(ArEnableInterrupts)
  91. sti # Set the interrupt flag.
  92. ret #
  93. END_FUNCTION(ArEnableInterrupts)
  94. //
  95. // VOID
  96. // ArLoadKernelDataSegments (
  97. // VOID
  98. // )
  99. //
  100. /*++
  101. Routine Description:
  102. This routine switches the data segments DS and ES to the kernel data
  103. segment selectors.
  104. Arguments:
  105. None.
  106. Return Value:
  107. None.
  108. --*/
  109. FUNCTION(ArLoadKernelDataSegments)
  110. LOAD_KERNEL_DATA_SEGMENTS # Load the kernel data segments.
  111. ret #
  112. END_FUNCTION(ArLoadKernelDataSegments)
  113. //
  114. // ULONG
  115. // ArGetProcessorFlags (
  116. // VOID
  117. // )
  118. //
  119. /*++
  120. Routine Description:
  121. This routine gets the current processor's flags register.
  122. Arguments:
  123. None.
  124. Return Value:
  125. Returns the current flags.
  126. --*/
  127. FUNCTION(ArGetProcessorFlags)
  128. pushfq # Push the flags onto the stack.
  129. popq %rax # Pop them into the return value.
  130. ret #
  131. END_FUNCTION(ArGetProcessorFlags)
  132. //
  133. // VOID
  134. // ArLoadTr (
  135. // USHORT TssSegment
  136. // )
  137. //
  138. /*++
  139. Routine Description:
  140. This routine loads a TSS (Task Selector State).
  141. Arguments:
  142. TssSegment - Supplies the segment selector in the GDT that describes the
  143. TSS.
  144. Return Value:
  145. None.
  146. --*/
  147. FUNCTION(ArLoadTr)
  148. ltr %di # Load the Task Register.
  149. ret # That's it!
  150. END_FUNCTION(ArLoadTr)
  151. //
  152. // VOID
  153. // ArStoreTr (
  154. // PULONG TssSegment
  155. // )
  156. //
  157. /*++
  158. Routine Description:
  159. This routine retrieves the current TSS (Task Selector State) register.
  160. Arguments:
  161. TssSegment - Supplies a pointer where the current TSS segment register will
  162. be returned.
  163. Return Value:
  164. None.
  165. --*/
  166. FUNCTION(ArStoreTr)
  167. str (%rdi) # Store the TR register.
  168. ret # Return
  169. END_FUNCTION(ArStoreTr)
  170. //
  171. // VOID
  172. // ArLoadIdtr (
  173. // PVOID IdtBase
  174. // )
  175. //
  176. /*++
  177. Routine Description:
  178. This routine loads the given Interrupt Descriptor Table.
  179. Arguments:
  180. IdtBase - Supplies a pointer to the base of the IDT.
  181. Return Value:
  182. None.
  183. --*/
  184. FUNCTION(ArLoadIdtr)
  185. lidt (%rdi) # Load the IDT register.
  186. ret # That's it!
  187. END_FUNCTION(ArLoadIdtr)
  188. //
  189. // VOID
  190. // ArStoreIdtr (
  191. // PTABLE_REGISTER IdtRegister
  192. // )
  193. //
  194. /*++
  195. Routine Description:
  196. This routine stores the interrupt descriptor table register into the given
  197. value.
  198. Arguments:
  199. IdtRegister - Supplies a pointer that will receive the value.
  200. Return Value:
  201. None.
  202. --*/
  203. FUNCTION(ArStoreIdtr)
  204. sidt (%rdi) # Store the IDT register.
  205. ret # Return politely.
  206. END_FUNCTION(ArStoreIdtr)
  207. //
  208. // VOID
  209. // ArLoadGdtr (
  210. // PTABLE_REGISTER Gdt
  211. // )
  212. //
  213. /*++
  214. Routine Description:
  215. This routine loads a global descriptor table.
  216. Arguments:
  217. Gdt - Supplies a pointer to the Gdt pointer, which contains the base and
  218. limit for the GDT.
  219. Return Value:
  220. None.
  221. --*/
  222. FUNCTION(ArLoadGdtr)
  223. lgdt (%rdi) # Load the GDT.
  224. //
  225. // In order to load the new GDT, a long jump of some kind is needed. Use a
  226. // far return for this purpose, returning from this routine in the process.
  227. //
  228. popq %rax # Pop the return address into a register.
  229. pushq $KERNEL_CS # Push the return segemnt.
  230. pushq %rax # Push the return address.
  231. retfq # Do a 64-bit far return, loading the GDT.
  232. END_FUNCTION(ArLoadGdtr)
  233. //
  234. // VOID
  235. // ArStoreGdtr (
  236. // PTABLE_REGISTER GdtRegister
  237. // )
  238. //
  239. /*++
  240. Routine Description:
  241. This routine stores the GDT register into the given value.
  242. Arguments:
  243. GdtRegister - Supplies a pointer that will receive the value.
  244. Return Value:
  245. None.
  246. --*/
  247. FUNCTION(ArStoreGdtr)
  248. sgdt (%rdi) # Store the GDT register.
  249. ret # Return politely.
  250. END_FUNCTION(ArStoreGdtr)
  251. //
  252. // PVOID
  253. // ArGetFaultingAddress (
  254. // VOID
  255. // )
  256. //
  257. /*++
  258. Routine Description:
  259. This routine determines which address caused a page fault.
  260. Arguments:
  261. None.
  262. Return Value:
  263. Returns the faulting address.
  264. --*/
  265. FUNCTION(ArGetFaultingAddress)
  266. movq %cr2, %rax # Return CR2.
  267. ret #
  268. END_FUNCTION(ArGetFaultingAddress)
  269. //
  270. // VOID
  271. // ArSetFaultingAddress (
  272. // PVOID Value
  273. // )
  274. //
  275. /*++
  276. Routine Description:
  277. This routine sets the CR2 register.
  278. Arguments:
  279. Value - Supplies the value to set.
  280. Return Value:
  281. None.
  282. --*/
  283. FUNCTION(ArSetFaultingAddress)
  284. movq %rdi, %cr2
  285. ret
  286. END_FUNCTION(ArSetFaultingAddress)
  287. //
  288. // UINTN
  289. // ArGetCurrentPageDirectory (
  290. // VOID
  291. // )
  292. //
  293. /*++
  294. Routine Description:
  295. This routine returns the active page directory.
  296. Arguments:
  297. None.
  298. Return Value:
  299. Returns the page directory currently in use by the system.
  300. --*/
  301. FUNCTION(ArGetCurrentPageDirectory)
  302. movq %cr3, %rax # Return CR3.
  303. ret #
  304. END_FUNCTION(ArGetCurrentPageDirectory)
  305. //
  306. // VOID
  307. // ArSetCurrentPageDirectory (
  308. // UINTN Value
  309. // )
  310. //
  311. /*++
  312. Routine Description:
  313. This routine sets the CR3 register.
  314. Arguments:
  315. Value - Supplies the value to set.
  316. Return Value:
  317. None.
  318. --*/
  319. FUNCTION(ArSetCurrentPageDirectory)
  320. movq %rdi, %cr3
  321. ret
  322. END_FUNCTION(ArSetCurrentPageDirectory)
  323. //
  324. // VOID
  325. // ArInvalidateTlbEntry (
  326. // PVOID Address
  327. // )
  328. //
  329. /*++
  330. Routine Description:
  331. This routine invalidates one TLB entry corresponding to the given virtual
  332. address.
  333. Arguments:
  334. Address - Supplies the virtual address whose associated TLB entry will be
  335. invalidated.
  336. Return Value:
  337. None.
  338. --*/
  339. FUNCTION(ArInvalidateTlbEntry)
  340. invlpg (%rdi) # Invalidate the TLB entry.
  341. ret #
  342. END_FUNCTION(ArInvalidateTlbEntry)
  343. //
  344. // VOID
  345. // ArCleanEntireCache (
  346. // VOID
  347. // )
  348. //
  349. /*++
  350. Routine Description:
  351. This routine cleans the entire data cache.
  352. Arguments:
  353. None.
  354. Return Value:
  355. None.
  356. --*/
  357. FUNCTION(ArCleanEntireCache)
  358. wbinvd # Write back invalidate cache.
  359. ret
  360. END_FUNCTION(ArCleanEntireCache)
  361. //
  362. // VOID
  363. // ArInvalidateEntireTlb (
  364. // VOID
  365. // )
  366. //
  367. /*++
  368. Routine Description:
  369. This routine invalidates the entire TLB.
  370. Arguments:
  371. None.
  372. Return Value:
  373. None.
  374. --*/
  375. FUNCTION(ArInvalidateEntireTlb)
  376. movq %cr3, %rax # Reloading CR3 causes the entire TLB to
  377. movq %rax, %cr3 # be flushed.
  378. ret
  379. END_FUNCTION(ArInvalidateEntireTlb)
  380. //
  381. // VOID
  382. // ArProcessorYield (
  383. // VOID
  384. // )
  385. //
  386. /*++
  387. Routine Description:
  388. This routine executes a short processor yield in hardware.
  389. Arguments:
  390. None.
  391. Return Value:
  392. None.
  393. --*/
  394. FUNCTION(ArProcessorYield)
  395. pause
  396. ret
  397. END_FUNCTION(ArProcessorYield)
  398. //
  399. // KERNEL_API
  400. // VOID
  401. // ArWaitForInterrupt (
  402. // VOID
  403. // )
  404. //
  405. /*++
  406. Routine Description:
  407. This routine halts the processor until the next interrupt comes in. This
  408. routine should be called with interrupts disabled, and will return with
  409. interrupts enabled.
  410. Arguments:
  411. None.
  412. Return Value:
  413. None.
  414. --*/
  415. PROTECTED_FUNCTION(ArWaitForInterrupt)
  416. sti # Enables interrupts one instruction later.
  417. hlt # Simtaneously halt and enable interrupts.
  418. ret
  419. END_FUNCTION(ArWaitForInterrupt)
  420. //
  421. // VOID
  422. // ArSerializeExecution (
  423. // VOID
  424. // )
  425. //
  426. /*++
  427. Routine Description:
  428. This routine acts a serializing instruction, preventing the processor
  429. from speculatively executing beyond this point.
  430. Arguments:
  431. None.
  432. Return Value:
  433. None.
  434. --*/
  435. FUNCTION(ArSerializeExecution)
  436. movq %cr2, %rax # Control register accesses are
  437. movq %rax, %cr2 # cheap (ish) and serializing.
  438. ret
  439. END_FUNCTION(ArSerializeExecution)
  440. //
  441. // VOID
  442. // ArInvalidateInstructionCache (
  443. // VOID
  444. // )
  445. //
  446. /*++
  447. Routine Description:
  448. This routine invalidate the processor's instruction cache, indicating
  449. that a page containing code has changed.
  450. Arguments:
  451. None.
  452. Return Value:
  453. None.
  454. --*/
  455. FUNCTION(ArInvalidateInstructionCache)
  456. ret
  457. END_FUNCTION(ArInvalidateInstructionCache)
  458. //
  459. // VOID
  460. // ArCpuid (
  461. // PULONG Eax,
  462. // PULONG Ebx,
  463. // PULONG Ecx,
  464. // PULONG Edx
  465. // )
  466. //
  467. /*++
  468. Routine Description:
  469. This routine executes the CPUID instruction to get processor architecture
  470. information.
  471. Arguments:
  472. Eax - Supplies a pointer to the value that EAX should be set to when the
  473. CPUID instruction is executed. On output, contains the contents of
  474. EAX immediately after the CPUID instruction.
  475. Ebx - Supplies a pointer to the value that EBX should be set to when the
  476. CPUID instruction is executed. On output, contains the contents of
  477. EAX immediately after the CPUID instruction.
  478. Ecx - Supplies a pointer to the value that ECX should be set to when the
  479. CPUID instruction is executed. On output, contains the contents of
  480. EAX immediately after the CPUID instruction.
  481. Edx - Supplies a pointer to the value that EDX should be set to when the
  482. CPUID instruction is executed. On output, contains the contents of
  483. EAX immediately after the CPUID instruction.
  484. Return Value:
  485. None.
  486. --*/
  487. FUNCTION(ArCpuid)
  488. pushq %rbx # Save the only non-volatile involved.
  489. movq %rdx, %r8 # Save rcx into R8
  490. movq %rcx, %r9 # Save rdx into R9.
  491. movl (%rdi), %eax # Dereference to get eax.
  492. movl (%rsi), %ebx # Dereference to get ebx.
  493. movl (%r8), %ecx # Dereference to get ecx.
  494. movl (%r9), %edx # Dereference to get edx.
  495. cpuid # Fire off the CPUID instruction.
  496. movl %edx, (%r9) # Save the resulting edx.
  497. movl %ecx, (%r8) # Save the resulting ecx.
  498. movl %ebx, (%rsi) # Save the resulting ebx.
  499. movl %eax, (%rdi) # Save the resulting eax.
  500. popq %rbx # Restore the non-volatile.
  501. ret
  502. END_FUNCTION(ArCpuid)
  503. //
  504. // UINTN
  505. // ArGetControlRegister0 (
  506. // VOID
  507. // )
  508. //
  509. /*++
  510. Routine Description:
  511. This routine returns the current value of CR0.
  512. Arguments:
  513. None.
  514. Return Value:
  515. Returns CR0.
  516. --*/
  517. FUNCTION(ArGetControlRegister0)
  518. movq %cr0, %rax
  519. ret
  520. END_FUNCTION(ArGetControlRegister0)
  521. //
  522. // VOID
  523. // ArSetControlRegister0 (
  524. // UINTN Value
  525. // )
  526. //
  527. /*++
  528. Routine Description:
  529. This routine sets the CR0 register.
  530. Arguments:
  531. Value - Supplies the value to set.
  532. Return Value:
  533. None.
  534. --*/
  535. FUNCTION(ArSetControlRegister0)
  536. movq %rdi, %cr0
  537. ret
  538. END_FUNCTION(ArSetControlRegister0)
  539. //
  540. // UINTN
  541. // ArGetControlRegister4 (
  542. // VOID
  543. // )
  544. //
  545. /*++
  546. Routine Description:
  547. This routine returns the current value of CR4.
  548. Arguments:
  549. None.
  550. Return Value:
  551. Returns CR4.
  552. --*/
  553. FUNCTION(ArGetControlRegister4)
  554. movq %cr4, %rax
  555. ret
  556. END_FUNCTION(ArGetControlRegister4)
  557. //
  558. // VOID
  559. // ArSetControlRegister4 (
  560. // UINTN Value
  561. // )
  562. //
  563. /*++
  564. Routine Description:
  565. This routine sets the CR4 register.
  566. Arguments:
  567. Value - Supplies the value to set.
  568. Return Value:
  569. None.
  570. --*/
  571. FUNCTION(ArSetControlRegister4)
  572. movq %rdi, %cr4
  573. ret
  574. END_FUNCTION(ArSetControlRegister4)
  575. //
  576. // UINTN
  577. // ArGetDebugRegister0 (
  578. // VOID
  579. // )
  580. //
  581. /*++
  582. Routine Description:
  583. This routine returns the current value of DR0.
  584. Arguments:
  585. None.
  586. Return Value:
  587. Returns DR0.
  588. --*/
  589. FUNCTION(ArGetDebugRegister0)
  590. movq %dr0, %rax
  591. ret
  592. END_FUNCTION(ArGetDebugRegister0)
  593. //
  594. // VOID
  595. // ArSetDebugRegister0 (
  596. // UINTN Value
  597. // )
  598. //
  599. /*++
  600. Routine Description:
  601. This routine sets the DR0 register.
  602. Arguments:
  603. Value - Supplies the value to set.
  604. Return Value:
  605. None.
  606. --*/
  607. FUNCTION(ArSetDebugRegister0)
  608. movq %rdi, %dr0
  609. ret
  610. END_FUNCTION(ArSetDebugRegister0)
  611. //
  612. // UINTN
  613. // ArGetDebugRegister1 (
  614. // VOID
  615. // )
  616. //
  617. /*++
  618. Routine Description:
  619. This routine returns the current value of DR1.
  620. Arguments:
  621. None.
  622. Return Value:
  623. Returns DR1.
  624. --*/
  625. FUNCTION(ArGetDebugRegister1)
  626. movq %dr1, %rax
  627. ret
  628. END_FUNCTION(ArGetDebugRegister1)
  629. //
  630. // VOID
  631. // ArSetDebugRegister1 (
  632. // UINTN Value
  633. // )
  634. //
  635. /*++
  636. Routine Description:
  637. This routine sets the DR1 register.
  638. Arguments:
  639. Value - Supplies the value to set.
  640. Return Value:
  641. None.
  642. --*/
  643. FUNCTION(ArSetDebugRegister1)
  644. movq %rdi, %dr1
  645. ret
  646. END_FUNCTION(ArSetDebugRegister1)
  647. //
  648. // UINTN
  649. // ArGetDebugRegister2 (
  650. // VOID
  651. // )
  652. //
  653. /*++
  654. Routine Description:
  655. This routine returns the current value of DR2.
  656. Arguments:
  657. None.
  658. Return Value:
  659. Returns DR2.
  660. --*/
  661. FUNCTION(ArGetDebugRegister2)
  662. movq %dr2, %rax
  663. ret
  664. END_FUNCTION(ArGetDebugRegister2)
  665. //
  666. // VOID
  667. // ArSetDebugRegister2 (
  668. // UINTN Value
  669. // )
  670. //
  671. /*++
  672. Routine Description:
  673. This routine sets the DR2 register.
  674. Arguments:
  675. Value - Supplies the value to set.
  676. Return Value:
  677. None.
  678. --*/
  679. FUNCTION(ArSetDebugRegister2)
  680. movq %rdi, %dr2
  681. ret
  682. END_FUNCTION(ArSetDebugRegister2)
  683. //
  684. // UINTN
  685. // ArGetDebugRegister3 (
  686. // VOID
  687. // )
  688. //
  689. /*++
  690. Routine Description:
  691. This routine returns the current value of DR3.
  692. Arguments:
  693. None.
  694. Return Value:
  695. Returns DR3.
  696. --*/
  697. FUNCTION(ArGetDebugRegister3)
  698. movq %dr3, %rax
  699. ret
  700. END_FUNCTION(ArGetDebugRegister3)
  701. //
  702. // VOID
  703. // ArSetDebugRegister3 (
  704. // UINTN Value
  705. // )
  706. //
  707. /*++
  708. Routine Description:
  709. This routine sets the DR3 register.
  710. Arguments:
  711. Value - Supplies the value to set.
  712. Return Value:
  713. None.
  714. --*/
  715. FUNCTION(ArSetDebugRegister3)
  716. movq %rdi, %dr3
  717. ret
  718. END_FUNCTION(ArSetDebugRegister3)
  719. //
  720. // UINTN
  721. // ArGetDebugRegister6 (
  722. // VOID
  723. // )
  724. //
  725. /*++
  726. Routine Description:
  727. This routine returns the current value of DR6.
  728. Arguments:
  729. None.
  730. Return Value:
  731. Returns DR6.
  732. --*/
  733. FUNCTION(ArGetDebugRegister6)
  734. movq %dr6, %rax
  735. ret
  736. END_FUNCTION(ArGetDebugRegister6)
  737. //
  738. // VOID
  739. // ArSetDebugRegister6 (
  740. // UINTN Value
  741. // )
  742. //
  743. /*++
  744. Routine Description:
  745. This routine sets the DR6 register.
  746. Arguments:
  747. Value - Supplies the value to set.
  748. Return Value:
  749. None.
  750. --*/
  751. FUNCTION(ArSetDebugRegister6)
  752. movq %rdi, %dr6
  753. ret
  754. END_FUNCTION(ArSetDebugRegister6)
  755. //
  756. // UINTN
  757. // ArGetDebugRegister7 (
  758. // VOID
  759. // )
  760. //
  761. /*++
  762. Routine Description:
  763. This routine returns the current value of DR7.
  764. Arguments:
  765. None.
  766. Return Value:
  767. Returns DR7.
  768. --*/
  769. FUNCTION(ArGetDebugRegister7)
  770. movq %dr7, %rax
  771. ret
  772. END_FUNCTION(ArGetDebugRegister7)
  773. //
  774. // VOID
  775. // ArSetDebugRegister7 (
  776. // UINTN Value
  777. // )
  778. //
  779. /*++
  780. Routine Description:
  781. This routine sets the DR7 register.
  782. Arguments:
  783. Value - Supplies the value to set.
  784. Return Value:
  785. None.
  786. --*/
  787. FUNCTION(ArSetDebugRegister7)
  788. movq %rdi, %dr7
  789. ret
  790. END_FUNCTION(ArSetDebugRegister7)
  791. //
  792. // VOID
  793. // ArFxSave (
  794. // PFPU_CONTEXT Buffer
  795. // )
  796. //
  797. /*++
  798. Routine Description:
  799. This routine saves the current x87 FPU, MMX, XMM, and MXCSR registers to a
  800. 512 byte memory location.
  801. Arguments:
  802. Buffer - Supplies a pointer to the buffer where the information will be
  803. saved. This buffer must be 16-byte aligned.
  804. Return Value:
  805. None.
  806. --*/
  807. FUNCTION(ArFxSave)
  808. addq $0xF, %rdi # Round up to nearest alignment requirement.
  809. andq $~0xF, %rdi # Align.
  810. clts # Clear the TS flag, Enabling FPU access.
  811. fxsave (%rdi) # Save the state into there.
  812. ret
  813. END_FUNCTION(ArFxSave)
  814. //
  815. // VOID
  816. // ArFxRestore (
  817. // PFPU_CONTEXT Buffer
  818. // )
  819. //
  820. /*++
  821. Routine Description:
  822. This routine restores the current x87 FPU, MMX, XMM, and MXCSR registers
  823. from a 512 byte memory location.
  824. Arguments:
  825. Buffer - Supplies a pointer to the buffer where the information will be
  826. loaded from. This buffer must be 16-byte aligned.
  827. Return Value:
  828. None.
  829. --*/
  830. FUNCTION(ArFxRestore)
  831. addq $0xF, %rdi # Round up to nearest alignment requirement.
  832. andq $~0xF, %rax # Align.
  833. clts # Clear the TS flag, Enabling FPU access.
  834. fxrstor (%rdi) # Load the state from there.
  835. ret
  836. END_FUNCTION(ArFxRestore)
  837. //
  838. // VOID
  839. // ArEnableFpu (
  840. // VOID
  841. // )
  842. //
  843. /*++
  844. Routine Description:
  845. This routine clears the TS bit of CR0, allowing access to the FPU.
  846. Arguments:
  847. None.
  848. Return Value:
  849. None.
  850. --*/
  851. FUNCTION(ArEnableFpu)
  852. clts # Use the dedicated instruction for this.
  853. ret # Return.
  854. END_FUNCTION(ArEnableFpu)
  855. //
  856. // VOID
  857. // ArDisableFpu (
  858. // VOID
  859. // )
  860. //
  861. /*++
  862. Routine Description:
  863. This routine sets the TS bit of CR0, disallowing access to the FPU.
  864. Arguments:
  865. None.
  866. Return Value:
  867. None.
  868. --*/
  869. FUNCTION(ArDisableFpu)
  870. movq %cr0, %rax # Get CR0.
  871. andq $CR0_TASK_SWITCHED, %rax # See if it's already disabled.
  872. jnz ArDisableFpuReturn # Jump out without writing if it's already off.
  873. orq $CR0_TASK_SWITCHED, %rax # Turn on that bit.
  874. movq %rax, %cr0 # Write CR0.
  875. ArDisableFpuReturn:
  876. ret # Return.
  877. END_FUNCTION(ArDisableFpu)
  878. //
  879. // VOID
  880. // ArInitializeFpu (
  881. // VOID
  882. // )
  883. //
  884. /*++
  885. Routine Description:
  886. This routine resets the FPU state.
  887. Arguments:
  888. None.
  889. Return Value:
  890. None.
  891. --*/
  892. FUNCTION(ArInitializeFpu)
  893. fninit # Reset the FPU state.
  894. ret # Return.
  895. END_FUNCTION(ArInitializeFpu)
  896. //
  897. // ULONGLONG
  898. // ArReadTimeStampCounter (
  899. // VOID
  900. // )
  901. //
  902. /*++
  903. Routine Description:
  904. This routine reads the time stamp counter from the current processor. It
  905. is essential that callers of this function understand that this returns
  906. instruction cycles, which does not always translate directly into units
  907. of time. For example, some processors halt the timestamp counter during
  908. performance and CPU idle state transitions. In other cases, the timestamp
  909. counters of all processors are not in sync, so as execution of a thread
  910. bounces unpredictably from one core to another, different timelines may be
  911. observed. Additionally, one must understand that this intrinsic is not a
  912. serializing instruction to the hardware, so the processor may decide to
  913. execute any number of instructions after this one before actually snapping
  914. the timestamp counter. To all those who choose to continue to use this
  915. primitive to measure time, you have been warned.
  916. Arguments:
  917. None.
  918. Return Value:
  919. Returns the current instruction cycle count since the processor was started.
  920. --*/
  921. FUNCTION(ArReadTimeStampCounter)
  922. rdtsc # Store the timestamp counter in EDX:EAX.
  923. shlq $32, %rdx # Shift rdx into its high word.
  924. orq %rdx, %rax # OR rdx into rax.
  925. ret # And return!
  926. END_FUNCTION(ArReadTimeStampCounter)
  927. //
  928. // ULONGLONG
  929. // ArReadMsr (
  930. // ULONG Msr
  931. // )
  932. //
  933. /*++
  934. Routine Description:
  935. This routine reads the requested Model Specific Register.
  936. Arguments:
  937. Msr - Supplies the MSR to read.
  938. Return Value:
  939. Returns the 64-bit MSR value.
  940. --*/
  941. FUNCTION(ArReadMsr)
  942. movq %rdi, %rcx # Load the MSR number into ecx.
  943. xorq %rax, %rax # Clear high bits of rax.
  944. rdmsr # Read the MSR into EDX:EAX.
  945. shlq $32, %rdx # Shift rdx into its high word.
  946. orq %rdx, %rax # OR rdx into rax.
  947. ret # Return.
  948. END_FUNCTION(ArReadMsr)
  949. //
  950. // VOID
  951. // ArWriteMsr (
  952. // ULONG Msr,
  953. // ULONGLONG Value
  954. // )
  955. //
  956. /*++
  957. Routine Description:
  958. This routine writes the requested Model Specific Register.
  959. Arguments:
  960. Msr - Supplies the MSR to write.
  961. Value - Supplies the 64-bit value to write.
  962. Return Value:
  963. None.
  964. --*/
  965. FUNCTION(ArWriteMsr)
  966. movq %rdi, %rcx # Load the MSR number into ECX.
  967. movq %rsi, %rdx # Load rdx with the whole 64-bit word.
  968. shrq $32, %rdx # Shift rdx right to put the high bits in edx.
  969. movl %esi, %eax # Put the low bits into eax.
  970. wrmsr # Write the MSR.
  971. ret # Return.
  972. END_FUNCTION(ArWriteMsr)
  973. //
  974. // PVOID
  975. // ArReadFsbase (
  976. // VOID
  977. // )
  978. //
  979. /*++
  980. Routine Description:
  981. This routine reads the fs: base register.
  982. Arguments:
  983. None.
  984. Return Value:
  985. Returns the fsbase pointer.
  986. --*/
  987. FUNCTION(ArReadFsbase)
  988. rdfsbase %rax # Read fsbase into rax.
  989. ret # Return.
  990. END_FUNCTION(ArReadFsbase)
  991. //
  992. // VOID
  993. // ArWriteFsbase (
  994. // PVOID Fsbase
  995. // )
  996. //
  997. /*++
  998. Routine Description:
  999. This routine writes the fs: base register.
  1000. Arguments:
  1001. Fsbase - Supplies the new fsbase value to write.
  1002. Return Value:
  1003. None.
  1004. --*/
  1005. FUNCTION(ArWriteFsbase)
  1006. wrfsbase %rdi # Write fsbase.
  1007. ret # Return.
  1008. END_FUNCTION(ArWriteFsbase)
  1009. //
  1010. // PVOID
  1011. // ArReadGsbase (
  1012. // VOID
  1013. // )
  1014. //
  1015. /*++
  1016. Routine Description:
  1017. This routine reads the gs: base register.
  1018. Arguments:
  1019. None.
  1020. Return Value:
  1021. Returns the gsbase pointer.
  1022. --*/
  1023. FUNCTION(ArReadGsbase)
  1024. rdgsbase %rax # Read gsbase into rax.
  1025. ret # Return.
  1026. END_FUNCTION(ArReadGsbase)
  1027. //
  1028. // VOID
  1029. // ArWriteGsbase (
  1030. // PVOID Gsbase
  1031. // )
  1032. //
  1033. /*++
  1034. Routine Description:
  1035. This routine writes the gs: base register.
  1036. Arguments:
  1037. Gsbase - Supplies the new gsbase value to write.
  1038. Return Value:
  1039. None.
  1040. --*/
  1041. FUNCTION(ArWriteGsbase)
  1042. wrgsbase %rdi # Write gsbase.
  1043. ret # Return.
  1044. END_FUNCTION(ArWriteGsbase)
  1045. //
  1046. // VOID
  1047. // ArSwapGs (
  1048. // VOID
  1049. // )
  1050. //
  1051. /*++
  1052. Routine Description:
  1053. This routine exchanges the GS base hidden register with the kernel GS base
  1054. MSR.
  1055. Arguments:
  1056. None.
  1057. Return Value:
  1058. None.
  1059. --*/
  1060. FUNCTION(ArSwapGs)
  1061. swapgs # Swap gs.
  1062. ret # Return.
  1063. END_FUNCTION(ArSwapGs)
  1064. //
  1065. // KERNEL_API
  1066. // VOID
  1067. // ArMonitor (
  1068. // PVOID Address,
  1069. // UINTN Ecx,
  1070. // UINTN Edx
  1071. // )
  1072. //
  1073. /*++
  1074. Routine Description:
  1075. This routine arms the monitoring hardware in preparation for an mwait
  1076. instruction.
  1077. Arguments:
  1078. Address - Supplies the address pointer to monitor.
  1079. Ecx - Supplies the contents to load into the ECX (RCX in 64-bit) register
  1080. when executing the monitor instruction. These are defined as hints.
  1081. Edx - Supplies the contents to load into the EDX/RDX register. These are
  1082. also hints.
  1083. Return Value:
  1084. None.
  1085. --*/
  1086. PROTECTED_FUNCTION(ArMonitor)
  1087. movq %rdi, %rax # Load the address to rax.
  1088. movq %rsi, %rcx # Load the first set of hints.
  1089. # The second set of hints is already in rdx.
  1090. monitor # Arm the monitoring hardware.
  1091. ret # Return.
  1092. END_FUNCTION(ArMonitor)
  1093. //
  1094. // KERNEL_API
  1095. // VOID
  1096. // ArMwait (
  1097. // UINTN Eax,
  1098. // UINTN Ecx
  1099. // )
  1100. //
  1101. /*++
  1102. Routine Description:
  1103. This routine executes the mwait instruction, which is used to halt the
  1104. processor until a specified memory location is written to. It is also used
  1105. on Intel processors to enter C-states. A monitor instruction must have
  1106. been executed prior to this to set up the monitoring region.
  1107. Arguments:
  1108. Eax - Supplies the contents to load into EAX/RAX when executing the mwait
  1109. instruction. This is a set of hints, including which C-state to enter
  1110. on Intel processors.
  1111. Ecx - Supplies the contents to load into the ECX (RCX in 64-bit) register
  1112. when executing the mwait instruction. This is 1 when entering a C-state
  1113. with interrupts disabled to indicate that an interrupt should still
  1114. break out.
  1115. Return Value:
  1116. None.
  1117. --*/
  1118. PROTECTED_FUNCTION(ArMwait)
  1119. movq %rdi, %rax # Load eax.
  1120. movq %rsi, %rcx # Load ecx.
  1121. mwait # Go down.
  1122. ret # Return.
  1123. END_FUNCTION(ArMwait)
  1124. //
  1125. // KERNEL_API
  1126. // VOID
  1127. // ArIoReadAndHalt (
  1128. // USHORT IoPort
  1129. // )
  1130. //
  1131. /*++
  1132. Routine Description:
  1133. This routine performs a single 8-bit I/O port read and then halts the
  1134. processor until the next interrupt comes in. This routine should be called
  1135. with interrupts disabled, and will return with interrupts enabled.
  1136. Arguments:
  1137. IoPort - Supplies the I/O port to read from.
  1138. Return Value:
  1139. None.
  1140. --*/
  1141. PROTECTED_FUNCTION(ArIoReadAndHalt)
  1142. movq %rdi, %rdx # Move 1st parameter to dx.
  1143. inb %dx, %al # Perform the I/O port read.
  1144. sti # Enables interrupts one instruction later.
  1145. hlt # Simtaneously halt and enable interrupts.
  1146. ret
  1147. END_FUNCTION(ArIoReadAndHalt)
  1148. //
  1149. // UINTN
  1150. // ArSaveProcessorContext (
  1151. // PPROCESSOR_CONTEXT Context
  1152. // )
  1153. //
  1154. /*++
  1155. Routine Description:
  1156. This routine saves the current processor context, including the
  1157. non-volatile general registers and the system level control registers. This
  1158. function appears to return twice, once when the context is saved and then
  1159. again when the context is restored. Because the stack pointer is restored,
  1160. the caller of this function may not return without either abandoning the
  1161. context or calling restore. Returning and then calling restore would almost
  1162. certainly result in stack corruption.
  1163. Arguments:
  1164. Context - Supplies a pointer to the context area to save into.
  1165. Return Value:
  1166. Returns 0 after the context was successfully saved (first time).
  1167. Returns the value in the context return address register when the restore
  1168. function is called (the second time). By default this value is 1, though it
  1169. can be manipulated after the initial save is complete.
  1170. --*/
  1171. FUNCTION(ArSaveProcessorContext)
  1172. movq %rdi, %r10 # Save rdi into a volatile register.
  1173. movl $1, %eax # Get default return value.
  1174. cld # Increment edi upon stosd.
  1175. stosq # Save it.
  1176. movq (%rsp), %rax # Get the return address.
  1177. stosq # Save it.
  1178. xorl %eax, %eax # Zero out rax.
  1179. movw %cs, %ax # Get CS.
  1180. stosq # Save it.
  1181. pushfq # Push rflags.
  1182. popq %rax # Get rflags in rax.
  1183. stosq # Save it.
  1184. movq %r10, (%rdi) # Save original rdi.
  1185. movq %rsi, 8(%rdi) # Save rsi.
  1186. movq %rdx, 16(%rdi) # Save rdx.
  1187. movq %rcx, 24(%rdi) # Save rcx.
  1188. movq %r8, 32(%rdi) # Save r8.
  1189. movq %r9, 40(%rdi) # Save r9.
  1190. addq $48, %rdi # Advance over arguments.
  1191. movq %rbx, %rax # Get rbx.
  1192. stosq # Save it.
  1193. movq %rbp, %rax # Get rbp.
  1194. stosq # Save it.
  1195. movq %rsp, %rax # Get rsp.
  1196. addq $8, %rax # Pop off return address.
  1197. stosq # Save it.
  1198. movq %r12, (%rdi) # Save r12.
  1199. movq %r13, 8(%rdi) # Save r13.
  1200. movq %r14, 16(%rdi) # Save r14
  1201. movq %r15, 24(%rdi) # Save r15
  1202. addq $32, %rdi # Advance pointer for r12-r15.
  1203. movq %dr7, %rax # Get dr7.
  1204. stosq # Save it.
  1205. movq %dr6, %rax # Get dr6.
  1206. stosq # Save it.
  1207. movq %dr0, %rax # Get dr0.
  1208. stosq # Save it.
  1209. movq %dr1, %rax # Get dr1.
  1210. stosq # Save it.
  1211. movq %dr2, %rax # Get dr2.
  1212. stosq # Save it.
  1213. movq %dr3, %rax # Get dr3.
  1214. stosq # Save it.
  1215. movq %rdi, (%rdi) # Save VA of this structure member.
  1216. addq $8, %rdi # Advance over VA member.
  1217. movq %cr0, %rax # Get cr0.
  1218. stosq # Save it.
  1219. movq %cr2, %rax # Get cr2. Why is there no cr1?
  1220. stosq # Save it.
  1221. movq %cr3, %rax # Get the all-important cr3.
  1222. stosq # Save it.
  1223. movq %cr4, %rax # Get cr4.
  1224. stosq # Save it.
  1225. movl $X86_MSR_FSBASE, %ecx # Load MSR number.
  1226. rdmsr # Read FSBASE.
  1227. shlq $32, %rdx # Shift rdx into its high word.
  1228. orq %rdx, %rax # OR rdx into rax.
  1229. stosq # Save it.
  1230. movl $X86_MSR_GSBASE, %ecx # Load MSR number.
  1231. rdmsr # Read GSBASE.
  1232. shlq $32, %rdx # Shift rdx into its high word.
  1233. orq %rdx, %rax # OR rdx into rax.
  1234. stosq # Save it.
  1235. movl $X86_MSR_KERNEL_GSBASE, %ecx # Load MSR number.
  1236. rdmsr # Read kernel GSBASE.
  1237. shlq $32, %rdx # Shift rdx into its high word.
  1238. orq %rdx, %rax # OR rdx into rax.
  1239. stosq # Save it.
  1240. movl $X86_MSR_EFER, %ecx # Load MSR number.
  1241. rdmsr # Read EFER.
  1242. shlq $32, %rdx # Shift rdx into its high word.
  1243. orq %rdx, %rax # OR rdx into rax.
  1244. stosq # Save it.
  1245. xor %eax, %eax # Clear out upper bits of rax.
  1246. str %ax # Get the TR register.
  1247. stosq # Save it.
  1248. pushq %rdi # Save rdi temporarily.
  1249. movq %rax, %rdi # Set the TSS segment as the first param.
  1250. call ArClearTssBusyBit # Clear the busy bit from the TSS.
  1251. popq %rdi # Restore rdi.
  1252. sidt (%rdi) # Save IDT base/limit.
  1253. addq $10, %rdi # Advance over IDT area.
  1254. sgdt (%rdi) # Save GDT base/limit.
  1255. xor %eax, %eax # Zero out return value.
  1256. ret
  1257. END_FUNCTION(ArSaveProcessorContext)
  1258. //
  1259. // VOID
  1260. // ArRestoreProcessorContext (
  1261. // PPROCESSOR_CONTEXT Context
  1262. // )
  1263. //
  1264. /*++
  1265. Routine Description:
  1266. This routine restores the current processor context, including the
  1267. non-volatile general registers and the system level control registers. This
  1268. function does not return, but instead jumps to the return address from
  1269. the caller of the save context function.
  1270. Arguments:
  1271. Context - Supplies a pointer to the context to restore.
  1272. Return Value:
  1273. Does not return, at least not conventionally.
  1274. --*/
  1275. FUNCTION(ArRestoreProcessorContext)
  1276. ##
  1277. ## Note that the processor must already be running in 64-bit mode. This
  1278. ## function cannot be used to trampoline from 32-bit to 64-bit mode.
  1279. ##
  1280. movq %rdi, %rsi # Load the pointer into rsi (for stosq).
  1281. movq 16(%rdi), %rax # Load CS into rax.
  1282. addq $(PROCESSOR_CONTEXT_SIZE - 20), %rsi # Get to the IDT location.
  1283. std # Decrement rsi during lodsq.
  1284. ##
  1285. ## After loading the GDT, a far return is needed for it to take effect.
  1286. ## this also loads CS.
  1287. ##
  1288. pushq %rax # Push CS.
  1289. movq ArRestoreProcessorContextJump@GOTPCREL(%rip), %rax
  1290. pushq %rax # Push "return" address (just below).
  1291. lgdt 10(%rsi) # Load the new GDT.
  1292. retfq # Far return to load the new GDT.
  1293. ArRestoreProcessorContextJump:
  1294. lidt (%rsi) # Load the IDT.
  1295. subq $8, %rsi # Retreat to tr.
  1296. lodsq # Pop a value and retreat.
  1297. ltr %ax # Load the task segment.
  1298. lodsq # Pop a value and retreat.
  1299. movl $X86_MSR_EFER, %ecx # Load the MSR number.
  1300. movq %rax, %rdx # Load rdx with the whole 64-bit word.
  1301. shrq $32, %rdx # Put the high bits in edx.
  1302. wrmsr # Write the MSR.
  1303. lodsq # Pop a value and retreat.
  1304. movl $X86_MSR_KERNEL_GSBASE, %ecx # Load the MSR number.
  1305. movq %rax, %rdx # Load rdx with the whole 64-bit word.
  1306. shrq $32, %rdx # Put the high bits in edx.
  1307. wrmsr # Write the MSR.
  1308. lodsq # Pop a value and retreat.
  1309. movl $X86_MSR_GSBASE, %ecx # Load the MSR number.
  1310. movq %rax, %rdx # Load rdx with the whole 64-bit word.
  1311. shrq $32, %rdx # Put the high bits in edx.
  1312. wrmsr # Write the MSR.
  1313. lodsq # Pop a value and retreat.
  1314. movl $X86_MSR_FSBASE, %ecx # Load the MSR number.
  1315. movq %rax, %rdx # Load rdx with the whole 64-bit word.
  1316. shrq $32, %rdx # Put the high bits in edx.
  1317. wrmsr # Write the MSR.
  1318. lodsq # Pop a value and retreat.
  1319. movq %rax, %cr4 # Load CR4.
  1320. lodsq # Pop a value and retreat.
  1321. movq %rax, %cr3 # Load CR3.
  1322. lodsq # Pop a value and retreat.
  1323. movq %rax, %cr2 # Load CR2.
  1324. lodsq # Pop a value and retreat.
  1325. movq %rax, %cr0 # Load CR0.
  1326. lodsq # Pop a value and retreat.
  1327. movq %rax, %rsi # Load the new VA.
  1328. subq $8, %rsi # Retreat beyond the VA.
  1329. lodsq # Pop a value and retreat.
  1330. movq %rax, %dr3 # Restore a debug register.
  1331. lodsq # Pop a value and retreat.
  1332. movq %rax, %dr2 # Restore a debug register.
  1333. lodsq # Pop a value and retreat.
  1334. movq %rax, %dr1 # Restore a debug register.
  1335. lodsq # Pop a value and retreat.
  1336. movq %rax, %dr0 # Restore a debug register.
  1337. lodsq # Pop a value and retreat.
  1338. movq %rax, %dr6 # Restore a debug register.
  1339. lodsq # Pop a value and retreat.
  1340. movq %rax, %dr7 # Restore a debug register.
  1341. movq (%rsi), %r15 # Restore a general register.
  1342. movq -8(%rsi), %r14 # Restore a general register.
  1343. movq -16(%rsi), %r13 # Restore a general register.
  1344. movq -24(%rsi), %r12 # Restore a general register.
  1345. movq -32(%rsi), %rax # Get RSP.
  1346. movq %rax, %rsp # Restore rsp.
  1347. movq -40(%rsi), %rbp # Restore a general register.
  1348. movq -48(%rsi), %rbx # Restore a general register.
  1349. movq -56(%rsi), %r9 # Restore an argument register.
  1350. movq -64(%rsi), %r8 # Restore an argument register.
  1351. movq -72(%rsi), %rcx # Restore an argument register.
  1352. movq -80(%rsi), %rdx # Restore an argument register.
  1353. movq -88(%rsi), %r10 # Restore RSI into R10 (volatile) for now.
  1354. movq -96(%rsi), %rdi # Restore an argument register.
  1355. movq -104(%rsi), %rax # Get RFLAGS.
  1356. subq $120, %rsi # Advance beyond the registers and CS.
  1357. pushq %rax # Push rflags.
  1358. lodsq # Pop a value and retreat.
  1359. movq %rax, %r11 # Save rip in r11 (a volatile register).
  1360. lodsq # Pop rax, the return value, into place.
  1361. movq %r10, %rsi # Restore RSI (held in R10 from above).
  1362. popfq # Pop rflags (including direction flag).
  1363. jmp *%r11 # Jump off to the return value.
  1364. END_FUNCTION(ArRestoreProcessorContext)
  1365. //
  1366. // --------------------------------------------------------- Internal Functions
  1367. //