@@ -21,20 +21,89 @@ use hyperlight_common::flatbuffer_wrappers::guest_error::ErrorCode;
21
21
use hyperlight_common:: outb:: Exception ;
22
22
use hyperlight_guest:: exit:: abort_with_code_and_message;
23
23
24
+ use crate :: paging;
25
+
26
+ /// See AMD64 Architecture Programmer's Manual, Volume 2
27
+ /// §8.9.3 Interrupt Stack Frame, pp. 283--284
28
+ /// Figure 8-14: Long-Mode Stack After Interrupt---Same Privilege,
29
+ /// Figure 8-15: Long-Mode Stack After Interrupt---Higher Privilege
30
+ /// Subject to the proviso that we push a dummy error code of 0 for exceptions
31
+ /// for which the processor does not provide one
32
+ #[ repr( C ) ]
33
+ pub struct ExceptionInfo {
34
+ pub error_code : u64 ,
35
+ pub rip : u64 ,
36
+ pub cs : u64 ,
37
+ pub rflags : u64 ,
38
+ pub rsp : u64 ,
39
+ pub ss : u64 ,
40
+ }
41
+ const _: ( ) = assert ! ( core:: mem:: offset_of!( ExceptionInfo , rip) == 8 ) ;
42
+ const _: ( ) = assert ! ( core:: mem:: offset_of!( ExceptionInfo , rsp) == 32 ) ;
43
+
44
+ #[ repr( C ) ]
45
+ /// Saved context, pushed onto the stack by exception entry code
46
+ pub struct Context {
47
+ /// in order: gs, fs, es
48
+ pub segments : [ u64 ; 3 ] ,
49
+ pub fxsave : [ u8 ; 512 ] ,
50
+ pub ds : u64 ,
51
+ /// no `rsp`, since the processor saved it
52
+ /// `rax` is at the top, `r15` the bottom
53
+ pub gprs : [ u64 ; 15 ] ,
54
+ }
55
+ const _: ( ) = assert ! ( size_of:: <Context >( ) == 152 + 512 ) ;
56
+
57
+ // TODO: This will eventually need to end up in a per-thread context,
58
+ // when there are threads.
59
+ pub static handlers: [ core:: sync:: atomic:: AtomicU64 ; 31 ] =
60
+ [ const { core:: sync:: atomic:: AtomicU64 :: new ( 0 ) } ; 31 ] ;
61
+ type handler_t = fn ( n : u64 , info : * mut ExceptionInfo , ctx : * mut Context , pf_addr : u64 ) -> bool ;
62
+
24
63
/// Exception handler
25
64
#[ unsafe( no_mangle) ]
26
65
pub extern "C" fn hl_exception_handler (
27
66
stack_pointer : u64 ,
28
67
exception_number : u64 ,
29
68
page_fault_address : u64 ,
30
69
) {
70
+ let ctx = stack_pointer as * mut Context ;
71
+ let exn_info = ( stack_pointer + size_of :: < Context > ( ) as u64 ) as * mut ExceptionInfo ;
72
+
31
73
let exception = Exception :: try_from ( exception_number as u8 ) . expect ( "Invalid exception number" ) ;
74
+
75
+ let saved_rip = unsafe { ( & raw const ( * exn_info) . rip ) . read_volatile ( ) } ;
76
+ let error_code = unsafe { ( & raw const ( * exn_info) . error_code ) . read_volatile ( ) } ;
77
+
32
78
let msg = format ! (
33
- "Page Fault Address: {:#x}\n \
34
- Stack Pointer: {:#x}",
35
- page_fault_address, stack_pointer
79
+ "Exception vector: {:#}\n \
80
+ Faulting Instruction: {:#x}\n \
81
+ Page Fault Address: {:#x}\n \
82
+ Error code: {:#x}\n \
83
+ Stack Pointer: {:#x}",
84
+ exception_number, saved_rip, page_fault_address, error_code, stack_pointer
36
85
) ;
37
86
87
+ // We don't presently have any need for user-defined interrupts,
88
+ // so we only support handlers for the architecture-defined
89
+ // vectors (0-31)
90
+ if exception_number < 31 {
91
+ let handler =
92
+ handlers[ exception_number as usize ] . load ( core:: sync:: atomic:: Ordering :: Acquire ) ;
93
+ if handler != 0
94
+ && unsafe {
95
+ core:: mem:: transmute :: < _ , handler_t > ( handler) (
96
+ exception_number,
97
+ exn_info,
98
+ ctx,
99
+ page_fault_address,
100
+ )
101
+ }
102
+ {
103
+ return ;
104
+ }
105
+ }
106
+
38
107
unsafe {
39
108
abort_with_code_and_message (
40
109
& [ ErrorCode :: GuestError as u8 , exception as u8 ] ,
0 commit comments