2ee7efa68c15dfa470cffe802536200a9ba31174
1 #ifndef UST_PROCESSOR_H
2 #define UST_PROCESSOR_H
7 extern __thread
long ust_reg_stack
[500];
8 extern volatile __thread
long *ust_reg_stack_ptr
;
10 #ifndef CONFIG_UST_GDB_INTEGRATION
11 static inline save_ip(void)
38 #ifdef CONFIG_UST_GDB_ITEGRATION
40 #error "GDB integration not supported for x86-32 yet."
43 #define save_registers(a)
45 #else /* CONFIG_UST_GDB_ITEGRATION */
48 #define save_registers(a)
50 #endif /* CONFIG_UST_GDB_ITEGRATION */
52 #define RELATIVE_ADDRESS(__rel_label__) __rel_label__
54 #define _ASM_PTR ".long "
56 #else /* below is code for x86-64 */
59 int padding
; /* 4 bytes */
81 #ifdef CONFIG_UST_GDB_ITEGRATION
83 asm (".section __marker_addr,\"aw\",@progbits\n\t" \
84 _ASM_PTR "%c[marker_struct], (1f)\n\t" \
87 :: [marker_struct] "i" (&__mark_##channel##_##name));\
89 #define save_registers(regsptr) \
91 /* save original rsp */ \
93 /* push original rflags */ \
95 /* rax will hold the ptr to the private stack bottom */ \
97 /* rbx will be used to temporarily hold the stack bottom addr */ \
99 /* rdi is the input to __tls_get_addr, and also a temp var */ \
101 /* Start TLS access of private reg stack pointer */ \
103 "leaq ust_reg_stack_ptr@tlsgd(%%rip), %%rdi\n\t" \
106 "call __tls_get_addr@plt\n\t" \
107 /* --- End TLS access */ \
108 /* check if ust_reg_stack_ptr has been initialized */ \
109 "movq (%%rax),%%rbx\n\t" \
110 "testq %%rbx,%%rbx\n\t" \
112 "movq %%rax,%%rbx\n\t" \
113 /* Start TLS access of private reg stack */ \
115 "leaq ust_reg_stack@tlsgd(%%rip), %%rdi\n\t" \
118 "call __tls_get_addr@plt\n\t" \
119 /* --- End TLS access */ \
120 "addq $500,%%rax\n\t" \
121 "movq %%rax,(%%rbx)\n\t" \
122 "movq %%rbx,%%rax\n\t" \
123 /* now the pointer to the private stack is in rax.
124 must add stack size so the ptr points to the stack bottom. */ \
126 /* Manually push rsp to private stack */ \
127 "addq $-8,(%%rax)\n\t" \
128 "movq 32(%%rsp), %%rdi\n\t" \
129 "movq (%%rax), %%rbx\n\t" \
130 "movq %%rdi, (%%rbx)\n\t" \
131 /* Manually push eflags to private stack */ \
132 "addq $-8,(%%rax)\n\t" \
133 "movq 24(%%rsp), %%rdi\n\t" \
134 "movq (%%rax), %%rbx\n\t" \
135 "movq %%rdi, (%%rbx)\n\t" \
136 /* Manually push rax to private stack */ \
137 "addq $-8,(%%rax)\n\t" \
138 "movq 16(%%rsp), %%rdi\n\t" \
139 "movq (%%rax), %%rbx\n\t" \
140 "movq %%rdi, (%%rbx)\n\t" \
141 /* Manually push rbx to private stack */ \
142 "addq $-8,(%%rax)\n\t" \
143 "movq 8(%%rsp), %%rdi\n\t" \
144 "movq (%%rax), %%rbx\n\t" \
145 "movq %%rdi, (%%rbx)\n\t" \
146 /* Manually push rdi to private stack */ \
147 "addq $-8,(%%rax)\n\t" \
148 "movq 0(%%rsp), %%rdi\n\t" \
149 "movq (%%rax), %%rbx\n\t" \
150 "movq %%rdi, (%%rbx)\n\t" \
151 /* now push regs to tls */ \
152 /* -- rsp already pushed -- */ \
153 /* -- rax already pushed -- */ \
154 /* -- rbx already pushed -- */ \
155 /* -- rdi already pushed -- */ \
156 "addq $-8,(%%rax)\n\t" \
157 "movq (%%rax), %%rbx\n\t" \
158 "movq %%rcx,(%%rbx)\n\t" \
159 "addq $-8,(%%rax)\n\t" \
160 "movq (%%rax), %%rbx\n\t" \
161 "movq %%rdx,(%%rbx)\n\t" \
162 "addq $-8,(%%rax)\n\t" \
163 "movq (%%rax), %%rbx\n\t" \
164 "movq %%rbp,(%%rbx)\n\t" \
165 "addq $-8,(%%rax)\n\t" \
166 "movq (%%rax), %%rbx\n\t" \
167 "movq %%rsi,(%%rbx)\n\t" \
168 "addq $-8,(%%rax)\n\t" \
169 "movq (%%rax), %%rbx\n\t" \
170 "movq %%r8,(%%rbx)\n\t" \
171 "addq $-8,(%%rax)\n\t" \
172 "movq (%%rax), %%rbx\n\t" \
173 "movq %%r9,(%%rbx)\n\t" \
174 "addq $-8,(%%rax)\n\t" \
175 "movq (%%rax), %%rbx\n\t" \
176 "movq %%r10,(%%rbx)\n\t" \
177 "addq $-8,(%%rax)\n\t" \
178 "movq (%%rax), %%rbx\n\t" \
179 "movq %%r11,(%%rbx)\n\t" \
180 "addq $-8,(%%rax)\n\t" \
181 "movq (%%rax), %%rbx\n\t" \
182 "movq %%r12,(%%rbx)\n\t" \
183 "addq $-8,(%%rax)\n\t" \
184 "movq (%%rax), %%rbx\n\t" \
185 "movq %%r13,(%%rbx)\n\t" \
186 "addq $-8,(%%rax)\n\t" \
187 "movq (%%rax), %%rbx\n\t" \
188 "movq %%r14,(%%rbx)\n\t" \
189 "addq $-8,(%%rax)\n\t" \
190 "movq (%%rax), %%rbx\n\t" \
191 "movq %%r15,(%%rbx)\n\t" \
193 "addq $-2,(%%rax)\n\t" \
194 "movq (%%rax), %%rbx\n\t" \
195 "movw %%cs, (%%rbx)\n\t" \
197 "addq $-2,(%%rax)\n\t" \
198 "movq (%%rax), %%rbx\n\t" \
199 "movw %%ss, (%%rbx)\n\t" \
200 /* add padding for struct registers */ \
201 "addq $-4,(%%rax)\n\t" \
202 /* restore original values of regs that were used internally */ \
206 /* cancel push of rsp */ \
207 "addq $8,%%rsp\n\t" \
208 /* cancel push of rflags */ \
209 "addq $8,%%rsp\n\t" \
211 memcpy(regsptr, (void *)ust_reg_stack_ptr, sizeof(struct registers)); \
212 ust_reg_stack_ptr = (void *)(((long)ust_reg_stack_ptr) + sizeof(struct registers));
214 #endif /* CONFIG_UST_GDB_ITEGRATION */
216 /* Macro to insert the address of a relative jump in an assembly stub,
217 * in a relocatable way. On x86-64, this uses a special (%rip) notation. */
218 #define RELATIVE_ADDRESS(__rel_label__) __rel_label__(%%rip)
220 #define _ASM_PTR ".quad "
224 #endif /* UST_PROCESSOR_H */
This page took 0.035864 seconds and 3 git commands to generate.