97 lines
		
	
	
		
			2.2 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			97 lines
		
	
	
		
			2.2 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
| /*
 | |
|  * Supervisor Mode Access Prevention support
 | |
|  *
 | |
|  * Copyright (C) 2012 Intel Corporation
 | |
|  * Author: H. Peter Anvin <hpa@linux.intel.com>
 | |
|  *
 | |
|  * This program is free software; you can redistribute it and/or
 | |
|  * modify it under the terms of the GNU General Public License
 | |
|  * as published by the Free Software Foundation; version 2
 | |
|  * of the License.
 | |
|  */
 | |
| 
 | |
| #ifndef _ASM_X86_SMAP_H
 | |
| #define _ASM_X86_SMAP_H
 | |
| 
 | |
| #include <linux/stringify.h>
 | |
| #include <asm/nops.h>
 | |
| #include <asm/cpufeatures.h>
 | |
| #include <asm/alternative.h>
 | |
| 
 | |
| /* "Raw" instruction opcodes */
 | |
| #define __ASM_CLAC	.byte 0x0f,0x01,0xca
 | |
| #define __ASM_STAC	.byte 0x0f,0x01,0xcb
 | |
| 
 | |
| #ifdef __ASSEMBLY__
 | |
| 
 | |
| #ifdef CONFIG_X86_SMAP
 | |
| 
 | |
| #define ASM_CLAC \
 | |
| 	ALTERNATIVE "", __stringify(__ASM_CLAC), X86_FEATURE_SMAP
 | |
| 
 | |
| #define ASM_STAC \
 | |
| 	ALTERNATIVE "", __stringify(__ASM_STAC), X86_FEATURE_SMAP
 | |
| 
 | |
| #else /* CONFIG_X86_SMAP */
 | |
| 
 | |
| #define ASM_CLAC
 | |
| #define ASM_STAC
 | |
| 
 | |
| #endif /* CONFIG_X86_SMAP */
 | |
| 
 | |
| #else /* __ASSEMBLY__ */
 | |
| 
 | |
| #ifdef CONFIG_X86_SMAP
 | |
| 
 | |
| static __always_inline void clac(void)
 | |
| {
 | |
| 	/* Note: a barrier is implicit in alternative() */
 | |
| 	alternative("", __stringify(__ASM_CLAC), X86_FEATURE_SMAP);
 | |
| }
 | |
| 
 | |
| static __always_inline void stac(void)
 | |
| {
 | |
| 	/* Note: a barrier is implicit in alternative() */
 | |
| 	alternative("", __stringify(__ASM_STAC), X86_FEATURE_SMAP);
 | |
| }
 | |
| 
 | |
| static __always_inline unsigned long smap_save(void)
 | |
| {
 | |
| 	unsigned long flags;
 | |
| 
 | |
| 	asm volatile (ALTERNATIVE("", "pushf; pop %0; " __stringify(__ASM_CLAC),
 | |
| 				  X86_FEATURE_SMAP)
 | |
| 		      : "=rm" (flags) : : "memory", "cc");
 | |
| 
 | |
| 	return flags;
 | |
| }
 | |
| 
 | |
| static __always_inline void smap_restore(unsigned long flags)
 | |
| {
 | |
| 	asm volatile (ALTERNATIVE("", "push %0; popf", X86_FEATURE_SMAP)
 | |
| 		      : : "g" (flags) : "memory", "cc");
 | |
| }
 | |
| 
 | |
| /* These macros can be used in asm() statements */
 | |
| #define ASM_CLAC \
 | |
| 	ALTERNATIVE("", __stringify(__ASM_CLAC), X86_FEATURE_SMAP)
 | |
| #define ASM_STAC \
 | |
| 	ALTERNATIVE("", __stringify(__ASM_STAC), X86_FEATURE_SMAP)
 | |
| 
 | |
| #else /* CONFIG_X86_SMAP */
 | |
| 
 | |
| static inline void clac(void) { }
 | |
| static inline void stac(void) { }
 | |
| 
 | |
| static inline unsigned long smap_save(void) { return 0; }
 | |
| static inline void smap_restore(unsigned long flags) { }
 | |
| 
 | |
| #define ASM_CLAC
 | |
| #define ASM_STAC
 | |
| 
 | |
| #endif /* CONFIG_X86_SMAP */
 | |
| 
 | |
| #endif /* __ASSEMBLY__ */
 | |
| 
 | |
| #endif /* _ASM_X86_SMAP_H */
 |