diff options
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-powerpc/mmu-hash64.h | 1 | ||||
-rw-r--r-- | include/asm-powerpc/pgtable-ppc64.h | 10 | ||||
-rw-r--r-- | include/asm-powerpc/uaccess.h | 4 |
3 files changed, 8 insertions, 7 deletions
diff --git a/include/asm-powerpc/mmu-hash64.h b/include/asm-powerpc/mmu-hash64.h index 0dff7677604..39c5c5f62bf 100644 --- a/include/asm-powerpc/mmu-hash64.h +++ b/include/asm-powerpc/mmu-hash64.h @@ -177,6 +177,7 @@ extern struct mmu_psize_def mmu_psize_defs[MMU_PAGE_COUNT]; extern int mmu_linear_psize; extern int mmu_virtual_psize; extern int mmu_vmalloc_psize; +extern int mmu_vmemmap_psize; extern int mmu_io_psize; extern int mmu_kernel_ssize; extern int mmu_highuser_ssize; diff --git a/include/asm-powerpc/pgtable-ppc64.h b/include/asm-powerpc/pgtable-ppc64.h index 27f18695f7d..cc6a43ba41d 100644 --- a/include/asm-powerpc/pgtable-ppc64.h +++ b/include/asm-powerpc/pgtable-ppc64.h @@ -65,15 +65,15 @@ #define VMALLOC_REGION_ID (REGION_ID(VMALLOC_START)) #define KERNEL_REGION_ID (REGION_ID(PAGE_OFFSET)) +#define VMEMMAP_REGION_ID (0xfUL) #define USER_REGION_ID (0UL) /* - * Defines the address of the vmemap area, in the top 16th of the - * kernel region. + * Defines the address of the vmemap area, in its own region */ -#define VMEMMAP_BASE (ASM_CONST(CONFIG_KERNEL_START) + \ - (0xfUL << (REGION_SHIFT - 4))) -#define vmemmap ((struct page *)VMEMMAP_BASE) +#define VMEMMAP_BASE (VMEMMAP_REGION_ID << REGION_SHIFT) +#define vmemmap ((struct page *)VMEMMAP_BASE) + /* * Common bits in a linux-style PTE. These match the bits in the diff --git a/include/asm-powerpc/uaccess.h b/include/asm-powerpc/uaccess.h index 8e798e3758b..1a0736f8803 100644 --- a/include/asm-powerpc/uaccess.h +++ b/include/asm-powerpc/uaccess.h @@ -380,7 +380,7 @@ static inline unsigned long __copy_from_user_inatomic(void *to, const void __user *from, unsigned long n) { if (__builtin_constant_p(n) && (n <= 8)) { - unsigned long ret; + unsigned long ret = 1; switch (n) { case 1: @@ -406,7 +406,7 @@ static inline unsigned long __copy_to_user_inatomic(void __user *to, const void *from, unsigned long n) { if (__builtin_constant_p(n) && (n <= 8)) { - unsigned long ret; + unsigned long ret = 1; switch (n) { case 1: |