Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright IBM Corp. 2011 |
| 3 | * Author(s): Jan Glauber <jang@linux.vnet.ibm.com> |
| 4 | */ |
Heiko Carstens | 6b70a92 | 2012-11-02 12:56:43 +0100 | [diff] [blame] | 5 | #include <linux/hugetlb.h> |
Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 6 | #include <linux/module.h> |
| 7 | #include <linux/mm.h> |
Martin Schwidefsky | 638ad34 | 2011-10-30 15:17:13 +0100 | [diff] [blame] | 8 | #include <asm/cacheflush.h> |
Heiko Carstens | cfb0b24 | 2014-09-23 21:29:20 +0200 | [diff] [blame] | 9 | #include <asm/facility.h> |
Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 10 | #include <asm/pgtable.h> |
Heiko Carstens | 6b70a92 | 2012-11-02 12:56:43 +0100 | [diff] [blame] | 11 | #include <asm/page.h> |
| 12 | |
Martin Schwidefsky | 127c1fe | 2013-10-07 12:12:32 +0200 | [diff] [blame] | 13 | #if PAGE_DEFAULT_KEY |
Heiko Carstens | f7f8d7e | 2013-03-14 16:46:05 +0100 | [diff] [blame] | 14 | static inline unsigned long sske_frame(unsigned long addr, unsigned char skey) |
| 15 | { |
| 16 | asm volatile(".insn rrf,0xb22b0000,%[skey],%[addr],9,0" |
| 17 | : [addr] "+a" (addr) : [skey] "d" (skey)); |
| 18 | return addr; |
| 19 | } |
| 20 | |
Martin Schwidefsky | 127c1fe | 2013-10-07 12:12:32 +0200 | [diff] [blame] | 21 | void __storage_key_init_range(unsigned long start, unsigned long end) |
Heiko Carstens | 6b70a92 | 2012-11-02 12:56:43 +0100 | [diff] [blame] | 22 | { |
Heiko Carstens | f7f8d7e | 2013-03-14 16:46:05 +0100 | [diff] [blame] | 23 | unsigned long boundary, size; |
Heiko Carstens | 6b70a92 | 2012-11-02 12:56:43 +0100 | [diff] [blame] | 24 | |
| 25 | while (start < end) { |
Heiko Carstens | 6b70a92 | 2012-11-02 12:56:43 +0100 | [diff] [blame] | 26 | if (MACHINE_HAS_EDAT1) { |
| 27 | /* set storage keys for a 1MB frame */ |
Heiko Carstens | 6b70a92 | 2012-11-02 12:56:43 +0100 | [diff] [blame] | 28 | size = 1UL << 20; |
| 29 | boundary = (start + size) & ~(size - 1); |
| 30 | if (boundary <= end) { |
| 31 | do { |
Heiko Carstens | f7f8d7e | 2013-03-14 16:46:05 +0100 | [diff] [blame] | 32 | start = sske_frame(start, PAGE_DEFAULT_KEY); |
Heiko Carstens | 6b70a92 | 2012-11-02 12:56:43 +0100 | [diff] [blame] | 33 | } while (start < boundary); |
| 34 | continue; |
| 35 | } |
| 36 | } |
| 37 | page_set_storage_key(start, PAGE_DEFAULT_KEY, 0); |
| 38 | start += PAGE_SIZE; |
| 39 | } |
| 40 | } |
Martin Schwidefsky | 127c1fe | 2013-10-07 12:12:32 +0200 | [diff] [blame] | 41 | #endif |
Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 42 | |
Heiko Carstens | 5b1ba9e | 2012-10-01 16:18:46 +0200 | [diff] [blame] | 43 | static pte_t *walk_page_table(unsigned long addr) |
| 44 | { |
| 45 | pgd_t *pgdp; |
| 46 | pud_t *pudp; |
| 47 | pmd_t *pmdp; |
| 48 | pte_t *ptep; |
| 49 | |
| 50 | pgdp = pgd_offset_k(addr); |
| 51 | if (pgd_none(*pgdp)) |
| 52 | return NULL; |
| 53 | pudp = pud_offset(pgdp, addr); |
Heiko Carstens | 18da236 | 2012-10-08 09:18:26 +0200 | [diff] [blame] | 54 | if (pud_none(*pudp) || pud_large(*pudp)) |
Heiko Carstens | 5b1ba9e | 2012-10-01 16:18:46 +0200 | [diff] [blame] | 55 | return NULL; |
| 56 | pmdp = pmd_offset(pudp, addr); |
| 57 | if (pmd_none(*pmdp) || pmd_large(*pmdp)) |
| 58 | return NULL; |
| 59 | ptep = pte_offset_kernel(pmdp, addr); |
| 60 | if (pte_none(*ptep)) |
| 61 | return NULL; |
| 62 | return ptep; |
| 63 | } |
| 64 | |
Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 65 | static void change_page_attr(unsigned long addr, int numpages, |
| 66 | pte_t (*set) (pte_t)) |
| 67 | { |
| 68 | pte_t *ptep, pte; |
Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 69 | int i; |
| 70 | |
| 71 | for (i = 0; i < numpages; i++) { |
Heiko Carstens | 5b1ba9e | 2012-10-01 16:18:46 +0200 | [diff] [blame] | 72 | ptep = walk_page_table(addr); |
| 73 | if (WARN_ON_ONCE(!ptep)) |
| 74 | break; |
Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 75 | pte = *ptep; |
| 76 | pte = set(pte); |
Martin Schwidefsky | b2fa47e | 2011-05-23 10:24:40 +0200 | [diff] [blame] | 77 | __ptep_ipte(addr, ptep); |
Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 78 | *ptep = pte; |
Jan Glauber | e4c031b | 2011-04-20 10:15:32 +0200 | [diff] [blame] | 79 | addr += PAGE_SIZE; |
Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 80 | } |
| 81 | } |
| 82 | |
| 83 | int set_memory_ro(unsigned long addr, int numpages) |
| 84 | { |
| 85 | change_page_attr(addr, numpages, pte_wrprotect); |
| 86 | return 0; |
| 87 | } |
Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 88 | |
| 89 | int set_memory_rw(unsigned long addr, int numpages) |
| 90 | { |
| 91 | change_page_attr(addr, numpages, pte_mkwrite); |
| 92 | return 0; |
| 93 | } |
Jan Glauber | 305b152 | 2011-03-15 17:08:22 +0100 | [diff] [blame] | 94 | |
| 95 | /* not possible */ |
| 96 | int set_memory_nx(unsigned long addr, int numpages) |
| 97 | { |
| 98 | return 0; |
| 99 | } |
Jan Glauber | 448694a | 2011-05-19 16:55:26 -0600 | [diff] [blame] | 100 | |
| 101 | int set_memory_x(unsigned long addr, int numpages) |
| 102 | { |
| 103 | return 0; |
| 104 | } |
Heiko Carstens | 0a4ccc9 | 2012-11-02 13:28:48 +0100 | [diff] [blame] | 105 | |
| 106 | #ifdef CONFIG_DEBUG_PAGEALLOC |
Heiko Carstens | cfb0b24 | 2014-09-23 21:29:20 +0200 | [diff] [blame] | 107 | |
| 108 | static void ipte_range(pte_t *pte, unsigned long address, int nr) |
| 109 | { |
| 110 | int i; |
| 111 | |
Heiko Carstens | 5a79859 | 2015-02-12 13:08:27 +0100 | [diff] [blame] | 112 | if (test_facility(13)) { |
Heiko Carstens | cfb0b24 | 2014-09-23 21:29:20 +0200 | [diff] [blame] | 113 | __ptep_ipte_range(address, nr - 1, pte); |
| 114 | return; |
| 115 | } |
| 116 | for (i = 0; i < nr; i++) { |
| 117 | __ptep_ipte(address, pte); |
| 118 | address += PAGE_SIZE; |
| 119 | pte++; |
| 120 | } |
| 121 | } |
| 122 | |
Joonsoo Kim | 031bc57 | 2014-12-12 16:55:52 -0800 | [diff] [blame] | 123 | void __kernel_map_pages(struct page *page, int numpages, int enable) |
Heiko Carstens | 0a4ccc9 | 2012-11-02 13:28:48 +0100 | [diff] [blame] | 124 | { |
| 125 | unsigned long address; |
Heiko Carstens | cfb0b24 | 2014-09-23 21:29:20 +0200 | [diff] [blame] | 126 | int nr, i, j; |
Heiko Carstens | 0a4ccc9 | 2012-11-02 13:28:48 +0100 | [diff] [blame] | 127 | pgd_t *pgd; |
| 128 | pud_t *pud; |
| 129 | pmd_t *pmd; |
| 130 | pte_t *pte; |
Heiko Carstens | 0a4ccc9 | 2012-11-02 13:28:48 +0100 | [diff] [blame] | 131 | |
Heiko Carstens | cfb0b24 | 2014-09-23 21:29:20 +0200 | [diff] [blame] | 132 | for (i = 0; i < numpages;) { |
Heiko Carstens | 0a4ccc9 | 2012-11-02 13:28:48 +0100 | [diff] [blame] | 133 | address = page_to_phys(page + i); |
| 134 | pgd = pgd_offset_k(address); |
| 135 | pud = pud_offset(pgd, address); |
| 136 | pmd = pmd_offset(pud, address); |
| 137 | pte = pte_offset_kernel(pmd, address); |
Heiko Carstens | cfb0b24 | 2014-09-23 21:29:20 +0200 | [diff] [blame] | 138 | nr = (unsigned long)pte >> ilog2(sizeof(long)); |
| 139 | nr = PTRS_PER_PTE - (nr & (PTRS_PER_PTE - 1)); |
| 140 | nr = min(numpages - i, nr); |
| 141 | if (enable) { |
| 142 | for (j = 0; j < nr; j++) { |
| 143 | pte_val(*pte) = __pa(address); |
| 144 | address += PAGE_SIZE; |
| 145 | pte++; |
| 146 | } |
| 147 | } else { |
| 148 | ipte_range(pte, address, nr); |
Heiko Carstens | 0a4ccc9 | 2012-11-02 13:28:48 +0100 | [diff] [blame] | 149 | } |
Heiko Carstens | cfb0b24 | 2014-09-23 21:29:20 +0200 | [diff] [blame] | 150 | i += nr; |
Heiko Carstens | 0a4ccc9 | 2012-11-02 13:28:48 +0100 | [diff] [blame] | 151 | } |
| 152 | } |
| 153 | |
| 154 | #ifdef CONFIG_HIBERNATION |
| 155 | bool kernel_page_present(struct page *page) |
| 156 | { |
| 157 | unsigned long addr; |
| 158 | int cc; |
| 159 | |
| 160 | addr = page_to_phys(page); |
| 161 | asm volatile( |
| 162 | " lra %1,0(%1)\n" |
| 163 | " ipm %0\n" |
| 164 | " srl %0,28" |
| 165 | : "=d" (cc), "+a" (addr) : : "cc"); |
| 166 | return cc == 0; |
| 167 | } |
| 168 | #endif /* CONFIG_HIBERNATION */ |
| 169 | |
| 170 | #endif /* CONFIG_DEBUG_PAGEALLOC */ |