| tag | line | file | source code |
| ptep | 274 | arch/sparc/kernel/sys_sunos.c | pte_t *ptep; |
| ptep | 296 | arch/sparc/kernel/sys_sunos.c | ptep = pte_offset(pmdp, addr); |
| ptep | 297 | arch/sparc/kernel/sys_sunos.c | if(pte_none(*ptep)) |
| ptep | 300 | arch/sparc/kernel/sys_sunos.c | array[pnum] = pte_present(*ptep) ? 1 : 0; |
| ptep | 59 | arch/sparc/mm/srmmu.c | int srmmu_pte_inuse(pte_t *ptep) { return mem_map[MAP_NR(ptep)].reserved || mem_map[MAP_NR(ptep)].count != 1; } |
| ptep | 60 | arch/sparc/mm/srmmu.c | void srmmu_pte_clear(pte_t *ptep) { pte_val(*ptep) = 0; } |
| ptep | 61 | arch/sparc/mm/srmmu.c | void srmmu_pte_reuse(pte_t *ptep) |
| ptep | 63 | arch/sparc/mm/srmmu.c | if(!mem_map[MAP_NR(ptep)].reserved) |
| ptep | 64 | arch/sparc/mm/srmmu.c | mem_map[MAP_NR(ptep)].count++; |
| ptep | 152 | arch/sparc/mm/srmmu.c | srmmu_pmd_set(pmd_t * pmdp, pte_t * ptep) |
| ptep | 154 | arch/sparc/mm/srmmu.c | unsigned long page = (unsigned long) ptep; |
| ptep | 381 | arch/sparc/mm/srmmu.c | void srmmu_set_pte(pte_t *ptep, pte_t pteval) |
| ptep | 384 | arch/sparc/mm/srmmu.c | *ptep = pteval; |
| ptep | 412 | arch/sparc/mm/srmmu.c | pte_t *ptep; |
| ptep | 416 | arch/sparc/mm/srmmu.c | ptep = srmmu_pte_offset(pmdp, virt_addr); |
| ptep | 417 | arch/sparc/mm/srmmu.c | pte_val(*ptep) = (physaddr >> SRMMU_PTE_PPN_PADDR_SHIFT) & SRMMU_PTE_PPN_MASK; |
| ptep | 420 | arch/sparc/mm/srmmu.c | pte_val(*ptep) |= (SRMMU_ACC_S_RDWREXEC | SRMMU_ET_PTE); |
| ptep | 422 | arch/sparc/mm/srmmu.c | pte_val(*ptep) |= (SRMMU_ACC_S_RDEXEC | SRMMU_ET_PTE); |
| ptep | 424 | arch/sparc/mm/srmmu.c | pte_val(*ptep) |= (bus_type << 28); |
| ptep | 425 | arch/sparc/mm/srmmu.c | pte_val(*ptep) &= ~(SRMMU_PTE_C_MASK); /* Make sure cacheable bit is off. */ |
| ptep | 593 | arch/sparc/mm/srmmu.c | pte_t *ptep = 0; |
| ptep | 620 | arch/sparc/mm/srmmu.c | ptep = srmmu_init_alloc(&mempool, |
| ptep | 622 | arch/sparc/mm/srmmu.c | srmmu_pmd_set(pmdp, ptep); |
| ptep | 625 | arch/sparc/mm/srmmu.c | ptep = srmmu_pte_offset(pmdp, vaddr); |
| ptep | 626 | arch/sparc/mm/srmmu.c | *ptep = srmmu_mk_pte(vaddr, SRMMU_PAGE_KERNEL); |
| ptep | 640 | arch/sparc/mm/srmmu.c | ptep = srmmu_init_alloc(&mempool, |
| ptep | 642 | arch/sparc/mm/srmmu.c | srmmu_pmd_set(pmdp, ptep); |
| ptep | 657 | arch/sparc/mm/srmmu.c | ptep = srmmu_init_alloc(&mempool, |
| ptep | 659 | arch/sparc/mm/srmmu.c | srmmu_pmd_set(pmdp, ptep); |
| ptep | 662 | arch/sparc/mm/srmmu.c | ptep = srmmu_pte_offset(pmdp, vaddr); |
| ptep | 663 | arch/sparc/mm/srmmu.c | *ptep = srmmu_mk_pte((unsigned int) srmmu_init_alloc(&mempool, PAGE_SIZE), SRMMU_PAGE_KERNEL); |
| ptep | 664 | arch/sparc/mm/srmmu.c | pte_val(*ptep) &= ~(SRMMU_PTE_C_MASK); |
| ptep | 684 | arch/sparc/mm/srmmu.c | ptep = srmmu_init_alloc(&mempool, |
| ptep | 686 | arch/sparc/mm/srmmu.c | srmmu_pmd_set(pmdp, ptep); |
| ptep | 688 | arch/sparc/mm/srmmu.c | ptep = srmmu_pte_offset(pmdp, vaddr); |
| ptep | 690 | arch/sparc/mm/srmmu.c | *ptep++ = srmmu_mk_pte((unsigned int) start, SRMMU_PAGE_KERNEL); |
| ptep | 692 | arch/sparc/mm/srmmu.c | *ptep++ = srmmu_mk_pte((unsigned int) srmmu_init_alloc(&mempool, PAGE_SIZE), |
| ptep | 695 | arch/sparc/mm/srmmu.c | *ptep++ = srmmu_mk_pte((unsigned int) srmmu_init_alloc(&mempool, PAGE_SIZE), |
| ptep | 698 | arch/sparc/mm/srmmu.c | *ptep = srmmu_mk_pte((unsigned int) srmmu_init_alloc(&mempool, PAGE_SIZE), |
| ptep | 741 | arch/sparc/mm/srmmu.c | ptep = srmmu_init_alloc(&mempool, |
| ptep | 743 | arch/sparc/mm/srmmu.c | srmmu_pmd_set(pmdp, ptep); |
| ptep | 746 | arch/sparc/mm/srmmu.c | ptep = srmmu_pte_offset(pmdp, vaddr); |
| ptep | 747 | arch/sparc/mm/srmmu.c | pte_val(*ptep) = prom_pte; |
| ptep | 68 | arch/sparc/mm/sun4c.c | int sun4c_pte_inuse(pte_t *ptep) { return mem_map[MAP_NR(ptep)].reserved || mem_map[MAP_NR(ptep)].count != 1; } |
| ptep | 69 | arch/sparc/mm/sun4c.c | void sun4c_pte_clear(pte_t *ptep) { pte_val(*ptep) = 0; } |
| ptep | 70 | arch/sparc/mm/sun4c.c | void sun4c_pte_reuse(pte_t *ptep) |
| ptep | 72 | arch/sparc/mm/sun4c.c | if(!mem_map[MAP_NR(ptep)].reserved) |
| ptep | 73 | arch/sparc/mm/sun4c.c | mem_map[MAP_NR(ptep)].count++; |
| ptep | 364 | arch/sparc/mm/sun4c.c | void sun4c_set_pte(pte_t *ptep, pte_t pteval) |
| ptep | 366 | arch/sparc/mm/sun4c.c | *ptep = pteval; |
| ptep | 224 | include/asm-alpha/pgtable.h | extern inline void pmd_set(pmd_t * pmdp, pte_t * ptep) |
| ptep | 225 | include/asm-alpha/pgtable.h | { pmd_val(*pmdp) = _PAGE_TABLE | ((((unsigned long) ptep) - PAGE_OFFSET) << (32-PAGE_SHIFT)); } |
| ptep | 241 | include/asm-alpha/pgtable.h | extern inline int pte_inuse(pte_t *ptep) { return mem_map[MAP_NR(ptep)].reserved || mem_map[MAP_NR(ptep)].count != 1; } |
| ptep | 242 | include/asm-alpha/pgtable.h | extern inline void pte_clear(pte_t *ptep) { pte_val(*ptep) = 0; } |
| ptep | 243 | include/asm-alpha/pgtable.h | extern inline void pte_reuse(pte_t * ptep) |
| ptep | 245 | include/asm-alpha/pgtable.h | if (!mem_map[MAP_NR(ptep)].reserved) |
| ptep | 246 | include/asm-alpha/pgtable.h | mem_map[MAP_NR(ptep)].count++; |
| ptep | 291 | include/asm-i386/pgtable.h | extern inline int pte_inuse(pte_t *ptep) { return mem_map[MAP_NR(ptep)].reserved || mem_map[MAP_NR(ptep)].count != 1; } |
| ptep | 292 | include/asm-i386/pgtable.h | extern inline void pte_clear(pte_t *ptep) { pte_val(*ptep) = 0; } |
| ptep | 293 | include/asm-i386/pgtable.h | extern inline void pte_reuse(pte_t * ptep) |
| ptep | 295 | include/asm-i386/pgtable.h | if (!mem_map[MAP_NR(ptep)].reserved) |
| ptep | 296 | include/asm-i386/pgtable.h | mem_map[MAP_NR(ptep)].count++; |
| ptep | 221 | include/asm-mips/pgtable.h | extern inline void pmd_set(pmd_t * pmdp, pte_t * ptep) |
| ptep | 222 | include/asm-mips/pgtable.h | { pmd_val(*pmdp) = _PAGE_TABLE | ((unsigned long) ptep - PT_OFFSET); } |
| ptep | 226 | include/asm-mips/pgtable.h | extern inline int pte_inuse(pte_t *ptep) { return mem_map[MAP_NR(ptep)].reserved || mem_map[MAP_NR(ptep)].count != 1; } |
| ptep | 227 | include/asm-mips/pgtable.h | extern inline void pte_clear(pte_t *ptep) { pte_val(*ptep) = 0; } |
| ptep | 228 | include/asm-mips/pgtable.h | extern inline void pte_reuse(pte_t * ptep) |
| ptep | 230 | include/asm-mips/pgtable.h | if (!mem_map[MAP_NR(ptep)].reserved) |
| ptep | 231 | include/asm-mips/pgtable.h | mem_map[MAP_NR(ptep)].count++; |
| ptep | 201 | include/asm-ppc/pgtable.h | extern inline int pte_inuse(pte_t *ptep) { return mem_map[MAP_NR(ptep)].reserved; } |
| ptep | 203 | include/asm-ppc/pgtable.h | extern inline void pte_clear(pte_t *ptep) { pte_val(*ptep) = 0; } |
| ptep | 204 | include/asm-ppc/pgtable.h | extern inline void pte_reuse(pte_t * ptep) |
| ptep | 206 | include/asm-ppc/pgtable.h | if (!mem_map[MAP_NR(ptep)].reserved) |
| ptep | 207 | include/asm-ppc/pgtable.h | mem_map[MAP_NR(ptep)].count++; |
| ptep | 594 | mm/filemap.c | static inline int filemap_sync_pte(pte_t * ptep, struct vm_area_struct *vma, |
| ptep | 597 | mm/filemap.c | pte_t pte = *ptep; |
| ptep | 606 | mm/filemap.c | set_pte(ptep, pte_mkclean(pte)); |
| ptep | 613 | mm/filemap.c | pte_clear(ptep); |