taglinefilesource code
pmd48arch/i386/kernel/process.cpmd_t * pmd;
pmd54arch/i386/kernel/process.cpmd = pmd_offset(swapper_pg_dir, 0);
pmd56arch/i386/kernel/process.cpmd_clear(pmd++);
pmd73arch/i386/kernel/vm86.cpmd_t *pmd;
pmd85arch/i386/kernel/vm86.cpmd = pmd_offset(pgd, 0xA0000);
pmd86arch/i386/kernel/vm86.cif (pmd_none(*pmd))
pmd88arch/i386/kernel/vm86.cif (pmd_bad(*pmd)) {
pmd89arch/i386/kernel/vm86.cprintk("vm86: bad pmd entry [%p]:%08lx\n", pmd, pmd_val(*pmd));
pmd90arch/i386/kernel/vm86.cpmd_clear(pmd);
pmd93arch/i386/kernel/vm86.cpte = pte_offset(pmd, 0xA0000);
pmd77arch/sparc/mm/srmmu.csrmmu_pmd_page(pmd_t pmd)
pmd81arch/sparc/mm/srmmu.cpage = (pmd_val(pmd) & (SRMMU_PTD_PTP_MASK)) << SRMMU_PTD_PTP_PADDR_SHIFT;
pmd114arch/sparc/mm/srmmu.cint srmmu_pmd_none(pmd_t pmd)    { return !pmd_val(pmd); }
pmd115arch/sparc/mm/srmmu.cint srmmu_pmd_bad(pmd_t pmd)
pmd117arch/sparc/mm/srmmu.creturn ((pmd_val(pmd)&SRMMU_ET_PTDBAD)==SRMMU_ET_PTDBAD) ||
pmd118arch/sparc/mm/srmmu.c(srmmu_pmd_page(pmd) > high_memory);
pmd121arch/sparc/mm/srmmu.cint srmmu_pmd_present(pmd_t pmd)  { return pmd_val(pmd) & SRMMU_ET_PTD; }
pmd271arch/sparc/mm/srmmu.csrmmu_pte_alloc_kernel(pmd_t *pmd, unsigned long address)
pmd276arch/sparc/mm/srmmu.cif (srmmu_pmd_none(*pmd)) {
pmd278arch/sparc/mm/srmmu.cif (srmmu_pmd_none(*pmd)) {
pmd280arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, page);
pmd284arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, (pte_t *) SRMMU_ET_PTDBAD);
pmd289arch/sparc/mm/srmmu.cif (srmmu_pmd_bad(*pmd)) {
pmd290arch/sparc/mm/srmmu.cprintk("Bad pmd in pte_alloc_kernel: %08lx\n", pmd_val(*pmd));
pmd291arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, (pte_t *) SRMMU_ET_PTDBAD);
pmd294arch/sparc/mm/srmmu.creturn (pte_t *) srmmu_pmd_page(*pmd) + address;
pmd299arch/sparc/mm/srmmu.csrmmu_pmd_free_kernel(pmd_t *pmd)
pmd301arch/sparc/mm/srmmu.cmem_map[MAP_NR(pmd)] = 1;
pmd302arch/sparc/mm/srmmu.cfree_page((unsigned long) pmd);
pmd339arch/sparc/mm/srmmu.csrmmu_pte_alloc(pmd_t * pmd, unsigned long address)
pmd344arch/sparc/mm/srmmu.cif (srmmu_pmd_none(*pmd)) {
pmd346arch/sparc/mm/srmmu.cif (srmmu_pmd_none(*pmd)) {
pmd348arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, page);
pmd352arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, (pte_t *) SRMMU_ET_PTDBAD);
pmd357arch/sparc/mm/srmmu.cif (srmmu_pmd_bad(*pmd)) {
pmd358arch/sparc/mm/srmmu.cprintk("Bad pmd in pte_alloc_kernel: %08lx\n", pmd_val(*pmd));
pmd359arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, (pte_t *) SRMMU_ET_PTDBAD);
pmd362arch/sparc/mm/srmmu.creturn (pte_t *) srmmu_pmd_page(*pmd) + address;
pmd370arch/sparc/mm/srmmu.csrmmu_pmd_free(pmd_t * pmd)
pmd372arch/sparc/mm/srmmu.cfree_page((unsigned long) pmd);
pmd95arch/sparc/mm/sun4c.csun4c_pmd_page(pmd_t pmd)
pmd97arch/sparc/mm/sun4c.creturn ((pmd_val(pmd) & _SUN4C_PGD_PFN_MASK) << (_SUN4C_PGD_PAGE_SHIFT));
pmd127arch/sparc/mm/sun4c.cint sun4c_pmd_none(pmd_t pmd)    { return !pmd_val(pmd); }
pmd128arch/sparc/mm/sun4c.cint sun4c_pmd_bad(pmd_t pmd)
pmd130arch/sparc/mm/sun4c.creturn ((pmd_val(pmd) & _SUN4C_PGD_MMU_MASK) != _SUN4C_PAGE_TABLE);
pmd133arch/sparc/mm/sun4c.cint sun4c_pmd_present(pmd_t pmd)  { return pmd_val(pmd) & _SUN4C_PAGE_VALID; }
pmd248arch/sparc/mm/sun4c.csun4c_pte_alloc_kernel(pmd_t *pmd, unsigned long address)
pmd254arch/sparc/mm/sun4c.cif (sun4c_pmd_none(*pmd)) {
pmd257arch/sparc/mm/sun4c.cif (sun4c_pmd_none(*pmd)) {
pmd259arch/sparc/mm/sun4c.csun4c_pmd_set(pmd, page);
pmd263arch/sparc/mm/sun4c.csun4c_pmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd268arch/sparc/mm/sun4c.cif (sun4c_pmd_bad(*pmd)) {
pmd269arch/sparc/mm/sun4c.cprintk("Bad pmd in pte_alloc_kernel: %08lx\n", pmd_val(*pmd));
pmd270arch/sparc/mm/sun4c.csun4c_pmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd274arch/sparc/mm/sun4c.creturn (pte_t *) sun4c_pmd_page(*pmd) + address;
pmd282arch/sparc/mm/sun4c.csun4c_pmd_free_kernel(pmd_t *pmd)
pmd300arch/sparc/mm/sun4c.csun4c_pte_alloc(pmd_t * pmd, unsigned long address)
pmd305arch/sparc/mm/sun4c.cif (sun4c_pmd_none(*pmd)) {
pmd307arch/sparc/mm/sun4c.cif (sun4c_pmd_none(*pmd)) {
pmd309arch/sparc/mm/sun4c.csun4c_pmd_set(pmd, page);
pmd312arch/sparc/mm/sun4c.csun4c_pmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd317arch/sparc/mm/sun4c.cif (sun4c_pmd_bad(*pmd)) {
pmd318arch/sparc/mm/sun4c.cprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd319arch/sparc/mm/sun4c.csun4c_pmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd324arch/sparc/mm/sun4c.creturn (pte_t *) sun4c_pmd_page(*pmd) + address;
pmd332arch/sparc/mm/sun4c.csun4c_pmd_free(pmd_t * pmd)
pmd474fs/proc/array.cstatic inline void statm_pte_range(pmd_t * pmd, unsigned long address, unsigned long size,
pmd480fs/proc/array.cif (pmd_none(*pmd))
pmd482fs/proc/array.cif (pmd_bad(*pmd)) {
pmd483fs/proc/array.cprintk("statm_pte_range: bad pmd (%08lx)\n", pmd_val(*pmd));
pmd484fs/proc/array.cpmd_clear(pmd);
pmd487fs/proc/array.cpte = pte_offset(pmd, address);
pmd515fs/proc/array.cpmd_t * pmd;
pmd525fs/proc/array.cpmd = pmd_offset(pgd, address);
pmd531fs/proc/array.cstatm_pte_range(pmd, address, end - address, pages, shared, dirty, total);
pmd533fs/proc/array.cpmd++;
pmd18include/asm-alpha/page.htypedef struct { unsigned long pmd; } pmd_t;
pmd23include/asm-alpha/page.h#define pmd_val(x)  ((x).pmd)
pmd166include/asm-alpha/pgtable.hextern inline unsigned long pmd_page(pmd_t pmd)
pmd167include/asm-alpha/pgtable.h{ return PAGE_OFFSET + ((pmd_val(pmd) & _PFN_MASK) >> (32-PAGE_SHIFT)); }
pmd182include/asm-alpha/pgtable.hextern inline int pmd_none(pmd_t pmd)    { return !pmd_val(pmd); }
pmd183include/asm-alpha/pgtable.hextern inline int pmd_bad(pmd_t pmd)    { return (pmd_val(pmd) & ~_PFN_MASK) != _PAGE_TABLE || pmd_page(pmd) > high_memory; }
pmd184include/asm-alpha/pgtable.hextern inline int pmd_present(pmd_t pmd)  { return pmd_val(pmd) & _PAGE_VALID; }
pmd281include/asm-alpha/pgtable.hextern inline pte_t * pte_alloc_kernel(pmd_t *pmd, unsigned long address)
pmd284include/asm-alpha/pgtable.hif (pmd_none(*pmd)) {
pmd286include/asm-alpha/pgtable.hif (pmd_none(*pmd)) {
pmd288include/asm-alpha/pgtable.hpmd_set(pmd, page);
pmd292include/asm-alpha/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd297include/asm-alpha/pgtable.hif (pmd_bad(*pmd)) {
pmd298include/asm-alpha/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd299include/asm-alpha/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd302include/asm-alpha/pgtable.hreturn (pte_t *) pmd_page(*pmd) + address;
pmd305include/asm-alpha/pgtable.hextern inline void pmd_free_kernel(pmd_t * pmd)
pmd307include/asm-alpha/pgtable.hmem_map[MAP_NR(pmd)] = 1;
pmd308include/asm-alpha/pgtable.hfree_page((unsigned long) pmd);
pmd340include/asm-alpha/pgtable.hextern inline pte_t * pte_alloc(pmd_t *pmd, unsigned long address)
pmd343include/asm-alpha/pgtable.hif (pmd_none(*pmd)) {
pmd345include/asm-alpha/pgtable.hif (pmd_none(*pmd)) {
pmd347include/asm-alpha/pgtable.hpmd_set(pmd, page);
pmd350include/asm-alpha/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd355include/asm-alpha/pgtable.hif (pmd_bad(*pmd)) {
pmd356include/asm-alpha/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd357include/asm-alpha/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd360include/asm-alpha/pgtable.hreturn (pte_t *) pmd_page(*pmd) + address;
pmd363include/asm-alpha/pgtable.hextern inline void pmd_free(pmd_t * pmd)
pmd365include/asm-alpha/pgtable.hfree_page((unsigned long) pmd);
pmd18include/asm-i386/page.htypedef struct { unsigned long pmd; } pmd_t;
pmd23include/asm-i386/page.h#define pmd_val(x)  ((x).pmd)
pmd142include/asm-i386/pgtable.hextern inline int pmd_none(pmd_t pmd)    { return !pmd_val(pmd); }
pmd143include/asm-i386/pgtable.hextern inline int pmd_bad(pmd_t pmd)    { return (pmd_val(pmd) & ~PAGE_MASK) != _PAGE_TABLE || pmd_val(pmd) > high_memory; }
pmd144include/asm-i386/pgtable.hextern inline int pmd_present(pmd_t pmd)  { return pmd_val(pmd) & _PAGE_PRESENT; }
pmd202include/asm-i386/pgtable.hextern inline unsigned long pmd_page(pmd_t pmd)
pmd203include/asm-i386/pgtable.h{ return pmd_val(pmd) & PAGE_MASK; }
pmd234include/asm-i386/pgtable.hextern inline pte_t * pte_alloc_kernel(pmd_t * pmd, unsigned long address)
pmd237include/asm-i386/pgtable.hif (pmd_none(*pmd)) {
pmd239include/asm-i386/pgtable.hif (pmd_none(*pmd)) {
pmd241include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) page;
pmd245include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd250include/asm-i386/pgtable.hif (pmd_bad(*pmd)) {
pmd251include/asm-i386/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd252include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd255include/asm-i386/pgtable.hreturn (pte_t *) pmd_page(*pmd) + address;
pmd262include/asm-i386/pgtable.hextern inline void pmd_free_kernel(pmd_t * pmd)
pmd276include/asm-i386/pgtable.hextern inline pte_t * pte_alloc(pmd_t * pmd, unsigned long address)
pmd279include/asm-i386/pgtable.hif (pmd_none(*pmd)) {
pmd281include/asm-i386/pgtable.hif (pmd_none(*pmd)) {
pmd283include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) page;
pmd286include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd291include/asm-i386/pgtable.hif (pmd_bad(*pmd)) {
pmd292include/asm-i386/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd293include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd296include/asm-i386/pgtable.hreturn (pte_t *) pmd_page(*pmd) + address;
pmd303include/asm-i386/pgtable.hextern inline void pmd_free(pmd_t * pmd)
pmd48include/asm-sparc/page.htypedef struct { unsigned long pmd; } pmd_t;
pmd53include/asm-sparc/page.h#define pmd_val(x)      ((x).pmd)
pmd149mm/filemap.cstatic inline void filemap_sync_pte_range(pmd_t * pmd,
pmd156mm/filemap.cif (pmd_none(*pmd))
pmd158mm/filemap.cif (pmd_bad(*pmd)) {
pmd159mm/filemap.cprintk("filemap_sync_pte_range: bad pmd (%08lx)\n", pmd_val(*pmd));
pmd160mm/filemap.cpmd_clear(pmd);
pmd163mm/filemap.cpte = pte_offset(pmd, address);
pmd180mm/filemap.cpmd_t * pmd;
pmd190mm/filemap.cpmd = pmd_offset(pgd, address);
pmd197mm/filemap.cfilemap_sync_pte_range(pmd, address, end - address, vma, offset, flags);
pmd199mm/filemap.cpmd++;
pmd122mm/memory.cpmd_t * pmd;
pmd131mm/memory.cpmd = pmd_offset(dir, 0);
pmd133mm/memory.cif (pmd_inuse(pmd)) {
pmd134mm/memory.cpmd_free(pmd);
pmd138mm/memory.cfree_one_pmd(pmd+j);
pmd139mm/memory.cpmd_free(pmd);
pmd359mm/memory.cstatic inline void unmap_pte_range(pmd_t * pmd, unsigned long address, unsigned long size)
pmd364mm/memory.cif (pmd_none(*pmd))
pmd366mm/memory.cif (pmd_bad(*pmd)) {
pmd367mm/memory.cprintk("unmap_pte_range: bad pmd (%08lx)\n", pmd_val(*pmd));
pmd368mm/memory.cpmd_clear(pmd);
pmd371mm/memory.cpte = pte_offset(pmd, address);
pmd387mm/memory.cpmd_t * pmd;
pmd397mm/memory.cpmd = pmd_offset(dir, address);
pmd403mm/memory.cunmap_pte_range(pmd, address, end - address);
pmd405mm/memory.cpmd++;
pmd445mm/memory.cstatic inline int zeromap_pmd_range(pmd_t * pmd, unsigned long address, unsigned long size, pte_t zero_pte)
pmd454mm/memory.cpte_t * pte = pte_alloc(pmd, address);
pmd459mm/memory.cpmd++;
pmd474mm/memory.cpmd_t *pmd = pmd_alloc(dir, address);
pmd476mm/memory.cif (!pmd)
pmd478mm/memory.cerror = zeromap_pmd_range(pmd, address, end - address, zero_pte);
pmd514mm/memory.cstatic inline int remap_pmd_range(pmd_t * pmd, unsigned long address, unsigned long size,
pmd525mm/memory.cpte_t * pte = pte_alloc(pmd, address);
pmd530mm/memory.cpmd++;
pmd544mm/memory.cpmd_t *pmd = pmd_alloc(dir, from);
pmd546mm/memory.cif (!pmd)
pmd548mm/memory.cerror = remap_pmd_range(pmd, from, end - from, offset + from, prot);
pmd579mm/memory.cpmd_t * pmd;
pmd587mm/memory.cpmd = pmd_alloc(pgd, address);
pmd588mm/memory.cif (!pmd) {
pmd593mm/memory.cpte = pte_alloc(pmd, address);
pmd960mm/memory.cpmd_t *pmd;
pmd964mm/memory.cpmd = pmd_alloc(pgd, address);
pmd965mm/memory.cif (!pmd) {
pmd969mm/memory.cpte = pte_alloc(pmd, address);
pmd1105mm/memory.cpmd_t *pmd;
pmd1109mm/memory.cpmd = pmd_alloc(pgd, address);
pmd1110mm/memory.cif (!pmd)
pmd1112mm/memory.cpte = pte_alloc(pmd, address);
pmd20mm/mprotect.cstatic inline void change_pte_range(pmd_t * pmd, unsigned long address,
pmd26mm/mprotect.cif (pmd_none(*pmd))
pmd28mm/mprotect.cif (pmd_bad(*pmd)) {
pmd29mm/mprotect.cprintk("change_pte_range: bad pmd (%08lx)\n", pmd_val(*pmd));
pmd30mm/mprotect.cpmd_clear(pmd);
pmd33mm/mprotect.cpte = pte_offset(pmd, address);
pmd50mm/mprotect.cpmd_t * pmd;
pmd60mm/mprotect.cpmd = pmd_offset(pgd, address);
pmd66mm/mprotect.cchange_pte_range(pmd, address, end - address, newprot);
pmd68mm/mprotect.cpmd++;
pmd467mm/swap.cpmd_t * pmd;
pmd478mm/swap.cpmd = pmd_offset(dir, address);
pmd485mm/swap.cint result = swap_out_pmd(vma, pmd, address, end, limit);
pmd489mm/swap.cpmd++;
pmd903mm/swap.cpmd_t * pmd;
pmd913mm/swap.cpmd = pmd_offset(dir, address);
pmd920mm/swap.cif (unuse_pmd(vma, pmd, address, end - address, offset, type, page))
pmd923mm/swap.cpmd++;
pmd38mm/vmalloc.cstatic inline void free_area_pte(pmd_t * pmd, unsigned long address, unsigned long size)
pmd43mm/vmalloc.cif (pmd_none(*pmd))
pmd45mm/vmalloc.cif (pmd_bad(*pmd)) {
pmd46mm/vmalloc.cprintk("free_area_pte: bad pmd (%08lx)\n", pmd_val(*pmd));
pmd47mm/vmalloc.cpmd_clear(pmd);
pmd50mm/vmalloc.cpte = pte_offset(pmd, address);
pmd72mm/vmalloc.cpmd_t * pmd;
pmd82mm/vmalloc.cpmd = pmd_offset(dir, address);
pmd88mm/vmalloc.cfree_area_pte(pmd, address, end - address);
pmd90mm/vmalloc.cpmd++;
pmd130mm/vmalloc.cstatic inline int alloc_area_pmd(pmd_t * pmd, unsigned long address, unsigned long size)
pmd139mm/vmalloc.cpte_t * pte = pte_alloc_kernel(pmd, address);
pmd145mm/vmalloc.cpmd++;
pmd157mm/vmalloc.cpmd_t *pmd = pmd_alloc_kernel(dir, address);
pmd158mm/vmalloc.cif (!pmd)
pmd160mm/vmalloc.cif (alloc_area_pmd(pmd, address, end - address))
pmd189mm/vmalloc.cstatic inline int remap_area_pmd(pmd_t * pmd, unsigned long address, unsigned long size,
pmd200mm/vmalloc.cpte_t * pte = pte_alloc_kernel(pmd, address);
pmd205mm/vmalloc.cpmd++;
pmd218mm/vmalloc.cpmd_t *pmd = pmd_alloc_kernel(dir, address);
pmd219mm/vmalloc.cif (!pmd)
pmd221mm/vmalloc.cif (remap_area_pmd(pmd, address, end - address, offset + address))