taglinefilesource code
pmd73arch/i386/kernel/vm86.cpmd_t *pmd;
pmd85arch/i386/kernel/vm86.cpmd = pmd_offset(pgd, 0xA0000);
pmd86arch/i386/kernel/vm86.cif (pmd_none(*pmd))
pmd88arch/i386/kernel/vm86.cif (pmd_bad(*pmd)) {
pmd89arch/i386/kernel/vm86.cprintk("vm86: bad pmd entry [%p]:%08lx\n", pmd, pmd_val(*pmd));
pmd90arch/i386/kernel/vm86.cpmd_clear(pmd);
pmd93arch/i386/kernel/vm86.cpte = pte_offset(pmd, 0xA0000);
pmd34arch/ppc/mm/fault.cpmd_t *pmd;
pmd58arch/ppc/mm/fault.cpmd = pmd_offset(dir, regs->dar & PAGE_MASK);
pmd59arch/ppc/mm/fault.cif (pmd && pmd_present(*pmd))
pmd61arch/ppc/mm/fault.cpte = pte_offset(pmd, regs->dar & PAGE_MASK);
pmd83arch/ppc/mm/fault.cpmd_t *pmd;
pmd102arch/ppc/mm/fault.cpmd = pmd_offset(dir, regs->dar & PAGE_MASK);
pmd103arch/ppc/mm/fault.cif (pmd && pmd_present(*pmd))
pmd105arch/ppc/mm/fault.cpte = pte_offset(pmd, regs->dar & PAGE_MASK);
pmd254arch/ppc/mm/fault.cpmd_t *pmd;
pmd259arch/ppc/mm/fault.cpmd = pmd_offset(dir, address & PAGE_MASK);
pmd260arch/ppc/mm/fault.cif (pmd && pmd_present(*pmd))
pmd262arch/ppc/mm/fault.cpte = pte_offset(pmd, address & PAGE_MASK);
pmd31arch/sparc/mm/srmmu.csrmmu_pmd_page(pmd_t pmd)
pmd35arch/sparc/mm/srmmu.cpage = (pmd_val(pmd) & (SRMMU_PTD_PTP_MASK)) << SRMMU_PTD_PTP_PADDR_SHIFT;
pmd67arch/sparc/mm/srmmu.cint srmmu_pmd_none(pmd_t pmd)    { return !pmd_val(pmd); }
pmd68arch/sparc/mm/srmmu.cint srmmu_pmd_bad(pmd_t pmd)
pmd70arch/sparc/mm/srmmu.creturn ((pmd_val(pmd)&SRMMU_ET_PTDBAD)==SRMMU_ET_PTDBAD) ||
pmd71arch/sparc/mm/srmmu.c(srmmu_pmd_page(pmd) > high_memory);
pmd74arch/sparc/mm/srmmu.cint srmmu_pmd_present(pmd_t pmd)  { return pmd_val(pmd) & SRMMU_ET_PTD; }
pmd223arch/sparc/mm/srmmu.csrmmu_pte_alloc_kernel(pmd_t *pmd, unsigned long address)
pmd228arch/sparc/mm/srmmu.cif (srmmu_pmd_none(*pmd)) {
pmd230arch/sparc/mm/srmmu.cif (srmmu_pmd_none(*pmd)) {
pmd232arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, page);
pmd236arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, (pte_t *) SRMMU_ET_PTDBAD);
pmd241arch/sparc/mm/srmmu.cif (srmmu_pmd_bad(*pmd)) {
pmd242arch/sparc/mm/srmmu.cprintk("Bad pmd in pte_alloc_kernel: %08lx\n", pmd_val(*pmd));
pmd243arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, (pte_t *) SRMMU_ET_PTDBAD);
pmd246arch/sparc/mm/srmmu.creturn (pte_t *) srmmu_pmd_page(*pmd) + address;
pmd251arch/sparc/mm/srmmu.csrmmu_pmd_free_kernel(pmd_t *pmd)
pmd253arch/sparc/mm/srmmu.cmem_map[MAP_NR(pmd)].reserved = 0;
pmd254arch/sparc/mm/srmmu.cfree_page((unsigned long) pmd);
pmd291arch/sparc/mm/srmmu.csrmmu_pte_alloc(pmd_t * pmd, unsigned long address)
pmd296arch/sparc/mm/srmmu.cif (srmmu_pmd_none(*pmd)) {
pmd298arch/sparc/mm/srmmu.cif (srmmu_pmd_none(*pmd)) {
pmd300arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, page);
pmd303arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, (pte_t *) SRMMU_ET_PTDBAD);
pmd308arch/sparc/mm/srmmu.cif (srmmu_pmd_bad(*pmd)) {
pmd309arch/sparc/mm/srmmu.cprintk("Bad pmd in pte_alloc_kernel: %08lx\n", pmd_val(*pmd));
pmd310arch/sparc/mm/srmmu.csrmmu_pmd_set(pmd, (pte_t *) SRMMU_ET_PTDBAD);
pmd313arch/sparc/mm/srmmu.creturn (pte_t *) srmmu_pmd_page(*pmd) + address;
pmd321arch/sparc/mm/srmmu.csrmmu_pmd_free(pmd_t * pmd)
pmd323arch/sparc/mm/srmmu.cfree_page((unsigned long) pmd);
pmd76arch/sparc/mm/sun4c.cint sun4c_pmd_none(pmd_t pmd)    { return !pmd_val(pmd); }
pmd77arch/sparc/mm/sun4c.cint sun4c_pmd_bad(pmd_t pmd)
pmd79arch/sparc/mm/sun4c.creturn (pmd_val(pmd) & ~PAGE_MASK) != PGD_TABLE || pmd_val(pmd) > high_memory;
pmd82arch/sparc/mm/sun4c.cint sun4c_pmd_present(pmd_t pmd)  { return pmd_val(pmd) & PGD_PRESENT; }
pmd136arch/sparc/mm/sun4c.cunsigned long sun4c_pmd_page(pmd_t pmd)
pmd138arch/sparc/mm/sun4c.creturn (pmd_val(pmd) & PAGE_MASK);
pmd381arch/sparc/mm/sun4c.cpte_t *sun4c_pte_alloc_kernel(pmd_t *pmd, unsigned long address)
pmd384arch/sparc/mm/sun4c.cif (sun4c_pmd_none(*pmd)) {
pmd386arch/sparc/mm/sun4c.cif (sun4c_pmd_none(*pmd)) {
pmd388arch/sparc/mm/sun4c.cpmd_val(*pmd) = PGD_TABLE | (unsigned long) page;
pmd392arch/sparc/mm/sun4c.cpmd_val(*pmd) = PGD_TABLE | (unsigned long) BAD_PAGETABLE;
pmd397arch/sparc/mm/sun4c.cif (sun4c_pmd_bad(*pmd)) {
pmd398arch/sparc/mm/sun4c.cprintk("Bad pmd in pte_alloc_kernel: %08lx\n", pmd_val(*pmd));
pmd399arch/sparc/mm/sun4c.cpmd_val(*pmd) = PGD_TABLE | (unsigned long) BAD_PAGETABLE;
pmd402arch/sparc/mm/sun4c.creturn (pte_t *) sun4c_pmd_page(*pmd) + address;
pmd409arch/sparc/mm/sun4c.cvoid sun4c_pmd_free_kernel(pmd_t *pmd)
pmd411arch/sparc/mm/sun4c.cpmd_val(*pmd) = 0;
pmd424arch/sparc/mm/sun4c.cpte_t *sun4c_pte_alloc(pmd_t * pmd, unsigned long address)
pmd427arch/sparc/mm/sun4c.cif (sun4c_pmd_none(*pmd)) {
pmd429arch/sparc/mm/sun4c.cif (sun4c_pmd_none(*pmd)) {
pmd431arch/sparc/mm/sun4c.cpmd_val(*pmd) = PGD_TABLE | (unsigned long) page;
pmd434arch/sparc/mm/sun4c.cpmd_val(*pmd) = PGD_TABLE | (unsigned long) BAD_PAGETABLE;
pmd439arch/sparc/mm/sun4c.cif (sun4c_pmd_bad(*pmd)) {
pmd440arch/sparc/mm/sun4c.cprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd441arch/sparc/mm/sun4c.cpmd_val(*pmd) = PGD_TABLE | (unsigned long) BAD_PAGETABLE;
pmd444arch/sparc/mm/sun4c.creturn (pte_t *) sun4c_pmd_page(*pmd) + address;
pmd451arch/sparc/mm/sun4c.cvoid sun4c_pmd_free(pmd_t * pmd)
pmd453arch/sparc/mm/sun4c.cpmd_val(*pmd) = 0;
pmd544fs/proc/array.cstatic inline void statm_pte_range(pmd_t * pmd, unsigned long address, unsigned long size,
pmd550fs/proc/array.cif (pmd_none(*pmd))
pmd552fs/proc/array.cif (pmd_bad(*pmd)) {
pmd553fs/proc/array.cprintk("statm_pte_range: bad pmd (%08lx)\n", pmd_val(*pmd));
pmd554fs/proc/array.cpmd_clear(pmd);
pmd557fs/proc/array.cpte = pte_offset(pmd, address);
pmd585fs/proc/array.cpmd_t * pmd;
pmd595fs/proc/array.cpmd = pmd_offset(pgd, address);
pmd601fs/proc/array.cstatm_pte_range(pmd, address, end - address, pages, shared, dirty, total);
pmd603fs/proc/array.cpmd++;
pmd18include/asm-alpha/page.htypedef struct { unsigned long pmd; } pmd_t;
pmd23include/asm-alpha/page.h#define pmd_val(x)  ((x).pmd)
pmd229include/asm-alpha/pgtable.hextern inline unsigned long pmd_page(pmd_t pmd)
pmd230include/asm-alpha/pgtable.h{ return PAGE_OFFSET + ((pmd_val(pmd) & _PFN_MASK) >> (32-PAGE_SHIFT)); }
pmd245include/asm-alpha/pgtable.hextern inline int pmd_none(pmd_t pmd)    { return !pmd_val(pmd); }
pmd246include/asm-alpha/pgtable.hextern inline int pmd_bad(pmd_t pmd)    { return (pmd_val(pmd) & ~_PFN_MASK) != _PAGE_TABLE || pmd_page(pmd) > high_memory; }
pmd247include/asm-alpha/pgtable.hextern inline int pmd_present(pmd_t pmd)  { return pmd_val(pmd) & _PAGE_VALID; }
pmd338include/asm-alpha/pgtable.hextern inline pte_t * pte_alloc_kernel(pmd_t *pmd, unsigned long address)
pmd341include/asm-alpha/pgtable.hif (pmd_none(*pmd)) {
pmd343include/asm-alpha/pgtable.hif (pmd_none(*pmd)) {
pmd345include/asm-alpha/pgtable.hpmd_set(pmd, page);
pmd349include/asm-alpha/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd354include/asm-alpha/pgtable.hif (pmd_bad(*pmd)) {
pmd355include/asm-alpha/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd356include/asm-alpha/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd359include/asm-alpha/pgtable.hreturn (pte_t *) pmd_page(*pmd) + address;
pmd362include/asm-alpha/pgtable.hextern inline void pmd_free_kernel(pmd_t * pmd)
pmd364include/asm-alpha/pgtable.hmem_map[MAP_NR(pmd)].reserved = 0;
pmd365include/asm-alpha/pgtable.hfree_page((unsigned long) pmd);
pmd397include/asm-alpha/pgtable.hextern inline pte_t * pte_alloc(pmd_t *pmd, unsigned long address)
pmd400include/asm-alpha/pgtable.hif (pmd_none(*pmd)) {
pmd402include/asm-alpha/pgtable.hif (pmd_none(*pmd)) {
pmd404include/asm-alpha/pgtable.hpmd_set(pmd, page);
pmd407include/asm-alpha/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd412include/asm-alpha/pgtable.hif (pmd_bad(*pmd)) {
pmd413include/asm-alpha/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd414include/asm-alpha/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd417include/asm-alpha/pgtable.hreturn (pte_t *) pmd_page(*pmd) + address;
pmd420include/asm-alpha/pgtable.hextern inline void pmd_free(pmd_t * pmd)
pmd422include/asm-alpha/pgtable.hfree_page((unsigned long) pmd);
pmd18include/asm-i386/page.htypedef struct { unsigned long pmd; } pmd_t;
pmd23include/asm-i386/page.h#define pmd_val(x)  ((x).pmd)
pmd247include/asm-i386/pgtable.hextern inline int pmd_none(pmd_t pmd)    { return !pmd_val(pmd); }
pmd248include/asm-i386/pgtable.hextern inline int pmd_bad(pmd_t pmd)    { return (pmd_val(pmd) & ~PAGE_MASK) != _PAGE_TABLE || pmd_val(pmd) > high_memory; }
pmd249include/asm-i386/pgtable.hextern inline int pmd_present(pmd_t pmd)  { return pmd_val(pmd) & _PAGE_PRESENT; }
pmd306include/asm-i386/pgtable.hextern inline unsigned long pmd_page(pmd_t pmd)
pmd307include/asm-i386/pgtable.h{ return pmd_val(pmd) & PAGE_MASK; }
pmd338include/asm-i386/pgtable.hextern inline pte_t * pte_alloc_kernel(pmd_t * pmd, unsigned long address)
pmd341include/asm-i386/pgtable.hif (pmd_none(*pmd)) {
pmd343include/asm-i386/pgtable.hif (pmd_none(*pmd)) {
pmd345include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) page;
pmd349include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd354include/asm-i386/pgtable.hif (pmd_bad(*pmd)) {
pmd355include/asm-i386/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd356include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd359include/asm-i386/pgtable.hreturn (pte_t *) pmd_page(*pmd) + address;
pmd366include/asm-i386/pgtable.hextern inline void pmd_free_kernel(pmd_t * pmd)
pmd368include/asm-i386/pgtable.hpmd_val(*pmd) = 0;
pmd381include/asm-i386/pgtable.hextern inline pte_t * pte_alloc(pmd_t * pmd, unsigned long address)
pmd384include/asm-i386/pgtable.hif (pmd_none(*pmd)) {
pmd386include/asm-i386/pgtable.hif (pmd_none(*pmd)) {
pmd388include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) page;
pmd391include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd396include/asm-i386/pgtable.hif (pmd_bad(*pmd)) {
pmd397include/asm-i386/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd398include/asm-i386/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd401include/asm-i386/pgtable.hreturn (pte_t *) pmd_page(*pmd) + address;
pmd408include/asm-i386/pgtable.hextern inline void pmd_free(pmd_t * pmd)
pmd410include/asm-i386/pgtable.hpmd_val(*pmd) = 0;
pmd20include/asm-mips/page.htypedef struct { unsigned long pmd; } pmd_t;
pmd25include/asm-mips/page.h#define pmd_val(x)  ((x).pmd)
pmd218include/asm-mips/pgtable.hextern inline unsigned long pmd_page(pmd_t pmd)
pmd219include/asm-mips/pgtable.h{ return PAGE_OFFSET + (pmd_val(pmd) & PAGE_MASK); }
pmd237include/asm-mips/pgtable.hextern inline int pmd_none(pmd_t pmd)    { return (pmd_val(pmd) & PAGE_MASK) == ((unsigned long) invalid_pte_table - PAGE_OFFSET); }
pmd239include/asm-mips/pgtable.hextern inline int pmd_bad(pmd_t pmd)
pmd241include/asm-mips/pgtable.hreturn (pmd_val(pmd) & ~PAGE_MASK) != _PAGE_TABLE ||
pmd242include/asm-mips/pgtable.hpmd_page(pmd) > high_memory ||
pmd243include/asm-mips/pgtable.hpmd_page(pmd) < PAGE_OFFSET;
pmd245include/asm-mips/pgtable.hextern inline int pmd_present(pmd_t pmd)  { return pmd_val(pmd) & _PAGE_PRESENT; }
pmd378include/asm-mips/pgtable.hextern inline pte_t * pte_alloc_kernel(pmd_t *pmd, unsigned long address)
pmd381include/asm-mips/pgtable.hif (pmd_none(*pmd)) {
pmd383include/asm-mips/pgtable.hif (pmd_none(*pmd)) {
pmd390include/asm-mips/pgtable.hpmd_set(pmd, (pte_t *)page);
pmd393include/asm-mips/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd398include/asm-mips/pgtable.hif (pmd_bad(*pmd)) {
pmd399include/asm-mips/pgtable.hprintk("Bad pmd in pte_alloc_kernel: %08lx\n", pmd_val(*pmd));
pmd400include/asm-mips/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd403include/asm-mips/pgtable.hreturn (pte_t *) (pmd_page(*pmd) + (PT_OFFSET - PAGE_OFFSET)) + address;
pmd410include/asm-mips/pgtable.hextern inline void pmd_free_kernel(pmd_t * pmd)
pmd429include/asm-mips/pgtable.hextern inline pte_t * pte_alloc(pmd_t * pmd, unsigned long address)
pmd432include/asm-mips/pgtable.hif (pmd_none(*pmd)) {
pmd434include/asm-mips/pgtable.hif (pmd_none(*pmd)) {
pmd440include/asm-mips/pgtable.hpmd_set(pmd, (pte_t *)page);
pmd443include/asm-mips/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd448include/asm-mips/pgtable.hif (pmd_bad(*pmd)) {
pmd449include/asm-mips/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd450include/asm-mips/pgtable.hpmd_set(pmd, (pte_t *) BAD_PAGETABLE);
pmd453include/asm-mips/pgtable.hreturn (pte_t *) (pmd_page(*pmd) + (PT_OFFSET - PAGE_OFFSET)) + address;
pmd460include/asm-mips/pgtable.hextern inline void pmd_free(pmd_t * pmd)
pmd20include/asm-ppc/page.htypedef struct { unsigned long pmd; } pmd_t;
pmd25include/asm-ppc/page.h#define pmd_val(x)  ((x).pmd)
pmd216include/asm-ppc/pgtable.hextern inline int pmd_none(pmd_t pmd)    { return !pmd_val(pmd); }
pmd217include/asm-ppc/pgtable.hextern inline int pmd_bad(pmd_t pmd)    { return (pmd_val(pmd) & ~PAGE_MASK) != _PAGE_TABLE; }
pmd218include/asm-ppc/pgtable.hextern inline int pmd_present(pmd_t pmd)  { return pmd_val(pmd) & _PAGE_PRESENT; }
pmd283include/asm-ppc/pgtable.hextern inline unsigned long pmd_page(pmd_t pmd)
pmd284include/asm-ppc/pgtable.h{ return pmd_val(pmd) & PAGE_MASK; }
pmd385include/asm-ppc/pgtable.hextern inline pte_t * pte_alloc_kernel(pmd_t * pmd, unsigned long address)
pmd388include/asm-ppc/pgtable.hif (pmd_none(*pmd)) {
pmd390include/asm-ppc/pgtable.hif (pmd_none(*pmd)) {
pmd393include/asm-ppc/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) page;
pmd398include/asm-ppc/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd403include/asm-ppc/pgtable.hif (pmd_bad(*pmd)) {
pmd404include/asm-ppc/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd406include/asm-ppc/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd409include/asm-ppc/pgtable.hreturn (pte_t *) pmd_page(*pmd) + address;
pmd416include/asm-ppc/pgtable.hextern inline void pmd_free_kernel(pmd_t * pmd)
pmd430include/asm-ppc/pgtable.hextern inline pte_t * pte_alloc(pmd_t * pmd, unsigned long address)
pmd433include/asm-ppc/pgtable.hif (pmd_none(*pmd)) {
pmd435include/asm-ppc/pgtable.hif (pmd_none(*pmd)) {
pmd437include/asm-ppc/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) page;
pmd440include/asm-ppc/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd445include/asm-ppc/pgtable.hif (pmd_bad(*pmd)) {
pmd446include/asm-ppc/pgtable.hprintk("Bad pmd in pte_alloc: %08lx\n", pmd_val(*pmd));
pmd447include/asm-ppc/pgtable.hpmd_val(*pmd) = _PAGE_TABLE | (unsigned long) BAD_PAGETABLE;
pmd450include/asm-ppc/pgtable.hreturn (pte_t *) pmd_page(*pmd) + address;
pmd457include/asm-ppc/pgtable.hextern inline void pmd_free(pmd_t * pmd)
pmd51include/asm-sparc/page.htypedef struct { unsigned long pmd; } pmd_t;
pmd56include/asm-sparc/page.h#define pmd_val(x)      ((x).pmd)
pmd206mm/filemap.cstatic inline int filemap_sync_pte_range(pmd_t * pmd,
pmd214mm/filemap.cif (pmd_none(*pmd))
pmd216mm/filemap.cif (pmd_bad(*pmd)) {
pmd217mm/filemap.cprintk("filemap_sync_pte_range: bad pmd (%08lx)\n", pmd_val(*pmd));
pmd218mm/filemap.cpmd_clear(pmd);
pmd221mm/filemap.cpte = pte_offset(pmd, address);
pmd240mm/filemap.cpmd_t * pmd;
pmd251mm/filemap.cpmd = pmd_offset(pgd, address);
pmd259mm/filemap.cerror |= filemap_sync_pte_range(pmd, address, end - address, vma, offset, flags);
pmd261mm/filemap.cpmd++;
pmd114mm/memory.cpmd_t * pmd;
pmd123mm/memory.cpmd = pmd_offset(dir, 0);
pmd125mm/memory.cif (!pmd_inuse(pmd)) {
pmd128mm/memory.cfree_one_pmd(pmd+j);
pmd130mm/memory.cpmd_free(pmd);
pmd324mm/memory.cstatic inline void zap_pte_range(pmd_t * pmd, unsigned long address, unsigned long size)
pmd329mm/memory.cif (pmd_none(*pmd))
pmd331mm/memory.cif (pmd_bad(*pmd)) {
pmd332mm/memory.cprintk("zap_pte_range: bad pmd (%08lx)\n", pmd_val(*pmd));
pmd333mm/memory.cpmd_clear(pmd);
pmd336mm/memory.cpte = pte_offset(pmd, address);
pmd352mm/memory.cpmd_t * pmd;
pmd362mm/memory.cpmd = pmd_offset(dir, address);
pmd368mm/memory.czap_pte_range(pmd, address, end - address);
pmd370mm/memory.cpmd++;
pmd409mm/memory.cstatic inline int zeromap_pmd_range(pmd_t * pmd, unsigned long address, unsigned long size, pte_t zero_pte)
pmd418mm/memory.cpte_t * pte = pte_alloc(pmd, address);
pmd423mm/memory.cpmd++;
pmd438mm/memory.cpmd_t *pmd = pmd_alloc(dir, address);
pmd440mm/memory.cif (!pmd)
pmd442mm/memory.cerror = zeromap_pmd_range(pmd, address, end - address, zero_pte);
pmd478mm/memory.cstatic inline int remap_pmd_range(pmd_t * pmd, unsigned long address, unsigned long size,
pmd489mm/memory.cpte_t * pte = pte_alloc(pmd, address);
pmd494mm/memory.cpmd++;
pmd508mm/memory.cpmd_t *pmd = pmd_alloc(dir, from);
pmd510mm/memory.cif (!pmd)
pmd512mm/memory.cerror = remap_pmd_range(pmd, from, end - from, offset + from, prot);
pmd543mm/memory.cpmd_t * pmd;
pmd551mm/memory.cpmd = pmd_alloc(pgd, address);
pmd552mm/memory.cif (!pmd) {
pmd557mm/memory.cpte = pte_alloc(pmd, address);
pmd1032mm/memory.cpmd_t *pmd;
pmd1036mm/memory.cpmd = pmd_alloc(pgd, address);
pmd1037mm/memory.cif (!pmd) {
pmd1041mm/memory.cpte = pte_alloc(pmd, address);
pmd1178mm/memory.cpmd_t *pmd;
pmd1182mm/memory.cpmd = pmd_alloc(pgd, address);
pmd1183mm/memory.cif (!pmd)
pmd1185mm/memory.cpte = pte_alloc(pmd, address);
pmd20mm/mprotect.cstatic inline void change_pte_range(pmd_t * pmd, unsigned long address,
pmd26mm/mprotect.cif (pmd_none(*pmd))
pmd28mm/mprotect.cif (pmd_bad(*pmd)) {
pmd29mm/mprotect.cprintk("change_pte_range: bad pmd (%08lx)\n", pmd_val(*pmd));
pmd30mm/mprotect.cpmd_clear(pmd);
pmd33mm/mprotect.cpte = pte_offset(pmd, address);
pmd50mm/mprotect.cpmd_t * pmd;
pmd60mm/mprotect.cpmd = pmd_offset(pgd, address);
pmd66mm/mprotect.cchange_pte_range(pmd, address, end - address, newprot);
pmd68mm/mprotect.cpmd++;
pmd536mm/swap.cpmd_t * pmd;
pmd547mm/swap.cpmd = pmd_offset(dir, address);
pmd554mm/swap.cint result = swap_out_pmd(tsk, vma, pmd, address, end, limit);
pmd558mm/swap.cpmd++;
pmd965mm/swap.cpmd_t * pmd;
pmd975mm/swap.cpmd = pmd_offset(dir, address);
pmd982mm/swap.cif (unuse_pmd(vma, pmd, address, end - address, offset, type, page))
pmd985mm/swap.cpmd++;
pmd41mm/vmalloc.cstatic inline void free_area_pte(pmd_t * pmd, unsigned long address, unsigned long size)
pmd46mm/vmalloc.cif (pmd_none(*pmd))
pmd48mm/vmalloc.cif (pmd_bad(*pmd)) {
pmd49mm/vmalloc.cprintk("free_area_pte: bad pmd (%08lx)\n", pmd_val(*pmd));
pmd50mm/vmalloc.cpmd_clear(pmd);
pmd53mm/vmalloc.cpte = pte_offset(pmd, address);
pmd75mm/vmalloc.cpmd_t * pmd;
pmd85mm/vmalloc.cpmd = pmd_offset(dir, address);
pmd91mm/vmalloc.cfree_area_pte(pmd, address, end - address);
pmd93mm/vmalloc.cpmd++;
pmd133mm/vmalloc.cstatic inline int alloc_area_pmd(pmd_t * pmd, unsigned long address, unsigned long size)
pmd142mm/vmalloc.cpte_t * pte = pte_alloc_kernel(pmd, address);
pmd148mm/vmalloc.cpmd++;
pmd160mm/vmalloc.cpmd_t *pmd = pmd_alloc_kernel(dir, address);
pmd161mm/vmalloc.cif (!pmd)
pmd163mm/vmalloc.cif (alloc_area_pmd(pmd, address, end - address))
pmd192mm/vmalloc.cstatic inline int remap_area_pmd(pmd_t * pmd, unsigned long address, unsigned long size,
pmd203mm/vmalloc.cpte_t * pte = pte_alloc_kernel(pmd, address);
pmd208mm/vmalloc.cpmd++;
pmd221mm/vmalloc.cpmd_t *pmd = pmd_alloc_kernel(dir, address);
pmd222mm/vmalloc.cif (!pmd)
pmd224mm/vmalloc.cif (remap_area_pmd(pmd, address, end - address, offset + address))