Lines Matching refs:vstart
1678 static unsigned long __ref kernel_map_hugepud(unsigned long vstart, in kernel_map_hugepud() argument
1683 u64 pte_val = vstart; in kernel_map_hugepud()
1686 if ((vstart & mask16gb) || in kernel_map_hugepud()
1687 (vend - vstart <= mask16gb)) { in kernel_map_hugepud()
1691 return vstart + PUD_SIZE; in kernel_map_hugepud()
1697 vend = vstart + mask16gb + 1UL; in kernel_map_hugepud()
1698 while (vstart < vend) { in kernel_map_hugepud()
1702 vstart += PUD_SIZE; in kernel_map_hugepud()
1705 return vstart; in kernel_map_hugepud()
1708 static bool kernel_can_map_hugepud(unsigned long vstart, unsigned long vend, in kernel_can_map_hugepud() argument
1711 if (guard && !(vstart & ~PUD_MASK) && (vend - vstart) >= PUD_SIZE) in kernel_can_map_hugepud()
1717 static unsigned long __ref kernel_map_hugepmd(unsigned long vstart, in kernel_map_hugepmd() argument
1723 u64 pte_val = vstart; in kernel_map_hugepmd()
1726 if ((vstart & mask256mb) || in kernel_map_hugepmd()
1727 (vend - vstart <= mask256mb)) { in kernel_map_hugepmd()
1731 return vstart + PMD_SIZE; in kernel_map_hugepmd()
1734 if ((vstart & mask2gb) || in kernel_map_hugepmd()
1735 (vend - vstart <= mask2gb)) { in kernel_map_hugepmd()
1738 vend = vstart + mask256mb + 1UL; in kernel_map_hugepmd()
1742 vend = vstart + mask2gb + 1UL; in kernel_map_hugepmd()
1745 while (vstart < vend) { in kernel_map_hugepmd()
1749 vstart += PMD_SIZE; in kernel_map_hugepmd()
1753 return vstart; in kernel_map_hugepmd()
1756 static bool kernel_can_map_hugepmd(unsigned long vstart, unsigned long vend, in kernel_can_map_hugepmd() argument
1759 if (guard && !(vstart & ~PMD_MASK) && (vend - vstart) >= PMD_SIZE) in kernel_can_map_hugepmd()
1769 unsigned long vstart = PAGE_OFFSET + pstart; in kernel_map_range() local
1773 if ((vstart & ~PAGE_MASK) || (vend & ~PAGE_MASK)) { in kernel_map_range()
1775 vstart, vend); in kernel_map_range()
1779 while (vstart < vend) { in kernel_map_range()
1780 unsigned long this_end, paddr = __pa(vstart); in kernel_map_range()
1781 pgd_t *pgd = pgd_offset_k(vstart); in kernel_map_range()
1798 p4d = p4d_offset(pgd, vstart); in kernel_map_range()
1810 pud = pud_offset(p4d, vstart); in kernel_map_range()
1814 if (kernel_can_map_hugepud(vstart, vend, use_huge)) { in kernel_map_range()
1815 vstart = kernel_map_hugepud(vstart, vend, pud); in kernel_map_range()
1826 pmd = pmd_offset(pud, vstart); in kernel_map_range()
1830 if (kernel_can_map_hugepmd(vstart, vend, use_huge)) { in kernel_map_range()
1831 vstart = kernel_map_hugepmd(vstart, vend, pmd); in kernel_map_range()
1842 pte = pte_offset_kernel(pmd, vstart); in kernel_map_range()
1843 this_end = (vstart + PMD_SIZE) & PMD_MASK; in kernel_map_range()
1847 while (vstart < this_end) { in kernel_map_range()
1850 vstart += PAGE_SIZE; in kernel_map_range()
2588 int __meminit vmemmap_populate(unsigned long vstart, unsigned long vend, in vmemmap_populate() argument
2602 vstart = vstart & PMD_MASK; in vmemmap_populate()
2604 for (; vstart < vend; vstart += PMD_SIZE) { in vmemmap_populate()
2605 pgd_t *pgd = vmemmap_pgd_populate(vstart, node); in vmemmap_populate()
2614 p4d = vmemmap_p4d_populate(pgd, vstart, node); in vmemmap_populate()
2618 pud = vmemmap_pud_populate(p4d, vstart, node); in vmemmap_populate()
2622 pmd = pmd_offset(pud, vstart); in vmemmap_populate()