taglinefilesource code
SECTORS_PER_PAGE68drivers/scsi/scsi.c#if SECTORS_PER_PAGE <= 8
SECTORS_PER_PAGE70drivers/scsi/scsi.c#elif SECTORS_PER_PAGE <= 32
SECTORS_PER_PAGE2181drivers/scsi/scsi.cfor(i=0;i < dma_sectors / SECTORS_PER_PAGE; i++)
SECTORS_PER_PAGE2182drivers/scsi/scsi.cfor(j=0; j<=SECTORS_PER_PAGE - nbits; j++){
SECTORS_PER_PAGE2206drivers/scsi/scsi.cfor (page = 0; page < dma_sectors / SECTORS_PER_PAGE; page++) {
SECTORS_PER_PAGE2216drivers/scsi/scsi.cif ((mask << sector) >= (1 << SECTORS_PER_PAGE))
SECTORS_PER_PAGE2550drivers/scsi/scsi.cfor(i=0; i < dma_sectors / SECTORS_PER_PAGE; i++)
SECTORS_PER_PAGE2554drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages));
SECTORS_PER_PAGE2558drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_freelist));
SECTORS_PER_PAGE2566drivers/scsi/scsi.cnew_dma_sectors = 2*SECTORS_PER_PAGE;    /* Base value we use */
SECTORS_PER_PAGE2575drivers/scsi/scsi.cnew_dma_sectors += SECTORS_PER_PAGE;  /* Increment for each host */
SECTORS_PER_PAGE2607drivers/scsi/scsi.csize = (new_dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
SECTORS_PER_PAGE2611drivers/scsi/scsi.csize = (new_dma_sectors / SECTORS_PER_PAGE)*sizeof(*new_dma_malloc_pages);
SECTORS_PER_PAGE2620drivers/scsi/scsi.cfor(i=dma_sectors / SECTORS_PER_PAGE; i< new_dma_sectors / SECTORS_PER_PAGE; i++)
SECTORS_PER_PAGE2632drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
SECTORS_PER_PAGE2640drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages);
SECTORS_PER_PAGE3149drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
SECTORS_PER_PAGE3155drivers/scsi/scsi.cscsi_init_malloc((dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages), GFP_ATOMIC);