taglinefilesource code
SECTORS_PER_PAGE68drivers/scsi/scsi.c#if SECTORS_PER_PAGE <= 8
SECTORS_PER_PAGE70drivers/scsi/scsi.c#elif SECTORS_PER_PAGE <= 32
SECTORS_PER_PAGE2187drivers/scsi/scsi.cfor(i=0;i < dma_sectors / SECTORS_PER_PAGE; i++)
SECTORS_PER_PAGE2188drivers/scsi/scsi.cfor(j=0; j<=SECTORS_PER_PAGE - nbits; j++){
SECTORS_PER_PAGE2212drivers/scsi/scsi.cfor (page = 0; page < dma_sectors / SECTORS_PER_PAGE; page++) {
SECTORS_PER_PAGE2222drivers/scsi/scsi.cif ((mask << sector) >= (1 << SECTORS_PER_PAGE))
SECTORS_PER_PAGE2562drivers/scsi/scsi.cfor(i=0; i < dma_sectors / SECTORS_PER_PAGE; i++)
SECTORS_PER_PAGE2566drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages));
SECTORS_PER_PAGE2570drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_freelist));
SECTORS_PER_PAGE2578drivers/scsi/scsi.cnew_dma_sectors = 2*SECTORS_PER_PAGE;    /* Base value we use */
SECTORS_PER_PAGE2587drivers/scsi/scsi.cnew_dma_sectors += SECTORS_PER_PAGE;  /* Increment for each host */
SECTORS_PER_PAGE2619drivers/scsi/scsi.csize = (new_dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
SECTORS_PER_PAGE2623drivers/scsi/scsi.csize = (new_dma_sectors / SECTORS_PER_PAGE)*sizeof(*new_dma_malloc_pages);
SECTORS_PER_PAGE2632drivers/scsi/scsi.cfor(i=dma_sectors / SECTORS_PER_PAGE; i< new_dma_sectors / SECTORS_PER_PAGE; i++)
SECTORS_PER_PAGE2644drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
SECTORS_PER_PAGE2652drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages);
SECTORS_PER_PAGE3159drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
SECTORS_PER_PAGE3165drivers/scsi/scsi.cscsi_init_malloc((dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages), GFP_ATOMIC);