taglinefilesource code
SECTORS_PER_PAGE68drivers/scsi/scsi.c#if SECTORS_PER_PAGE <= 8
SECTORS_PER_PAGE70drivers/scsi/scsi.c#elif SECTORS_PER_PAGE <= 32
SECTORS_PER_PAGE2190drivers/scsi/scsi.cfor(i=0;i < dma_sectors / SECTORS_PER_PAGE; i++)
SECTORS_PER_PAGE2191drivers/scsi/scsi.cfor(j=0; j<=SECTORS_PER_PAGE - nbits; j++){
SECTORS_PER_PAGE2215drivers/scsi/scsi.cfor (page = 0; page < dma_sectors / SECTORS_PER_PAGE; page++) {
SECTORS_PER_PAGE2225drivers/scsi/scsi.cif ((mask << sector) >= (1 << SECTORS_PER_PAGE))
SECTORS_PER_PAGE2565drivers/scsi/scsi.cfor(i=0; i < dma_sectors / SECTORS_PER_PAGE; i++)
SECTORS_PER_PAGE2569drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages));
SECTORS_PER_PAGE2573drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_freelist));
SECTORS_PER_PAGE2581drivers/scsi/scsi.cnew_dma_sectors = 2*SECTORS_PER_PAGE;    /* Base value we use */
SECTORS_PER_PAGE2590drivers/scsi/scsi.cnew_dma_sectors += SECTORS_PER_PAGE;  /* Increment for each host */
SECTORS_PER_PAGE2622drivers/scsi/scsi.csize = (new_dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
SECTORS_PER_PAGE2626drivers/scsi/scsi.csize = (new_dma_sectors / SECTORS_PER_PAGE)*sizeof(*new_dma_malloc_pages);
SECTORS_PER_PAGE2635drivers/scsi/scsi.cfor(i=dma_sectors / SECTORS_PER_PAGE; i< new_dma_sectors / SECTORS_PER_PAGE; i++)
SECTORS_PER_PAGE2647drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
SECTORS_PER_PAGE2655drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages);
SECTORS_PER_PAGE3162drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
SECTORS_PER_PAGE3168drivers/scsi/scsi.cscsi_init_malloc((dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages), GFP_ATOMIC);