taglinefilesource code
dma_sectors91drivers/scsi/scsi.cstatic unsigned int dma_sectors = 0;
dma_sectors2209drivers/scsi/scsi.cfor(i=0;i < dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2234drivers/scsi/scsi.cfor (page = 0; page < dma_sectors / SECTORS_PER_PAGE; page++) {
dma_sectors2581drivers/scsi/scsi.cif( dma_free_sectors != dma_sectors )
dma_sectors2582drivers/scsi/scsi.cpanic("SCSI DMA pool memory leak %d %d\n",dma_free_sectors,dma_sectors);
dma_sectors2584drivers/scsi/scsi.cfor(i=0; i < dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2588drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages));
dma_sectors2592drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_freelist));
dma_sectors2594drivers/scsi/scsi.cdma_sectors = 0;
dma_sectors2636drivers/scsi/scsi.cif( new_dma_sectors < dma_sectors )
dma_sectors2637drivers/scsi/scsi.cnew_dma_sectors = dma_sectors;
dma_sectors2653drivers/scsi/scsi.cif( new_dma_sectors > dma_sectors ) { 
dma_sectors2654drivers/scsi/scsi.cfor(i=dma_sectors / SECTORS_PER_PAGE; i< new_dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2666drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
dma_sectors2674drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages);
dma_sectors2679drivers/scsi/scsi.cdma_free_sectors += new_dma_sectors - dma_sectors;
dma_sectors2681drivers/scsi/scsi.cdma_sectors = new_dma_sectors;
dma_sectors3173drivers/scsi/scsi.cdma_sectors = PAGE_SIZE / SECTOR_SIZE;
dma_sectors3174drivers/scsi/scsi.cdma_free_sectors= dma_sectors;
dma_sectors3181drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
dma_sectors3187drivers/scsi/scsi.cscsi_init_malloc((dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages), GFP_ATOMIC);