taglinefilesource code
dma_sectors91drivers/scsi/scsi.cstatic unsigned int dma_sectors = 0;
dma_sectors2184drivers/scsi/scsi.cfor(i=0;i < dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2209drivers/scsi/scsi.cfor (page = 0; page < dma_sectors / SECTORS_PER_PAGE; page++) {
dma_sectors2553drivers/scsi/scsi.cif( dma_free_sectors != dma_sectors )
dma_sectors2554drivers/scsi/scsi.cpanic("SCSI DMA pool memory leak %d %d\n",dma_free_sectors,dma_sectors);
dma_sectors2556drivers/scsi/scsi.cfor(i=0; i < dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2560drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages));
dma_sectors2564drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_freelist));
dma_sectors2566drivers/scsi/scsi.cdma_sectors = 0;
dma_sectors2608drivers/scsi/scsi.cif( new_dma_sectors < dma_sectors )
dma_sectors2609drivers/scsi/scsi.cnew_dma_sectors = dma_sectors;
dma_sectors2625drivers/scsi/scsi.cif( new_dma_sectors > dma_sectors ) { 
dma_sectors2626drivers/scsi/scsi.cfor(i=dma_sectors / SECTORS_PER_PAGE; i< new_dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2638drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
dma_sectors2646drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages);
dma_sectors2651drivers/scsi/scsi.cdma_free_sectors += new_dma_sectors - dma_sectors;
dma_sectors2653drivers/scsi/scsi.cdma_sectors = new_dma_sectors;
dma_sectors3145drivers/scsi/scsi.cdma_sectors = PAGE_SIZE / SECTOR_SIZE;
dma_sectors3146drivers/scsi/scsi.cdma_free_sectors= dma_sectors;
dma_sectors3153drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
dma_sectors3159drivers/scsi/scsi.cscsi_init_malloc((dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages), GFP_ATOMIC);