taglinefilesource code
dma_sectors88drivers/scsi/scsi.cstatic unsigned int dma_sectors = 0;
dma_sectors2181drivers/scsi/scsi.cfor(i=0;i < dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2206drivers/scsi/scsi.cfor (page = 0; page < dma_sectors / SECTORS_PER_PAGE; page++) {
dma_sectors2547drivers/scsi/scsi.cif( dma_free_sectors != dma_sectors )
dma_sectors2548drivers/scsi/scsi.cpanic("SCSI DMA pool memory leak %d %d\n",dma_free_sectors,dma_sectors);
dma_sectors2550drivers/scsi/scsi.cfor(i=0; i < dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2554drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages));
dma_sectors2558drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_freelist));
dma_sectors2560drivers/scsi/scsi.cdma_sectors = 0;
dma_sectors2602drivers/scsi/scsi.cif( new_dma_sectors < dma_sectors )
dma_sectors2603drivers/scsi/scsi.cnew_dma_sectors = dma_sectors;
dma_sectors2619drivers/scsi/scsi.cif( new_dma_sectors > dma_sectors ) { 
dma_sectors2620drivers/scsi/scsi.cfor(i=dma_sectors / SECTORS_PER_PAGE; i< new_dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2632drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
dma_sectors2640drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages);
dma_sectors2645drivers/scsi/scsi.cdma_free_sectors += new_dma_sectors - dma_sectors;
dma_sectors2647drivers/scsi/scsi.cdma_sectors = new_dma_sectors;
dma_sectors3141drivers/scsi/scsi.cdma_sectors = PAGE_SIZE / SECTOR_SIZE;
dma_sectors3142drivers/scsi/scsi.cdma_free_sectors= dma_sectors;
dma_sectors3149drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
dma_sectors3155drivers/scsi/scsi.cscsi_init_malloc((dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages), GFP_ATOMIC);