taglinefilesource code
dma_sectors91drivers/scsi/scsi.cstatic unsigned int dma_sectors = 0;
dma_sectors2187drivers/scsi/scsi.cfor(i=0;i < dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2212drivers/scsi/scsi.cfor (page = 0; page < dma_sectors / SECTORS_PER_PAGE; page++) {
dma_sectors2556drivers/scsi/scsi.cif( dma_free_sectors != dma_sectors )
dma_sectors2557drivers/scsi/scsi.cpanic("SCSI DMA pool memory leak %d %d\n",dma_free_sectors,dma_sectors);
dma_sectors2559drivers/scsi/scsi.cfor(i=0; i < dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2563drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages));
dma_sectors2567drivers/scsi/scsi.c(dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_freelist));
dma_sectors2569drivers/scsi/scsi.cdma_sectors = 0;
dma_sectors2611drivers/scsi/scsi.cif( new_dma_sectors < dma_sectors )
dma_sectors2612drivers/scsi/scsi.cnew_dma_sectors = dma_sectors;
dma_sectors2628drivers/scsi/scsi.cif( new_dma_sectors > dma_sectors ) { 
dma_sectors2629drivers/scsi/scsi.cfor(i=dma_sectors / SECTORS_PER_PAGE; i< new_dma_sectors / SECTORS_PER_PAGE; i++)
dma_sectors2641drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
dma_sectors2649drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages);
dma_sectors2654drivers/scsi/scsi.cdma_free_sectors += new_dma_sectors - dma_sectors;
dma_sectors2656drivers/scsi/scsi.cdma_sectors = new_dma_sectors;
dma_sectors3148drivers/scsi/scsi.cdma_sectors = PAGE_SIZE / SECTOR_SIZE;
dma_sectors3149drivers/scsi/scsi.cdma_free_sectors= dma_sectors;
dma_sectors3156drivers/scsi/scsi.csize = (dma_sectors / SECTORS_PER_PAGE)*sizeof(FreeSectorBitmap);
dma_sectors3162drivers/scsi/scsi.cscsi_init_malloc((dma_sectors / SECTORS_PER_PAGE)*sizeof(*dma_malloc_pages), GFP_ATOMIC);