@TechReport{patterson:informed-tr, author = {R. Hugo Patterson and Garth A. Gibson and Eka Ginting and Daniel Stodolsky and Jim Zelenka}, title = {Informed Prefetching and Caching}, year = {1995}, number = {CMU-CS-95-134}, institution = {School of Computer Science, Carnegie Mellon University}, later = {patterson:informed}, keywords = {caching, prefetching, file system, hints, I/O, resource management, parallel I/O, pario-bib}, abstract = {The underutilization of disk parallelism and file cache buffers by traditional file systems induces I/O stall time that degrades the performance of modern microprocessor-based systems. In this paper, we present aggressive mechanisms that tailor file system resource management to the needs of I/O-intensive applications. In particular, we show how to use application-disclosed access patterns (hints) to expose and exploit I/O parallelism, and to dynamically allocate file buffers among three competing demands: prefetching hinted blocks, caching hinted blocks for reuse, and caching recently used data for unhinted accesses. Our approach estimates the impact of alternative buffer allocations on application execution time and applies a cost-benefit analysis to allocate buffers where they will have the greatest impact. We implemented informed prefetching and caching in DEC's OSF/1 operating system and measured its performance on a 150 MHz Alpha equipped with 15 disks. When running a range of applications including text search, 3D scientific visualization, relational database queries, speech recognition, and computational chemistry, informed prefetching reduces the execution time of four of these applications by 20% to 87%. Informed caching reduces the execution time of the fifth application by up to 30%.} }