Add AsyncCache (#12)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index eafa7b0..378a162 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,8 @@
+## 1.12.0
+
+* Add an `AsyncCache` class that caches asynchronous operations for a period of
+  time.
+
 ## 1.11.3
 
 * Fix strong-mode warning against the signature of Future.then
diff --git a/lib/async.dart b/lib/async.dart
index 8218986..2526ae5 100644
--- a/lib/async.dart
+++ b/lib/async.dart
@@ -2,6 +2,7 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
+export "src/async_cache.dart";
 export "src/async_memoizer.dart";
 export "src/cancelable_operation.dart";
 export "src/delegate/event_sink.dart";
diff --git a/lib/src/async_cache.dart b/lib/src/async_cache.dart
new file mode 100644
index 0000000..96e1a50
--- /dev/null
+++ b/lib/src/async_cache.dart
@@ -0,0 +1,104 @@
+// Copyright (c) 2017, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'dart:async';
+
+import 'package:async/async.dart';
+
+/// Runs asynchronous functions and caches the result for a period of time.
+///
+/// This class exists to cover the pattern of having potentially expensive code
+/// such as file I/O, network access, or isolate computation that's unlikely to
+/// change quickly run fewer times. For example:
+///
+/// ```dart
+/// final _usersCache = new AsyncCache<List<String>>(const Duration(hours: 1));
+///
+/// /// Uses the cache if it exists, otherwise calls the closure:
+/// Future<List<String>> get onlineUsers => _usersCache.fetch(() {
+///   // Actually fetch online users here.
+/// });
+/// ```
+///
+/// This class's timing can be mocked using [`fake_async`][fake_async].
+///
+/// [fake_async]: https://pub.dartlang.org/packages/fake_async
+class AsyncCache<T> {
+  /// How long cached values stay fresh.
+  final Duration _duration;
+
+  /// Cached results of a previous [fetchStream] call.
+  StreamSplitter<T> _cachedStreamSplitter;
+
+  /// Cached results of a previous [fetch] call.
+  Future<T> _cachedValueFuture;
+
+  /// Fires when the cache should be considered stale.
+  Timer _stale;
+
+  /// Creates a cache that invalidates after an in-flight request is complete.
+  ///
+  /// An ephemeral cache guarantees that a callback function will only be
+  /// executed at most once concurrently. This is useful for requests for which
+  /// data is updated frequently but stale data is acceptable.
+  factory AsyncCache.ephemeral() =>
+      new AsyncCache(Duration.ZERO);
+
+  /// Creates a cache that invalidates its contents after [duration] has passed.
+  ///
+  /// The [duration] starts counting after the Future returned by [fetch]
+  /// completes, or after the Stream returned by [fetchStream] emits a done
+  /// event.
+  AsyncCache(this._duration);
+
+  /// Returns a cached value from a previous call to [fetch], or runs [callback]
+  /// to compute a new one.
+  ///
+  /// If [fetch] has been called recently enough, returns its previous return
+  /// value. Otherwise, runs [callback] and returns its new return value.
+  Future<T> fetch(Future<T> callback()) async {
+    if (_cachedStreamSplitter != null) {
+      throw new StateError('Previously used to cache via `fetchStream`');
+    }
+    if (_cachedValueFuture == null) {
+      _cachedValueFuture = callback();
+      await _cachedValueFuture;
+      _startStaleTimer();
+    }
+    return _cachedValueFuture;
+  }
+
+  /// Returns a cached stream from a previous call to [fetchStream], or runs
+  /// [callback] to compute a new stream.
+  ///
+  /// If [fetchStream] has been called recently enough, returns a copy of its
+  /// previous return value. Otherwise, runs [callback] and returns its new
+  /// return value.
+  Stream<T> fetchStream(Stream<T> callback()) {
+    if (_cachedValueFuture != null) {
+      throw new StateError('Previously used to cache via `fetch`');
+    }
+    if (_cachedStreamSplitter == null) {
+      _cachedStreamSplitter = new StreamSplitter(callback()
+          .transform(new StreamTransformer.fromHandlers(handleDone: (sink) {
+        _startStaleTimer();
+        sink.close();
+      })));
+    }
+    return _cachedStreamSplitter.split();
+  }
+
+  /// Removes any cached value.
+  void invalidate() {
+    _cachedValueFuture = null;
+    _cachedStreamSplitter?.close();
+    _cachedStreamSplitter = null;
+    _stale?.cancel();
+    _stale = null;
+  }
+
+  void _startStaleTimer() {
+    _stale = new Timer(_duration, invalidate);
+  }
+}
diff --git a/pubspec.yaml b/pubspec.yaml
index 2741747..20ec733 100644
--- a/pubspec.yaml
+++ b/pubspec.yaml
@@ -1,5 +1,5 @@
 name: async
-version: 1.11.3
+version: 1.12.0-dev
 author: Dart Team <misc@dartlang.org>
 description: Utility functions and classes related to the 'dart:async' library.
 homepage: https://www.github.com/dart-lang/async
diff --git a/test/async_cache_test.dart b/test/async_cache_test.dart
new file mode 100644
index 0000000..b81d9f9
--- /dev/null
+++ b/test/async_cache_test.dart
@@ -0,0 +1,154 @@
+// Copyright (c) 2017, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'dart:async';
+
+import 'package:async/async.dart';
+import 'package:fake_async/fake_async.dart';
+import 'package:test/test.dart';
+
+void main() {
+  AsyncCache<String> cache;
+
+  setUp(() {
+    // Create a cache that is fresh for an hour.
+    cache = new AsyncCache(const Duration(hours: 1));
+  });
+
+  test('should fetch via a callback when no cache exists', () async {
+    expect(await cache.fetch(() async => 'Expensive'), 'Expensive');
+  });
+
+  test('should not fetch via callback when a cache exists', () async {
+    await cache.fetch(() async => 'Expensive');
+    expect(await cache.fetch(expectAsync0(() {}, count: 0)), 'Expensive');
+  });
+
+  test('should not fetch via callback when a future is in-flight', () async {
+    // No actual caching is done, just avoid duplicate requests.
+    cache = new AsyncCache.ephemeral();
+
+    var completer = new Completer<String>();
+    expect(cache.fetch(() => completer.future), completion('Expensive'));
+    expect(cache.fetch(expectAsync0(() {}, count: 0)), completion('Expensive'));
+    await completer.complete('Expensive');
+  });
+
+  test('should fetch via a callback again when cache expires', () {
+    new FakeAsync().run((fakeAsync) async {
+      var timesCalled = 0;
+      call() async => 'Called ${++timesCalled}';
+      expect(await cache.fetch(call), 'Called 1');
+      expect(await cache.fetch(call), 'Called 1', reason: 'Cache still fresh');
+
+      fakeAsync.elapse(const Duration(hours: 1) - const Duration(seconds: 1));
+      expect(await cache.fetch(call), 'Called 1', reason: 'Cache still fresh');
+
+      fakeAsync.elapse(const Duration(seconds: 1));
+      expect(await cache.fetch(call), 'Called 2');
+      expect(await cache.fetch(call), 'Called 2', reason: 'Cache fresh again');
+
+      fakeAsync.elapse(const Duration(hours: 1));
+      expect(await cache.fetch(call), 'Called 3');
+    });
+  });
+
+  test('should fetch via a callback when manually invalidated', () async {
+    var timesCalled = 0;
+    call() async => 'Called ${++timesCalled}';
+    expect(await cache.fetch(call), 'Called 1');
+    await cache.invalidate();
+    expect(await cache.fetch(call), 'Called 2');
+    await cache.invalidate();
+    expect(await cache.fetch(call), 'Called 3');
+  });
+
+  test('should fetch a stream via a callback', () async {
+    expect(await cache.fetchStream(expectAsync0(() {
+      return new Stream.fromIterable(['1', '2', '3']);
+    })).toList(), ['1', '2', '3']);
+  });
+
+  test('should not fetch stream via callback when a cache exists', () async {
+    await cache.fetchStream(() async* {
+      yield '1';
+      yield '2';
+      yield '3';
+    }).toList();
+    expect(await cache.fetchStream(expectAsync0(() {}, count: 0)).toList(),
+        ['1', '2', '3']);
+  });
+
+  test('should not fetch stream via callback when request in flight', () async {
+    // Unlike the above test, we want to verify that we don't make multiple
+    // calls if a cache is being filled currently, and instead wait for that
+    // cache to be completed.
+    var controller = new StreamController<String>();
+    Stream<String> call() => controller.stream;
+    expect(cache.fetchStream(call).toList(), completion(['1', '2', '3']));
+    controller.add('1');
+    controller.add('2');
+    await new Future.value();
+    expect(cache.fetchStream(call).toList(), completion(['1', '2', '3']));
+    controller.add('3');
+    await controller.close();
+  });
+
+  test('should fetch stream via a callback again when cache expires', () {
+    new FakeAsync().run((fakeAsync) async {
+      var timesCalled = 0;
+      Stream<String> call() {
+        return new Stream.fromIterable(['Called ${++timesCalled}']);
+      }
+
+      expect(await cache.fetchStream(call).toList(), ['Called 1']);
+      expect(await cache.fetchStream(call).toList(), ['Called 1'],
+          reason: 'Cache still fresh');
+
+      fakeAsync.elapse(const Duration(hours: 1) - const Duration(seconds: 1));
+      expect(await cache.fetchStream(call).toList(), ['Called 1'],
+          reason: 'Cache still fresh');
+
+      fakeAsync.elapse(const Duration(seconds: 1));
+      expect(await cache.fetchStream(call).toList(), ['Called 2']);
+      expect(await cache.fetchStream(call).toList(), ['Called 2'],
+          reason: 'Cache fresh again');
+
+      fakeAsync.elapse(const Duration(hours: 1));
+      expect(await cache.fetchStream(call).toList(), ['Called 3']);
+    });
+  });
+
+  test('should fetch via a callback when manually invalidated', () async {
+    var timesCalled = 0;
+    Stream<String> call() {
+      return new Stream.fromIterable(['Called ${++timesCalled}']);
+    }
+
+    expect(await cache.fetchStream(call).toList(), ['Called 1']);
+    await cache.invalidate();
+    expect(await cache.fetchStream(call).toList(), ['Called 2']);
+    await cache.invalidate();
+    expect(await cache.fetchStream(call).toList(), ['Called 3']);
+  });
+
+  test('should cancel a cached stream without affecting others', () async {
+    Stream<String> call() => new Stream.fromIterable(['1', '2', '3']);
+
+    expect(cache.fetchStream(call).toList(), completion(['1', '2', '3']));
+
+    // Listens to the stream for the initial value, then cancels subscription.
+    expect(await cache.fetchStream(call).first, '1');
+  });
+
+  test('should pause a cached stream without affecting others', () async {
+    Stream<String> call() => new Stream.fromIterable(['1', '2', '3']);
+
+    StreamSubscription sub;
+    sub = cache.fetchStream(call).listen(expectAsync1((event) {
+      if (event == '1') sub.pause();
+    }));
+    expect(cache.fetchStream(call).toList(), completion(['1', '2', '3']));
+  });
+}