Merge pull request #7210 from nebyan:CacheKeyFactoryNotUsed

PiperOrigin-RevId: 307045655
This commit is contained in:
Oliver Woodman 2020-04-20 13:28:27 +01:00
commit 704993ce7c
3 changed files with 12 additions and 26 deletions

View file

@ -109,7 +109,6 @@ public final class CacheUtil {
*
* @param dataSpec Defines the data to be cached.
* @param cache A {@link Cache} to store the data.
* @param cacheKeyFactory An optional factory for cache keys.
* @param upstream A {@link DataSource} for reading data not in the cache.
* @param progressListener A listener to receive progress updates, or {@code null}.
* @param isCanceled An optional flag that will interrupt caching if set to true.
@ -120,7 +119,6 @@ public final class CacheUtil {
public static void cache(
DataSpec dataSpec,
Cache cache,
@Nullable CacheKeyFactory cacheKeyFactory,
DataSource upstream,
@Nullable ProgressListener progressListener,
@Nullable AtomicBoolean isCanceled)
@ -128,7 +126,7 @@ public final class CacheUtil {
cache(
dataSpec,
cache,
cacheKeyFactory,
/* cacheKeyFactory= */ null,
new CacheDataSource(cache, upstream),
new byte[DEFAULT_BUFFER_SIZE_BYTES],
/* priorityTaskManager= */ null,
@ -139,14 +137,14 @@ public final class CacheUtil {
}
/**
* Caches the data defined by {@code dataSpec} while skipping already cached data. Caching stops
* early if end of input is reached and {@code enableEOFException} is false.
* Caches the data defined by {@code dataSpec}, skipping already cached data. Caching stops early
* if end of input is reached and {@code enableEOFException} is false.
*
* <p>If a {@link PriorityTaskManager} is given, it's used to pause and resume caching depending
* on {@code priority} and the priority of other tasks registered to the PriorityTaskManager.
* Please note that it's the responsibility of the calling code to call {@link
* PriorityTaskManager#add} to register with the manager before calling this method, and to call
* {@link PriorityTaskManager#remove} afterwards to unregister.
* <p>If a {@link PriorityTaskManager} is provided, it's used to pause and resume caching
* depending on {@code priority} and the priority of other tasks registered to the
* PriorityTaskManager. Please note that it's the responsibility of the calling code to call
* {@link PriorityTaskManager#add} to register with the manager before calling this method, and to
* call {@link PriorityTaskManager#remove} afterwards to unregister.
*
* <p>This method may be slow and shouldn't normally be called on the main thread.
*

View file

@ -360,7 +360,6 @@ public final class CacheDataSourceTest {
CacheUtil.cache(
unboundedDataSpec,
cache,
/* cacheKeyFactory= */ null,
upstream2,
/* progressListener= */ null,
/* isCanceled= */ null);
@ -409,7 +408,6 @@ public final class CacheDataSourceTest {
CacheUtil.cache(
unboundedDataSpec,
cache,
/* cacheKeyFactory= */ null,
upstream2,
/* progressListener= */ null,
/* isCanceled= */ null);
@ -433,7 +431,6 @@ public final class CacheDataSourceTest {
CacheUtil.cache(
dataSpec,
cache,
/* cacheKeyFactory= */ null,
upstream,
/* progressListener= */ null,
/* isCanceled= */ null);
@ -469,7 +466,6 @@ public final class CacheDataSourceTest {
CacheUtil.cache(
dataSpec,
cache,
/* cacheKeyFactory= */ null,
upstream,
/* progressListener= */ null,
/* isCanceled= */ null);

View file

@ -203,7 +203,6 @@ public final class CacheUtilTest {
CacheUtil.cache(
new DataSpec(Uri.parse("test_data")),
cache,
/* cacheKeyFactory= */ null,
dataSource,
counters,
/* isCanceled= */ null);
@ -220,8 +219,7 @@ public final class CacheUtilTest {
Uri testUri = Uri.parse("test_data");
DataSpec dataSpec = new DataSpec(testUri, /* position= */ 10, /* length= */ 20);
CachingCounters counters = new CachingCounters();
CacheUtil.cache(
dataSpec, cache, /* cacheKeyFactory= */ null, dataSource, counters, /* isCanceled= */ null);
CacheUtil.cache(dataSpec, cache, dataSource, counters, /* isCanceled= */ null);
counters.assertValues(0, 20, 20);
counters.reset();
@ -229,7 +227,6 @@ public final class CacheUtilTest {
CacheUtil.cache(
new DataSpec(testUri),
cache,
/* cacheKeyFactory= */ null,
dataSource,
counters,
/* isCanceled= */ null);
@ -247,8 +244,7 @@ public final class CacheUtilTest {
DataSpec dataSpec = new DataSpec(Uri.parse("test_data"));
CachingCounters counters = new CachingCounters();
CacheUtil.cache(
dataSpec, cache, /* cacheKeyFactory= */ null, dataSource, counters, /* isCanceled= */ null);
CacheUtil.cache(dataSpec, cache, dataSource, counters, /* isCanceled= */ null);
counters.assertValues(0, 100, 100);
assertCachedData(cache, fakeDataSet);
@ -264,8 +260,7 @@ public final class CacheUtilTest {
Uri testUri = Uri.parse("test_data");
DataSpec dataSpec = new DataSpec(testUri, /* position= */ 10, /* length= */ 20);
CachingCounters counters = new CachingCounters();
CacheUtil.cache(
dataSpec, cache, /* cacheKeyFactory= */ null, dataSource, counters, /* isCanceled= */ null);
CacheUtil.cache(dataSpec, cache, dataSource, counters, /* isCanceled= */ null);
counters.assertValues(0, 20, 20);
counters.reset();
@ -273,7 +268,6 @@ public final class CacheUtilTest {
CacheUtil.cache(
new DataSpec(testUri),
cache,
/* cacheKeyFactory= */ null,
dataSource,
counters,
/* isCanceled= */ null);
@ -290,8 +284,7 @@ public final class CacheUtilTest {
Uri testUri = Uri.parse("test_data");
DataSpec dataSpec = new DataSpec(testUri, /* position= */ 0, /* length= */ 1000);
CachingCounters counters = new CachingCounters();
CacheUtil.cache(
dataSpec, cache, /* cacheKeyFactory= */ null, dataSource, counters, /* isCanceled= */ null);
CacheUtil.cache(dataSpec, cache, dataSource, counters, /* isCanceled= */ null);
counters.assertValues(0, 100, 1000);
assertCachedData(cache, fakeDataSet);
@ -340,7 +333,6 @@ public final class CacheUtilTest {
CacheUtil.cache(
new DataSpec(Uri.parse("test_data")),
cache,
/* cacheKeyFactory= */ null,
dataSource,
counters,
/* isCanceled= */ null);