[13350] | 1 | /*
|
---|
| 2 | * BasicArrayCache
|
---|
| 3 | *
|
---|
| 4 | * Author: Lasse Collin <lasse.collin@tukaani.org>
|
---|
| 5 | *
|
---|
| 6 | * This file has been put into the public domain.
|
---|
| 7 | * You can do whatever you want with this file.
|
---|
| 8 | */
|
---|
| 9 |
|
---|
| 10 | package org.tukaani.xz;
|
---|
| 11 |
|
---|
| 12 | import java.lang.ref.Reference;
|
---|
| 13 | import java.lang.ref.SoftReference;
|
---|
| 14 | import java.util.Arrays;
|
---|
| 15 | import java.util.LinkedHashMap;
|
---|
| 16 | import java.util.Map;
|
---|
| 17 |
|
---|
| 18 | /**
|
---|
| 19 | * A basic {@link ArrayCache} implementation.
|
---|
| 20 | * <p>
|
---|
| 21 | * This caches exact array sizes, that is, {@code getByteArray} will return
|
---|
| 22 | * an array whose size is exactly the requested size. A limited number
|
---|
| 23 | * of different array sizes are cached at the same time; least recently used
|
---|
| 24 | * sizes will be dropped from the cache if needed (can happen if several
|
---|
| 25 | * different (de)compression options are used with the same cache).
|
---|
| 26 | * <p>
|
---|
| 27 | * The current implementation uses
|
---|
| 28 | * {@link java.util.LinkedHashMap LinkedHashMap} to map different array sizes
|
---|
| 29 | * to separate array-based data structures which hold
|
---|
| 30 | * {@link java.lang.ref.SoftReference SoftReferences} to the cached arrays.
|
---|
| 31 | * In the common case this should give good performance and fairly low
|
---|
| 32 | * memory usage overhead.
|
---|
| 33 | * <p>
|
---|
| 34 | * A statically allocated global {@code BasicArrayCache} instance is
|
---|
| 35 | * available via {@link #getInstance()} which is a good choice in most
|
---|
| 36 | * situations where caching is wanted.
|
---|
| 37 | *
|
---|
| 38 | * @since 1.7
|
---|
| 39 | */
|
---|
| 40 | public class BasicArrayCache extends ArrayCache {
|
---|
| 41 | /**
|
---|
| 42 | * Arrays smaller than this many elements will not be cached.
|
---|
| 43 | */
|
---|
| 44 | private static final int CACHEABLE_SIZE_MIN = 32 << 10;
|
---|
| 45 |
|
---|
| 46 | /**
|
---|
| 47 | * Number of stacks i.e. how many different array sizes to cache.
|
---|
| 48 | */
|
---|
| 49 | private static final int STACKS_MAX = 32;
|
---|
| 50 |
|
---|
| 51 | /**
|
---|
| 52 | * Number of arrays of the same type and size to keep in the cache.
|
---|
| 53 | * (ELEMENTS_PER_STACK - 1) is used as a bit mask so ELEMENTS_PER_STACK
|
---|
| 54 | * must be a power of two!
|
---|
| 55 | */
|
---|
| 56 | private static final int ELEMENTS_PER_STACK = 512;
|
---|
| 57 |
|
---|
| 58 | /**
|
---|
| 59 | * A thread-safe stack-like data structure whose {@code push} method
|
---|
| 60 | * overwrites the oldest element in the stack if the stack is full.
|
---|
| 61 | */
|
---|
| 62 | private static class CyclicStack<T> {
|
---|
| 63 | /**
|
---|
| 64 | * Array that holds the elements in the cyclic stack.
|
---|
| 65 | */
|
---|
| 66 | @SuppressWarnings("unchecked")
|
---|
| 67 | private final T[] elements = (T[])new Object[ELEMENTS_PER_STACK];
|
---|
| 68 |
|
---|
| 69 | /**
|
---|
| 70 | * Read-write position in the {@code refs} array.
|
---|
| 71 | * The most recently added element is in {@code refs[pos]}.
|
---|
| 72 | * If it is {@code null}, then the stack is empty and all
|
---|
| 73 | * elements in {@code refs} are {@code null}.
|
---|
| 74 | * <p>
|
---|
| 75 | * Note that {@code pop()} always modifies {@code pos}, even if
|
---|
| 76 | * the stack is empty. This means that when the first element is
|
---|
| 77 | * added by {@code push(T)}, it can get added in any position in
|
---|
| 78 | * {@code refs} and the stack will start growing from there.
|
---|
| 79 | */
|
---|
| 80 | private int pos = 0;
|
---|
| 81 |
|
---|
| 82 | /**
|
---|
| 83 | * Gets the most recently added element from the stack.
|
---|
| 84 | * If the stack is empty, {@code null} is returned.
|
---|
| 85 | */
|
---|
| 86 | public synchronized T pop() {
|
---|
| 87 | T e = elements[pos];
|
---|
| 88 | elements[pos] = null;
|
---|
| 89 | pos = (pos - 1) & (ELEMENTS_PER_STACK - 1);
|
---|
| 90 | return e;
|
---|
| 91 | }
|
---|
| 92 |
|
---|
| 93 | /**
|
---|
| 94 | * Adds a new element to the stack. If the stack is full, the oldest
|
---|
| 95 | * element is overwritten.
|
---|
| 96 | */
|
---|
| 97 | public synchronized void push(T e) {
|
---|
| 98 | pos = (pos + 1) & (ELEMENTS_PER_STACK - 1);
|
---|
| 99 | elements[pos] = e;
|
---|
| 100 | }
|
---|
| 101 | }
|
---|
| 102 |
|
---|
| 103 | /**
|
---|
| 104 | * Maps Integer (array size) to stacks of references to arrays. At most
|
---|
| 105 | * STACKS_MAX number of stacks are kept in the map (LRU cache).
|
---|
| 106 | */
|
---|
| 107 | private static class CacheMap<T>
|
---|
| 108 | extends LinkedHashMap<Integer, CyclicStack<Reference<T>>> {
|
---|
| 109 | /**
|
---|
| 110 | * This class won't be serialized but this is needed
|
---|
| 111 | * to silence a compiler warning.
|
---|
| 112 | */
|
---|
| 113 | private static final long serialVersionUID = 1L;
|
---|
| 114 |
|
---|
| 115 | /**
|
---|
| 116 | * Creates a new CacheMap.
|
---|
| 117 | */
|
---|
| 118 | public CacheMap() {
|
---|
| 119 | // The map may momentarily have at most STACKS_MAX + 1 entries
|
---|
| 120 | // when put(K,V) has inserted a new entry but hasn't called
|
---|
| 121 | // removeEldestEntry yet. Using 2 * STACKS_MAX as the initial
|
---|
| 122 | // (and the final) capacity should give good performance. 0.75 is
|
---|
| 123 | // the default load factor and in this case it guarantees that
|
---|
| 124 | // the map will never need to be rehashed because
|
---|
| 125 | // (STACKS_MAX + 1) / 0.75 < 2 * STACKS_MAX.
|
---|
| 126 | //
|
---|
| 127 | // That last argument is true to get LRU cache behavior together
|
---|
[15662] | 128 | // with the overridden removeEldestEntry method.
|
---|
[13350] | 129 | super(2 * STACKS_MAX, 0.75f, true);
|
---|
| 130 | }
|
---|
| 131 |
|
---|
| 132 | /**
|
---|
| 133 | * Returns true if the map is full and the least recently used stack
|
---|
| 134 | * should be removed.
|
---|
| 135 | */
|
---|
| 136 | protected boolean removeEldestEntry(
|
---|
| 137 | Map.Entry<Integer, CyclicStack<Reference<T>>> eldest) {
|
---|
| 138 | return size() > STACKS_MAX;
|
---|
| 139 | }
|
---|
| 140 | }
|
---|
| 141 |
|
---|
| 142 | /**
|
---|
| 143 | * Helper class for the singleton instance.
|
---|
| 144 | * This is allocated only if {@code getInstance()} is called.
|
---|
| 145 | */
|
---|
| 146 | private static final class LazyHolder {
|
---|
| 147 | static final BasicArrayCache INSTANCE = new BasicArrayCache();
|
---|
| 148 | }
|
---|
| 149 |
|
---|
| 150 | /**
|
---|
| 151 | * Returns a statically-allocated {@code BasicArrayCache} instance.
|
---|
| 152 | * This is often a good choice when a cache is needed.
|
---|
| 153 | */
|
---|
| 154 | public static BasicArrayCache getInstance() {
|
---|
| 155 | return LazyHolder.INSTANCE;
|
---|
| 156 | }
|
---|
| 157 |
|
---|
| 158 | /**
|
---|
| 159 | * Stacks for cached byte arrays.
|
---|
| 160 | */
|
---|
| 161 | private final CacheMap<byte[]> byteArrayCache = new CacheMap<byte[]>();
|
---|
| 162 |
|
---|
| 163 | /**
|
---|
| 164 | * Stacks for cached int arrays.
|
---|
| 165 | */
|
---|
| 166 | private final CacheMap<int[]> intArrayCache = new CacheMap<int[]>();
|
---|
| 167 |
|
---|
| 168 | /**
|
---|
| 169 | * Gets {@code T[size]} from the given {@code cache}.
|
---|
| 170 | * If no such array is found, {@code null} is returned.
|
---|
| 171 | */
|
---|
| 172 | private static <T> T getArray(CacheMap<T> cache, int size) {
|
---|
| 173 | // putArray doesn't add small arrays to the cache and so it's
|
---|
| 174 | // pointless to look for small arrays here.
|
---|
| 175 | if (size < CACHEABLE_SIZE_MIN)
|
---|
| 176 | return null;
|
---|
| 177 |
|
---|
| 178 | // Try to find a stack that holds arrays of T[size].
|
---|
| 179 | CyclicStack<Reference<T>> stack;
|
---|
| 180 | synchronized(cache) {
|
---|
| 181 | stack = cache.get(size);
|
---|
| 182 | }
|
---|
| 183 |
|
---|
| 184 | if (stack == null)
|
---|
| 185 | return null;
|
---|
| 186 |
|
---|
| 187 | // Try to find a non-cleared Reference from the stack.
|
---|
| 188 | T array;
|
---|
| 189 | do {
|
---|
| 190 | Reference<T> r = stack.pop();
|
---|
| 191 | if (r == null)
|
---|
| 192 | return null;
|
---|
| 193 |
|
---|
| 194 | array = r.get();
|
---|
| 195 | } while (array == null);
|
---|
| 196 |
|
---|
| 197 | return array;
|
---|
| 198 | }
|
---|
| 199 |
|
---|
| 200 | /**
|
---|
| 201 | * Puts the {@code array} of {@code size} elements long into
|
---|
| 202 | * the {@code cache}.
|
---|
| 203 | */
|
---|
| 204 | private static <T> void putArray(CacheMap<T> cache, T array, int size) {
|
---|
| 205 | // Small arrays aren't cached.
|
---|
| 206 | if (size < CACHEABLE_SIZE_MIN)
|
---|
| 207 | return;
|
---|
| 208 |
|
---|
| 209 | CyclicStack<Reference<T>> stack;
|
---|
| 210 |
|
---|
| 211 | synchronized(cache) {
|
---|
| 212 | // Get a stack that holds arrays of T[size]. If no such stack
|
---|
| 213 | // exists, allocate a new one. If the cache already had STACKS_MAX
|
---|
| 214 | // number of stacks, the least recently used stack is removed by
|
---|
| 215 | // cache.put (it calls removeEldestEntry).
|
---|
| 216 | stack = cache.get(size);
|
---|
| 217 | if (stack == null) {
|
---|
| 218 | stack = new CyclicStack<Reference<T>>();
|
---|
| 219 | cache.put(size, stack);
|
---|
| 220 | }
|
---|
| 221 | }
|
---|
| 222 |
|
---|
| 223 | stack.push(new SoftReference<T>(array));
|
---|
| 224 | }
|
---|
| 225 |
|
---|
| 226 | /**
|
---|
| 227 | * Allocates a new byte array, hopefully reusing an existing
|
---|
| 228 | * array from the cache.
|
---|
| 229 | *
|
---|
| 230 | * @param size size of the array to allocate
|
---|
| 231 | *
|
---|
| 232 | * @param fillWithZeros
|
---|
| 233 | * if true, all the elements of the returned
|
---|
| 234 | * array will be zero; if false, the contents
|
---|
| 235 | * of the returned array is undefined
|
---|
| 236 | */
|
---|
| 237 | public byte[] getByteArray(int size, boolean fillWithZeros) {
|
---|
| 238 | byte[] array = getArray(byteArrayCache, size);
|
---|
| 239 |
|
---|
| 240 | if (array == null)
|
---|
| 241 | array = new byte[size];
|
---|
| 242 | else if (fillWithZeros)
|
---|
| 243 | Arrays.fill(array, (byte)0x00);
|
---|
| 244 |
|
---|
| 245 | return array;
|
---|
| 246 | }
|
---|
| 247 |
|
---|
| 248 | /**
|
---|
| 249 | * Puts the given byte array to the cache. The caller must no longer
|
---|
| 250 | * use the array.
|
---|
| 251 | * <p>
|
---|
| 252 | * Small arrays aren't cached and will be ignored by this method.
|
---|
| 253 | */
|
---|
| 254 | public void putArray(byte[] array) {
|
---|
| 255 | putArray(byteArrayCache, array, array.length);
|
---|
| 256 | }
|
---|
| 257 |
|
---|
| 258 | /**
|
---|
| 259 | * This is like getByteArray but for int arrays.
|
---|
| 260 | */
|
---|
| 261 | public int[] getIntArray(int size, boolean fillWithZeros) {
|
---|
| 262 | int[] array = getArray(intArrayCache, size);
|
---|
| 263 |
|
---|
| 264 | if (array == null)
|
---|
| 265 | array = new int[size];
|
---|
| 266 | else if (fillWithZeros)
|
---|
| 267 | Arrays.fill(array, 0);
|
---|
| 268 |
|
---|
| 269 | return array;
|
---|
| 270 | }
|
---|
| 271 |
|
---|
| 272 | /**
|
---|
| 273 | * Puts the given int array to the cache. The caller must no longer
|
---|
| 274 | * use the array.
|
---|
| 275 | * <p>
|
---|
| 276 | * Small arrays aren't cached and will be ignored by this method.
|
---|
| 277 | */
|
---|
| 278 | public void putArray(int[] array) {
|
---|
| 279 | putArray(intArrayCache, array, array.length);
|
---|
| 280 | }
|
---|
| 281 | }
|
---|