source: osm/applications/editors/josm/plugins/imagerycache/src/org/mapdb/HTreeMap.java@ 29363

Last change on this file since 29363 was 29363, checked in by akks, 11 years ago

JOSM/ImageryCache: Initial commit

File size: 39.0 KB
Line 
1/*
2 * Copyright (c) 2012 Jan Kotek
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16package org.mapdb;
17
18import java.io.DataInput;
19import java.io.DataOutput;
20import java.io.IOException;
21import java.util.*;
22import java.util.concurrent.ConcurrentMap;
23import java.util.concurrent.locks.ReentrantReadWriteLock;
24
25/**
26 * Thread safe concurrent HashMap
27 * <p/>
28 * This map uses full 32bit hash from beginning, There is no initial load factor and rehash.
29 * Technically it is not hash table, but hash tree with nodes expanding when they become full.
30 * <p/>
31 * This map is suitable for number of records 1e9 and over.
32 * Larger number of records will increase hash collisions and performance
33 * will degrade linearly with number of records (separate chaining).
34 * <p/>
35 * Concurrent scalability is achieved by splitting HashMap into 16 segments, each with separate lock.
36 * Very similar to ConcurrentHashMap
37 *
38 * @author Jan Kotek
39 */
40@SuppressWarnings({ "unchecked", "rawtypes" })
41public class HTreeMap<K,V> extends AbstractMap<K,V> implements ConcurrentMap<K, V>, Bind.MapWithModificationListener<K,V> {
42
43
44 protected static final int BUCKET_OVERFLOW = 4;
45
46 /** is this a Map or Set? if false, entries do not have values, only keys are allowed*/
47 protected final boolean hasValues;
48
49 /**
50 * Salt added to hash before rehashing, so it is harder to trigger hash collision attack.
51 */
52 protected final int hashSalt;
53
54
55 protected final Serializer<K> keySerializer;
56 protected final Serializer<V> valueSerializer;
57
58 protected final Serializer defaultSerialzierForSnapshots;
59
60
61
62
63 /** node which holds key-value pair */
64 protected static class LinkedNode<K,V>{
65 final K key;
66 final V value;
67 final long next;
68
69 LinkedNode(final long next, final K key, final V value ){
70 this.key = key;
71 this.value = value;
72 this.next = next;
73 }
74 }
75
76 static class HashRootSerializer implements Serializer<HashRoot>{
77
78 private Serializer defaultSerializer;
79
80 public HashRootSerializer(Serializer defaultSerializer) {
81 this.defaultSerializer = defaultSerializer;
82 }
83
84 @Override
85 public void serialize(DataOutput out, HashRoot value) throws IOException {
86 out.writeBoolean(value.hasValues);
87 out.writeInt(value.hashSalt);
88 for(int i=0;i<16;i++){
89 Utils.packLong(out, value.segmentRecids[i]);
90 }
91 defaultSerializer.serialize(out,value.keySerializer);
92 defaultSerializer.serialize(out,value.valueSerializer);
93
94 }
95
96 @Override
97 public HashRoot deserialize(DataInput in, int available) throws IOException {
98 if(available==0) return null;
99 HashRoot r = new HashRoot();
100 r.hasValues = in.readBoolean();
101 r.hashSalt = in.readInt();
102 r.segmentRecids = new long[16];
103 for(int i=0;i<16;i++){
104 r.segmentRecids[i] = Utils.unpackLong(in);
105 }
106 r.keySerializer = (Serializer) defaultSerializer.deserialize(in, -1);
107 r.valueSerializer = (Serializer) defaultSerializer.deserialize(in, -1);
108 return r;
109 }
110
111 }
112
113
114 static class HashRoot{
115 long[] segmentRecids;
116 boolean hasValues;
117 int hashSalt;
118 Serializer keySerializer;
119 Serializer valueSerializer;
120 }
121
122
123 final Serializer<LinkedNode<K,V>> LN_SERIALIZER = new Serializer<LinkedNode<K,V>>() {
124 @Override
125 public void serialize(DataOutput out, LinkedNode<K,V> value) throws IOException {
126 Utils.packLong(out, value.next);
127 keySerializer.serialize(out,value.key);
128 if(hasValues)
129 valueSerializer.serialize(out,value.value);
130 }
131
132 @Override
133 public LinkedNode<K,V> deserialize(DataInput in, int available) throws IOException {
134 return new LinkedNode<K, V>(
135 Utils.unpackLong(in),
136 (K) keySerializer.deserialize(in,-1),
137 hasValues? (V) valueSerializer.deserialize(in,-1) : (V) Utils.EMPTY_STRING
138 );
139 }
140 };
141
142
143 static final Serializer<long[][]>DIR_SERIALIZER = new Serializer<long[][]>() {
144 @Override
145 public void serialize(DataOutput out, long[][] value) throws IOException {
146 if(value.length!=16) throw new InternalError();
147
148 //first write mask which indicate subarray nullability
149 int nulls = 0;
150 for(int i = 0;i<16;i++){
151 if(value[i]!=null)
152 nulls |= 1<<i;
153 }
154 out.writeShort(nulls);
155
156 //write non null subarrays
157 for(int i = 0;i<16;i++){
158 if(value[i]!=null){
159 if(value[i].length!=8) throw new InternalError();
160 for(long l:value[i]){
161 Utils.packLong(out, l);
162 }
163 }
164 }
165 }
166
167
168 @Override
169 public long[][] deserialize(DataInput in, int available) throws IOException {
170
171 final long[][] ret = new long[16][];
172
173 //there are 16 subarrays, each bite indicates if subarray is null
174 int nulls = in.readUnsignedShort();
175 for(int i=0;i<16;i++){
176 if((nulls & 1)!=0){
177 final long[] subarray = new long[8];
178 for(int j=0;j<8;j++){
179 subarray[j] = Utils.unpackLong(in);
180 }
181 ret[i] = subarray;
182 }
183 nulls = nulls>>>1;
184 }
185
186 return ret;
187 }
188 };
189
190
191 /** list of segments, this is immutable*/
192 protected final long[] segmentRecids;
193
194 protected final ReentrantReadWriteLock[] segmentLocks = new ReentrantReadWriteLock[16];
195 {
196 for(int i=0;i< 16;i++) segmentLocks[i]=new ReentrantReadWriteLock();
197 }
198
199 protected final Engine engine;
200 public final long rootRecid;
201
202
203 /**
204 * Constructor used to create new HTreeMap without existing record (recid) in Engine.
205 * This constructor creates new record and saves all configuration parameters there.
206 * Constructor args are defining HTreeMap format, are stored in db and can not be changed latter.
207 *
208 * @param engine used for persistence
209 * @param hasValues is Map or Set? If true only keys will be stored, no values
210 * @param defaultSerializer serialier used to serialize/deserialize other serializers. May be null for default value.
211 * @param keySerializer Serializier used for keys. May be null for default value.
212 * @param valueSerializer Serializer used for values. May be null for default value
213 */
214 public HTreeMap(Engine engine, boolean hasValues, int hashSalt, Serializer defaultSerializer, Serializer<K> keySerializer, Serializer<V> valueSerializer) {
215 this.engine = engine;
216 this.hasValues = hasValues;
217 this.hashSalt = hashSalt;
218 SerializerBase.assertSerializable(keySerializer);
219 SerializerBase.assertSerializable(valueSerializer);
220
221 if(defaultSerializer == null) defaultSerializer = Serializer.BASIC_SERIALIZER;
222 this.defaultSerialzierForSnapshots = defaultSerializer;
223 this.keySerializer = keySerializer==null ? (Serializer<K>) defaultSerializer : keySerializer;
224 this.valueSerializer = valueSerializer==null ? (Serializer<V>) defaultSerializer : valueSerializer;
225
226
227 //prealocate segmentRecids, so we dont have to lock on those latter
228 segmentRecids = new long[16];
229 for(int i=0;i<16;i++)
230 segmentRecids[i] = engine.put(new long[16][], DIR_SERIALIZER);
231 HashRoot r = new HashRoot();
232 r.hasValues = hasValues;
233 r.hashSalt = hashSalt;
234 r.segmentRecids = segmentRecids;
235 r.keySerializer = this.keySerializer;
236 r.valueSerializer = this.valueSerializer;
237 this.rootRecid = engine.put(r, new HashRootSerializer(defaultSerializer));
238 }
239
240 /**
241 * Constructor used to load existing HTreeMap (with assigned recid).
242 * Map was already created and saved to Engine, this constructor just loads it.
243 *
244 * @param engine used for persistence
245 * @param rootRecid under which BTreeMap was stored
246 * @param defaultSerializer used to deserialize other serializers and comparator
247 */
248 public HTreeMap(Engine engine, long rootRecid, Serializer defaultSerializer) {
249 if(rootRecid == 0) throw new IllegalArgumentException("recid is 0");
250 this.engine = engine;
251 this.rootRecid = rootRecid;
252 //load all fields from store
253 if(defaultSerializer==null) defaultSerializer = Serializer.BASIC_SERIALIZER;
254 this.defaultSerialzierForSnapshots = defaultSerializer;
255 HashRoot r = engine.get(rootRecid, new HashRootSerializer(defaultSerializer));
256 this.segmentRecids = r.segmentRecids;
257 this.hasValues = r.hasValues;
258 this.hashSalt = r.hashSalt;
259 this.keySerializer = r.keySerializer;
260 this.valueSerializer = r.valueSerializer;
261 }
262
263 /** hack used for Dir Name*/
264 static final Map<String, Long> preinitNamedDir(Engine engine){
265 HashRootSerializer serializer = new HashRootSerializer(Serializer.BASIC_SERIALIZER);
266 //check if record already exist
267 HashRoot r = engine.get(Engine.NAME_DIR_RECID, serializer);
268 if(r!=null)
269 return new HTreeMap<String, Long>(engine, Engine.NAME_DIR_RECID, Serializer.BASIC_SERIALIZER);
270
271 if(engine.isReadOnly())
272 return Collections.unmodifiableMap(new HashMap<String, Long>());
273
274 //prealocate segmentRecids
275 long[] segmentRecids = new long[16];
276 for(int i=0;i<16;i++)
277 segmentRecids[i] = engine.put(new long[16][], DIR_SERIALIZER);
278 r = new HashRoot();
279 r.hasValues = true;
280 r.segmentRecids = segmentRecids;
281 r.keySerializer = Serializer.BASIC_SERIALIZER;
282 r.valueSerializer = Serializer.BASIC_SERIALIZER;
283 engine.update(Engine.NAME_DIR_RECID, r, serializer);
284 //and now load it
285 return new HTreeMap<String, Long>(engine, Engine.NAME_DIR_RECID, Serializer.BASIC_SERIALIZER);
286
287 }
288
289 @Override
290 public boolean containsKey(final Object o){
291 return get(o)!=null;
292 }
293
294 @Override
295 public int size() {
296 long counter = 0;
297
298 //search tree, until we find first non null
299 for(int i=0;i<16;i++){
300 try{
301 segmentLocks[i].readLock().lock();
302
303 final long dirRecid = segmentRecids[i];
304 counter+=recursiveDirCount(dirRecid);
305 }finally {
306 segmentLocks[i].readLock().unlock();
307 }
308 }
309
310 if(counter>Integer.MAX_VALUE)
311 return Integer.MAX_VALUE;
312
313 return (int) counter;
314 }
315
316 private long recursiveDirCount(final long dirRecid) {
317 long[][] dir = engine.get(dirRecid, DIR_SERIALIZER);
318 long counter = 0;
319 for(long[] subdir:dir){
320 if(subdir == null) continue;
321 for(long recid:subdir){
322 if(recid == 0) continue;
323 if((recid&1)==0){
324 //reference to another subdir
325 recid = recid>>>1;
326 counter += recursiveDirCount(recid);
327 }else{
328 //reference to linked list, count it
329 recid = recid>>>1;
330 while(recid!=0){
331 LinkedNode n = engine.get(recid, LN_SERIALIZER);
332 if(n!=null){
333 counter++;
334 recid = n.next;
335 }else{
336 recid = 0;
337 }
338 }
339 }
340 }
341 }
342 return counter;
343 }
344
345 @Override
346 public boolean isEmpty() {
347 //search tree, until we find first non null
348 for(int i=0;i<16;i++){
349 try{
350 segmentLocks[i].readLock().lock();
351
352 long dirRecid = segmentRecids[i];
353 long[][] dir = engine.get(dirRecid, DIR_SERIALIZER);
354 for(long[] d:dir){
355 if(d!=null) return false;
356 }
357 return true;
358 }finally {
359 segmentLocks[i].readLock().unlock();
360 }
361 }
362
363 return true;
364 }
365
366
367
368 @Override
369 public V get(final Object o){
370 if(o==null) return null;
371 final int h = hash(o);
372 final int segment = h >>>28;
373 try{
374 segmentLocks[segment].readLock().lock();
375 long recid = segmentRecids[segment];
376 for(int level=3;level>=0;level--){
377 long[][] dir = engine.get(recid, DIR_SERIALIZER);
378 if(dir == null) return null;
379 int slot = (h>>>(level*7 )) & 0x7F;
380 if(slot>=128) throw new InternalError();
381 if(dir[slot/8]==null) return null;
382 recid = dir[slot/8][slot%8];
383 if(recid == 0) return null;
384 if((recid&1)!=0){ //last bite indicates if referenced record is LinkedNode
385 recid = recid>>>1;
386 while(true){
387 LinkedNode<K,V> ln = engine.get(recid, LN_SERIALIZER);
388 if(ln == null) return null;
389 if(ln.key.equals(o)) return ln.value;
390 if(ln.next==0) return null;
391 recid = ln.next;
392 }
393 }
394
395 recid = recid>>>1;
396 }
397
398 return null;
399 }finally {
400 segmentLocks[segment].readLock().unlock();
401 }
402 }
403
404 @Override
405 public V put(final K key, final V value){
406 if (key == null)
407 throw new IllegalArgumentException("null key");
408
409 if (value == null)
410 throw new IllegalArgumentException("null value");
411
412 Utils.checkMapValueIsNotCollecion(value);
413
414 final int h = hash(key);
415 final int segment = h >>>28;
416 try{
417 segmentLocks[segment].writeLock().lock();
418 long dirRecid = segmentRecids[segment];
419
420 int level = 3;
421 while(true){
422 long[][] dir = engine.get(dirRecid, DIR_SERIALIZER);
423 final int slot = (h>>>(7*level )) & 0x7F;
424 if(slot>127) throw new InternalError();
425
426 if(dir == null ){
427 //create new dir
428 dir = new long[16][];
429 }
430
431 if(dir[slot/8] == null){
432 dir[slot/8] = new long[8];
433 }
434
435 int counter = 0;
436 long recid = dir[slot/8][slot%8];
437
438 if(recid!=0){
439 if((recid&1) == 0){
440 dirRecid = recid>>>1;
441 level--;
442 continue;
443 }
444 recid = recid>>>1;
445
446 //traverse linked list, try to replace previous value
447 LinkedNode<K,V> ln = engine.get(recid, LN_SERIALIZER);
448
449 while(ln!=null){
450 if(ln.key.equals(key)){
451 //found, replace value at this node
452 V oldVal = ln.value;
453 ln = new LinkedNode<K, V>(ln.next, ln.key, value);
454 engine.update(recid, ln, LN_SERIALIZER);
455 notify(key, oldVal, value);
456 return oldVal;
457 }
458 recid = ln.next;
459 ln = recid==0? null : engine.get(recid, LN_SERIALIZER);
460 counter++;
461 }
462 //key was not found at linked list, so just append it to beginning
463 }
464
465
466 //check if linked list has overflow and needs to be expanded to new dir level
467 if(counter>=BUCKET_OVERFLOW && level>=1){
468 long[][] nextDir = new long[16][];
469
470 {
471 //add newly inserted record
472 int pos =(h >>>(7*(level-1) )) & 0x7F;
473 nextDir[pos/8] = new long[8];
474 nextDir[pos/8][pos%8] = (engine.put(new LinkedNode<K, V>(0, key, value), LN_SERIALIZER) <<1) | 1;
475 }
476
477
478 //redistribute linked bucket into new dir
479 long nodeRecid = dir[slot/8][slot%8]>>>1;
480 while(nodeRecid!=0){
481 LinkedNode<K,V> n = engine.get(nodeRecid, LN_SERIALIZER);
482 final long nextRecid = n.next;
483 int pos = (hash(n.key) >>>(7*(level -1) )) & 0x7F;
484 if(nextDir[pos/8]==null) nextDir[pos/8] = new long[8];
485 n = new LinkedNode<K, V>(nextDir[pos/8][pos%8]>>>1, n.key, n.value);
486 nextDir[pos/8][pos%8] = (nodeRecid<<1) | 1;
487 engine.update(nodeRecid, n, LN_SERIALIZER);
488 nodeRecid = nextRecid;
489 }
490
491 //insert nextDir and update parent dir
492 long nextDirRecid = engine.put(nextDir, DIR_SERIALIZER);
493 int parentPos = (h>>>(7*level )) & 0x7F;
494 dir[parentPos/8][parentPos%8] = (nextDirRecid<<1) | 0;
495 engine.update(dirRecid, dir, DIR_SERIALIZER);
496 notify(key, null, value);
497 return null;
498 }else{
499 // record does not exist in linked list, so create new one
500 recid = dir[slot/8][slot%8]>>>1;
501 long newRecid = engine.put(new LinkedNode<K, V>(recid, key, value), LN_SERIALIZER);
502 dir[slot/8][slot%8] = (newRecid<<1) | 1;
503 engine.update(dirRecid, dir, DIR_SERIALIZER);
504 notify(key, null, value);
505 return null;
506 }
507 }
508
509 }finally {
510 segmentLocks[segment].writeLock().unlock();
511 }
512 }
513
514
515 @Override
516 public V remove(Object key){
517
518 final int h = hash(key);
519 final int segment = h >>>28;
520 try{
521 segmentLocks[segment].writeLock().lock();
522
523 final long[] dirRecids = new long[4];
524 int level = 3;
525 dirRecids[level] = segmentRecids[segment];
526
527 while(true){
528 long[][] dir = engine.get(dirRecids[level], DIR_SERIALIZER);
529 final int slot = (h>>>(7*level )) & 0x7F;
530 if(slot>127) throw new InternalError();
531
532 if(dir == null ){
533 //create new dir
534 dir = new long[16][];
535 }
536
537 if(dir[slot/8] == null){
538 dir[slot/8] = new long[8];
539 }
540
541// int counter = 0;
542 long recid = dir[slot/8][slot%8];
543
544 if(recid!=0){
545 if((recid&1) == 0){
546 level--;
547 dirRecids[level] = recid>>>1;
548 continue;
549 }
550 recid = recid>>>1;
551
552 //traverse linked list, try to remove node
553 LinkedNode<K,V> ln = engine.get(recid, LN_SERIALIZER);
554 LinkedNode<K,V> prevLn = null;
555 long prevRecid = 0;
556 while(ln!=null){
557 if(ln.key.equals(key)){
558 //remove from linkedList
559 if(prevLn == null ){
560 //referenced directly from dir
561 if(ln.next==0){
562 recursiveDirDelete(h, level, dirRecids, dir, slot);
563
564
565 }else{
566 dir[slot/8][slot%8] = (ln.next<<1)|1;
567 engine.update(dirRecids[level], dir, DIR_SERIALIZER);
568 }
569
570 }else{
571 //referenced from LinkedNode
572 prevLn = new LinkedNode<K, V>(ln.next, prevLn.key, prevLn.value);
573 engine.update(prevRecid, prevLn, LN_SERIALIZER);
574 }
575 //found, remove this node
576 engine.delete(recid, LN_SERIALIZER);
577 notify((K) key, ln.value, null);
578 return ln.value;
579 }
580 prevRecid = recid;
581 prevLn = ln;
582 recid = ln.next;
583 ln = recid==0? null : engine.get(recid, LN_SERIALIZER);
584// counter++;
585 }
586 //key was not found at linked list, so it does not exist
587 return null;
588 }
589 //recid is 0, so entry does not exist
590 return null;
591
592 }
593 }finally {
594 segmentLocks[segment].writeLock().unlock();
595 }
596 }
597
598
599 private void recursiveDirDelete(int h, int level, long[] dirRecids, long[][] dir, int slot) {
600 //was only item in linked list, so try to collapse the dir
601 dir[slot/8][slot%8] = 0;
602 //one record was zeroed out, check if subarray can be collapsed to null
603 boolean allZero = true;
604 for(long l:dir[slot/8]){
605 if(l!=0){
606 allZero = false;
607 break;
608 }
609 }
610 if(allZero)
611 dir[slot/8] = null;
612 allZero = true;
613 for(long[] l:dir){
614 if(l!=null){
615 allZero = false;
616 break;
617 }
618 }
619
620 if(allZero){
621 //delete from parent dir
622 if(level==3){
623 //parent is segment, recid of this dir can not be modified, so just update to null
624 engine.update(dirRecids[level], new long[16][], DIR_SERIALIZER);
625 }else{
626 engine.delete(dirRecids[level], DIR_SERIALIZER);
627
628 final long[][] parentDir = engine.get(dirRecids[level + 1], DIR_SERIALIZER);
629 final int parentPos = (h >>> (7 * (level + 1))) & 0x7F;
630 recursiveDirDelete(h,level+1,dirRecids, parentDir, parentPos);
631 //parentDir[parentPos/8][parentPos%8] = 0;
632 //engine.update(dirRecids[level + 1],parentDir,DIR_SERIALIZER);
633
634 }
635 }else{
636 engine.update(dirRecids[level], dir, DIR_SERIALIZER);
637 }
638 }
639
640 @Override
641 public void clear() {
642 for(int i = 0; i<16;i++) try{
643 segmentLocks[i].writeLock().lock();
644
645 final long dirRecid = segmentRecids[i];
646 recursiveDirClear(dirRecid);
647
648 //set dir to null, as segment recid is immutable
649 engine.update(dirRecid, new long[16][], DIR_SERIALIZER);
650
651 }finally {
652 segmentLocks[i].writeLock().unlock();
653 }
654 }
655
656 private void recursiveDirClear(final long dirRecid) {
657 final long[][] dir = engine.get(dirRecid, DIR_SERIALIZER);
658 if(dir == null) return;
659 for(long[] subdir:dir){
660 if(subdir==null) continue;
661 for(long recid:subdir){
662 if(recid == 0) continue;
663 if((recid&1)==0){
664 //another dir
665 recid = recid>>>1;
666 //recursively remove dir
667 recursiveDirClear(recid);
668 engine.delete(recid, DIR_SERIALIZER);
669 }else{
670 //linked list to delete
671 recid = recid>>>1;
672 while(recid!=0){
673 LinkedNode n = engine.get(recid, LN_SERIALIZER);
674 engine.delete(recid,LN_SERIALIZER);
675 notify((K)n.key, (V)n.value , null);
676 recid = n.next;
677 }
678 }
679
680 }
681 }
682 }
683
684
685 @Override
686 public boolean containsValue(Object value) {
687 for (V v : values()) {
688 if (v.equals(value)) return true;
689 }
690 return false;
691 }
692
693 @Override
694 public void putAll(Map<? extends K, ? extends V> m) {
695 for(Entry<? extends K, ? extends V> e:m.entrySet()){
696 put(e.getKey(),e.getValue());
697 }
698 }
699
700
701 private final Set<K> _keySet = new AbstractSet<K>() {
702
703 @Override
704 public int size() {
705 return HTreeMap.this.size();
706 }
707
708 @Override
709 public boolean isEmpty() {
710 return HTreeMap.this.isEmpty();
711 }
712
713 @Override
714 public boolean contains(Object o) {
715 return HTreeMap.this.containsKey(o);
716 }
717
718 @Override
719 public Iterator<K> iterator() {
720 return new KeyIterator();
721 }
722
723 @Override
724 public boolean add(K k) {
725 if(HTreeMap.this.hasValues)
726 throw new UnsupportedOperationException();
727 else
728 return HTreeMap.this.put(k, (V) Utils.EMPTY_STRING) == null;
729 }
730
731 @Override
732 public boolean remove(Object o) {
733// if(o instanceof Entry){
734// Entry e = (Entry) o;
735// return HTreeMap.this.remove(((Entry) o).getKey(),((Entry) o).getValue());
736// }
737 return HTreeMap.this.remove(o)!=null;
738
739 }
740
741
742 @Override
743 public void clear() {
744 HTreeMap.this.clear();
745 }
746 };
747
748 @Override
749 public Set<K> keySet() {
750 return _keySet;
751 }
752
753 private final Collection<V> _values = new AbstractCollection<V>(){
754
755 @Override
756 public int size() {
757 return HTreeMap.this.size();
758 }
759
760 @Override
761 public boolean isEmpty() {
762 return HTreeMap.this.isEmpty();
763 }
764
765 @Override
766 public boolean contains(Object o) {
767 return HTreeMap.this.containsValue(o);
768 }
769
770
771
772 @Override
773 public Iterator<V> iterator() {
774 return new ValueIterator();
775 }
776
777 };
778
779 @Override
780 public Collection<V> values() {
781 return _values;
782 }
783
784 private Set<Entry<K,V>> _entrySet = new AbstractSet<Entry<K,V>>(){
785
786 @Override
787 public int size() {
788 return HTreeMap.this.size();
789 }
790
791 @Override
792 public boolean isEmpty() {
793 return HTreeMap.this.isEmpty();
794 }
795
796 @Override
797 public boolean contains(Object o) {
798 if(o instanceof Entry){
799 Entry e = (Entry) o;
800 Object val = HTreeMap.this.get(e.getKey());
801 return val!=null && val.equals(e.getValue());
802 }else
803 return false;
804 }
805
806 @Override
807 public Iterator<Entry<K, V>> iterator() {
808 return new EntryIterator();
809 }
810
811
812 @Override
813 public boolean add(Entry<K, V> kvEntry) {
814 K key = kvEntry.getKey();
815 V value = kvEntry.getValue();
816 if(key==null || value == null) throw new NullPointerException();
817 HTreeMap.this.put(key, value);
818 return true;
819 }
820
821 @Override
822 public boolean remove(Object o) {
823 if(o instanceof Entry){
824 Entry e = (Entry) o;
825 Object key = e.getKey();
826 if(key == null) return false;
827 return HTreeMap.this.remove(key, e.getValue());
828 }
829 return false;
830 }
831
832
833 @Override
834 public void clear() {
835 HTreeMap.this.clear();
836 }
837 };
838
839 @Override
840 public Set<Entry<K, V>> entrySet() {
841 return _entrySet;
842 }
843
844
845 protected int hash(final Object key) {
846 int h = key.hashCode() ^ hashSalt;
847 h ^= (h >>> 20) ^ (h >>> 12);
848 return h ^ (h >>> 7) ^ (h >>> 4);
849 }
850
851
852 abstract class HashIterator{
853
854 protected Object[] currentLinkedList;
855 protected int currentLinkedListPos = 0;
856
857 private K lastReturnedKey = null;
858
859 private int lastSegment = 0;
860
861 HashIterator(){
862 currentLinkedList = findNextLinkedNode(0);
863 }
864
865 public void remove() {
866 final K keyToRemove = lastReturnedKey;
867 if (lastReturnedKey == null)
868 throw new IllegalStateException();
869
870 lastReturnedKey = null;
871 HTreeMap.this.remove(keyToRemove);
872 }
873
874 public boolean hasNext(){
875 return currentLinkedList!=null && currentLinkedListPos<currentLinkedList.length;
876 }
877
878
879 protected void moveToNext(){
880 lastReturnedKey = (K) currentLinkedList[currentLinkedListPos];
881 currentLinkedListPos+=2;
882 if(currentLinkedListPos==currentLinkedList.length){
883 final int lastHash = hash(lastReturnedKey);
884 currentLinkedList = advance(lastHash);
885 currentLinkedListPos = 0;
886 }
887 }
888
889 private Object[] advance(int lastHash){
890
891 int segment = lastHash >>>28;
892
893 //two phases, first find old item and increase hash
894 try{
895 segmentLocks[segment].readLock().lock();
896
897 long dirRecid = segmentRecids[segment];
898 int level = 3;
899 //dive into tree, finding last hash position
900 while(true){
901 long[][] dir = engine.get(dirRecid, DIR_SERIALIZER);
902 int pos = (lastHash>>>(7 * level)) & 0x7F;
903
904 //check if we need to expand deeper
905 if(dir[pos/8]==null || dir[pos/8][pos%8]==0 || (dir[pos/8][pos%8]&1)==1) {
906 //increase hash by 1
907 if(level!=0){
908 lastHash = ((lastHash>>>(7 * level)) + 1) << (7*level); //should use mask and XOR
909 }else
910 lastHash +=1;
911 if(lastHash==0){
912 return null;
913 }
914 break;
915 }
916
917 //reference is dir, move to next level
918 dirRecid = dir[pos/8][pos%8]>>>1;
919 level--;
920 }
921
922 }finally {
923 segmentLocks[segment].readLock().unlock();
924 }
925 return findNextLinkedNode(lastHash);
926
927
928 }
929
930 private Object[] findNextLinkedNode(int hash) {
931 //second phase, start search from increased hash to find next items
932 for(int segment = Math.max(hash >>>28, lastSegment); segment<16;segment++)try{
933
934 lastSegment = Math.max(segment,lastSegment);
935 segmentLocks[segment].readLock().lock();
936
937 long dirRecid = segmentRecids[segment];
938 Object ret[] = findNextLinkedNodeRecur(dirRecid, hash, 3);
939 //System.out.println(Arrays.asList(ret));
940 if(ret !=null) return ret;
941 hash = 0;
942 }finally {
943 segmentLocks[segment].readLock().unlock();
944 }
945
946 return null;
947 }
948
949 private Object[] findNextLinkedNodeRecur(long dirRecid, int newHash, int level){
950 long[][] dir = engine.get(dirRecid, DIR_SERIALIZER);
951 if(dir == null) return null;
952 int pos = (newHash>>>(level*7)) & 0x7F;
953 boolean first = true;
954 while(pos<128){
955 if(dir[pos/8]!=null){
956 long recid = dir[pos/8][pos%8];
957 if(recid!=0){
958 if((recid&1) == 1){
959 recid = recid>>1;
960 //found linked list, load it into array and return
961 Object[] array = new Object[2];
962 int arrayPos = 0;
963 while(recid!=0){
964 LinkedNode ln = engine.get(recid, LN_SERIALIZER);
965 if(ln==null){
966 recid = 0;
967 continue;
968 }
969 //increase array size if needed
970 if(arrayPos == array.length)
971 array = Arrays.copyOf(array, array.length+2);
972 array[arrayPos++] = ln.key;
973 array[arrayPos++] = ln.value;
974 recid = ln.next;
975 }
976 return array;
977 }else{
978 //found another dir, continue dive
979 recid = recid>>1;
980 Object[] ret = findNextLinkedNodeRecur(recid, first ? newHash : 0, level - 1);
981 if(ret != null) return ret;
982 }
983 }
984 }
985 first = false;
986 pos++;
987 }
988 return null;
989 }
990 }
991
992 class KeyIterator extends HashIterator implements Iterator<K>{
993
994 @Override
995 public K next() {
996 if(currentLinkedList == null)
997 throw new NoSuchElementException();
998 K key = (K) currentLinkedList[currentLinkedListPos];
999 moveToNext();
1000 return key;
1001 }
1002 }
1003
1004 class ValueIterator extends HashIterator implements Iterator<V>{
1005
1006 @Override
1007 public V next() {
1008 if(currentLinkedList == null)
1009 throw new NoSuchElementException();
1010 V value = (V) currentLinkedList[currentLinkedListPos+1];
1011 moveToNext();
1012 return value;
1013 }
1014 }
1015
1016 class EntryIterator extends HashIterator implements Iterator<Entry<K,V>>{
1017
1018 @Override
1019 public Entry<K, V> next() {
1020 if(currentLinkedList == null)
1021 throw new NoSuchElementException();
1022 K key = (K) currentLinkedList[currentLinkedListPos];
1023 moveToNext();
1024 return new Entry2(key);
1025 }
1026 }
1027
1028 class Entry2 implements Entry<K,V>{
1029
1030 private final K key;
1031
1032 Entry2(K key) {
1033 this.key = key;
1034 }
1035
1036 @Override
1037 public K getKey() {
1038 return key;
1039 }
1040
1041 @Override
1042 public V getValue() {
1043 return HTreeMap.this.get(key);
1044 }
1045
1046 @Override
1047 public V setValue(V value) {
1048 return HTreeMap.this.put(key,value);
1049 }
1050
1051 @Override
1052 public boolean equals(Object o) {
1053 return (o instanceof Entry) && key.equals(((Entry) o).getKey());
1054 }
1055
1056 @Override
1057 public int hashCode() {
1058 final V value = HTreeMap.this.get(key);
1059 return (key == null ? 0 : key.hashCode()) ^
1060 (value == null ? 0 : value.hashCode());
1061 }
1062 }
1063
1064
1065 @Override
1066 public V putIfAbsent(K key, V value) {
1067 if(key==null||value==null) throw new NullPointerException();
1068 Utils.checkMapValueIsNotCollecion(value);
1069 final int segment = HTreeMap.this.hash(key) >>>28;
1070 try{
1071 segmentLocks[segment].writeLock().lock();
1072
1073 if (!containsKey(key))
1074 return put(key, value);
1075 else
1076 return get(key);
1077
1078 }finally {
1079 segmentLocks[segment].writeLock().unlock();
1080 }
1081 }
1082
1083 @Override
1084 public boolean remove(Object key, Object value) {
1085 if(key==null||value==null) throw new NullPointerException();
1086 final int segment = HTreeMap.this.hash(key) >>>28;
1087 try{
1088 segmentLocks[segment].writeLock().lock();
1089
1090 if (containsKey(key) && get(key).equals(value)) {
1091 remove(key);
1092 return true;
1093 }else
1094 return false;
1095
1096 }finally {
1097 segmentLocks[segment].writeLock().unlock();
1098 }
1099 }
1100
1101 @Override
1102 public boolean replace(K key, V oldValue, V newValue) {
1103 if(key==null||oldValue==null||newValue==null) throw new NullPointerException();
1104 final int segment = HTreeMap.this.hash(key) >>>28;
1105 try{
1106 segmentLocks[segment].writeLock().lock();
1107
1108 if (containsKey(key) && get(key).equals(oldValue)) {
1109 put(key, newValue);
1110 return true;
1111 } else
1112 return false;
1113
1114 }finally {
1115 segmentLocks[segment].writeLock().unlock();
1116 }
1117 }
1118
1119 @Override
1120 public V replace(K key, V value) {
1121 if(key==null||value==null) throw new NullPointerException();
1122 final int segment = HTreeMap.this.hash(key) >>>28;
1123 try{
1124 segmentLocks[segment].writeLock().lock();
1125
1126 if (containsKey(key))
1127 return put(key, value);
1128 else
1129 return null;
1130 }finally {
1131 segmentLocks[segment].writeLock().unlock();
1132 }
1133 }
1134
1135
1136 /**
1137 * Make readonly snapshot view of current Map. Snapshot is immutable and not affected by modifications made by other threads.
1138 * Useful if you need consistent view on Map.
1139 * <p>
1140 * Maintaining snapshot have some overhead, underlying Engine is closed after Map view is GCed.
1141 * Please make sure to release reference to this Map view, so snapshot view can be garbage collected.
1142 *
1143 * @return snapshot
1144 */
1145 public Map<K,V> snapshot(){
1146 Engine snapshot = SnapshotEngine.createSnapshotFor(engine);
1147 return new HTreeMap<K, V>(snapshot,rootRecid, defaultSerialzierForSnapshots);
1148 }
1149
1150
1151 protected final Object modListenersLock = new Object();
1152 protected Bind.MapListener<K,V>[] modListeners = new Bind.MapListener[0];
1153
1154 @Override
1155 public void addModificationListener(Bind.MapListener<K,V> listener) {
1156 synchronized (modListenersLock){
1157 Bind.MapListener<K,V>[] modListeners2 =
1158 Arrays.copyOf(modListeners,modListeners.length+1);
1159 modListeners2[modListeners2.length-1] = listener;
1160 modListeners = modListeners2;
1161 }
1162
1163 }
1164
1165 @Override
1166 public void removeModificationListener(Bind.MapListener<K,V> listener) {
1167 synchronized (modListenersLock){
1168 for(int i=0;i<modListeners.length;i++){
1169 if(modListeners[i]==listener) modListeners[i]=null;
1170 }
1171 }
1172 }
1173
1174 protected void notify(K key, V oldValue, V newValue) {
1175 Bind.MapListener<K,V>[] modListeners2 = modListeners;
1176 for(Bind.MapListener<K,V> listener:modListeners2){
1177 if(listener!=null)
1178 listener.update(key, oldValue, newValue);
1179 }
1180 }
1181
1182 /**
1183 * Closes underlying storage and releases all resources.
1184 * Used mostly with temporary collections where engine is not accessible.
1185 */
1186 public void close(){
1187 engine.close();
1188 }
1189
1190}
Note: See TracBrowser for help on using the repository browser.