`

使用HashMap实现缓存

    博客分类:
  • java
阅读更多

本类开发中 欢迎拍砖 重伤我者 必须答谢!

 

实现:

 

package creative.air.datastructure.map;

import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import java.util.logging.Level;
import java.util.logging.Logger;

import creative.air.datastructure.map.AirHashMap.AirEntry;

/**
 * 
 * @author Eric Han feuyeux@gmail.com 06/09/2012
 * @since 0.0.1
 * @version 0.0.1
 */
public class HashMapCache<H, L> {
	enum ConcurrentStrategy {
		NOTIFY, WAIT, TIMEOUT
	}

	enum FullStrategy {
		NOTIFY, DISCARD, REPLACE
	}

	static final Logger logger = Logger.getLogger(HashMapCache.class.getName());
	// cache full strategy
	private int capacity = 12;
	private FullStrategy fullStrategy = FullStrategy.NOTIFY;

	// cache lock strategy
	private static ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
	private static WriteLock wLock = lock.writeLock();
	private static ReadLock rLock = lock.readLock();
	private ConcurrentStrategy concurrentStrategy = ConcurrentStrategy.TIMEOUT;
	private long waitingLockTimeout = 500;

	private AirHashMap<H, L> map;

	public HashMapCache() {
		map = new AirHashMap<H, L>();
	}

	public HashMapCache(int capacity) {
		this.capacity = capacity;
		map = new AirHashMap<H, L>();
	}

	public HashMapCache(int capacity, int initialCapacity, float loadFactor) {
		this.capacity = capacity;
		map = new AirHashMap<H, L>(initialCapacity, loadFactor);
	}

	public void clear() {
		try {
			lock(wLock);// tryLock(long timeout, TimeUnit unit)
			map.clear();
		} catch (Exception e) {
			logger.log(Level.SEVERE, "clear error", e);
		} finally {
			wLock.unlock();
		}
	}

	public void remove(H key) {
		try {
			lock(wLock);
			map.remove(key);
		} catch (Exception e) {
			logger.log(Level.SEVERE, "clear error", e);
		} finally {
			wLock.unlock();
		}
	}

	public L put(H key, L value) throws Exception {
		lock(wLock);
		if (this.capacity < map.size()) {
			switch (fullStrategy) {
			case NOTIFY:
				throw new Exception("100 reached the cache's maximum");
			case DISCARD:
				return null;
			case REPLACE:
				// TODO it's a dangerous way
				// which cannot guarantee the data already stored in cache
				AirEntry<H, L> entry = map.getTable()[0];
				remove(entry.getKey());
			default:
				throw new Exception("100 reached the cache's maximum");
			}
		}
		try {
			return map.put(key, value);
		} catch (Exception e) {
			logger.log(Level.SEVERE, "put error", e);
			return null;
		} finally {
			wLock.unlock();
		}
	}

	public L get(H key) {
		try {
			lock(rLock);
			return map.get(key);
		} catch (Exception e) {
			logger.log(Level.SEVERE, "get error", e);
			return null;
		} finally {
			rLock.unlock();
		}
	}

	public Iterator<Map.Entry<H, L>> iterate() {
		try {
			lock(rLock);
			return map.entrySet().iterator();
		} catch (Exception e) {
			logger.log(Level.SEVERE, "get error", e);
			return null;
		} finally {
			rLock.unlock();
		}
	}

	private void lock(Lock lock) throws Exception {
		switch (concurrentStrategy) {
		case NOTIFY:
			throw new Exception("200 Cannot control the cache");
		case WAIT:
			lock.lock();
			break;
		case TIMEOUT:
			lock.tryLock(waitingLockTimeout, TimeUnit.MICROSECONDS);
			break;
		}
	}

	public Set<Map.Entry<H, L>> entrySet() {
		return map.entrySet();
	}

	public int getCapacity() {
		return capacity;
	}

	public ConcurrentStrategy getConcurrentStrategy() {
		return concurrentStrategy;
	}

	public FullStrategy getFullStrategy() {
		return fullStrategy;
	}

	public void setCapacity(int capacity) {
		this.capacity = capacity;
	}

	public void setConcurrentStrategy(ConcurrentStrategy concurrentStrategy) {
		this.concurrentStrategy = concurrentStrategy;
	}

	public void setFullStrategy(FullStrategy fullStrategy) {
		this.fullStrategy = fullStrategy;
	}

	public long getWaitingLockTimeout() {
		return waitingLockTimeout;
	}

	public void setWaitingLockTimeout(long waitingLockTimeout) {
		this.waitingLockTimeout = waitingLockTimeout;
	}
}

 

 

hashmap:

 

package creative.air.datastructure.map;

/**
 * 
 * @author
 * Eric Han feuyeux@gmail.com
 * 16/09/2012
 * @since  0.0.1
 * @version 0.0.1
 */
import java.io.IOException;
import java.io.Serializable;
import java.util.AbstractCollection;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;

public class AirHashMap<K, V> extends AbstractMap<K, V> implements Map<K, V>, Cloneable, Serializable {
	private static final long serialVersionUID = 3476735979928755996L;
	static final int DEFAULT_INITIAL_CAPACITY = 16;// 初始容量
	static final int MAXIMUM_CAPACITY = 1 << 30;
	static final float DEFAULT_LOAD_FACTOR = 0.75f;// 负载因子
	transient AirEntry<K, V>[] table;// hash数组
	transient int size;
	int threshold;// 阈值
	final float loadFactor;
	transient int modCount;// 修改次数

	public AirHashMap(int initialCapacity, float loadFactor) {
		if (initialCapacity < 0)
			throw new IllegalArgumentException("Illegal initial capacity: " + initialCapacity);
		if (initialCapacity > MAXIMUM_CAPACITY)
			initialCapacity = MAXIMUM_CAPACITY;
		if (loadFactor <= 0 || Float.isNaN(loadFactor))
			throw new IllegalArgumentException("Illegal load factor: " + loadFactor);

		// Find a power of 2 >= initialCapacity
		int capacity = 1;
		while (capacity < initialCapacity)
			capacity <<= 1;

		this.loadFactor = loadFactor;
		threshold = (int) (capacity * loadFactor);
		table = new AirEntry[capacity];
		init();
	}

	public AirHashMap(int initialCapacity) {
		this(initialCapacity, DEFAULT_LOAD_FACTOR);
	}

	public AirHashMap() {
		this.loadFactor = DEFAULT_LOAD_FACTOR;
		threshold = (int) (DEFAULT_INITIAL_CAPACITY * DEFAULT_LOAD_FACTOR);
		table = new AirEntry[DEFAULT_INITIAL_CAPACITY];
		init();
	}

	public AirHashMap(Map<? extends K, ? extends V> m) {
		this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1, DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR);
		putAllForCreate(m);
	}

	void init() {
	}

	static int hash(int h) {
		// This function ensures that hashCodes that differ only by
		// constant multiples at each bit position have a bounded
		// number of collisions (approximately 8 at default load factor).
		h ^= (h >>> 20) ^ (h >>> 12);
		return h ^ (h >>> 7) ^ (h >>> 4);
	}

	static int indexFor(int h, int length) {
		return h & (length - 1);
	}

	public int size() {
		return size;
	}

	public boolean isEmpty() {
		return size == 0;
	}

	public V get(Object key) {
		if (key == null)
			return getForNullKey();
		int hash = hash(key.hashCode());
		for (AirEntry<K, V> e = table[indexFor(hash, table.length)]; e != null; e = e.next) {
			Object k;
			if (e.hash == hash && ((k = e.key) == key || key.equals(k)))
				return e.value;
		}
		return null;
	}

	private V getForNullKey() {
		for (AirEntry<K, V> e = table[0]; e != null; e = e.next) {
			if (e.key == null)
				return e.value;
		}
		return null;
	}

	public boolean containsKey(Object key) {
		return getEntry(key) != null;
	}

	final AirEntry<K, V> getEntry(Object key) {
		int hash = (key == null) ? 0 : hash(key.hashCode());
		for (AirEntry<K, V> e = table[indexFor(hash, table.length)]; e != null; e = e.next) {
			Object k;
			if (e.hash == hash && ((k = e.key) == key || (key != null && key.equals(k))))
				return e;
		}
		return null;
	}

	public V put(K key, V value) {
		if (key == null)
			return putForNullKey(value);
		int hash = hash(key.hashCode());
		int i = indexFor(hash, table.length);
		for (AirEntry<K, V> e = table[i]; e != null; e = e.next) {
			Object k;
			if (e.hash == hash && ((k = e.key) == key || key.equals(k))) {
				V oldValue = e.value;
				e.value = value;
				e.recordAccess(this);
				return oldValue;
			}
		}

		modCount++;
		addEntry(hash, key, value, i);
		return null;
	}

	private V putForNullKey(V value) {
		for (AirEntry<K, V> e = table[0]; e != null; e = e.next) {
			if (e.key == null) {
				V oldValue = e.value;
				e.value = value;
				e.recordAccess(this);
				return oldValue;
			}
		}
		modCount++;
		addEntry(0, null, value, 0);
		return null;
	}

	private void putForCreate(K key, V value) {
		int hash = (key == null) ? 0 : hash(key.hashCode());
		int i = indexFor(hash, table.length);

		/**
		 * Look for preexisting entry for key. This will never happen for clone or deserialize. It will only happen for construction if the input Map is a
		 * sorted map whose ordering is inconsistent w/ equals.
		 */
		for (AirEntry<K, V> e = table[i]; e != null; e = e.next) {
			Object k;
			if (e.hash == hash && ((k = e.key) == key || (key != null && key.equals(k)))) {
				e.value = value;
				return;
			}
		}

		createEntry(hash, key, value, i);
	}

	private void putAllForCreate(Map<? extends K, ? extends V> m) {
		for (Map.Entry<? extends K, ? extends V> e : m.entrySet())
			putForCreate(e.getKey(), e.getValue());
	}

	void resize(int newCapacity) {
		AirEntry[] oldTable = table;
		int oldCapacity = oldTable.length;
		if (oldCapacity == MAXIMUM_CAPACITY) {
			threshold = Integer.MAX_VALUE;
			return;
		}

		AirEntry[] newTable = new AirEntry[newCapacity];
		transfer(newTable);
		table = newTable;
		threshold = (int) (newCapacity * loadFactor);
	}

	void transfer(AirEntry[] newTable) {
		AirEntry[] src = table;
		int newCapacity = newTable.length;
		for (int j = 0; j < src.length; j++) {
			AirEntry<K, V> e = src[j];
			if (e != null) {
				src[j] = null;
				do {
					AirEntry<K, V> next = e.next;
					int i = indexFor(e.hash, newCapacity);
					e.next = newTable[i];
					newTable[i] = e;
					e = next;
				} while (e != null);
			}
		}
	}

	public void putAll(Map<? extends K, ? extends V> m) {
		int numKeysToBeAdded = m.size();
		if (numKeysToBeAdded == 0)
			return;

		if (numKeysToBeAdded > threshold) {
			int targetCapacity = (int) (numKeysToBeAdded / loadFactor + 1);
			if (targetCapacity > MAXIMUM_CAPACITY)
				targetCapacity = MAXIMUM_CAPACITY;
			int newCapacity = table.length;
			while (newCapacity < targetCapacity)
				newCapacity <<= 1;
			if (newCapacity > table.length)
				resize(newCapacity);
		}

		for (Map.Entry<? extends K, ? extends V> e : m.entrySet())
			put(e.getKey(), e.getValue());
	}

	public V remove(Object key) {
		AirEntry<K, V> e = removeEntryForKey(key);
		return (e == null ? null : e.value);
	}

	final AirEntry<K, V> removeEntryForKey(Object key) {
		int hash = (key == null) ? 0 : hash(key.hashCode());
		int i = indexFor(hash, table.length);
		AirEntry<K, V> prev = table[i];
		AirEntry<K, V> e = prev;

		while (e != null) {
			AirEntry<K, V> next = e.next;
			Object k;
			if (e.hash == hash && ((k = e.key) == key || (key != null && key.equals(k)))) {
				modCount++;
				size--;
				if (prev == e)
					table[i] = next;
				else
					prev.next = next;
				e.recordRemoval(this);
				return e;
			}
			prev = e;
			e = next;
		}

		return e;
	}

	final AirEntry<K, V> removeMapping(Object o) {
		if (!(o instanceof Map.Entry))
			return null;

		Map.Entry<K, V> entry = (Map.Entry<K, V>) o;
		Object key = entry.getKey();
		int hash = (key == null) ? 0 : hash(key.hashCode());
		int i = indexFor(hash, table.length);
		AirEntry<K, V> prev = table[i];
		AirEntry<K, V> e = prev;

		while (e != null) {
			AirEntry<K, V> next = e.next;
			if (e.hash == hash && e.equals(entry)) {
				modCount++;
				size--;
				if (prev == e)
					table[i] = next;
				else
					prev.next = next;
				e.recordRemoval(this);
				return e;
			}
			prev = e;
			e = next;
		}

		return e;
	}

	public void clear() {
		modCount++;
		AirEntry[] tab = table;
		for (int i = 0; i < tab.length; i++)
			tab[i] = null;
		size = 0;
	}

	public boolean containsValue(Object value) {
		if (value == null)
			return containsNullValue();

		AirEntry[] tab = table;
		for (int i = 0; i < tab.length; i++)
			for (AirEntry e = tab[i]; e != null; e = e.next)
				if (value.equals(e.value))
					return true;
		return false;
	}

	private boolean containsNullValue() {
		AirEntry[] tab = table;
		for (int i = 0; i < tab.length; i++)
			for (AirEntry e = tab[i]; e != null; e = e.next)
				if (e.value == null)
					return true;
		return false;
	}

	public Object clone() {
		AirHashMap<K, V> result = null;
		try {
			result = (AirHashMap<K, V>) super.clone();
		} catch (CloneNotSupportedException e) {
			// assert false;
		}
		result.table = new AirEntry[table.length];
		result.entrySet = null;
		result.modCount = 0;
		result.size = 0;
		result.init();
		result.putAllForCreate(this);

		return result;
	}

	void addEntry(int hash, K key, V value, int bucketIndex) {
		AirEntry<K, V> e = table[bucketIndex];
		table[bucketIndex] = new AirEntry<K, V>(hash, key, value, e);
		if (size++ >= threshold)
			resize(2 * table.length);
	}

	void createEntry(int hash, K key, V value, int bucketIndex) {
		AirEntry<K, V> e = table[bucketIndex];
		table[bucketIndex] = new AirEntry<K, V>(hash, key, value, e);
		size++;
	}

	private final class ValueIterator extends HashIterator<V> {
		public V next() {
			return nextEntry().value;
		}
	}

	private final class KeyIterator extends HashIterator<K> {
		public K next() {
			return nextEntry().getKey();
		}
	}

	private final class EntryIterator extends HashIterator<Map.Entry<K, V>> {
		public Map.Entry<K, V> next() {
			return nextEntry();
		}
	}

	// Subclass overrides these to alter behavior of views' iterator() method
	Iterator<K> newKeyIterator() {
		return new KeyIterator();
	}

	Iterator<V> newValueIterator() {
		return new ValueIterator();
	}

	Iterator<Map.Entry<K, V>> newEntryIterator() {
		return new EntryIterator();
	}

	private transient Set<Map.Entry<K, V>> entrySet = null;

	// public Set<K> keySet() {
	// Set<K> ks = keySet;
	// return (ks != null ? ks : (keySet = new KeySet()));
	// }

	private final class KeySet extends AbstractSet<K> {
		public Iterator<K> iterator() {
			return newKeyIterator();
		}

		public int size() {
			return size;
		}

		public boolean contains(Object o) {
			return containsKey(o);
		}

		public boolean remove(Object o) {
			return AirHashMap.this.removeEntryForKey(o) != null;
		}

		public void clear() {
			AirHashMap.this.clear();
		}
	}

	// public Collection<V> values() {
	// Collection<V> vs = values;
	// return (vs != null ? vs : (values = new Values()));
	// }

	private final class Values extends AbstractCollection<V> {
		public Iterator<V> iterator() {
			return newValueIterator();
		}

		public int size() {
			return size;
		}

		public boolean contains(Object o) {
			return containsValue(o);
		}

		public void clear() {
			AirHashMap.this.clear();
		}
	}

	public Set<Map.Entry<K, V>> entrySet() {
		return entrySet0();
	}

	private Set<Map.Entry<K, V>> entrySet0() {
		Set<Map.Entry<K, V>> es = entrySet;
		return es != null ? es : (entrySet = new EntrySet());
	}

	private final class EntrySet extends AbstractSet<Map.Entry<K, V>> {
		public Iterator<Map.Entry<K, V>> iterator() {
			return newEntryIterator();
		}

		public boolean contains(Object o) {
			if (!(o instanceof Map.Entry))
				return false;
			Map.Entry<K, V> e = (Map.Entry<K, V>) o;
			AirEntry<K, V> candidate = getEntry(e.getKey());
			return candidate != null && candidate.equals(e);
		}

		public boolean remove(Object o) {
			return removeMapping(o) != null;
		}

		public int size() {
			return size;
		}

		public void clear() {
			AirHashMap.this.clear();
		}
	}

	private void writeObject(java.io.ObjectOutputStream s) throws IOException {
		Iterator<Map.Entry<K, V>> i = (size > 0) ? entrySet0().iterator() : null;

		// Write out the threshold, loadfactor, and any hidden stuff
		s.defaultWriteObject();

		// Write out number of buckets
		s.writeInt(table.length);

		// Write out size (number of Mappings)
		s.writeInt(size);

		// Write out keys and values (alternating)
		if (i != null) {
			while (i.hasNext()) {
				Map.Entry<K, V> e = i.next();
				s.writeObject(e.getKey());
				s.writeObject(e.getValue());
			}
		}
	}

	private void readObject(java.io.ObjectInputStream s) throws IOException, ClassNotFoundException {
		// Read in the threshold, loadfactor, and any hidden stuff
		s.defaultReadObject();

		// Read in number of buckets and allocate the bucket array;
		int numBuckets = s.readInt();
		table = new AirEntry[numBuckets];

		init(); // Give subclass a chance to do its thing.

		// Read in size (number of Mappings)
		int size = s.readInt();

		// Read the keys and values, and put the mappings in the AirHashMap
		for (int i = 0; i < size; i++) {
			K key = (K) s.readObject();
			V value = (V) s.readObject();
			putForCreate(key, value);
		}
	}

	// These methods are used when serializing HashSets
	int capacity() {
		return table.length;
	}

	float loadFactor() {
		return loadFactor;
	}

	// ====Entry====
	static class AirEntry<K, V> implements Map.Entry<K, V> {
		final K key;
		V value;
		AirEntry<K, V> next;
		final int hash;

		/**
		 * Creates new entry.
		 */
		AirEntry(int h, K k, V v, AirEntry<K, V> n) {
			value = v;
			next = n;
			key = k;
			hash = h;
		}

		public final K getKey() {
			return key;
		}

		public final V getValue() {
			return value;
		}

		public final V setValue(V newValue) {
			V oldValue = value;
			value = newValue;
			return oldValue;
		}

		public final boolean equals(Object o) {
			if (!(o instanceof Map.Entry))
				return false;
			Map.Entry e = (Map.Entry) o;
			Object k1 = getKey();
			Object k2 = e.getKey();
			if (k1 == k2 || (k1 != null && k1.equals(k2))) {
				Object v1 = getValue();
				Object v2 = e.getValue();
				if (v1 == v2 || (v1 != null && v1.equals(v2)))
					return true;
			}
			return false;
		}

		public final int hashCode() {
			return (key == null ? 0 : key.hashCode()) ^ (value == null ? 0 : value.hashCode());
		}

		public final String toString() {
			return getKey() + "=" + getValue();
		}

		/**
		 * This method is invoked whenever the value in an entry is overwritten by an invocation of put(k,v) for a key k that's already in the AirHashMap.
		 */
		void recordAccess(AirHashMap<K, V> m) {
		}

		/**
		 * This method is invoked whenever the entry is removed from the table.
		 */
		void recordRemoval(AirHashMap<K, V> m) {
		}
	}

	// ====HashIterator====
	private abstract class HashIterator<E> implements Iterator<E> {
		AirEntry<K, V> next; // next entry to return
		int expectedModCount; // For fast-fail
		int index; // current slot
		AirEntry<K, V> current; // current entry

		HashIterator() {
			expectedModCount = modCount;
			if (size > 0) { // advance to first entry
				AirEntry[] t = table;
				while (index < t.length && (next = t[index++]) == null)
					;
			}
		}

		public final boolean hasNext() {
			return next != null;
		}

		final AirEntry<K, V> nextEntry() {
			if (modCount != expectedModCount)
				throw new ConcurrentModificationException();
			AirEntry<K, V> e = next;
			if (e == null)
				throw new NoSuchElementException();

			if ((next = e.next) == null) {
				AirEntry[] t = table;
				while (index < t.length && (next = t[index++]) == null)
					;
			}
			current = e;
			return e;
		}

		public void remove() {
			if (current == null)
				throw new IllegalStateException();
			if (modCount != expectedModCount)
				throw new ConcurrentModificationException();
			Object k = current.key;
			current = null;
			AirHashMap.this.removeEntryForKey(k);
			expectedModCount = modCount;
		}
	}

	public AirEntry<K, V>[] getTable() {
		return table;
	}
}
 

 

 

测试:

package creative.air.datastructure.map;

import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;

import junit.framework.Assert;

import org.junit.Test;

/**
 * Test HashMap
 * 1 map:键值对 hash:hashcode 
 * 2 非线程安全 键值允许为空 键为空时的处理 
 * 3 数组+链表结构 
 * 4 负载因子loadFactor和阈值threshold 扩容机制 缓存设计 
 * 5 遍历
 * 
 * Test Cache
 * @author
 * Eric Han feuyeux@gmail.com
 * 06/09/2012
 * @since  0.0.1
 * @version 0.0.1
 */
public class HashMapTest {
	static final Logger logger = Logger.getLogger(HashMapTest.class.getName());

	@Test
	public void test2() throws Exception {
		HashMapCache<String, Integer> cacheMap = new HashMapCache<String, Integer>();
		cacheMap.clear();
		cacheMap.put("" + 1, null);
		cacheMap.put(null, 1);
		cacheMap.put(null, null);
		Assert.assertNull(cacheMap.get(null));
	}

	@Test
	public void test5() {
		int n = 0;
		final int maxium = 5000;
		HashMapCache<String, Integer> cacheMap = new HashMapCache<String, Integer>(maxium);
		HashMapCache<String, Integer> retrievingMap = new HashMapCache<String, Integer>(maxium, maxium / 10, 0.5f);

		boolean inputRight = true;
		while (n < maxium) {
			String s = "" + n;
			try {
				cacheMap.put(s, n);
				retrievingMap.put(s, n++);
			} catch (Exception e) {
				e.printStackTrace();
				inputRight = false;
				break;
			}
		}
		Assert.assertTrue(inputRight);
		Object[] r1 = iterate(cacheMap, Level.INFO);
		Object[] r2 = iterate(retrievingMap, Level.INFO);
		logger.log(Level.INFO, "default map iterating elapse:{0}(start={1},end={2})", r1);
		logger.log(Level.INFO, "customize map iterating elapse:{0}(start={1},end={2})", r2);
		Assert.assertTrue((Long) r1[0] >= (Long) r2[0]);
	}

	/**
	 * @param level 
	 * 
	 */
	private Object[] iterate(HashMapCache<String, Integer> map, Level level) {
		Iterator<Map.Entry<String, Integer>> iter = map.entrySet().iterator();
		long startTime = System.currentTimeMillis();
		while (iter.hasNext()) {
			Map.Entry<String, Integer> entry = iter.next();
			String key = entry.getKey();
			Integer val = entry.getValue();
			logger.log(level, "{0}:{1}", new Object[] { key, val });
		}
		long endTime = System.currentTimeMillis();
		return new Object[] { endTime - startTime, endTime, startTime };
	}
	
	@Test
	public void testPutAll(){
		HashMap<String,String> map=new HashMap<String,String>();
		HashMap<String,HashMap<String,String>> map2=new HashMap<String,HashMap<String,String>>();
		
		map.put("Gateway", "Thomson");
		map2.put("patner", map);
		map.put("Gateway", "Technicolor");
		
		Iterator<Map.Entry<String,HashMap<String,String>>> iter = map2.entrySet().iterator();
		while (iter.hasNext()) {
			Map.Entry<String,HashMap<String,String>> entry = iter.next();
			String key = entry.getKey();
			HashMap<String,String> val = entry.getValue();
			
			Iterator<Map.Entry<String,String>> iter2 = val.entrySet().iterator();
			while (iter.hasNext()) {
				Map.Entry<String,String> entry2 = iter2.next();
				String key2 = entry2.getKey();
				String val2 = entry2.getValue();
				Assert.assertEquals("Technicolor", val2);
				logger.log(Level.INFO, "{0}:{1}", new Object[] { key2, val2 });
			}
			logger.log(Level.INFO, "{0}:{1}", new Object[] { key, val });
		}
	}
}
 
4
5
分享到:
评论
16 楼 marshan 2012-09-26  
Kilin 写道
使用这种锁的方式不能保证线程安全

请给出反例,谢谢。
15 楼 marshan 2012-09-26  
montya 写道
montya 写道
get的时候,应该不需要加锁。

建议尝试使用CopyOnWriteArrayList.有空我也来写一个。

改进下,应该是concurrentHashMap


经过考虑,get也需要。如果另一个线程在get的同时做remove会有问题。
读-读并不互斥,读-写互斥。
14 楼 marshan 2012-09-26  
m_lixn 写道
当集群的时候你这个缓存就该出问题了, 建议你在刷新缓存的时候 同步下其他容器下的信息.

第一阶段先不考虑集群。谢谢。
13 楼 marshan 2012-09-25  
war0071 写道
借机学习一下ReentrantReadWriteLock类,写的还是不错的!不过public Iterator<Map.Entry<H, L>> iterate() { 
        return map.entrySet().iterator(); 
    } 

这个方法会出问题的,你用的HashMap实现,它的迭代器是快速失效的,也就是你这个方法得到的iterator不是线程安全的,当另一个线程修改这个Map的时候会,iterator会失效,如果有人正在操作它会出问题的


我记得你说过这个问题。我找个时间继续这个工具类,你也可以分享一下你的成果。我的用意你懂的,相信你可以更好地实现它。
12 楼 war0071 2012-09-21  
借机学习一下ReentrantReadWriteLock类,写的还是不错的!不过public Iterator<Map.Entry<H, L>> iterate() { 
        return map.entrySet().iterator(); 
    } 

这个方法会出问题的,你用的HashMap实现,它的迭代器是快速失效的,也就是你这个方法得到的iterator不是线程安全的,当另一个线程修改这个Map的时候会,iterator会失效,如果有人正在操作它会出问题的
11 楼 marshan 2012-09-07  
montya 写道
montya 写道
get的时候,应该不需要加锁。


建议尝试使用CopyOnWriteArrayList.有空我也来写一个。

改进下,应该是concurrentHashMap


目的在于练习hashmap。JDK1.6里有很多现成的类,我研究过。
10 楼 marshan 2012-09-07  
thihy 写道
使用Google的Guava吧。

我写此文的目的在于加深对jdk 的理解。有空接受你的建议再看。
9 楼 marshan 2012-09-07  
rockyzheng 写道
能告诉我你这个例子和JDK API中的有什么区别么?

写完了你就看到了,进行中
8 楼 marshan 2012-09-07  
Kilin 写道
使用这种锁的方式不能保证线程安全

能具体给出解决方案么
7 楼 montya 2012-09-07  
montya 写道
get的时候,应该不需要加锁。


建议尝试使用CopyOnWriteArrayList.有空我也来写一个。

改进下,应该是concurrentHashMap
6 楼 Kilin 2012-09-07  
使用这种锁的方式不能保证线程安全
5 楼 m_lixn 2012-09-07  
当集群的时候你这个缓存就该出问题了, 建议你在刷新缓存的时候 同步下其他容器下的信息.
4 楼 thihy 2012-09-07  
使用Google的Guava吧。
3 楼 rockyzheng 2012-09-07  
能告诉我你这个例子和JDK API中的有什么区别么?
2 楼 montya 2012-09-07  
get的时候,应该不需要加锁。


建议尝试使用CopyOnWriteArrayList.有空我也来写一个。
1 楼 xihuyu2000 2012-09-07  
好贴,收藏

相关推荐

Global site tag (gtag.js) - Google Analytics