Friday, 3 November 2023

Implement a shared reentrant lock

Shared Reentrant lock is useful, when you have a large number of resources that need to be accessed concurrently by multiple threads, and you want to minimize contention while allowing for parallel access to these resources.

ShardedReentrantLock.java

package com.sample.app.concurrency;

import java.util.concurrent.locks.ReentrantLock;

public class ShardedReentrantLock {
	private final ReentrantLock locks[];
	private final int size;
	private final int mask;

	public ShardedReentrantLock(int count) {
		this.size = count;
		mask = size - 1;

		locks = new ReentrantLock[size];
		for (int i = 0; i < size; i++) {
			locks[i] = new ReentrantLock();
		}
	}

	public ReentrantLock get(String key) {
		return locks[key.hashCode() & mask];
	}
}

 

Let's consider a scenario where you have a distributed cache that you want to implement using ShardedReentrantLock to ensure thread-safety and minimize contention when accessing cache entries. In this example, we'll create a simple cache using ShardedReentrantLock.


 

ShardedCache.java

package com.sample.app.concurrency;

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.locks.ReentrantLock;

public class ShardedCache<K, V> {
	private final ShardedReentrantLock lock;
	private final Map<K, V> cache;

	public ShardedCache(int shardCount) {
		this.lock = new ShardedReentrantLock(shardCount);
		this.cache = new HashMap<>();
	}

	public V get(K key) {
		ReentrantLock shardLock = lock.get(key.toString());
		shardLock.lock();
		try {
			System.out.printf("%s read by %s\n", key, Thread.currentThread().getName());
			return cache.get(key);
		} finally {
			shardLock.unlock();
		}
	}

	public void put(K key, V value) {
		ReentrantLock shardLock = lock.get(key.toString());
		shardLock.lock();
		try {
			System.out.printf("%s added/updated by %s\n", key, Thread.currentThread().getName());
			cache.put(key, value);
		} finally {
			shardLock.unlock();
		}
	}

	public void remove(K key) {
		ReentrantLock shardLock = lock.get(key.toString());
		shardLock.lock();
		try {
			System.out.printf("%s removed by %s\n", key, Thread.currentThread().getName());
			cache.remove(key);
		} finally {
			shardLock.unlock();
		}
	}
}

Let's test the application.

 

SharedCacheDemo.java

package com.sample.app;

import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import com.sample.app.concurrency.ShardedCache;

public class SharedCacheDemo {

	private static final int SHARD_COUNT = 16;
	private static final ShardedCache<String, String> SHARED_CACHE = new ShardedCache<>(SHARD_COUNT);

	private static enum CacheTaskType {
		READ, UPDATE, REMOVE
	}

	private static class CacheTask implements Runnable {
		CacheTaskType cacheTaskType;
		String key;
		String value;

		public CacheTask(CacheTaskType cacheTaskType, String key) {
			this(cacheTaskType, key, null);
		}

		public CacheTask(CacheTaskType cacheTaskType, String key, String value) {
			this.cacheTaskType = cacheTaskType;
			this.key = key;
			this.value = value;
		}

		@Override
		public void run() {
			if (CacheTaskType.READ == cacheTaskType) {
				SHARED_CACHE.get(key);
				return;
			}

			if (CacheTaskType.UPDATE == cacheTaskType) {
				SHARED_CACHE.put(key, value);
				return;
			}

			if (CacheTaskType.REMOVE == cacheTaskType) {
				SHARED_CACHE.remove(key);
				return;
			}

		}

	}

	public static void main(String[] args) {

		ExecutorService executor = new ThreadPoolExecutor(SHARD_COUNT, SHARD_COUNT, 0L, TimeUnit.MILLISECONDS,
				new LinkedBlockingQueue<Runnable>());

		for (int i = 0; i < 100; i++) {
			String key = "key_" + i;
			String value = "value_" + i;

			if (i % 3 == 0) {
				CacheTask cacheTask = new CacheTask(CacheTaskType.READ, key);
				executor.execute(cacheTask);
			} else if (i % 3 == 1) {
				CacheTask cacheTask = new CacheTask(CacheTaskType.UPDATE, key, value);
				executor.execute(cacheTask);
			} else {
				CacheTask cacheTask = new CacheTask(CacheTaskType.REMOVE, key);
				executor.execute(cacheTask);
			}

		}

		executor.shutdown();

	}

}

Sample Output

key_0 read by pool-1-thread-1
key_10 added/updated by pool-1-thread-11
key_9 read by pool-1-thread-10
key_8 removed by pool-1-thread-9
key_7 added/updated by pool-1-thread-8
key_6 read by pool-1-thread-7
key_5 removed by pool-1-thread-6
key_4 added/updated by pool-1-thread-5
key_3 read by pool-1-thread-4
key_2 removed by pool-1-thread-3
key_1 added/updated by pool-1-thread-2
key_13 added/updated by pool-1-thread-14
key_14 removed by pool-1-thread-15
key_15 read by pool-1-thread-16
key_16 added/updated by pool-1-thread-1
key_17 removed by pool-1-thread-11
key_21 read by pool-1-thread-7
key_18 read by pool-1-thread-10
key_20 removed by pool-1-thread-8
key_19 added/updated by pool-1-thread-9
key_11 removed by pool-1-thread-12
key_31 added/updated by pool-1-thread-11
key_29 removed by pool-1-thread-16
key_32 removed by pool-1-thread-7
key_28 added/updated by pool-1-thread-15
key_27 read by pool-1-thread-14
key_30 read by pool-1-thread-1
key_26 removed by pool-1-thread-2
key_25 added/updated by pool-1-thread-3
key_24 read by pool-1-thread-4
key_12 read by pool-1-thread-13
key_35 removed by pool-1-thread-9
key_36 read by pool-1-thread-12
key_43 added/updated by pool-1-thread-2
key_37 added/updated by pool-1-thread-11
key_41 removed by pool-1-thread-14
key_42 read by pool-1-thread-1
key_38 removed by pool-1-thread-16
key_40 added/updated by pool-1-thread-15
key_39 read by pool-1-thread-7
key_22 added/updated by pool-1-thread-6
key_51 read by pool-1-thread-14
key_54 read by pool-1-thread-15
key_49 added/updated by pool-1-thread-2
key_53 removed by pool-1-thread-16
key_52 added/updated by pool-1-thread-1
key_50 removed by pool-1-thread-11
key_48 read by pool-1-thread-12
key_47 removed by pool-1-thread-9
key_46 added/updated by pool-1-thread-13
key_23 removed by pool-1-thread-5
key_57 read by pool-1-thread-14
key_65 removed by pool-1-thread-13
key_58 added/updated by pool-1-thread-15
key_64 added/updated by pool-1-thread-9
key_63 read by pool-1-thread-12
key_59 removed by pool-1-thread-2
key_62 removed by pool-1-thread-11
key_61 added/updated by pool-1-thread-1
key_74 removed by pool-1-thread-1
key_60 read by pool-1-thread-16
key_33 read by pool-1-thread-10
key_76 added/updated by pool-1-thread-16
key_71 removed by pool-1-thread-12
key_75 read by pool-1-thread-1
key_72 read by pool-1-thread-2
key_73 added/updated by pool-1-thread-11
key_70 added/updated by pool-1-thread-9
key_69 read by pool-1-thread-15
key_68 removed by pool-1-thread-13
key_34 added/updated by pool-1-thread-8
key_85 added/updated by pool-1-thread-13
key_79 added/updated by pool-1-thread-12
key_84 read by pool-1-thread-15
key_83 removed by pool-1-thread-9
key_81 read by pool-1-thread-2
key_82 added/updated by pool-1-thread-11
key_80 removed by pool-1-thread-1
key_77 removed by pool-1-thread-10
key_91 added/updated by pool-1-thread-2
key_93 read by pool-1-thread-1
key_92 removed by pool-1-thread-11
key_90 read by pool-1-thread-9
key_87 read by pool-1-thread-13
key_86 removed by pool-1-thread-8
key_45 read by pool-1-thread-4
key_97 added/updated by pool-1-thread-11
key_98 removed by pool-1-thread-9
key_96 read by pool-1-thread-1
key_95 removed by pool-1-thread-2
key_94 added/updated by pool-1-thread-10
key_44 removed by pool-1-thread-3
key_56 removed by pool-1-thread-6
key_55 added/updated by pool-1-thread-7
key_67 added/updated by pool-1-thread-14
key_66 read by pool-1-thread-5
key_78 read by pool-1-thread-16
key_88 added/updated by pool-1-thread-12
key_89 removed by pool-1-thread-15
key_99 read by pool-1-thread-13





 

 

 

You may like

Interview Questions

Implement retry handler for a task in Java

Design an utility class to capture application metrics summary in Java

Utility class to get primitive type from wrapper type and vice versa

FNV hash algorithm implementation in Java

Controlling Randomness in Java: Exploring the Role of Seeds in java.util.Random

Calculating initial capacity from expected size and load factor

No comments:

Post a Comment