zookeeper筆記之基於zk實現分布式鎖


 

一、分布式鎖概述

Java中基於AQS框架提供了一系列的鎖,但是當需要在集群中的多台機器上互斥執行一段代碼或使用資源時Java提供的這種單機鎖就沒了用武之地,此時需要使用分布式鎖協調它們。分布式鎖有很多實現,基於redis、基於數據庫等等,本次討論的是基於zk實現分布式鎖。

免責聲明:下面的分布式鎖是本人學習zk時根據其特性摸索出來的實現,並不代表業內權威做法,僅作為不同的思想碰撞出靈感的小火花之用,如有錯誤之處還望多多指教。

二、不可重入的分布式鎖(有羊群效應,不公平鎖)

首先比較容易想到的就是約定一個路徑的創建作為搶占鎖的條件,比如路徑/zk-lock/foo,然后集群中每台機器上的程序都嘗試去創建這樣一個臨時節點,誰創建成功了誰就搶到了鎖,沒有創建成功的把自己休眠,休眠之前在/zk-lock/foo節點上添加watcher,watcher中當/zk-lock/foo節點被刪除時將自己叫醒重新搶占鎖,一直重復這個過程直到所有線程都搶到過鎖。

再來說搶到鎖的機器,這台機器搶到鎖之后就開始執行一些業務邏輯等亂七八糟的事情,執行完之后需要unlock釋放鎖,這里所謂的釋放鎖實際上就是將/zk-lock/foo節點刪除,之前說過了其它沒有搶到鎖的機器都在這個節點上面添加了一個watcher,所以刪除/zk-lock/foo實際上就是將其它休眠的機器喚醒讓它們重新搶占鎖。

上述過程流程圖:

image

代碼實現,首先是分布式鎖的接口定義:

package cc11001100.zookeeper.lock;

/**
 * @author CC11001100
 */
public interface DistributeLock {

	void lock() throws InterruptedException;

	void unlock();

	/**
	 * 釋放此分布式鎖所需要的資源,比如zookeeper連接
	 */
	void releaseResource();

}

按上述流程圖對分布式鎖的一個實現:

package cc11001100.zookeeper.lock;

import cc11001100.zookeeper.utils.ZooKeeperUtil;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.ZooKeeper;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.UUID;

/**
 * 不可重入有羊群效應的分布式鎖(不公平鎖)
 *
 * @author CC11001100
 */
public class NotReentrantDistributeLockWithHerdEffect implements DistributeLock {

	private final Thread currentThread;

	private String lockBasePath;
	private String lockName;
	private String lockFullPath;

	private ZooKeeper zooKeeper;
	private String myId;
	private String myName;

	public NotReentrantDistributeLockWithHerdEffect(String lockBasePath, String lockName) throws IOException {
		this.lockBasePath = lockBasePath;
		this.lockName = lockName;
		this.lockFullPath = this.lockBasePath + "/" + lockName;
		this.zooKeeper = ZooKeeperUtil.getZooKeeper();
		this.currentThread = Thread.currentThread();
		this.myId = UUID.randomUUID().toString();
		this.myName = Thread.currentThread().getName();
		createLockBasePath();
	}

	private void createLockBasePath() {
		try {
			if (zooKeeper.exists(lockBasePath, null) == null) {
				zooKeeper.create(lockBasePath, "".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
			}
		} catch (KeeperException | InterruptedException ignored) {
		}
	}

	@Override
	public void lock() throws InterruptedException {
		try {
			zooKeeper.create(lockFullPath, myId.getBytes(StandardCharsets.UTF_8), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL);
			System.out.println(myName + ": get lock");
		} catch (KeeperException.NodeExistsException e) {
			// 如果節點已經存在,則監聽此節點,當節點被刪除時再搶占鎖
			try {
				zooKeeper.exists(lockFullPath, event -> {
					synchronized (currentThread) {
						currentThread.notify();
						System.out.println(myName + ": wake");
					}
				});
			} catch (KeeperException.NoNodeException e1) {
				// 間不容發之際,其它人釋放了鎖
				lock();
			} catch (KeeperException | InterruptedException e1) {
				e1.printStackTrace();
			}
			synchronized (currentThread) {
				currentThread.wait();
			}
			lock();
		} catch (KeeperException | InterruptedException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void unlock() {
		try {
			byte[] nodeBytes = zooKeeper.getData(lockFullPath, false, null);
			String currentHoldLockNodeId = new String(nodeBytes, StandardCharsets.UTF_8);
			// 只有當前鎖的持有者是自己的時候,才能刪除節點
			if (myId.equalsIgnoreCase(currentHoldLockNodeId)) {
				zooKeeper.delete(lockFullPath, -1);
			}
		} catch (KeeperException | InterruptedException e) {
			e.printStackTrace();
		}
		System.out.println(myName + ": releaseResource lock");
	}

	@Override
	public void releaseResource() {
		try {
			// 將zookeeper連接釋放掉
			zooKeeper.close();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}

}

測試代碼如下:

package cc11001100.zookeeper.lock;

import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.concurrent.TimeUnit;

/**
 * @author CC11001100
 */
public class NotReentrantDistributeLockWithHerdEffectTest {

	private static String now() {
		return LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
	}

	public static void main(String[] args) {
		int nodeNum = 10;
		for (int i = 0; i < nodeNum; i++) {
			new Thread(() -> {
				DistributeLock lock = null;
				try {
					lock = new NotReentrantDistributeLockWithHerdEffect("/zk-lock", "foo");
					lock.lock();
					String myName = Thread.currentThread().getName();
					System.out.println(myName + ": hold lock, now=" + now());
					TimeUnit.SECONDS.sleep(3);
					lock.unlock();
				} catch (IOException | InterruptedException e) {
					e.printStackTrace();
				} finally {
					if (lock != null) {
						lock.releaseResource();
					}
				}
			}, "thread-name-" + i).start();
		}

//		output:
//		thread-name-8: get lock
//		thread-name-8: hold lock, now=2019-01-14 13:31:48
//		thread-name-8: releaseResource lock
//		thread-name-2: wake
//		thread-name-5: wake
//		thread-name-3: wake
//		thread-name-1: wake
//		thread-name-4: wake
//		thread-name-9: wake
//		thread-name-0: wake
//		thread-name-7: wake
//		thread-name-6: wake
//		thread-name-2: get lock
//		thread-name-2: hold lock, now=2019-01-14 13:31:51
//		thread-name-7: wake
//		thread-name-2: releaseResource lock
//		thread-name-6: wake
//		thread-name-0: wake
//		thread-name-9: wake
//		thread-name-1: wake
//		thread-name-3: wake
//		thread-name-5: wake
//		thread-name-4: wake
//		thread-name-0: get lock
//		thread-name-0: hold lock, now=2019-01-14 13:31:54
//		thread-name-6: wake
//		thread-name-1: wake
//		thread-name-7: wake
//		thread-name-4: wake
//		thread-name-3: wake
//		thread-name-5: wake
//		thread-name-0: releaseResource lock
//		thread-name-9: wake
//		thread-name-1: get lock
//		thread-name-1: hold lock, now=2019-01-14 13:31:57
//		thread-name-9: wake
//		thread-name-6: wake
//		thread-name-7: wake
//		thread-name-3: wake
//		thread-name-5: wake
//		thread-name-4: wake
//		thread-name-1: releaseResource lock
//		thread-name-9: get lock
//		thread-name-9: hold lock, now=2019-01-14 13:32:00
//		thread-name-7: wake
//		thread-name-4: wake
//		thread-name-5: wake
//		thread-name-3: wake
//		thread-name-9: releaseResource lock
//		thread-name-6: wake
//		thread-name-3: get lock
//		thread-name-3: hold lock, now=2019-01-14 13:32:03
//		thread-name-6: wake
//		thread-name-5: wake
//		thread-name-4: wake
//		thread-name-3: releaseResource lock
//		thread-name-7: wake
//		thread-name-6: get lock
//		thread-name-6: hold lock, now=2019-01-14 13:32:06
//		thread-name-7: wake
//		thread-name-6: releaseResource lock
//		thread-name-4: wake
//		thread-name-5: wake
//		thread-name-7: get lock
//		thread-name-7: hold lock, now=2019-01-14 13:32:09
//		thread-name-4: wake
//		thread-name-7: releaseResource lock
//		thread-name-5: wake
//		thread-name-5: get lock
//		thread-name-5: hold lock, now=2019-01-14 13:32:12
//		thread-name-5: releaseResource lock
//		thread-name-4: wake
//		thread-name-4: get lock
//		thread-name-4: hold lock, now=2019-01-14 13:32:15
//		thread-name-4: releaseResource lock

	}

}

 

三、不可重入的分布式鎖(無羊群效應,公平鎖)

上面實現的代碼雖然實現了基本的鎖的功能,但是它有一個比較致命的問題,就是當釋放鎖的時候會把其它所有機器全部喚醒,但是最終只有一台機器會搶到鎖,沒搶到的還得繼續休眠,尤其是當機器數比較多的時候,叫醒了好幾十台機器,結果還是只有一台機器能夠搶到鎖(堪比12306的搶票難度...),但是不公平鎖就是這樣啊,需要大家一起搶,否則怎么能體現出不公平呢,這個現象被稱為羊群效應。

其實這個問題可以變通一下,參考Java中AQS框架的CLH隊列的實現,我們可以讓其排隊,每台機器來搶鎖的時候都要排隊,排隊的時候領一個號碼稱為自己的ticket,這個ticket是有序的,每次增加1,所以只需要前面的人釋放鎖的時候叫醒后面的第一個人就可以了,即每台機器在領完號碼的時候都往自己前面的機器的ticket上添加一個watcher,當前面的執行完業務邏輯釋放鎖的時候將自己叫醒,當前面沒有人的時候說明已經輪到了自己,即直接獲得了鎖,關於獲取前面一個人的ticket,因為ticket是每次遞增1的,所以完全可以通過自己的ticket計算出前面一個人的ticket,沒必須要非獲取父節點下的所有children。然后就是號碼的發放,使用zookeeper的順序臨時節點可以保證每次發放的號碼都是有序的,同時當持有鎖的機器意外掛掉的時候能夠自動釋放鎖。

上述過程流程圖:

image

代碼實現:

package cc11001100.zookeeper.lock;

import cc11001100.zookeeper.utils.ZooKeeperUtil;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;

import java.io.IOException;

/**
 * 不可重入沒有羊群效應的鎖(公平鎖)
 *
 * @author CC11001100
 */
public class NotReentrantDistributeLock implements DistributeLock {

	private final Thread currentThread;

	private String lockBasePath;
	private String lockPrefix;
	private String lockFullPath;

	private ZooKeeper zooKeeper;
	private String myName;
	private int myTicket;

	public NotReentrantDistributeLock(String lockBasePath, String lockPrefix) throws IOException {
		this.lockBasePath = lockBasePath;
		this.lockPrefix = lockPrefix;
		this.lockFullPath = lockBasePath + "/" + lockPrefix;
		this.zooKeeper = ZooKeeperUtil.getZooKeeper();
		this.currentThread = Thread.currentThread();
		this.myName = currentThread.getName();
		createLockBasePath();
	}

	private void createLockBasePath() {
		try {
			if (zooKeeper.exists(lockBasePath, null) == null) {
				zooKeeper.create(lockBasePath, "".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
			}
		} catch (KeeperException | InterruptedException ignored) {
		}
	}

	@Override
	public void lock() throws InterruptedException {
		log("begin get lock");
		try {
			String path = zooKeeper.create(lockFullPath, "".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL);
			myTicket = extractMyTicket(path);
			String previousNode = buildPath(myTicket - 1);
			Stat exists = zooKeeper.exists(previousNode, event -> {
				synchronized (currentThread) {
					currentThread.notify();
					log("wake");
				}
			});
			if (exists != null) {
				synchronized (currentThread) {
					currentThread.wait();
				}
			}
			log("get lock success");
		} catch (KeeperException e) {
			e.printStackTrace();
		}
	}

	private int extractMyTicket(String path) {
		int splitIndex = path.lastIndexOf("/");
		return Integer.parseInt(path.substring(splitIndex + 1).replace(lockPrefix, ""));
	}

	private String buildPath(int ticket) {
		return String.format("%s%010d", lockFullPath, ticket);
	}

	@Override
	public void unlock() {
		log("begin release lock");
		try {
			zooKeeper.delete(buildPath(myTicket), -1);
		} catch (KeeperException | InterruptedException e) {
			e.printStackTrace();
		}
		log("end release lock");
	}

	@Override
	public void releaseResource() {
		try {
			// 將zookeeper連接釋放掉
			zooKeeper.close();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}

	private void log(String msg) {
		System.out.printf("[%d] %s : %s\n", myTicket, myName, msg);
	}

}

測試代碼:

package cc11001100.zookeeper.lock;

import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.concurrent.TimeUnit;

/**
 * @author CC11001100
 */
public class NotReentrantDistributeLockTest {

	private static String now() {
		return LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
	}

	public static void main(String[] args) {

		int nodeNum = 10;
		for (int i = 0; i < nodeNum; i++) {
			new Thread(() -> {
				DistributeLock lock = null;
				try {
					lock = new NotReentrantDistributeLock("/zk-lock", "bar");
					lock.lock();
					String myName = Thread.currentThread().getName();
					System.out.println(myName + ": get lock success, now=" + now());
					TimeUnit.SECONDS.sleep(3);
					lock.unlock();
				} catch (IOException | InterruptedException e) {
					e.printStackTrace();
				} finally {
					if (lock != null) {
						lock.releaseResource();
					}
				}
			}, "thread-name-" + i).start();
		}

//		output:
//		[0] thread-name-6 : begin get lock
//		[0] thread-name-3 : begin get lock
//		[0] thread-name-5 : begin get lock
//		[0] thread-name-4 : begin get lock
//		[0] thread-name-8 : begin get lock
//		[0] thread-name-9 : begin get lock
//		[0] thread-name-2 : begin get lock
//		[0] thread-name-7 : begin get lock
//		[0] thread-name-1 : begin get lock
//		[0] thread-name-0 : begin get lock
//		[170] thread-name-1 : get lock success
//		thread-name-1: get lock success, now=2019-01-14 17:59:56
//		[170] thread-name-1 : begin release lock
//		[170] thread-name-1 : end release lock
//		[171] thread-name-9 : wake
//		[171] thread-name-9 : get lock success
//		thread-name-9: get lock success, now=2019-01-14 17:59:59
//		[171] thread-name-9 : begin release lock
//		[171] thread-name-9 : end release lock
//		[172] thread-name-4 : wake
//		[172] thread-name-4 : get lock success
//		thread-name-4: get lock success, now=2019-01-14 18:00:02
//		[172] thread-name-4 : begin release lock
//		[173] thread-name-0 : wake
//		[172] thread-name-4 : end release lock
//		[173] thread-name-0 : get lock success
//		thread-name-0: get lock success, now=2019-01-14 18:00:05
//		[173] thread-name-0 : begin release lock
//		[174] thread-name-6 : wake
//		[173] thread-name-0 : end release lock
//		[174] thread-name-6 : get lock success
//		thread-name-6: get lock success, now=2019-01-14 18:00:08
//		[174] thread-name-6 : begin release lock
//		[175] thread-name-7 : wake
//		[174] thread-name-6 : end release lock
//		[175] thread-name-7 : get lock success
//		thread-name-7: get lock success, now=2019-01-14 18:00:11
//		[175] thread-name-7 : begin release lock
//		[176] thread-name-3 : wake
//		[175] thread-name-7 : end release lock
//		[176] thread-name-3 : get lock success
//		thread-name-3: get lock success, now=2019-01-14 18:00:14
//		[176] thread-name-3 : begin release lock
//		[177] thread-name-8 : wake
//		[176] thread-name-3 : end release lock
//		[177] thread-name-8 : get lock success
//		thread-name-8: get lock success, now=2019-01-14 18:00:17
//		[177] thread-name-8 : begin release lock
//		[178] thread-name-2 : wake
//		[177] thread-name-8 : end release lock
//		[178] thread-name-2 : get lock success
//		thread-name-2: get lock success, now=2019-01-14 18:00:20
//		[178] thread-name-2 : begin release lock
//		[179] thread-name-5 : wake
//		[178] thread-name-2 : end release lock
//		[179] thread-name-5 : get lock success
//		thread-name-5: get lock success, now=2019-01-14 18:00:23
//		[179] thread-name-5 : begin release lock
//		[179] thread-name-5 : end release lock

	}

}

 

四、可重入的分布式鎖(無羊群效應,公平鎖)

我們的鎖實現得越來越好了,但是它還有一個比較致命的問題,就是它不能重入,所謂的重入就是遞歸獲取鎖,當已經持有鎖時再嘗試獲取鎖時,可重入鎖會直接獲取成功,同時將重入次數加1,當釋放鎖時,將重入次數減1,只有當重入次數為0時才徹底釋放掉鎖。而上面的鎖屬於不可重入鎖,當嘗試遞歸獲取鎖時會產生死鎖永遠等待下去,這很明顯是不合理的,這一小節就討論一下如何將上一節的不可重入鎖轉換為重入鎖。

其實我的想法很簡單,首先能夠確定的是肯定需要在哪里保存鎖的重入次數,因為已經獲得了鎖的機器的重入次數並不需要讓其它機器知道,其它機器只要知道現在鎖在這哥們手里攥着就可以了,所以可以簡單的將重入次數作為DistributeLock的一個成員變量,比如叫做reentrantCount,每次嘗試獲取鎖的時候先判斷這個變量是否不為0,如果不為0的話說明當前機器已經獲得了鎖,根據重入鎖的特性,直接將重入次數加1即可,只有當reentrantCount為0的時候才需要去排隊等待獲取鎖。還有釋放鎖的時候也是判斷當前reentrantCount的值,如果這個值不為0的話說明當前還是在遞歸嵌套中,直接將reentrantCount減1即可,然后判斷reentrantCount的值如果為0的話說明鎖的所有重入都已經釋放掉了,直接將當前鎖的ticket刪除,因為隊列中的后一台機器已經在當前的ticket上添加了watcher,所以刪除當前ticket的動作相當於叫醒下一個排隊的人。

上述過程流程圖:

image

代碼實現:

package cc11001100.zookeeper.lock;

import cc11001100.zookeeper.utils.ZooKeeperUtil;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;

import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;

/**
 * 可重入無羊群效應的分布式鎖(公平鎖)
 *
 * @author CC11001100
 */
public class ReentrantDistributeLock implements DistributeLock {

	private final Thread currentThread;

	private String lockBasePath;
	private String lockPrefix;
	private String lockFullPath;

	private ZooKeeper zooKeeper;
	private String myName;
	private int myTicket;

	// 使用一個本機變量記載重復次數,這個值就不存儲在zookeeper上了,
	// 一則修改zk節點值還需要網絡會慢一些,二是其它節點只需要知道有個節點當前持有鎖就可以了,至於重入幾次不關它們的事呀
	private int reentrantCount = 0;

	public ReentrantDistributeLock(String lockBasePath, String lockName) throws IOException {
		this.lockBasePath = lockBasePath;
		this.lockPrefix = lockName;
		this.lockFullPath = lockBasePath + "/" + lockName;
		this.zooKeeper = ZooKeeperUtil.getZooKeeper();
		this.currentThread = Thread.currentThread();
		this.myName = currentThread.getName();
		createLockBasePath();
	}

	private void createLockBasePath() {
		try {
			if (zooKeeper.exists(lockBasePath, null) == null) {
				zooKeeper.create(lockBasePath, "".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
			}
		} catch (KeeperException | InterruptedException ignored) {
		}
	}

	@Override
	public void lock() throws InterruptedException {
		log("begin get lock");

		// 如果reentrantCount不為0說明當前節點已經持有鎖了,無需等待,直接增加重入次數即可
		if (reentrantCount != 0) {
			reentrantCount++;
			log("get lock success");
			return;
		}

		// 說明還沒有獲取到鎖,需要設置watcher監聽上一個節點釋放鎖事件
		try {
			String path = zooKeeper.create(lockFullPath, "".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL);
			myTicket = extractMyTicket(path);
			String previousNode = buildPathByTicket(myTicket - 1);
			Stat exists = zooKeeper.exists(previousNode, event -> {
				synchronized (currentThread) {
					currentThread.notify();
					log("wake");
				}
			});
			if (exists != null) {
				synchronized (currentThread) {
					currentThread.wait();
				}
			}
			reentrantCount++;
			log("get lock success");
		} catch (KeeperException e) {
			e.printStackTrace();
		}
	}

	private int extractMyTicket(String path) {
		int splitIndex = path.lastIndexOf("/");
		return Integer.parseInt(path.substring(splitIndex + 1).replace(lockPrefix, ""));
	}

	private String buildPathByTicket(int ticket) {
		return String.format("%s%010d", lockFullPath, ticket);
	}

	@Override
	public void unlock() {
		log("begin release lock");

		// 每次unlock的時候將遞歸次數減1,沒有減到0說明還在遞歸中
		reentrantCount--;
		if (reentrantCount != 0) {
			log("end release lock");
			return;
		}

		// 只有當重入次數為0的時候才刪除節點,將鎖釋放掉
		try {
			zooKeeper.delete(buildPathByTicket(myTicket), -1);
		} catch (KeeperException | InterruptedException e) {
			e.printStackTrace();
		}
		log("end release lock");
	}

	@Override
	public void releaseResource() {
		try {
			// 將zookeeper連接釋放掉
			zooKeeper.close();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}

	private void log(String msg) {
		System.out.printf("[%s], ticket=%d, reentrantCount=%d, threadName=%s, msg=%s\n", now(), myTicket, reentrantCount, myName, msg);
	}

	private String now() {
		return LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
	}

}

測試代碼:

package cc11001100.zookeeper.lock;

import java.io.IOException;
import java.util.Random;
import java.util.concurrent.TimeUnit;

/**
 * 測試分布式可重入鎖,每個鎖隨機重入,看是否會發生錯誤
 *
 * @author CC11001100
 */
public class ReentrantDistributeLockTest {

	public static void main(String[] args) {

		int nodeNum = 10;
		for (int i = 0; i < nodeNum; i++) {
			new Thread(() -> {
				DistributeLock lock = null;
				try {
					lock = new ReentrantDistributeLock("/zk-lock", "bar");
					lock.lock();
					TimeUnit.SECONDS.sleep(1);

					int reentrantTimes = new Random().nextInt(10);
					int reentrantCount = 0;
					for (int j = 0; j < reentrantTimes; j++) {
						lock.lock();
						reentrantCount++;
						if (Math.random() < 0.5) {
							lock.unlock();
							reentrantCount--;
						}
					}
					while (reentrantCount-- > 0) {
						lock.unlock();
					}

					lock.unlock();
				} catch (IOException | InterruptedException e) {
					e.printStackTrace();
				} finally {
					if (lock != null) {
						lock.releaseResource();
					}
				}
			}, "thread-name-" + i).start();
		}

//		output:
//		[2019-01-14 17:57:43], ticket=0, reentrantCount=0, threadName=thread-name-4, msg=begin get lock
//		[2019-01-14 17:57:43], ticket=0, reentrantCount=0, threadName=thread-name-5, msg=begin get lock
//		[2019-01-14 17:57:43], ticket=0, reentrantCount=0, threadName=thread-name-8, msg=begin get lock
//		[2019-01-14 17:57:43], ticket=0, reentrantCount=0, threadName=thread-name-0, msg=begin get lock
//		[2019-01-14 17:57:43], ticket=0, reentrantCount=0, threadName=thread-name-6, msg=begin get lock
//		[2019-01-14 17:57:43], ticket=0, reentrantCount=0, threadName=thread-name-1, msg=begin get lock
//		[2019-01-14 17:57:43], ticket=0, reentrantCount=0, threadName=thread-name-7, msg=begin get lock
//		[2019-01-14 17:57:43], ticket=0, reentrantCount=0, threadName=thread-name-9, msg=begin get lock
//		[2019-01-14 17:57:43], ticket=0, reentrantCount=0, threadName=thread-name-3, msg=begin get lock
//		[2019-01-14 17:57:43], ticket=0, reentrantCount=0, threadName=thread-name-2, msg=begin get lock
//		[2019-01-14 17:57:43], ticket=160, reentrantCount=1, threadName=thread-name-0, msg=get lock success
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=1, threadName=thread-name-0, msg=begin get lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=2, threadName=thread-name-0, msg=get lock success
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=2, threadName=thread-name-0, msg=begin release lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=1, threadName=thread-name-0, msg=end release lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=1, threadName=thread-name-0, msg=begin get lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=2, threadName=thread-name-0, msg=get lock success
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=2, threadName=thread-name-0, msg=begin get lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=3, threadName=thread-name-0, msg=get lock success
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=3, threadName=thread-name-0, msg=begin release lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=2, threadName=thread-name-0, msg=end release lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=2, threadName=thread-name-0, msg=begin get lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=3, threadName=thread-name-0, msg=get lock success
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=3, threadName=thread-name-0, msg=begin release lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=2, threadName=thread-name-0, msg=end release lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=2, threadName=thread-name-0, msg=begin release lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=1, threadName=thread-name-0, msg=end release lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=1, threadName=thread-name-0, msg=begin release lock
//		[2019-01-14 17:57:44], ticket=160, reentrantCount=0, threadName=thread-name-0, msg=end release lock
//		[2019-01-14 17:57:44], ticket=161, reentrantCount=0, threadName=thread-name-6, msg=wake
//		[2019-01-14 17:57:44], ticket=161, reentrantCount=1, threadName=thread-name-6, msg=get lock success
//		[2019-01-14 17:57:45], ticket=161, reentrantCount=1, threadName=thread-name-6, msg=begin get lock
//		[2019-01-14 17:57:45], ticket=161, reentrantCount=2, threadName=thread-name-6, msg=get lock success
//		[2019-01-14 17:57:45], ticket=161, reentrantCount=2, threadName=thread-name-6, msg=begin get lock
//		[2019-01-14 17:57:45], ticket=161, reentrantCount=3, threadName=thread-name-6, msg=get lock success
//		[2019-01-14 17:57:45], ticket=161, reentrantCount=3, threadName=thread-name-6, msg=begin release lock
//		[2019-01-14 17:57:45], ticket=161, reentrantCount=2, threadName=thread-name-6, msg=end release lock
//		[2019-01-14 17:57:45], ticket=161, reentrantCount=2, threadName=thread-name-6, msg=begin release lock
//		[2019-01-14 17:57:45], ticket=161, reentrantCount=1, threadName=thread-name-6, msg=end release lock
//		[2019-01-14 17:57:45], ticket=161, reentrantCount=1, threadName=thread-name-6, msg=begin release lock
//		[2019-01-14 17:57:45], ticket=162, reentrantCount=0, threadName=thread-name-5, msg=wake
//		[2019-01-14 17:57:45], ticket=161, reentrantCount=0, threadName=thread-name-6, msg=end release lock
//		[2019-01-14 17:57:45], ticket=162, reentrantCount=1, threadName=thread-name-5, msg=get lock success
//		[2019-01-14 17:57:46], ticket=162, reentrantCount=1, threadName=thread-name-5, msg=begin release lock
//		[2019-01-14 17:57:46], ticket=162, reentrantCount=0, threadName=thread-name-5, msg=end release lock
//		[2019-01-14 17:57:46], ticket=163, reentrantCount=0, threadName=thread-name-8, msg=wake
//		[2019-01-14 17:57:46], ticket=163, reentrantCount=1, threadName=thread-name-8, msg=get lock success
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=1, threadName=thread-name-8, msg=begin get lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=2, threadName=thread-name-8, msg=get lock success
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=2, threadName=thread-name-8, msg=begin release lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=1, threadName=thread-name-8, msg=end release lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=1, threadName=thread-name-8, msg=begin get lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=2, threadName=thread-name-8, msg=get lock success
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=2, threadName=thread-name-8, msg=begin get lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=3, threadName=thread-name-8, msg=get lock success
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=3, threadName=thread-name-8, msg=begin get lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=4, threadName=thread-name-8, msg=get lock success
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=4, threadName=thread-name-8, msg=begin get lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=5, threadName=thread-name-8, msg=get lock success
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=5, threadName=thread-name-8, msg=begin release lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=4, threadName=thread-name-8, msg=end release lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=4, threadName=thread-name-8, msg=begin release lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=3, threadName=thread-name-8, msg=end release lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=3, threadName=thread-name-8, msg=begin release lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=2, threadName=thread-name-8, msg=end release lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=2, threadName=thread-name-8, msg=begin release lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=1, threadName=thread-name-8, msg=end release lock
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=1, threadName=thread-name-8, msg=begin release lock
//		[2019-01-14 17:57:47], ticket=164, reentrantCount=0, threadName=thread-name-2, msg=wake
//		[2019-01-14 17:57:47], ticket=163, reentrantCount=0, threadName=thread-name-8, msg=end release lock
//		[2019-01-14 17:57:47], ticket=164, reentrantCount=1, threadName=thread-name-2, msg=get lock success
//		[2019-01-14 17:57:48], ticket=164, reentrantCount=1, threadName=thread-name-2, msg=begin release lock
//		[2019-01-14 17:57:48], ticket=165, reentrantCount=0, threadName=thread-name-9, msg=wake
//		[2019-01-14 17:57:48], ticket=164, reentrantCount=0, threadName=thread-name-2, msg=end release lock
//		[2019-01-14 17:57:48], ticket=165, reentrantCount=1, threadName=thread-name-9, msg=get lock success
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=1, threadName=thread-name-9, msg=begin get lock
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=2, threadName=thread-name-9, msg=get lock success
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=2, threadName=thread-name-9, msg=begin release lock
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=1, threadName=thread-name-9, msg=end release lock
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=1, threadName=thread-name-9, msg=begin get lock
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=2, threadName=thread-name-9, msg=get lock success
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=2, threadName=thread-name-9, msg=begin get lock
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=3, threadName=thread-name-9, msg=get lock success
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=3, threadName=thread-name-9, msg=begin release lock
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=2, threadName=thread-name-9, msg=end release lock
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=2, threadName=thread-name-9, msg=begin release lock
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=1, threadName=thread-name-9, msg=end release lock
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=1, threadName=thread-name-9, msg=begin release lock
//		[2019-01-14 17:57:49], ticket=166, reentrantCount=0, threadName=thread-name-1, msg=wake
//		[2019-01-14 17:57:49], ticket=165, reentrantCount=0, threadName=thread-name-9, msg=end release lock
//		[2019-01-14 17:57:49], ticket=166, reentrantCount=1, threadName=thread-name-1, msg=get lock success
//		[2019-01-14 17:57:50], ticket=166, reentrantCount=1, threadName=thread-name-1, msg=begin release lock
//		[2019-01-14 17:57:50], ticket=167, reentrantCount=0, threadName=thread-name-4, msg=wake
//		[2019-01-14 17:57:50], ticket=166, reentrantCount=0, threadName=thread-name-1, msg=end release lock
//		[2019-01-14 17:57:50], ticket=167, reentrantCount=1, threadName=thread-name-4, msg=get lock success
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=1, threadName=thread-name-4, msg=begin get lock
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=2, threadName=thread-name-4, msg=get lock success
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=2, threadName=thread-name-4, msg=begin release lock
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=1, threadName=thread-name-4, msg=end release lock
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=1, threadName=thread-name-4, msg=begin get lock
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=2, threadName=thread-name-4, msg=get lock success
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=2, threadName=thread-name-4, msg=begin get lock
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=3, threadName=thread-name-4, msg=get lock success
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=3, threadName=thread-name-4, msg=begin release lock
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=2, threadName=thread-name-4, msg=end release lock
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=2, threadName=thread-name-4, msg=begin release lock
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=1, threadName=thread-name-4, msg=end release lock
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=1, threadName=thread-name-4, msg=begin release lock
//		[2019-01-14 17:57:51], ticket=168, reentrantCount=0, threadName=thread-name-3, msg=wake
//		[2019-01-14 17:57:51], ticket=167, reentrantCount=0, threadName=thread-name-4, msg=end release lock
//		[2019-01-14 17:57:51], ticket=168, reentrantCount=1, threadName=thread-name-3, msg=get lock success
//		[2019-01-14 17:57:52], ticket=168, reentrantCount=1, threadName=thread-name-3, msg=begin get lock
//		[2019-01-14 17:57:52], ticket=168, reentrantCount=2, threadName=thread-name-3, msg=get lock success
//		[2019-01-14 17:57:52], ticket=168, reentrantCount=2, threadName=thread-name-3, msg=begin get lock
//		[2019-01-14 17:57:52], ticket=168, reentrantCount=3, threadName=thread-name-3, msg=get lock success
//		[2019-01-14 17:57:52], ticket=168, reentrantCount=3, threadName=thread-name-3, msg=begin release lock
//		[2019-01-14 17:57:52], ticket=168, reentrantCount=2, threadName=thread-name-3, msg=end release lock
//		[2019-01-14 17:57:52], ticket=168, reentrantCount=2, threadName=thread-name-3, msg=begin release lock
//		[2019-01-14 17:57:52], ticket=168, reentrantCount=1, threadName=thread-name-3, msg=end release lock
//		[2019-01-14 17:57:52], ticket=168, reentrantCount=1, threadName=thread-name-3, msg=begin release lock
//		[2019-01-14 17:57:52], ticket=169, reentrantCount=0, threadName=thread-name-7, msg=wake
//		[2019-01-14 17:57:52], ticket=168, reentrantCount=0, threadName=thread-name-3, msg=end release lock
//		[2019-01-14 17:57:52], ticket=169, reentrantCount=1, threadName=thread-name-7, msg=get lock success
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=1, threadName=thread-name-7, msg=begin get lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=2, threadName=thread-name-7, msg=get lock success
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=2, threadName=thread-name-7, msg=begin get lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=get lock success
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=begin release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=2, threadName=thread-name-7, msg=end release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=2, threadName=thread-name-7, msg=begin get lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=get lock success
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=begin get lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=4, threadName=thread-name-7, msg=get lock success
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=4, threadName=thread-name-7, msg=begin release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=end release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=begin get lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=4, threadName=thread-name-7, msg=get lock success
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=4, threadName=thread-name-7, msg=begin release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=end release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=begin get lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=4, threadName=thread-name-7, msg=get lock success
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=4, threadName=thread-name-7, msg=begin release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=end release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=begin get lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=4, threadName=thread-name-7, msg=get lock success
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=4, threadName=thread-name-7, msg=begin get lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=5, threadName=thread-name-7, msg=get lock success
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=5, threadName=thread-name-7, msg=begin get lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=6, threadName=thread-name-7, msg=get lock success
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=6, threadName=thread-name-7, msg=begin release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=5, threadName=thread-name-7, msg=end release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=5, threadName=thread-name-7, msg=begin release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=4, threadName=thread-name-7, msg=end release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=4, threadName=thread-name-7, msg=begin release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=end release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=3, threadName=thread-name-7, msg=begin release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=2, threadName=thread-name-7, msg=end release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=2, threadName=thread-name-7, msg=begin release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=1, threadName=thread-name-7, msg=end release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=1, threadName=thread-name-7, msg=begin release lock
//		[2019-01-14 17:57:53], ticket=169, reentrantCount=0, threadName=thread-name-7, msg=end release lock

	}

}
 

.


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM