Java 类org.apache.hadoop.util.LightWeightCache 实例源码
项目:hadoop
文件:NfsExports.java
/**
* Constructor.
* @param cacheSize The size of the access privilege cache.
* @param expirationPeriodNano The period
* @param matchingHosts A string specifying one or multiple matchers.
*/
NfsExports(int cacheSize, long expirationPeriodNano, String matchHosts) {
this.cacheExpirationPeriod = expirationPeriodNano;
accessCache = new LightWeightCache<AccessCacheEntry, AccessCacheEntry>(
cacheSize, cacheSize, expirationPeriodNano, 0);
String[] matchStrings = matchHosts.split(
CommonConfigurationKeys.NFS_EXPORTS_ALLOWED_HOSTS_SEPARATOR);
mMatches = new ArrayList<Match>(matchStrings.length);
for(String mStr : matchStrings) {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing match string '" + mStr + "'");
}
mStr = mStr.trim();
if(!mStr.isEmpty()) {
mMatches.add(getMatch(mStr));
}
}
}
项目:aliyun-oss-hadoop-fs
文件:NfsExports.java
/**
* Constructor.
* @param cacheSize The size of the access privilege cache.
* @param expirationPeriodNano The period
* @param matchingHosts A string specifying one or multiple matchers.
*/
NfsExports(int cacheSize, long expirationPeriodNano, String matchHosts) {
this.cacheExpirationPeriod = expirationPeriodNano;
accessCache = new LightWeightCache<AccessCacheEntry, AccessCacheEntry>(
cacheSize, cacheSize, expirationPeriodNano, 0);
String[] matchStrings = matchHosts.split(
CommonConfigurationKeys.NFS_EXPORTS_ALLOWED_HOSTS_SEPARATOR);
mMatches = new ArrayList<Match>(matchStrings.length);
for(String mStr : matchStrings) {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing match string '" + mStr + "'");
}
mStr = mStr.trim();
if(!mStr.isEmpty()) {
mMatches.add(getMatch(mStr));
}
}
}
项目:big-c
文件:NfsExports.java
/**
* Constructor.
* @param cacheSize The size of the access privilege cache.
* @param expirationPeriodNano The period
* @param matchingHosts A string specifying one or multiple matchers.
*/
NfsExports(int cacheSize, long expirationPeriodNano, String matchHosts) {
this.cacheExpirationPeriod = expirationPeriodNano;
accessCache = new LightWeightCache<AccessCacheEntry, AccessCacheEntry>(
cacheSize, cacheSize, expirationPeriodNano, 0);
String[] matchStrings = matchHosts.split(
CommonConfigurationKeys.NFS_EXPORTS_ALLOWED_HOSTS_SEPARATOR);
mMatches = new ArrayList<Match>(matchStrings.length);
for(String mStr : matchStrings) {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing match string '" + mStr + "'");
}
mStr = mStr.trim();
if(!mStr.isEmpty()) {
mMatches.add(getMatch(mStr));
}
}
}
项目:hadoop-2.6.0-cdh5.4.3
文件:NfsExports.java
/**
* Constructor.
* @param cacheSize The size of the access privilege cache.
* @param expirationPeriodNano The period
* @param matchingHosts A string specifying one or multiple matchers.
*/
NfsExports(int cacheSize, long expirationPeriodNano, String matchHosts) {
this.cacheExpirationPeriod = expirationPeriodNano;
accessCache = new LightWeightCache<AccessCacheEntry, AccessCacheEntry>(
cacheSize, cacheSize, expirationPeriodNano, 0);
String[] matchStrings = matchHosts.split(
CommonConfigurationKeys.NFS_EXPORTS_ALLOWED_HOSTS_SEPARATOR);
mMatches = new ArrayList<Match>(matchStrings.length);
for(String mStr : matchStrings) {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing match string '" + mStr + "'");
}
mStr = mStr.trim();
if(!mStr.isEmpty()) {
mMatches.add(getMatch(mStr));
}
}
}
项目:hops
文件:NfsExports.java
/**
* Constructor.
* @param cacheSize The size of the access privilege cache.
* @param expirationPeriodNano The period
* @param matchingHosts A string specifying one or multiple matchers.
*/
NfsExports(int cacheSize, long expirationPeriodNano, String matchHosts) {
this.cacheExpirationPeriod = expirationPeriodNano;
accessCache = new LightWeightCache<AccessCacheEntry, AccessCacheEntry>(
cacheSize, cacheSize, expirationPeriodNano, 0);
String[] matchStrings = matchHosts.split(
CommonConfigurationKeys.NFS_EXPORTS_ALLOWED_HOSTS_SEPARATOR);
mMatches = new ArrayList<Match>(matchStrings.length);
for(String mStr : matchStrings) {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing match string '" + mStr + "'");
}
mStr = mStr.trim();
if(!mStr.isEmpty()) {
mMatches.add(getMatch(mStr));
}
}
}
项目:hadoop-TCP
文件:NfsExports.java
/**
* Constructor.
* @param cacheSize The size of the access privilege cache.
* @param expirationPeriodNano The period
* @param matchingHosts A string specifying one or multiple matchers.
*/
NfsExports(int cacheSize, long expirationPeriodNano, String matchHosts) {
this.cacheExpirationPeriod = expirationPeriodNano;
accessCache = new LightWeightCache<AccessCacheEntry, AccessCacheEntry>(
cacheSize, cacheSize, expirationPeriodNano, 0);
String[] matchStrings = matchHosts.split(
Nfs3Constant.EXPORTS_ALLOWED_HOSTS_SEPARATOR);
mMatches = new ArrayList<Match>(matchStrings.length);
for(String mStr : matchStrings) {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing match string '" + mStr + "'");
}
mStr = mStr.trim();
if(!mStr.isEmpty()) {
mMatches.add(getMatch(mStr));
}
}
}
项目:hardfs
文件:NfsExports.java
/**
* Constructor.
* @param cacheSize The size of the access privilege cache.
* @param expirationPeriodNano The period
* @param matchingHosts A string specifying one or multiple matchers.
*/
NfsExports(int cacheSize, long expirationPeriodNano, String matchHosts) {
this.cacheExpirationPeriod = expirationPeriodNano;
accessCache = new LightWeightCache<AccessCacheEntry, AccessCacheEntry>(
cacheSize, cacheSize, expirationPeriodNano, 0);
String[] matchStrings = matchHosts.split(
Nfs3Constant.EXPORTS_ALLOWED_HOSTS_SEPARATOR);
mMatches = new ArrayList<Match>(matchStrings.length);
for(String mStr : matchStrings) {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing match string '" + mStr + "'");
}
mStr = mStr.trim();
if(!mStr.isEmpty()) {
mMatches.add(getMatch(mStr));
}
}
}
项目:hadoop-on-lustre2
文件:NfsExports.java
/**
* Constructor.
* @param cacheSize The size of the access privilege cache.
* @param expirationPeriodNano The period
* @param matchingHosts A string specifying one or multiple matchers.
*/
NfsExports(int cacheSize, long expirationPeriodNano, String matchHosts) {
this.cacheExpirationPeriod = expirationPeriodNano;
accessCache = new LightWeightCache<AccessCacheEntry, AccessCacheEntry>(
cacheSize, cacheSize, expirationPeriodNano, 0);
String[] matchStrings = matchHosts.split(
Nfs3Constant.EXPORTS_ALLOWED_HOSTS_SEPARATOR);
mMatches = new ArrayList<Match>(matchStrings.length);
for(String mStr : matchStrings) {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing match string '" + mStr + "'");
}
mStr = mStr.trim();
if(!mStr.isEmpty()) {
mMatches.add(getMatch(mStr));
}
}
}
项目:hadoop-oss
文件:RetryCache.java
/**
* Constructor
* @param cacheName name to identify the cache by
* @param percentage percentage of total java heap space used by this cache
* @param expirationTime time for an entry to expire in nanoseconds
*/
public RetryCache(String cacheName, double percentage, long expirationTime) {
int capacity = LightWeightGSet.computeCapacity(percentage, cacheName);
capacity = capacity > MAX_CAPACITY ? capacity : MAX_CAPACITY;
this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity,
expirationTime, 0);
this.expirationTime = expirationTime;
this.cacheName = cacheName;
this.retryCacheMetrics = RetryCacheMetrics.create(this);
}
项目:hadoop
文件:RetryCache.java
/**
* Constructor
* @param cacheName name to identify the cache by
* @param percentage percentage of total java heap space used by this cache
* @param expirationTime time for an entry to expire in nanoseconds
*/
public RetryCache(String cacheName, double percentage, long expirationTime) {
int capacity = LightWeightGSet.computeCapacity(percentage, cacheName);
capacity = capacity > 16 ? capacity : 16;
this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity,
expirationTime, 0);
this.expirationTime = expirationTime;
this.cacheName = cacheName;
this.retryCacheMetrics = RetryCacheMetrics.create(this);
}
项目:aliyun-oss-hadoop-fs
文件:RetryCache.java
/**
* Constructor
* @param cacheName name to identify the cache by
* @param percentage percentage of total java heap space used by this cache
* @param expirationTime time for an entry to expire in nanoseconds
*/
public RetryCache(String cacheName, double percentage, long expirationTime) {
int capacity = LightWeightGSet.computeCapacity(percentage, cacheName);
capacity = capacity > MAX_CAPACITY ? capacity : MAX_CAPACITY;
this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity,
expirationTime, 0);
this.expirationTime = expirationTime;
this.cacheName = cacheName;
this.retryCacheMetrics = RetryCacheMetrics.create(this);
}
项目:big-c
文件:RetryCache.java
/**
* Constructor
* @param cacheName name to identify the cache by
* @param percentage percentage of total java heap space used by this cache
* @param expirationTime time for an entry to expire in nanoseconds
*/
public RetryCache(String cacheName, double percentage, long expirationTime) {
int capacity = LightWeightGSet.computeCapacity(percentage, cacheName);
capacity = capacity > 16 ? capacity : 16;
this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity,
expirationTime, 0);
this.expirationTime = expirationTime;
this.cacheName = cacheName;
this.retryCacheMetrics = RetryCacheMetrics.create(this);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:RetryCache.java
/**
* Constructor
* @param cacheName name to identify the cache by
* @param percentage percentage of total java heap space used by this cache
* @param expirationTime time for an entry to expire in nanoseconds
*/
public RetryCache(String cacheName, double percentage, long expirationTime) {
int capacity = LightWeightGSet.computeCapacity(percentage, cacheName);
capacity = capacity > 16 ? capacity : 16;
this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity,
expirationTime, 0);
this.expirationTime = expirationTime;
this.cacheName = cacheName;
this.retryCacheMetrics = RetryCacheMetrics.create(this);
}
项目:hadoop-plus
文件:RetryCache.java
/**
* Constructor
* @param cacheName name to identify the cache by
* @param percentage percentage of total java heap space used by this cache
* @param expirationTime time for an entry to expire in nanoseconds
*/
public RetryCache(String cacheName, double percentage, long expirationTime) {
int capacity = LightWeightGSet.computeCapacity(percentage, cacheName);
capacity = capacity > 16 ? capacity : 16;
this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity,
expirationTime, 0);
this.expirationTime = expirationTime;
}
项目:hops
文件:RetryCache.java
/**
* Constructor
* @param cacheName name to identify the cache by
* @param percentage percentage of total java heap space used by this cache
* @param expirationTime time for an entry to expire in nanoseconds
*/
public RetryCache(String cacheName, double percentage, long expirationTime) {
int capacity = LightWeightGSet.computeCapacity(percentage, cacheName);
capacity = capacity > MAX_CAPACITY ? capacity : MAX_CAPACITY;
this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity,
expirationTime, 0);
this.expirationTime = expirationTime;
this.cacheName = cacheName;
this.retryCacheMetrics = RetryCacheMetrics.create(this);
}
项目:hadoop-TCP
文件:RetryCache.java
/**
* Constructor
* @param cacheName name to identify the cache by
* @param percentage percentage of total java heap space used by this cache
* @param expirationTime time for an entry to expire in nanoseconds
*/
public RetryCache(String cacheName, double percentage, long expirationTime) {
int capacity = LightWeightGSet.computeCapacity(percentage, cacheName);
capacity = capacity > 16 ? capacity : 16;
this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity,
expirationTime, 0);
this.expirationTime = expirationTime;
}
项目:hardfs
文件:RetryCache.java
/**
* Constructor
* @param cacheName name to identify the cache by
* @param percentage percentage of total java heap space used by this cache
* @param expirationTime time for an entry to expire in nanoseconds
*/
public RetryCache(String cacheName, double percentage, long expirationTime) {
int capacity = LightWeightGSet.computeCapacity(percentage, cacheName);
capacity = capacity > 16 ? capacity : 16;
this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity,
expirationTime, 0);
this.expirationTime = expirationTime;
}
项目:hadoop-on-lustre2
文件:RetryCache.java
/**
* Constructor
* @param cacheName name to identify the cache by
* @param percentage percentage of total java heap space used by this cache
* @param expirationTime time for an entry to expire in nanoseconds
*/
public RetryCache(String cacheName, double percentage, long expirationTime) {
int capacity = LightWeightGSet.computeCapacity(percentage, cacheName);
capacity = capacity > 16 ? capacity : 16;
this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity,
expirationTime, 0);
this.expirationTime = expirationTime;
this.cacheName = cacheName;
this.retryCacheMetrics = RetryCacheMetrics.create(this);
}