private LoadingCache<Integer, Bucket> createFilesCache(final MinebdConfig config) { Preconditions.checkNotNull(config.parentDirs); final Integer maxOpenFiles = config.maxOpenFiles; Preconditions.checkNotNull(maxOpenFiles); Preconditions.checkArgument(maxOpenFiles > 0); return CacheBuilder.newBuilder() .maximumSize(maxOpenFiles) .removalListener((RemovalListener<Integer, Bucket>) notification -> { logger.debug("no longer monitoring bucket {}", notification.getKey()); try { notification.getValue().close(); } catch (IOException e) { logger.warn("unable to flush and close file " + notification.getKey(), e); } }) .build(new CacheLoader<Integer, Bucket>() { @Override public Bucket load(Integer key) throws Exception { return bucketFactory.create(key); } }); }
@Bean public CasEventRepository casEventRepository() { final LoadingCache<String, CasEvent> storage = CacheBuilder.newBuilder() .initialCapacity(INITIAL_CACHE_SIZE) .maximumSize(MAX_CACHE_SIZE) .recordStats() .expireAfterWrite(EXPIRATION_TIME, TimeUnit.HOURS) .build(new CacheLoader<String, CasEvent>() { @Override public CasEvent load(final String s) throws Exception { LOGGER.error("Load operation of the cache is not supported."); return null; } }); LOGGER.debug("Created an in-memory event repository to store CAS events for [{}] hours", EXPIRATION_TIME); return new InMemoryCasEventRepository(storage); }
@ConditionalOnMissingBean(name = "mfaTrustEngine") @Bean @RefreshScope public MultifactorAuthenticationTrustStorage mfaTrustEngine() { final LoadingCache<String, MultifactorAuthenticationTrustRecord> storage = CacheBuilder.newBuilder() .initialCapacity(INITIAL_CACHE_SIZE) .maximumSize(MAX_CACHE_SIZE) .recordStats() .expireAfterWrite(casProperties.getAuthn().getMfa().getTrusted().getExpiration(), casProperties.getAuthn().getMfa().getTrusted().getTimeUnit()) .build(new CacheLoader<String, MultifactorAuthenticationTrustRecord>() { @Override public MultifactorAuthenticationTrustRecord load(final String s) throws Exception { LOGGER.error("Load operation of the cache is not supported."); return null; } }); final InMemoryMultifactorAuthenticationTrustStorage m = new InMemoryMultifactorAuthenticationTrustStorage(storage); m.setCipherExecutor(mfaTrustCipherExecutor()); return m; }
/** * checks that the given pattern & rotation is at the block co-ordinates. */ @Nullable private BlockPattern.PatternHelper checkPatternAt(BlockPos pos, EnumFacing finger, EnumFacing thumb, LoadingCache<BlockPos, BlockWorldState> lcache) { for (int i = 0; i < this.palmLength; ++i) { for (int j = 0; j < this.thumbLength; ++j) { for (int k = 0; k < this.fingerLength; ++k) { if (!this.blockMatches[k][j][i].apply(lcache.getUnchecked(translateOffset(pos, finger, thumb, i, j, k)))) { return null; } } } } return new BlockPattern.PatternHelper(pos, finger, thumb, lcache, this.palmLength, this.thumbLength, this.fingerLength); }
/** * checks that the given pattern & rotation is at the block co-ordinates. */ private BlockPattern.PatternHelper checkPatternAt(BlockPos pos, EnumFacing finger, EnumFacing thumb, LoadingCache<BlockPos, BlockWorldState> lcache) { for (int i = 0; i < this.palmLength; ++i) { for (int j = 0; j < this.thumbLength; ++j) { for (int k = 0; k < this.fingerLength; ++k) { if (!this.blockMatches[k][j][i].apply(lcache.getUnchecked(translateOffset(pos, finger, thumb, i, j, k)))) { return null; } } } } return new BlockPattern.PatternHelper(pos, finger, thumb, lcache, this.palmLength, this.thumbLength, this.fingerLength); }
@Override public synchronized Optional<V> get(@NonNull String key) { checkNotNull(key); LoadingCache<String, Optional<ExpiringValue<V>>> c = cache.getIfPresent(CurrentInstitution.get()); if( c != null ) { Optional<ExpiringValue<V>> op = c.getUnchecked(key); if( op.isPresent() ) { V ev = op.get().getValue(); return Optional.fromNullable(ev); } } return Optional.absent(); }
@Override public Map<String, Map<String, String>> getMetadata(File f) { LoadingCache<String, Map<String, String>> metadata = CacheBuilder.newBuilder().build( CacheLoader.from(new Function<String, Map<String, String>>() { @Override public Map<String, String> apply(String input) { return Maps.newHashMap(); } })); for( MetadataHandler handler : pluginTracker.getBeanList() ) { handler.getMetadata(metadata, f); } return metadata.asMap(); }
/** * Returns a boolean to denote whether a cache file is visible to all (public) * or not * * @return true if the path in the current path is visible to all, false * otherwise */ @Private public static boolean isPublic(FileSystem fs, Path current, FileStatus sStat, LoadingCache<Path,Future<FileStatus>> statCache) throws IOException { current = fs.makeQualified(current); //the leaf level file should be readable by others if (!checkPublicPermsForAll(fs, sStat, FsAction.READ_EXECUTE, FsAction.READ)) { return false; } if (Shell.WINDOWS && fs instanceof LocalFileSystem) { // Relax the requirement for public cache on LFS on Windows since default // permissions are "700" all the way up to the drive letter. In this // model, the only requirement for a user is to give EVERYONE group // permission on the file and the file will be considered public. // This code path is only hit when fs.default.name is file:/// (mainly // in tests). return true; } return ancestorsHaveExecutePermissions(fs, current.getParent(), statCache); }
/** * For each of the requested resources for a container, determines the * appropriate {@link LocalResourcesTracker} and forwards a * {@link LocalResourceRequest} to that tracker. */ private void handleInitContainerResources( ContainerLocalizationRequestEvent rsrcReqs) { Container c = rsrcReqs.getContainer(); // create a loading cache for the file statuses LoadingCache<Path,Future<FileStatus>> statCache = CacheBuilder.newBuilder().build(FSDownload.createStatusCacheLoader(getConfig())); LocalizerContext ctxt = new LocalizerContext( c.getUser(), c.getContainerId(), c.getCredentials(), statCache); Map<LocalResourceVisibility, Collection<LocalResourceRequest>> rsrcs = rsrcReqs.getRequestedResources(); for (Map.Entry<LocalResourceVisibility, Collection<LocalResourceRequest>> e : rsrcs.entrySet()) { LocalResourcesTracker tracker = getLocalResourcesTracker(e.getKey(), c.getUser(), c.getContainerId().getApplicationAttemptId() .getApplicationId()); for (LocalResourceRequest req : e.getValue()) { tracker.handle(new ResourceRequestEvent(req, e.getKey(), ctxt)); } } }
@Nullable /** * checks that the given pattern & rotation is at the block co-ordinates. */ private BlockPattern.PatternHelper checkPatternAt(BlockPos pos, EnumFacing finger, EnumFacing thumb, LoadingCache<BlockPos, BlockWorldState> lcache) { for (int i = 0; i < this.palmLength; ++i) { for (int j = 0; j < this.thumbLength; ++j) { for (int k = 0; k < this.fingerLength; ++k) { if (!this.blockMatches[k][j][i].apply(lcache.getUnchecked(translateOffset(pos, finger, thumb, i, j, k)))) { return null; } } } } return new BlockPattern.PatternHelper(pos, finger, thumb, lcache, this.palmLength, this.thumbLength, this.fingerLength); }
public OidcIdTokenSigningAndEncryptionService(final LoadingCache<String, Optional<RsaJsonWebKey>> defaultJsonWebKeystoreCache, final LoadingCache<OidcRegisteredService, Optional<RsaJsonWebKey>> serviceJsonWebKeystoreCache, final String issuer) { this.defaultJsonWebKeystoreCache = defaultJsonWebKeystoreCache; this.serviceJsonWebKeystoreCache = serviceJsonWebKeystoreCache; this.issuer = issuer; }
@Bean public LoadingCache<OidcRegisteredService, Optional<RsaJsonWebKey>> oidcServiceJsonWebKeystoreCache() { final OidcProperties oidc = casProperties.getAuthn().getOidc(); final LoadingCache<OidcRegisteredService, Optional<RsaJsonWebKey>> cache = CacheBuilder.newBuilder().maximumSize(1) .expireAfterWrite(oidc.getJwksCacheInMinutes(), TimeUnit.MINUTES) .build(oidcServiceJsonWebKeystoreCacheLoader()); return cache; }
@Bean public LoadingCache<String, Optional<RsaJsonWebKey>> oidcDefaultJsonWebKeystoreCache() { final OidcProperties oidc = casProperties.getAuthn().getOidc(); final LoadingCache<String, Optional<RsaJsonWebKey>> cache = CacheBuilder.newBuilder().maximumSize(1) .expireAfterWrite(oidc.getJwksCacheInMinutes(), TimeUnit.MINUTES) .build(oidcDefaultJsonWebKeystoreCacheLoader()); return cache; }
public PatternHelper(BlockPos posIn, EnumFacing fingerIn, EnumFacing thumbIn, LoadingCache<BlockPos, BlockWorldState> lcacheIn, int p_i46378_5_, int p_i46378_6_, int p_i46378_7_) { this.frontTopLeft = posIn; this.forwards = fingerIn; this.up = thumbIn; this.lcache = lcacheIn; this.width = p_i46378_5_; this.height = p_i46378_6_; this.depth = p_i46378_7_; }
private static <T> LoadingCache<Class<T>, AtomicInteger> createCache( Class<T> klass) { return CacheBuilder.newBuilder().build( new CacheLoader<Class<T>, AtomicInteger>() { @Override public AtomicInteger load(Class<T> key) throws Exception { return new AtomicInteger(); } }); }
EREBasedCorpusQueryExecutor(final Iterable<AlignmentConfiguration> alignmentConfigurations, final LoadingCache<Symbol, EREDocument> ereDocCache, final EREToKBPEventOntologyMapper ontologyMapper, final boolean requireBestCASType) { this.ereDocCache = checkNotNull(ereDocCache); this.ontologyMapper = checkNotNull(ontologyMapper); this.alignmentConfigurations = ImmutableList.copyOf(alignmentConfigurations); this.requireBestCASType = requireBestCASType; }
/** * The default query matching strategy for the 2016 evaluation. */ public static EREBasedCorpusQueryExecutor createDefaultFor2016( final Map<Symbol, File> docIdToEREMap, final ERELoader ereLoader, final EREToKBPEventOntologyMapper ontologyMapper, int slack, double minNominalCASOverlap, boolean requireBestCASType) { final LoadingCache<Symbol, EREDocument> ereDocCache = CacheBuilder.newBuilder() .maximumSize(50) .build(new CacheLoader<Symbol, EREDocument>() { @Override public EREDocument load(final Symbol docID) throws Exception { final File ereFileName = docIdToEREMap.get(docID); if (ereFileName != null) { return ereLoader.loadFrom(ereFileName); } else { throw new TACKBPEALException("Cannot find ERE file for " + docID); } } }); final ResponsePJContainsEntryPJWithSlack commonPJMatchStrategy = new ResponsePJContainsEntryPJWithSlack(slack); final ImmutableList<AlignmentConfiguration> alignmentConfigs = ImmutableList.of( AlignmentConfiguration.of(ExactCASMatch.INSTANCE, commonPJMatchStrategy), AlignmentConfiguration.of(QueryNameContainsSystemCAS.INSTANCE, commonPJMatchStrategy), AlignmentConfiguration.of(QueryNameContainedBySystemCAS.INSTANCE, commonPJMatchStrategy), AlignmentConfiguration.of( new NominalsContainOneAnotherWithMinimumOverlap(minNominalCASOverlap), commonPJMatchStrategy)); return new EREBasedCorpusQueryExecutor(alignmentConfigs, ereDocCache, ontologyMapper, requireBestCASType); }
/** * @param args * @throws ExecutionException * @throws InterruptedException */ public static void main(String[] args) throws ExecutionException, InterruptedException { LoadingCache<String, String> cache = null; cache = CacheBuilder.newBuilder() // 设置并发级别为200,并发级别是指可以同时写缓存的线程数 .concurrencyLevel(200) // 设置写缓存后1分钟过期 .expireAfterWrite(1, TimeUnit.SECONDS).initialCapacity(10).maximumSize(100) // 设置要统计缓存的命中率 .recordStats() // 设置缓存的移除通知 .removalListener(new RemovalListener<String, String>() { @Override public void onRemoval(RemovalNotification<String, String> notification) { System.out.println(notification.getKey() + " was removed, cause by " + notification.getCause()); } }).build(new CacheLoader<String, String>() { // build方法中可以指定CacheLoader,在缓存不存在时通过CacheLoader的实现自动加载缓存 @Override public String load(String appIdSecret) throws Exception { return ""; } }); cache.put("key1", "value1"); System.out.println(cache.get("key1")); Thread.sleep(2000); System.out.println(cache.get("key1")); }
private LoadingCache<UUID, FactTypeEntity> createFactTypeByIdCache() { return CacheBuilder.newBuilder() .expireAfterAccess(10, TimeUnit.MINUTES) .build(new CacheLoader<UUID, FactTypeEntity>() { @Override public FactTypeEntity load(UUID key) throws Exception { return ObjectUtils.notNull(factTypeMapper.get(key), new Exception(String.format("FactType with id = %s does not exist.", key))); } }); }
private LoadingCache<String, FactTypeEntity> createFactTypeByNameCache() { return CacheBuilder.newBuilder() .expireAfterAccess(10, TimeUnit.MINUTES) .build(new CacheLoader<String, FactTypeEntity>() { @Override public FactTypeEntity load(String key) throws Exception { return ObjectUtils.notNull(factTypeAccessor.getByName(key), new Exception(String.format("FactType with name = %s does not exist.", key))); } }); }
private LoadingCache<UUID, ObjectTypeEntity> createObjectTypeByIdCache() { return CacheBuilder.newBuilder() .expireAfterAccess(10, TimeUnit.MINUTES) .build(new CacheLoader<UUID, ObjectTypeEntity>() { @Override public ObjectTypeEntity load(UUID key) throws Exception { return ObjectUtils.notNull(objectTypeMapper.get(key), new Exception(String.format("ObjectType with id = %s does not exist.", key))); } }); }
private LoadingCache<String, ObjectTypeEntity> createObjectTypeByNameCache() { return CacheBuilder.newBuilder() .expireAfterAccess(10, TimeUnit.MINUTES) .build(new CacheLoader<String, ObjectTypeEntity>() { @Override public ObjectTypeEntity load(String key) throws Exception { return ObjectUtils.notNull(objectTypeAccessor.getByName(key), new Exception(String.format("ObjectType with name = %s does not exist.", key))); } }); }
private static LoadingCache<UserAuth, String> initCache() { try { currentAuth = ApprcHolder.get().currentAuth(); URI uri = new URI(currentAuth.getApiserver()); logger.info(String.format("Connecting to apiserver: %s host: %s port: %s", currentAuth.getApiserver(), uri.getHost(), uri.getPort())); NettyChannelBuilder builder = NettyChannelBuilder .forAddress(uri.getHost(), uri.getPort()) .nameResolverFactory(new DnsNameResolverProvider()); if (useTLS(currentAuth)) { File trustCertCollectionFile = null; builder .sslContext(GrpcSslContexts.forClient().trustManager(trustCertCollectionFile).build()) .negotiationType(NegotiationType.TLS); } else { builder.negotiationType(NegotiationType.PLAINTEXT); } channel = builder.build(); return CacheBuilder.newBuilder() .expireAfterAccess(DESCRIPTOR.getAuthCacheTtl(), TimeUnit.SECONDS) .build( new CacheLoader<UserAuth, String>() { @Override public String load(UserAuth key) throws Exception { if (isToken(key.getSecret())) { return checkToken(key.getUsername(), key.getSecret().substring(BEARER_PREFIX.length())); } return checkPassword(key.getUsername(), key.getSecret()); } } ); } catch (URISyntaxException | SSLException e) { logger.log(Level.SEVERE, e.getMessage()); } return null; }
private LoadingCache<String, FeatureRecord> buildCache(FeatureClient client, FeatureStoreLocal backingFeatureStore) { return CacheBuilder.newBuilder() .recordStats() .maximumSize(maxCacheSize) .refreshAfterWrite(refreshCacheAfterWriteSeconds, TimeUnit.SECONDS) .initialCapacity(initialCacheSize) .build(new HttpCacheLoader(client.resources(), backingFeatureStore)); }
@Override public void bindTo(MeterRegistry registry) { Gauge.builder(name + ".estimated.size", cache, Cache::size) .tags(tags) .description("The approximate number of entries in this cache") .register(registry); FunctionCounter.builder(name + ".requests", cache, c -> c.stats().missCount()) .tags(tags).tags("result", "miss") .description("The number of times cache lookup methods have returned an uncached (newly loaded) value, or null") .register(registry); FunctionCounter.builder(name + ".requests", cache, c -> c.stats().hitCount()) .tags(tags).tags("result", "hit") .description("The number of times cache lookup methods have returned a cached value") .register(registry); FunctionCounter.builder(name + ".evictions", cache, c -> c.stats().evictionCount()) .tags(tags) .description("Cache evictions") .register(registry); if (cache instanceof LoadingCache) { // dividing these gives you a measure of load latency TimeGauge.builder(name + ".load.duration", cache, TimeUnit.NANOSECONDS, c -> c.stats().totalLoadTime()) .tags(tags) .description("The time the cache has spent loading new values") .register(registry); FunctionCounter.builder(name + ".load",cache, c -> c.stats().loadSuccessCount()) .tags(tags).tags("result", "success") .description("The number of times cache lookup methods have successfully loaded a new value") .register(registry); FunctionCounter.builder(name + ".load", cache, c -> c.stats().loadExceptionCount()) .tags(tags).tags("result", "failure") .description("The number of times cache lookup methods threw an exception while loading a new value") .register(registry); } }
private static <T> Constructor<T> createProxyConstructor(@Nullable LoadingCache<CacheKey, ClassLoader> cache, Class<T> interfaceClass, Class<?>... otherInterfaceClasses) { Class<?>[] interfaceClasses = new Class[1 + otherInterfaceClasses.length]; interfaceClasses[0] = interfaceClass; ClassLoader classLoader; if (otherInterfaceClasses.length == 0) { classLoader = interfaceClass.getClassLoader(); } else { System.arraycopy(otherInterfaceClasses, 0, interfaceClasses, 1, otherInterfaceClasses.length); List<ClassLoader> classLoaders = extractClassloaderList(interfaceClasses); if (classLoaders.size() == 1) { classLoader = classLoaders.get(0); } else if (cache != null) { classLoader = cache.getUnchecked(new CacheKey(classLoaders)); } else { classLoader = createClassLoader(classLoaders); } } Class<?> uncheckedProxyClass = Proxy.getProxyClass(classLoader, interfaceClasses); Class<T> proxyClass = interfaceClass.getClass().cast(uncheckedProxyClass); try { return proxyClass.getConstructor(InvocationHandler.class); } catch (NoSuchMethodException e) { throw new StoreException(e); } }
static <K, V, E extends Exception> LoadingCache<K, V> newCache(CacheBuilder<K, V> builder, ThrowingFunction<K, V, E> loader) { return builder.build(new CacheLoader<K, V>() { @Override public V load(K key) throws E { return loader.applyThrows(key); } }); }
/** * Calculates whether the given world position matches the pattern. Warning, fairly heavy function. @return a * BlockPattern.PatternHelper if found, null otherwise. */ public BlockPattern.PatternHelper match(World worldIn, BlockPos pos) { LoadingCache<BlockPos, BlockWorldState> loadingcache = func_181627_a(worldIn, false); int i = Math.max(Math.max(this.palmLength, this.thumbLength), this.fingerLength); for (BlockPos blockpos : BlockPos.getAllInBox(pos, pos.add(i - 1, i - 1, i - 1))) { for (EnumFacing enumfacing : EnumFacing.values()) { for (EnumFacing enumfacing1 : EnumFacing.values()) { if (enumfacing1 != enumfacing && enumfacing1 != enumfacing.getOpposite()) { BlockPattern.PatternHelper blockpattern$patternhelper = this.checkPatternAt(blockpos, enumfacing, enumfacing1, loadingcache); if (blockpattern$patternhelper != null) { return blockpattern$patternhelper; } } } } } return null; }
public PatternHelper(BlockPos p_i46378_1_, EnumFacing p_i46378_2_, EnumFacing p_i46378_3_, LoadingCache<BlockPos, BlockWorldState> p_i46378_4_, int p_i46378_5_, int p_i46378_6_, int p_i46378_7_) { this.pos = p_i46378_1_; this.finger = p_i46378_2_; this.thumb = p_i46378_3_; this.lcache = p_i46378_4_; this.field_181120_e = p_i46378_5_; this.field_181121_f = p_i46378_6_; this.field_181122_g = p_i46378_7_; }
@Override public synchronized void put(@NonNull String key, @NonNull V value) { checkNotNull(key); checkNotNull(value); LoadingCache<String, Optional<ExpiringValue<V>>> c = cache.getUnchecked(CurrentInstitution.get()); // Do nothing if the value hasn't changed Optional<ExpiringValue<V>> opExVal = c.getIfPresent(key); if( opExVal != null && opExVal.isPresent() ) { V oldValue = opExVal.get().getValue(); if( oldValue != null && oldValue.equals(value) ) { return; } } // Update the DB state if it's clustered if( zookeeperService.isCluster() ) { dao.put(cacheId, key, new Date(System.currentTimeMillis() + ttlUnit.toMillis(ttl)), PluginAwareObjectOutputStream.toBytes(value)); } // Invalidate other servers caches invalidateOthers(key); // Update our local cache c.put(key, Optional.of(ExpiringValue.expireAfter(value, ttl, ttlUnit))); }
public void invalidateLocal(String... keys) { LoadingCache<String, ?> c = cache.getIfPresent(CurrentInstitution.get()); if( c != null ) { for( String key : keys ) { c.invalidate(key); } } }
@Override public void getMetadata(LoadingCache<String, Map<String, String>> metadata, Attachment a) { if( Objects.equal(a.getAttachmentType(), AttachmentType.FILE) ) { String filename = a.getUrl(); getMetadata(metadata, new File(filename)); } }