public MemoryStageController(Long pipelineId){ super(pipelineId); replys = new MapMaker().makeComputingMap(new Function<StageType, ReplyProcessQueue>() { public ReplyProcessQueue apply(StageType input) { int size = ArbitrateConfigUtils.getParallelism(getPipelineId()) * 10; if (size < 100) { size = 100; } return new ReplyProcessQueue(size); } }); progress = new MapMaker().makeMap(); termins = new LinkedBlockingQueue<TerminEventData>(20); }
public RpcStageController(Long pipelineId){ super(pipelineId); replys = new MapMaker().makeComputingMap(new Function<StageType, ReplyProcessQueue>() { public ReplyProcessQueue apply(StageType input) { int size = ArbitrateConfigUtils.getParallelism(getPipelineId()) * 10; if (size < 100) { size = 100; } return new ReplyProcessQueue(size); } }); progress = new MapMaker().makeMap(); // 注册一下监听事件变化 processMonitor = ArbitrateFactory.getInstance(pipelineId, ProcessMonitor.class); processMonitor.addListener(this); processMonitor.reload(); }
/** * 测试insert+update */ @Test public void testMergeWithSameKeyOfIU() { Map<RowKey, EventData> mergeMap = new MapMaker().makeMap(); DbLoadMerger.merge(makeInsertEventData(), mergeMap); DbLoadMerger.merge(makeUpdateEventData(), mergeMap); for (Entry<RowKey, EventData> entry : mergeMap.entrySet()) { RowKey key = entry.getKey(); EventColumn keyColumn = key.getKeys().get(0); Assert.assertEquals(KEY_VALUE, keyColumn.getColumnValue()); Assert.assertEquals(KEY_NAME, keyColumn.getColumnName()); EventData eventData = entry.getValue(); Assert.assertEquals(SCHEMA_NAME, eventData.getSchemaName()); Assert.assertEquals(TABLE_NAME, eventData.getTableName()); Assert.assertEquals(TABLE_ID, eventData.getTableId()); Assert.assertEquals(EventType.INSERT, eventData.getEventType()); Assert.assertEquals(eventData.getOldKeys().size(), 0); // 不存在oldKeys List<EventColumn> columns = eventData.getColumns(); Assert.assertEquals(3, columns.size()); } }
/** * 测试insert+update+delete */ @Test public void testMergeWithSameKeyOfIUD() { Map<RowKey, EventData> mergeMap = new MapMaker().makeMap(); DbLoadMerger.merge(makeInsertEventData(), mergeMap); DbLoadMerger.merge(makeUpdateEventData(), mergeMap); DbLoadMerger.merge(makeDeleteEventData(), mergeMap); for (Entry<RowKey, EventData> entry : mergeMap.entrySet()) { RowKey key = entry.getKey(); EventColumn keyColumn = key.getKeys().get(0); Assert.assertEquals(KEY_VALUE, keyColumn.getColumnValue()); Assert.assertEquals(KEY_NAME, keyColumn.getColumnName()); EventData eventData = entry.getValue(); Assert.assertEquals(SCHEMA_NAME, eventData.getSchemaName()); Assert.assertEquals(TABLE_NAME, eventData.getTableName()); Assert.assertEquals(TABLE_ID, eventData.getTableId()); Assert.assertEquals(EventType.DELETE, eventData.getEventType()); Assert.assertEquals(eventData.getOldKeys().size(), 0); // 不存在oldKeys List<EventColumn> columns = eventData.getColumns(); Assert.assertEquals(0, columns.size()); } }
/** * 测试insert+update+delete+insert */ @Test public void testMergeWithSameKeyOfIUDI() { Map<RowKey, EventData> mergeMap = new MapMaker().makeMap(); DbLoadMerger.merge(makeInsertEventData(), mergeMap); DbLoadMerger.merge(makeUpdateEventData(), mergeMap); DbLoadMerger.merge(makeDeleteEventData(), mergeMap); DbLoadMerger.merge(makeInsertEventData(), mergeMap); for (Entry<RowKey, EventData> entry : mergeMap.entrySet()) { RowKey key = entry.getKey(); EventColumn keyColumn = key.getKeys().get(0); Assert.assertEquals(KEY_VALUE, keyColumn.getColumnValue()); Assert.assertEquals(KEY_NAME, keyColumn.getColumnName()); EventData eventData = entry.getValue(); Assert.assertEquals(SCHEMA_NAME, eventData.getSchemaName()); Assert.assertEquals(TABLE_NAME, eventData.getTableName()); Assert.assertEquals(TABLE_ID, eventData.getTableId()); Assert.assertEquals(EventType.INSERT, eventData.getEventType()); Assert.assertEquals(eventData.getOldKeys().size(), 0); // 不存在oldKeys List<EventColumn> columns = eventData.getColumns(); Assert.assertEquals(2, columns.size()); } }
/** * 测试在主键发生变化后的merge操作,Update/Update */ @Test public void testMergeWithUpdateKeyOfUU() { Map<RowKey, EventData> mergeMap = new MapMaker().makeMap(); DbLoadMerger.merge(makeUpdateEventData(KEY_VALUE, KEY_VALUE_NEW1), mergeMap); DbLoadMerger.merge(makeUpdateEventData(KEY_VALUE_NEW1, KEY_VALUE_NEW2), mergeMap); for (Entry<RowKey, EventData> entry : mergeMap.entrySet()) { RowKey key = entry.getKey(); EventColumn keyColumn = key.getKeys().get(0); Assert.assertEquals(KEY_VALUE_NEW2, keyColumn.getColumnValue()); Assert.assertEquals(KEY_NAME, keyColumn.getColumnName()); EventData eventData = entry.getValue(); Assert.assertEquals(SCHEMA_NAME, eventData.getSchemaName()); Assert.assertEquals(TABLE_NAME, eventData.getTableName()); Assert.assertEquals(TABLE_ID, eventData.getTableId()); Assert.assertEquals(EventType.UPDATE, eventData.getEventType()); List<EventColumn> oldKeys = eventData.getOldKeys(); List<EventColumn> keys = eventData.getKeys(); Assert.assertNotSame(oldKeys, keys); } }
/** * 测试在主键发生变化后的merge操作,Update/Update/delete */ @Test public void testMergeWithUpdateKeyOfUUD() { Map<RowKey, EventData> mergeMap = new MapMaker().makeMap(); DbLoadMerger.merge(makeUpdateEventData(KEY_VALUE_NEW1, KEY_VALUE_NEW2), mergeMap); DbLoadMerger.merge(makeUpdateEventData(KEY_VALUE_NEW2, KEY_VALUE), mergeMap); DbLoadMerger.merge(makeDeleteEventData(), mergeMap); for (Entry<RowKey, EventData> entry : mergeMap.entrySet()) { RowKey key = entry.getKey(); EventColumn keyColumn = key.getKeys().get(0); Assert.assertEquals(KEY_VALUE_NEW1, keyColumn.getColumnValue()); Assert.assertEquals(KEY_NAME, keyColumn.getColumnName()); EventData eventData = entry.getValue(); Assert.assertEquals(SCHEMA_NAME, eventData.getSchemaName()); Assert.assertEquals(TABLE_NAME, eventData.getTableName()); Assert.assertEquals(TABLE_ID, eventData.getTableId()); Assert.assertEquals(EventType.DELETE, eventData.getEventType()); Assert.assertEquals(eventData.getOldKeys().size(), 0); // 不存在oldKeys } }
/** * 测试在主键发生变化后的merge操作,Insert/Update/Update/Update/Update */ @Test public void testMergeWithUpdateKeyOfIUUUU() { Map<RowKey, EventData> mergeMap = new MapMaker().makeMap(); DbLoadMerger.merge(makeInsertEventData(), mergeMap); DbLoadMerger.merge(makeUpdateEventData(KEY_VALUE, KEY_VALUE_NEW1), mergeMap); DbLoadMerger.merge(makeUpdateEventData(KEY_VALUE_NEW1, KEY_VALUE_NEW2), mergeMap); DbLoadMerger.merge(makeUpdateEventData(KEY_VALUE_NEW2, KEY_VALUE_NEW3), mergeMap); DbLoadMerger.merge(makeUpdateEventData(KEY_VALUE_NEW3, KEY_VALUE_NEW4), mergeMap); for (Entry<RowKey, EventData> entry : mergeMap.entrySet()) { RowKey key = entry.getKey(); EventColumn keyColumn = key.getKeys().get(0); Assert.assertEquals(KEY_VALUE_NEW4, keyColumn.getColumnValue()); Assert.assertEquals(KEY_NAME, keyColumn.getColumnName()); EventData eventData = entry.getValue(); Assert.assertEquals(SCHEMA_NAME, eventData.getSchemaName()); Assert.assertEquals(TABLE_NAME, eventData.getTableName()); Assert.assertEquals(TABLE_ID, eventData.getTableId()); Assert.assertEquals(EventType.INSERT, eventData.getEventType()); Assert.assertEquals(eventData.getOldKeys().size(), 0); // 不存在oldKeys } }
/** * 测试在主键发生变化后的merge操作,Update/Update/Insert */ @Test public void testMergeWithUpdateKeyOfUI() { Map<RowKey, EventData> mergeMap = new MapMaker().makeMap(); DbLoadMerger.merge(makeUpdateEventData(KEY_VALUE_NEW1, KEY_VALUE_NEW2), mergeMap); DbLoadMerger.merge(makeUpdateEventData(KEY_VALUE_NEW2, KEY_VALUE), mergeMap); DbLoadMerger.merge(makeInsertEventData(), mergeMap); for (Entry<RowKey, EventData> entry : mergeMap.entrySet()) { RowKey key = entry.getKey(); EventColumn keyColumn = key.getKeys().get(0); Assert.assertEquals(KEY_VALUE, keyColumn.getColumnValue()); Assert.assertEquals(KEY_NAME, keyColumn.getColumnName()); EventData eventData = entry.getValue(); Assert.assertEquals(SCHEMA_NAME, eventData.getSchemaName()); Assert.assertEquals(TABLE_NAME, eventData.getTableName()); Assert.assertEquals(TABLE_ID, eventData.getTableId()); Assert.assertEquals(EventType.INSERT, eventData.getEventType()); List<EventColumn> oldKeys = eventData.getOldKeys(); List<EventColumn> keys = eventData.getKeys(); Assert.assertNotSame(oldKeys, keys); } }
/** * 测试在主键发生变化后的merge操作,Insert/Insert */ @Test public void testMergeWithUpdateKeyOfII() { Map<RowKey, EventData> mergeMap = new MapMaker().makeMap(); DbLoadMerger.merge(makeInsertEventData(), mergeMap); DbLoadMerger.merge(makeInsertEventData(), mergeMap); for (Entry<RowKey, EventData> entry : mergeMap.entrySet()) { RowKey key = entry.getKey(); EventColumn keyColumn = key.getKeys().get(0); Assert.assertEquals(KEY_VALUE, keyColumn.getColumnValue()); Assert.assertEquals(KEY_NAME, keyColumn.getColumnName()); EventData eventData = entry.getValue(); Assert.assertEquals(SCHEMA_NAME, eventData.getSchemaName()); Assert.assertEquals(TABLE_NAME, eventData.getTableName()); Assert.assertEquals(TABLE_ID, eventData.getTableId()); Assert.assertEquals(EventType.INSERT, eventData.getEventType()); List<EventColumn> oldKeys = eventData.getOldKeys(); List<EventColumn> keys = eventData.getKeys(); Assert.assertNotSame(oldKeys, keys); } }
/** * Constructor to create a new counter metric * @param nam the name to publish this metric under * @param registry where the metrics object will be registered * @param description metrics description * @param topN how many 'keys' to publish metrics on */ public ExactCounterMetric(final String nam, final MetricsRegistry registry, final String description, int topN) { super(nam, description); this.counts = new MapMaker().makeComputingMap( new Function<String, Counter>() { @Override public Counter apply(String input) { return new Counter(); } }); this.lock = new ReentrantReadWriteLock(); this.topN = topN; if (registry != null) { registry.add(nam, this); } }
public IPreferenceValues getPreferenceValues(Resource context) { final IProject project = getProject(context); final IPreferenceStore store = project != null ? access.getContextPreferenceStore(project) : access.getPreferenceStore(); @SuppressWarnings("deprecation") final Map<String, String> computingMap = new MapMaker().makeComputingMap( new Function<String, String>() { public String apply(String input) { return store.getString(input); } }); return new IPreferenceValues() { public String getPreference(PreferenceKey key) { try { final String string = computingMap.get(key.getId()); return org.eclipse.jface.preference.IPreferenceStore.STRING_DEFAULT_DEFAULT.equals(string) ? key.getDefaultValue() : string; } catch (Exception e) { log.error("Error getting preference for key '"+key.getId()+"'.", e); return key.getDefaultValue(); } } }; }
@BeforeExperiment void setUp() { // random integers will be generated in this range, then raised to the // power of (1/concentration) and floor()ed max = Ints.checkedCast((long) Math.pow(distinctKeys, concentration)); cache = new MapMaker() .concurrencyLevel(segments) .maximumSize(maximumSize) .makeComputingMap( new Function<Integer, Integer>() { @Override public Integer apply(Integer from) { return (int) misses.incrementAndGet(); } }); // To start, fill up the cache. // Each miss both increments the counter and causes the map to grow by one, // so until evictions begin, the size of the map is the greatest return // value seen so far while (cache.get(nextRandomKey()) < maximumSize) {} requests.set(0); misses.set(0); }
public SimpleScoreboardElement(Plugin plugin, int priority, long updateDelay, List<FrameSupply> contents) { this.priority = priority; this.contents = Lists.newCopyOnWriteArrayList(contents); this.plugin = plugin; // forced update delay so that players can be cleaned out // implementation specific this.updateDelay = updateDelay > 0 ? updateDelay : 20L * 10; this.isTitle = priority < 0; MapMaker mapMaker = new MapMaker().weakKeys(); this.playerContentsMap = mapMaker.makeMap(); this.cachedContents = mapMaker.makeMap(); Bukkit.getScheduler() .runTaskTimerAsynchronously(plugin, this::updateAnimations, 1l, this.updateDelay); }
public PatternStatsCalculator(final AbstractJavaTreeExtractor treeFormat, final Set<TreeNode<Integer>> patterns, final File directory) { this.treeFormat = treeFormat; this.patterns = HashMultiset.create(patterns); int currentIdx = 0; for (final Multiset.Entry<TreeNode<Integer>> rule : this.patterns .entrySet()) { patternDictionary.put(rule.getElement(), currentIdx); patternSizes.put(currentIdx, rule.getElement().getTreeSize()); currentIdx++; } allFiles = FileUtils .listFiles(directory, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); fileSizes = new MapMaker() .concurrencyLevel(ParallelThreadPool.NUM_THREADS) .initialCapacity(allFiles.size()).makeMap(); filePatterns = HashBasedTable.create(allFiles.size(), patterns.size() / 10); filePatternsCount = HashBasedTable.create(allFiles.size(), patterns.size() / 1); }
private static void init() { if (!init) { synchronized (MetricsCache.class) { if (!init) { cache = new MapMaker().concurrencyLevel(4).weakValues().makeMap(); scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread thread = new Thread(r); thread.setDaemon(true); thread.setName("watcher-cache-evict-thread"); return thread; } }); scheduledExecutorService.scheduleAtFixedRate(new Runnable() { @Override public void run() { INSTANCE.evict(); } }, 1l, 1l, TimeUnit.SECONDS); init = true; } } } }
@Test public void testIdentitySet() throws InterruptedException{ IdentityHashMap<String,Object> m1=new IdentityHashMap<String,Object>(1024); Map<String,Object> m2=new HashMap<String,Object>(1024); Map<String,Object> m3=new java.util.IdentityHashMap<String,Object>(1024); Map<String,Object> m4=new MapMaker().concurrencyLevel(8).initialCapacity(1024).makeMap(); Map<String,Object> m5=new ConcurrentHashMap<String, Object>(1024); Map<String,Object> m6=new Hashtable<String, Object>(1024); warmup(); for(int i=0;i<LOOP;i++){ xx[i]=RandomData.randomString(10); } testMap_(m1); if(threadCount==1){ testMap(m2,"JDK HashMap"); //Will get deadlock under multiple-threads. } testMap(m3,"JDK IdentityHashMap "); testMap(m4,"Guava Mapmaker.concurrencyLevel(8)"); testMap(m5,"JDK ConcurrentHashMap "); testMap(m6,"JDK Hashtable "); }
@Test public void testWeakRefsMap() throws Exception { ConcurrentMap<String, Object> objects = new MapMaker().weakValues().makeMap(); objects.put("xxx", new Object()); if (null == objects.get("xxx")) { Assert.fail("Reference should NOT be null"); } try { @SuppressWarnings("unused") Object[] ignored = new Object[(int) Runtime.getRuntime().maxMemory()]; } catch (Throwable e) { // Ignore OME } if (null != objects.get("xxx")) { Assert.fail("Reference should be null"); } }
/** Private -- used solely for unit testing. * @param connection * @param preparedStatementCache * @param callableStatementCache * @param pool * @return Connection Handle */ protected static ConnectionHandle createTestConnectionHandle(Connection connection, IStatementCache preparedStatementCache, IStatementCache callableStatementCache, BoneCP pool){ ConnectionHandle handle = new ConnectionHandle(); handle.connection = connection; handle.preparedStatementCache = preparedStatementCache; handle.callableStatementCache = callableStatementCache; handle.connectionLastUsedInMs = System.currentTimeMillis(); handle.connectionLastResetInMs = System.currentTimeMillis(); handle.connectionCreationTimeInMs = System.currentTimeMillis(); handle.recoveryResult = new TransactionRecoveryResult(); handle.trackedStatement = new MapMaker().makeMap(); handle.url = "foo"; handle.closeOpenStatements = true; handle.pool = pool; handle.url=null; int cacheSize = pool.getConfig().getStatementsCacheSize(); if (cacheSize > 0) { handle.statementCachingEnabled = true; } return handle; }
/** * Builds the cache for the given class. * * @param target The target class */ public void build(Class<?> target) { if (this.cache.containsKey(target)) { return; } //System.out.println("Building cache for " + target.getName()); Map<Class, List<Method>> anno = new MapMaker().weakKeys().makeMap(); for (Method m : target.getMethods()) { for (Annotation a : m.getAnnotations()) { if (!anno.containsKey(a.annotationType())) { anno.put(a.annotationType(), Lists.<Method>newArrayList()); } //System.out.println("\t" + a.annotationType().getSimpleName() + " - " + m.toGenericString()); anno.get(a.annotationType()).add(m); } } this.cache.put(target, anno); }
public static synchronized MethodHandle equal(Class<?> type) { checkArgument(type != void.class); Class<?> erased = type.isPrimitive() ? type : Object.class; if (cache == null) { cache = new MapMaker().concurrencyLevel(2).makeMap(); } MethodHandle result = cache.get(erased); if (result == null) { try { result = lookup().findStatic(MethodHandleHelpers.class, "eq", methodType(boolean.class, erased, erased)); } catch (NoSuchMethodException | IllegalAccessException e) { throw new AssertionError(); // these all exist } cache.put(erased, result); } return result.asType(methodType(boolean.class, type, type)); }
public MemoryStageController(Long pipelineId){ super(pipelineId); replys = OtterMigrateMap.makeComputingMap(new Function<StageType, ReplyProcessQueue>() { public ReplyProcessQueue apply(StageType input) { int size = ArbitrateConfigUtils.getParallelism(getPipelineId()) * 10; if (size < 100) { size = 100; } return new ReplyProcessQueue(size); } }); progress = new MapMaker().makeMap(); termins = new LinkedBlockingQueue<TerminEventData>(20); }
public RpcStageController(Long pipelineId){ super(pipelineId); replys = OtterMigrateMap.makeComputingMap(new Function<StageType, ReplyProcessQueue>() { public ReplyProcessQueue apply(StageType input) { int size = ArbitrateConfigUtils.getParallelism(getPipelineId()) * 10; if (size < 100) { size = 100; } return new ReplyProcessQueue(size); } }); progress = new MapMaker().makeMap(); // 注册一下监听事件变化 processMonitor = ArbitrateFactory.getInstance(pipelineId, ProcessMonitor.class); processMonitor.addListener(this); processMonitor.reload(); }
public void start() { super.start(); batches = MigrateMap.makeComputingMap(new Function<ClientIdentity, MemoryClientIdentityBatch>() { public MemoryClientIdentityBatch apply(ClientIdentity clientIdentity) { return MemoryClientIdentityBatch.create(clientIdentity); } }); cursors = new MapMaker().makeMap(); destinations = MigrateMap.makeComputingMap(new Function<String, List<ClientIdentity>>() { public List<ClientIdentity> apply(String destination) { return Lists.newArrayList(); } }); }
@Override public void readFields(DataInput input) throws IOException { super.readFields(input); int size = input.readInt(); vertexMap = new MapMaker().concurrencyLevel( getConf().getNettyServerExecutionConcurrency()).initialCapacity( size).makeMap(); representativeVertex = getConf().createVertex(); representativeVertex.initialize( getConf().createVertexId(), getConf().createVertexValue(), getConf().createOutEdges()); useUnsafeSerialization = getConf().useUnsafeSerialization(); for (int i = 0; i < size; ++i) { progress(); I vertexId = getConf().createVertexId(); vertexId.readFields(input); int vertexDataSize = input.readInt(); byte[] vertexData = new byte[vertexDataSize]; input.readFully(vertexData); if (vertexMap.put(vertexId, vertexData) != null) { throw new IllegalStateException("readFields: Already saw vertex " + vertexId); } } }