public void testGet_concurrent() { assertTrue(ArbitraryInstances.get(BlockingDeque.class).isEmpty()); assertTrue(ArbitraryInstances.get(BlockingQueue.class).isEmpty()); assertTrue(ArbitraryInstances.get(DelayQueue.class).isEmpty()); assertTrue(ArbitraryInstances.get(SynchronousQueue.class).isEmpty()); assertTrue(ArbitraryInstances.get(PriorityBlockingQueue.class).isEmpty()); assertTrue(ArbitraryInstances.get(ConcurrentMap.class).isEmpty()); assertTrue(ArbitraryInstances.get(ConcurrentNavigableMap.class).isEmpty()); ArbitraryInstances.get(Executor.class).execute(ArbitraryInstances.get(Runnable.class)); assertNotNull(ArbitraryInstances.get(ThreadFactory.class)); assertFreshInstanceReturned( BlockingQueue.class, BlockingDeque.class, PriorityBlockingQueue.class, DelayQueue.class, SynchronousQueue.class, ConcurrentMap.class, ConcurrentNavigableMap.class, AtomicReference.class, AtomicBoolean.class, AtomicInteger.class, AtomicLong.class, AtomicDouble.class); }
private void runMaxPublishLatencyTest() throws Throwable { int highestRequestRate = 0; for (AtomicDouble publishLatency = new AtomicDouble(0); publishLatency.get() < maxPublishLatencyMillis; Client.requestRate = (int) (Client.requestRate * 1.1)) { Map<ClientType, Controller.LoadtestStats> statsMap = runTest(null); statsMap.forEach( (type, stats) -> { if (type.isPublisher()) { publishLatency.set( LatencyDistribution.getNthPercentileUpperBound( stats.bucketValues, maxPublishLatencyPercentile)); } }); if (publishLatency.get() < maxPublishLatencyMillis) { highestRequestRate = Client.requestRate; } } log.info("Maximum Request Rate: " + highestRequestRate); controller.shutdown(null); }
/** * Test of forEachIn method, of class ReflectionHelper. */ @Test public void testForEachValue() { // Test Integers final int int_sum = int_a + int_b + int_c; final AtomicInteger intCounter = new AtomicInteger(); ReflectionHelper.forEachValueIn(this, Integer.class, intCounter::addAndGet); assertEquals("Sum all of type int.", int_sum, intCounter.get()); // Test Doubles final double double_sum = double_a + double_b + double_c; final AtomicDouble doubleCounter = new AtomicDouble(); ReflectionHelper.forEachValueIn(this, Double.class, doubleCounter::addAndGet); assertEquals("Sum all of type int.", double_sum, doubleCounter.get(), 0.0); // Test All final AtomicDouble numberCounter = new AtomicDouble(); ReflectionHelper.forEachValueIn(this, Number.class, n -> numberCounter.addAndGet(n.doubleValue())); assertEquals("Sum all of numbers.", int_sum + double_sum, numberCounter.get(), 0.0); }
@Override public double getValue() { final AtomicDouble logProbSum = new AtomicDouble(0); final ParallelThreadPool ptp = new ParallelThreadPool(); for (final TreeNode<TSGNode> tree : treeCorpus) { for (final TreeNode<TSGNode> root : TSGNode.getAllRootsOf(tree)) { ptp.pushTask(new Runnable() { @Override public void run() { logProbSum .addAndGet(computeLog2PosteriorProbabilityOfRule( root, true)); } }); } } ptp.waitForTermination(); return logProbSum.get(); }
/** * Calculate the log-probability of the whole corpus, given the TSG. * * @return */ protected SampleStats calculateCorpusLogProb() { final AtomicDouble logProbSum = new AtomicDouble(0.); final AtomicInteger nNodes = new AtomicInteger(0); final ParallelThreadPool ptp = new ParallelThreadPool(); for (final TreeNode<TSGNode> tree : treeCorpus) { ptp.pushTask(new Runnable() { @Override public void run() { logProbSum .addAndGet(computePosteriorLog2ProbabilityForTree(tree)); nNodes.addAndGet(tree.getTreeSize()); } }); } ptp.waitForTermination(); return new SampleStats(logProbSum.get(), nNodes.get()); }
/** * * Computes coefficients a, b, c and d (available) and computes the association * rate based on these coefficients. * * These coefficients are made available through the not-thread safe methods <code>#getLastX()</code>. * * @param x * the base term * @param y * the co term * @return * the association rate */ public ContextData computeContextData(CrossTable crossTable, Term x, Term y) { ContextData data = new ContextData(); // A = (x & y) data.setA((int)x.getContext().getNbCooccs(y)); // B = (x & not(y)) AtomicDouble a_plus_b = crossTable.getAPlusB().get(x); data.setB(a_plus_b == null ? 0 : a_plus_b.intValue() - data.getA()); // int b = x.getFrequency() - a; // C = (not(x) & y) AtomicDouble a_plus_c = crossTable.getAPlusC().get(y); data.setC(a_plus_c == null ? 0 : a_plus_c.intValue() - data.getA()); // int c = y.getFrequency() - a; // D = (not(x) & not(y)) data.setD(crossTable.getTotalCoOccurrences() - data.getA() - data.getB() - data.getC()); return data; }
public CrossTable computeCrossTable() { int totalCoOccurrences = 0; Map<Term, AtomicDouble> aPlusB = Maps.newConcurrentMap() ; Map<Term, AtomicDouble> aPlusC = Maps.newConcurrentMap(); Term term; for (Iterator<TermService> it1 = terminology.getTerms().iterator(); it1.hasNext() ;) { term = it1.next().getTerm(); // this.totalFrequency++; if(term.getContext() == null) continue; ContextVector context = term.getContext(); for (ContextVector.Entry entry : context.getEntries()) { totalCoOccurrences += entry.getNbCooccs(); getScoreFromMap(aPlusB, term).addAndGet(entry.getNbCooccs()); getScoreFromMap(aPlusC, entry.getCoTerm()).addAndGet(entry.getNbCooccs()); } } return new CrossTable(aPlusB, aPlusC, totalCoOccurrences); }
private Leadership(WeightedGraph g, CountDownLatch authority, CountDownLatch hubness, CountDownLatch normalizationStep, double[] countHub, double[] countAuth, AtomicDouble SHub, AtomicDouble SAuth, int chunk, int runner) { this.g = g; this.chunk = chunk; this.runner = runner; this.authorityStep = authority; this.hubnessStep = hubness; this.normalizationStep = normalizationStep; this.hub = countHub; this.auth = countAuth; this.SHub = SHub; this.SAuth = SAuth; }
private HubnessAuthority(WeightedGraph g, CountDownLatch authority, CountDownLatch hubness, CountDownLatch normalizationStep, double[] countHub, double[] countAuth, AtomicDouble SHub, AtomicDouble SAuth, int chunk, int runner) { this.g = g; this.chunk = chunk; this.runner = runner; this.authorityStep = authority; this.hubnessStep = hubness; this.normalizationStep = normalizationStep; this.hub = countHub; this.auth = countAuth; this.SHub = SHub; this.SAuth = SAuth; }
private static Statistics getStatistics(Observable<CellValue> cells) { final AtomicDouble sum = new AtomicDouble(0); final AtomicDouble sumSquares = new AtomicDouble(0); log.info("calculating mean and sd"); long count = cells // add to sum and sumSquares .doOnNext(new Action1<CellValue>() { @Override public void call(CellValue cell) { sum.addAndGet(cell.getValue()); sumSquares.addAndGet(cell.getValue() * cell.getValue()); } }) // count .count() // block and get .toBlocking().single(); double mean = sum.get() / count; double variance = sumSquares.get() / count - mean * mean; double sd = Math.sqrt(variance); log.info("calculated"); Statistics stats = new Statistics(mean, sd, count); log.info(stats); return stats; }
private void aggregate(Metric metric) { List<AggregationRule> rules = aggregationConfiguration.getRules().get(metric.getTenant()); if (rules == null) { return; } ConcurrentMap<MetricKey, AtomicDouble> timestampMap = getTimestampMap(metric.getTimestamp()); for(AggregationRule rule : rules) { Matcher m = rule.getSource().matcher(metric.getPath()); if (m.matches()) { String destinationPath = rule.getDestination().replace("<data>", m.group("data")); MetricKey destinationKey = new MetricKey(metric.getTenant(), destinationPath, metric.getRollup(), metric.getPeriod(), metric.getTimestamp()); getMetricValue(timestampMap, destinationKey).addAndGet(metric.getValue()); } } }
@Override public Gradient gradient() { if (yIncs == null) yIncs = zeros(Y.shape()); if (gains == null) gains = ones(Y.shape()); AtomicDouble sumQ = new AtomicDouble(0); /* Calculate gradient based on barnes hut approximation with positive and negative forces */ INDArray posF = Nd4j.create(Y.shape()); INDArray negF = Nd4j.create(Y.shape()); if (tree == null) tree = new SpTree(Y); tree.computeEdgeForces(rows, cols, vals, N, posF); for (int n = 0; n < N; n++) tree.computeNonEdgeForces(n, theta, negF.slice(n), sumQ); INDArray dC = posF.subi(negF.divi(sumQ)); Gradient ret = new DefaultGradient(); ret.gradientForVariable().put(Y_GRAD, dC); return ret; }
/** * Exports an {@link AtomicDouble}, which will be included in time series tracking. * * @param name The name to export the stat with. * @param doubleVar The variable to export. * @return A reference to the {@link AtomicDouble} provided. */ public static AtomicDouble export(String name, final AtomicDouble doubleVar) { export(new StatImpl<Double>(name) { @Override public Double read() { return doubleVar.doubleValue(); } }); return doubleVar; }
static AtomicDouble gaugeFor(StatsReceiver stats, String scope) { final AtomicDouble result = new AtomicDouble(); StatsReceivers.addGauge(stats, new Callable<Float>() { @Override public Float call() throws Exception { return result.floatValue(); } }, scope); return result; }
public SumMap<T> build(final double min) { final LinkedHashMap<T, Double> builder = new LinkedHashMap<>(map.size()); for (final Entry<T, AtomicDouble> entry : map.entrySet()) { final double value = entry.getValue().get(); if (value >= min) { builder.put(entry.getKey(), value); } } return new SumMap<>(builder, min); }
public void clear() { this.sentenceCount = new AtomicInteger(0); this.stepsUntilMistake = new AtomicInteger(0); this.agendaCount = new AtomicInteger(0); this.chartCount = new AtomicInteger(0); this.neuralScore = new AtomicDouble(0); this.neuralCount = new AtomicInteger(0); this.goldParsesFound = new AtomicInteger(0); }
private void initializeWeights() { final int rows = weights.length; Random rand = new Random(); for (int row = 0; row < rows; row++) { for (int column = 0; column < rows; column++) { weights[row][column] = new AtomicDouble(rand.nextDouble()); } } }
private static void visitCompressedTableEntries(Expression compressedTableExpression, AtomicDouble count) { if (IfThenElse.isIfThenElse(compressedTableExpression)) { visitCompressedTableEntries(IfThenElse.thenBranch(compressedTableExpression), count); visitCompressedTableEntries(IfThenElse.elseBranch(compressedTableExpression), count); } else { // We are at a leaf node, therefore increment the count count.addAndGet(1); } }
@VisibleForTesting void validate() { final AtomicDouble sumOfWeights = new AtomicDouble(); final AtomicInteger actualNodeCount = new AtomicInteger(); final AtomicInteger actualNonZeroNodeCount = new AtomicInteger(); if (root != null) { validateStructure(root); postOrderTraversal(root, new Callback() { @Override public boolean process(Node node) { sumOfWeights.addAndGet(node.weightedCount); actualNodeCount.incrementAndGet(); if (node.weightedCount >= ZERO_WEIGHT_THRESHOLD) { actualNonZeroNodeCount.incrementAndGet(); } return true; } }); } checkState(Math.abs(sumOfWeights.get() - weightedCount) < ZERO_WEIGHT_THRESHOLD, "Computed weight (%s) doesn't match summary (%s)", sumOfWeights.get(), weightedCount); checkState(actualNodeCount.get() == totalNodeCount, "Actual node count (%s) doesn't match summary (%s)", actualNodeCount.get(), totalNodeCount); checkState(actualNonZeroNodeCount.get() == nonZeroNodeCount, "Actual non-zero node count (%s) doesn't match summary (%s)", actualNonZeroNodeCount.get(), nonZeroNodeCount); }
@Override public double evaluate(IChromosome a_subject) { EditsChromosome ec = new EditsChromosome(a_subject); String key = ec.key(); if (cache.containsKey(key)) return cache.get(key); try { initializator.initialize(algorithm, ec); log.debug(AlgorithmInitializator.toString(algorithm)); final AtomicDouble distd = new AtomicDouble(0); EditsThread<AnnotatedEntailmentPair> thread = new EditsThread<AnnotatedEntailmentPair>() { @Override public void process(AnnotatedEntailmentPair p) throws Exception { double score = Double.parseDouble(p.getAttributes().get("score")); double ns = 5 * (1 - algorithm.distance(p.getT().get(0), p.getH().get(0), p.getId())); distd.addAndGet(Math.abs(score - ns)); } }; thread.start(training.iterator()); double dist = 5.0 - distd.get() / training.size(); GeneticResult r = new GeneticResult(dist, ec); log.debug(r.getValue()); if (result == null || result.getValue() < dist) result = r; cache.put(key, dist); results.add(r); return dist; } catch (Exception e1) { e1.printStackTrace(); System.exit(1); } cache.put(key, 0.0); return 0; }
public double getProjectionScore(TerminologyService referenceTerminology) { AtomicDouble sum = new AtomicDouble(0); AtomicDouble total = new AtomicDouble(0); List<Term> top100 = topN(100).collect(toList()); for(Term docTerm :top100) { total.addAndGet(docTerm.getSpecificity()); int baseRank = getBaseRankInRefTermino(referenceTerminology, docTerm); if(baseRank > 0 && baseRank < 500) sum.addAndGet(docTerm.getSpecificity()); } return sum.doubleValue() / total.doubleValue(); }
public static void main(String[] args) throws IOException, InterruptedException { int worker = (int) (Runtime.getRuntime().availableProcessors()); WeightedUndirectedGraph g = new WeightedUndirectedGraph(3000); GraphReader.readGraph(g, "src/main/resources/graphs/test-graph-dn-0.gz", true); AtomicDouble[] info=GraphInfo.getGraphInfo(g, worker); System.out.println(info[0]); System.out.println(info[1]); System.out.println(info[2]); Core c = CoreDecomposition.getInnerMostCore(g, worker); System.out.println(c.minDegree); System.out.println(c.seq.length); MemInfo.info(); }
private Assortativity(WeightedGraph g, CountDownLatch matrixStep, AtomicDouble[][] matrix, int[] partition, int classes, int chunk, int runner) { this.g = g; this.chunk = chunk; this.runner = runner; this.matrixStep = matrixStep; this.matrix = matrix; this.partition = partition; this.classes = classes; }
private GraphInfo(WeightedGraph a, int[] vertices, AtomicDouble[] values, CountDownLatch cb, int chunk, int runner) { this.a = a; this.vertices = vertices; this.values = values; this.chunk = chunk; this.runner = runner; barrier = cb; }
public static AtomicDouble[] getGraphInfo(final WeightedGraph a, int runner) { long time = System.currentTimeMillis(); final CountDownLatch latch = new CountDownLatch(runner); int[] vertexs = a.getVertex(); // Info Values TODO: convert to a map?! // 0 number of vertices // 1 number of edges // 2 Density (E)/(V*(V-1)) AtomicDouble[] values = new AtomicDouble[3]; for (int i = 0; i < values.length; i++) { values[i] = new AtomicDouble(); } values[0].set(vertexs.length); Thread[] workers = new Thread[runner]; for (int i = 0; i < runner; i++) { workers[i] = new Thread(new GraphInfo(a, vertexs, values, latch, i, runner)); workers[i].setName("" + i); workers[i].start(); } try { latch.await(); } catch (InterruptedException e) { logger.debug(e); } values[0].set(vertexs.length); values[2].set((values[1].get()) / (values[0].get() * (values[0].get() - 1))); logger.info(((System.currentTimeMillis() - time) / 1000d) + "s"); return values; }
private PageRankPI(WeightedGraph g, CountDownLatch first, CountDownLatch second,CountDownLatch thirdStep, double[] counters, double beta, int realSize, AtomicDouble S, int chunk, int runner) { this.g = g; this.chunk = chunk; this.runner = runner; this.firstStep = first; this.secondStep = second; this.thirdStep = thirdStep; this.ranks = counters; this.beta = beta; this.realSize = realSize; this.S = S; }
@Override public void set(Metric<?> value) { if (value.getName().startsWith("histogram.")) { registry.distributionSummary(value.getName()).record(value.getValue().longValue()); } else if (value.getName().startsWith("timer.")) { registry.timer(value.getName()).record(value.getValue().longValue(), TimeUnit.MILLISECONDS); } else { final Id id = registry.createId(value.getName()); final AtomicDouble gauge = getGaugeStorage(id); gauge.set(value.getValue().doubleValue()); registry.gauge(id, gauge); } }
private AtomicDouble getGaugeStorage(Id id) { final AtomicDouble newGauge = new AtomicDouble(0); final AtomicDouble existingGauge = gauges.putIfAbsent(id, newGauge); if (existingGauge == null) { return newGauge; } return existingGauge; }
public StatsRecord(long renderRequests, long renderPathsRead, long renderPointsRead, long pathsRequests, double throttled, long timedOut) { this.renderRequests = new AtomicLong(renderRequests); this.renderPathsRead = new AtomicLong(renderPathsRead); this.renderPointsRead = new AtomicLong(renderPointsRead); this.pathsRequests = new AtomicLong(pathsRequests); this.throttled = new AtomicDouble(throttled); this.timedOutRequests = new AtomicLong(timedOut); }
public void test() throws InterruptedException { logMem(); System.gc(); Thread.sleep(3000); logMem(); ConcurrentHashMap<Position, AtomicDouble> map = new ConcurrentHashMap<Position, AtomicDouble>(); for (int i = 0; i < 1000000; i++) { Position p = new Position(i / Math.PI, i * Math.PI * Math.E); map.put(p, new AtomicDouble(Math.random() * 800)); } System.gc(); Thread.sleep(3000); logMem(); System.out.println(map.size()); }
public Double getIndexHitRate() { final AtomicDouble hitRate = new AtomicDouble(); selectWithConnection(PgStatusQueries.INDEX_HIT_RATE,(rs, cnt)-> { hitRate.set(rs.getDouble("rate")); }); return hitRate.get(); }
public Double getTableHitRate() { final AtomicDouble hitRate = new AtomicDouble(); selectWithConnection(PgStatusQueries.TABLE_HIT_RATE,(rs, cnt)-> { hitRate.set(rs.getDouble("rate")); }); return hitRate.get(); }
private ConcurrentMap<MetricKey, AtomicDouble> getTimestampMap(long timestamp) { ConcurrentMap<MetricKey, AtomicDouble> timestampMap = accumulator.get(timestamp); if (timestampMap == null) { ConcurrentMap<MetricKey, AtomicDouble> newTimestampMap = new ConcurrentHashMap<>(); timestampMap = accumulator.putIfAbsent(timestamp, newTimestampMap); if (timestampMap == null) { timestampMap = newTimestampMap; } } return timestampMap; }
private AtomicDouble getMetricValue(ConcurrentMap<MetricKey, AtomicDouble> map, MetricKey metricKey) { AtomicDouble value = map.get(metricKey); if (value == null) { AtomicDouble newValue = new AtomicDouble(0); value = map.putIfAbsent(metricKey, newValue); if (value == null) { value = newValue; } } return value; }