@Setup(Level.Trial) public void setup() { chunkStore = new ChunkImpl( new MetricsAndTagStoreImpl(new InvertedIndexTagStore(1_000_000, 1_000_000), new VarBitMetricStore()), null); try (Stream<String> lines = Files.lines(filePath, Charset.defaultCharset())) { lines.forEachOrdered(line -> { try { String[] words = line.split(" "); String metricName = words[1]; if (counts.containsKey(metricName)) { counts.put(metricName, counts.get(metricName) + 1); } else { counts.put(metricName, 1); } MetricUtils.parseAndAddOpenTsdbMetric(line, chunkStore); } catch (Exception e) { } }); } catch (Exception e) { e.printStackTrace(); } }
@Setup(Level.Trial) @Override public void setUp() throws Exception { ListeningExecutorService dsExec = MoreExecutors.newDirectExecutorService(); executor = MoreExecutors.listeningDecorator( MoreExecutors.getExitingExecutorService((ThreadPoolExecutor) Executors.newFixedThreadPool(1), 1L, TimeUnit.SECONDS)); InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", dsExec); InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", dsExec); Map<LogicalDatastoreType, DOMStore> datastores = ImmutableMap.of( LogicalDatastoreType.OPERATIONAL, (DOMStore)operStore, LogicalDatastoreType.CONFIGURATION, configStore); domBroker = new SerializedDOMDataBroker(datastores, executor); schemaContext = BenchmarkModel.createTestContext(); configStore.onGlobalContextUpdated(schemaContext); operStore.onGlobalContextUpdated(schemaContext); initTestNode(); }
@Setup(Level.Trial) public void setup() throws IOException { indexMemCache = new IndexExpiredMemCache(TimeUnit.MINUTES.toMillis(10), 1000 * 1024 * 1024); packMemCache = new PackExpiredMemCache(TimeUnit.MINUTES.toMillis(10), 1000 * 1024 * 1024); workDir = Files.createTempDirectory("dpsegment_bm_"); String segmentId = "test_segment"; DPSegment insertSegment = DPSegment.open(Version.LATEST_ID, SegmentMode.DEFAULT, workDir, segmentId, segmentSchema, OpenOption.Overwrite).update(); addRows(insertSegment, genRows(rowCount)); insertSegment.seal(); IntegratedSegment.Fd.create( insertSegment, workDir.resolve("integreated"), false); IntegratedSegment.Fd fd = IntegratedSegment.Fd.create("aa", workDir.resolve("integreated")); segment = (IntegratedSegment) fd.open(indexMemCache, null, packMemCache); // warm up. travel_stream_forEach(); travel_by_pack(); }
@Setup(Level.Trial) public void generateData() { array = new int[chunksCount][]; int remaining = chunksCount; Random rnd = new Random(1); while(remaining > 0) { int n = rnd.nextInt(chunksCount); if (array[n] == null) { --remaining; int[] chunk = new int[chunkSize / 2 + rnd.nextInt(chunkSize)]; array[n] = chunk; for(int i = 0; i != chunk.length; ++i) { array[n][i] = rnd.nextInt(); } Arrays.sort(array[n]); int b1 = Arrays.binarySearch(chunk, 0); int b2 = Arrays.binarySearch(chunk, chunk[chunk.length / 2]); int s1 = lookup(chunk, 0); int s2 = lookup(chunk, chunk[chunk.length / 2]); if (b1 != s1 || b2 != s2) { throw new IllegalArgumentException("Verification have failed"); } } } }
@Setup(Level.Iteration) public void start() { counter = 0; if (log == null) { com.comfortanalytics.alog.Alog.DEFAULT_MAX_QUEUE = -1; //ignore no messages log = com.comfortanalytics.alog.Alog.getLogger( "Alog", new PrintStream(new NullOutputStream())); log.getHandlers()[0].setFormatter(new SimpleFormatter()); log.setUseParentHandlers(false); } }
@Setup(Level.Trial) public void setup(BenchmarkParams params) throws Exception { DS = ManagedDataSourceBuilder.builder() .dataSource(new StubDataSource()) .userName("gnodet") .password("") .transaction(TransactionSupport.TransactionSupportLevel.NoTransaction) .minIdle(0) .maxPoolSize(maxPoolSize) .connectionTimeout(8000) .build(); }
@Setup(Level.Trial) public void setup() { ChunkManager chunkManager = new ChunkManager("test", 1_000_000); metricWriter = new FileMetricWriter(filePath, chunkManager); metricWriter.start(); // Convert all data to offHeap chunkManager.toOffHeapChunkMap(); }
@Setup(Level.Trial) public void setup() { chunkStore = new ChunkImpl( new MetricsAndTagStoreImpl(new InvertedIndexTagStore(1_000_000, 1_000_000), new VarBitMetricStore()), null); try (Stream<String> lines = Files.lines(filePath, Charset.defaultCharset())) { lines.forEachOrdered(line -> { try { String[] words = line.split(" "); String metricName = words[1]; if (metricName != null && !metricName.isEmpty()) { if (counts.containsKey(metricName)) { counts.put(metricName, counts.get(metricName) + 1); } else { counts.put(metricName, 1); } MetricUtils.parseAndAddOpenTsdbMetric(line, chunkStore); } } catch (Exception e) { System.err.println("Error ingesting metric: " + e.getMessage()); e.printStackTrace(); } }); } catch (Exception e) { e.printStackTrace(); } }
@Setup(Level.Invocation) public void setup() { switch (msType) { case "InvertedIndexTagStore": int initialMapSize = 10000; ms = new InvertedIndexTagStore(initialMapSize, initialMapSize); metrics = new ArrayList(); for (int i = 0; i < initialMapSize; i++) { metrics.add(randomMetric(numMetrics, numKeys, numValues)); } break; default: throw new RuntimeException("invalid msType: " + msType); } }
/** * No need to rebuild the graph for every invocation since it is not altered by the backend. */ @Setup(Level.Trial) public void setupGraph() { initializeMethod(); prepareRequest(); emitFrontEnd(); }
@Setup(Level.Trial) public void beforeBenchmark() { // setup graph initializeMethod(); prepareRequest(); emitFrontEnd(); generateLIR(); // compute cfg this.cfg = (ControlFlowGraph) getLIR().getControlFlowGraph(); }
@Setup(Level.Trial) public void setup() { initializeMethod(); prepareRequest(); emitFrontEnd(); generateLIR(); preAllocationStage(); // context for all allocation phases allocationContext = createAllocationContext(); applyLIRPhase(TRACE_BUILDER_PHASE, allocationContext); applyLIRPhase(LIVENESS_ANALYSIS_PHASE, allocationContext); }
@TearDown(Level.Invocation) public void afterInvocation() { if (invocation == 0) { // Only need to check the first invocation invocation++; for (int i = 0; i < nodes.length; i++) { if (nodes[i] != originalNodes[i]) { throw new InternalError(String.format("Benchmark method mutated node %d: original=%s, current=%s", i, originalNodes[i], nodes[i])); } } } }
@Setup(Level.Trial) public void doSetup() { try { intValField = Foo.class.getDeclaredField("intVal"); intValField.setAccessible(true); } catch (NoSuchFieldException | SecurityException e) { throw new RuntimeException(e); } }
@Setup(Level.Trial) public void doSetup() { try { intValAccessorMethod = Foo.class.getDeclaredMethod("getIntVal"); intValAccessorMethod.setAccessible(true); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException(e); } }
/** * Ensures Accumulo and the test are ready. */ @Setup(Level.Trial) public void setupTrial() throws Exception { AccumuloInstance.setup(); connector = AccumuloInstance.getConnector(USER_NAME); User user = AccumuloInstance.getUser(USER_NAME); authorizations = user.authorizations; encryptionKeys = user.encryptionKeys; signatureKeys = user.signatureKeys; for (String table : tables) { AccumuloInstance.createTable(table); } }
/** * Ensures Accumulo and test are cleaned up. */ @TearDown(Level.Trial) public void teardownTrial() throws Exception { for (String table : tables) { AccumuloInstance.deleteTable(table); } AccumuloInstance.teardown(); }
@Setup(Level.Trial) public void setup() { ThreadLocalRandom random = ThreadLocalRandom.current(); input = random.longs(0, 10000).limit(10000).toArray(); output = new long[input.length]; compressed = new long[input.length]; amount = Simple8RLE.compress(input, compressed); decompressed = new long[input.length]; }
@Override @Setup(Level.Trial) public void setUp() throws Exception { final String name = "DS_BENCHMARK"; final ExecutorService dataChangeListenerExecutor = SpecialExecutors.newBlockingBoundedFastThreadPool( MAX_DATA_CHANGE_EXECUTOR_POOL_SIZE, MAX_DATA_CHANGE_EXECUTOR_QUEUE_SIZE, name + "-DCL"); domStore = new InMemoryDOMDataStore("SINGLE_THREADED_DS_BENCHMARK", dataChangeListenerExecutor); schemaContext = BenchmarkModel.createTestContext(); domStore.onGlobalContextUpdated(schemaContext); initTestNode(); }
@Override @Setup(Level.Trial) public void setUp() throws Exception { domStore = new InMemoryDOMDataStore("SINGLE_THREADED_DS_BENCHMARK", Executors.newSingleThreadExecutor()); schemaContext = BenchmarkModel.createTestContext(); domStore.onGlobalContextUpdated(schemaContext); initTestNode(); }
@Setup(Level.Trial) public void setup() throws Exception { if (array == null) { array = new int[size]; slice = ByteSlice.allocateDirect(size << 2); } else { throw new RuntimeException("hhhh"); } for (int i = 0; i < size; i++) { array[i] = i; slice.putInt(i << 2, i); } }
@Setup(Level.Iteration) public void setup() { hub = new UniJavaRx2PubHub(PublishProcessorProxy); obs = hub.getPub(tag); upstream = Flowable.range(0, count).publish(); hub.addUpstream(tag, upstream); }
@Setup(Level.Iteration) public void setup() { hub = new UniJavaRx1Hub(RxJava1ProxyType.PublishSubjectProxy); obs = hub.getPub(tag); upstream = Observable.range(0, count).publish(); hub.addUpstream(tag, upstream); }
@Setup(Level.Iteration) public void setup() { hub = new UniReactorHub(EmitterProcessorProxy); obs = hub.getPub(tag); upstream = Flux.range(0, count).publish(); hub.addUpstream(tag, upstream); }
@TearDown(Level.Iteration) public void tearDownIteration() { storage.write((NoResult.Quiet) storeProvider -> { storeProvider.getJobUpdateStore().deleteAllUpdatesAndEvents(); storeProvider.getLockStore().deleteLocks(); }); }
/** * Runs before each invocation of the benchmark in order to store the tasks * that we will transition in the benchmark. */ @Setup(Level.Invocation) public void createTasks() { tasks = new Tasks.Builder() .setScheduleStatus(ScheduleStatus.STARTING) .build(NUM_TASKS); storage.write( (NoResult.Quiet) storeProvider -> storeProvider.getUnsafeTaskStore().saveTasks(tasks)); countDownLatch = new CountDownLatch(tasks.size()); }
@Setup(Level.Trial) @Override public void setUp() { storage = Guice.createInjector( Modules.combine( DbModule.testModuleWithWorkQueue(PLAIN, Optional.of(new InMemStoresModule(PLAIN))), new AbstractModule() { @Override protected void configure() { bind(StatsProvider.class).toInstance(new FakeStatsProvider()); } })) .getInstance(Storage.class); }
@Setup(Level.Invocation) public void setup() { items = new ArrayList<>(inputs.size()); for (Input input : inputs) { final Object item; if (cachingOn) { item = Cached.create(input.accountId, input.messageIds, input.users); } else { item = NotCached.create(input.accountId, input.messageIds, input.users); } items.add(item); } }
@Setup(Level.Invocation) public void setup() throws IOException { this.heap = new int[count]; this.offheap = MemoryUtil.allocate(count * 4); this.heap2 = new int[count]; this.offheap2 = MemoryUtil.allocate(count * 4); }
@TearDown(Level.Trial) public void tearDown() { byteData.free(); byteCmpData.free(); shortData.free(); shortCmpData.free(); intData.free(); intCmpData.free(); longData.free(); longCmpData.free(); }
@Setup(Level.Trial) public void setup() throws IOException { workDir = Files.createTempDirectory("dpsegment_bm_"); System.out.println(workDir); String segmentId = "test_segment"; DPSegment insertSegment = DPSegment.open(Version.LATEST_ID, SegmentMode.DEFAULT, workDir, segmentId, segmentSchema, OpenOption.Overwrite).update(); addRows(insertSegment, genRows(rowCount)); insertSegment.seal(); IntegratedSegment.Fd.create( insertSegment, workDir.resolve("integreated"), false); }
@Setup(Level.Trial) public void init() { this.data = createDoubleArrayGaussian((int)1E8); this.target = new double[data.length]; }
@Benchmark public void exception() { counter++; log.log(java.util.logging.Level.SEVERE, "Message", exception); }
@Benchmark public void parameter() { counter++; log.log(java.util.logging.Level.SEVERE, "Parameter {0}", param); }