private String generatePost(final List<SlackMessagePosted> publicaties) { String content = ""; content += "Hieronder is de lijst met #publicaties van deze week (" + WeekUtil.getWeekNumber() + ") te vinden. Veel leesplezier!"; content += System.lineSeparator(); content += IntStream.range(0, publicaties.size()) .mapToObj(index -> (index + 1) + ". " + messageParser.parse(publicaties.get(index).getMessageContent())) .reduce("", (identity, accu) -> identity + System.lineSeparator() + accu); content += System.lineSeparator() + System.lineSeparator(); String publicators = publicaties.stream() .map(message -> message.getSender().getUserName()) .distinct() .reduce((identity, accu) -> identity + ", @" + accu).get(); content += "De lijst is deze week mogelijk gemaakt door @" + publicators + ". Bedankt!"; content += System.lineSeparator() + System.lineSeparator(); content += "Kom je een interessante publicatie (foto/video/textueel) tegen? Deel deze dan in het #publicaties kanaal zodat iedereen er van kan leren :slightly_smiling_face:"; content += System.lineSeparator() + System.lineSeparator(); content += "Nog een fijne zondag!"; return content; }
@Override public String toString(){ StringBuffer result = new StringBuffer(); result.append( String.format( "Column name : %s\n" , getColumnName() ) ); result.append( String.format( "Column type : %s<" , getColumnType() ) ); result.append( columnContainer.entrySet().stream() .map( entry -> String.format( "%s" , entry.getValue().getColumnType() ) ) .collect( Collectors.joining( "," ) ) ); result.append( ">\n" ); result.append( "--------------------------\n" ); IntStream.range( 0 , size() ) .forEach( i -> { result.append( String.format( "CELL-%d: %s\n" , i , get( i ).toString() ) ); } ); return result.toString(); }
private void test(List<Integer> values, List<Double> probabilities) { final Map<Integer, AtomicInteger> results = new ConcurrentHashMap<>( values.stream() .map(integer -> new AbstractMap.SimpleImmutableEntry<>(integer, new AtomicInteger(0))) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) ); IntStream.range(0, N) .parallel() .forEach(i -> { results.get(GenerateNonuniformRandomNumbers.getRandom(values,probabilities)).incrementAndGet(); }); IntStream.range(0, values.size()) .parallel() .forEach(i -> { double expectedValue = N * probabilities.get(i); assertThat(results.get(values.get(i)), allOf(greaterThan(expectedValue - 50), lessThan(expectedValue + 50))); }); }
@Test public void clientToServerBackpressure() throws InterruptedException { ReactorNumbersGrpc.ReactorNumbersStub stub = ReactorNumbersGrpc.newReactorStub(channel); Flux<NumberProto.Number> reactorRequest = Flux .fromIterable(IntStream.range(0, NUMBER_OF_STREAM_ELEMENTS)::iterator) .doOnNext(i -> System.out.println(i + " --> ")) .doOnNext(i -> updateNumberOfWaits(lastValueTime, numberOfWaits)) .map(BackpressureIntegrationTest::protoNum); Mono<NumberProto.Number> reactorResponse = stub.requestPressure(reactorRequest); StepVerifier.create(reactorResponse) .expectNextMatches(v -> v.getNumber(0) == NUMBER_OF_STREAM_ELEMENTS - 1) .expectComplete() .verify(Duration.ofSeconds(5)); assertThat(numberOfWaits.get()).isEqualTo(1); }
/** <hr> <h3> Created : Alexandre Bolot 10/01 <br> Modified : Alexandre Bolot 10/01 </h3> <hr> */ @Test public void apply_Right () { long youngAmount; long oldAmount; for (int i = 0; i < 1000; i++) { people = new ArrayList8<Person>() {{ IntStream.range(0, randBetween(5, 20)).forEach(j -> add(randPerson(lifespan, true))); IntStream.range(0, randBetween(5, 20)).forEach(k -> add(randPerson(lifespan, false))); }}; youngAmount = people.stream().filter(person1 -> person1.data().age() < lifespan).count(); oldAmount = people.stream().filter(person -> person.data().age() >= lifespan).count(); assertEquals(youngAmount + oldAmount, people.size()); lifespanRule.apply(new Context(people)); assertEquals(youngAmount, people.size()); } }
/** * Algorithm 7.42: GenPermutation * <p>Generates a random permutation psy ∈ upper_psy_n following Knuth’s shuffle algorithm</p> * * @param upper_n the permutation size * @return a random permutation following Knuth's shuffle algorithm (permutation is 0 based, to mirror java indices) */ public List<Integer> genPermutation(int upper_n) { Integer[] upper_i = IntStream.range(0, upper_n).boxed() .collect(Collectors.toList()).toArray(new Integer[0]); List<Integer> psy = new ArrayList<>(); // indices are 0 base, as opposed to the 1 based in the algorithm for (int i = 0; i < upper_n; i++) { int k = randomGenerator.randomIntInRange(i, upper_n - 1); psy.add(upper_i[k]); upper_i[k] = upper_i[i]; } return psy; }
public static void main(String[] args) throws IOException { File mainDir = new File("mturk/annotation-task/data/final"); File trainingData = new File(mainDir, "train.tsv"); File devData = new File(mainDir, "dev.tsv"); File testData = new File(mainDir, "test.tsv"); // random IntStream.range(0, 3).parallel().forEach(value -> { try { runExperiment(new RandomClassifier((long) value), trainingData, devData, testData); } catch (IOException e) { e.printStackTrace(); } }); // LM runExperiment(new LMClassifier("apu", 8090), trainingData, devData, testData); }
protected void verifyConfigReports(ConfigurationReport configuration, String nameSuffix, List<Report> merged) { // check the expected number of configuration reports - should be same as the number of generated sections assertThatReport(configuration) .as("Configuration report should contain an exact number of test config sub-reports") .hasNumberOfSubReports(EXPECTED_NUMBER_OF_SECTIONS); // for each configuration sub-report IntStream.range(0, EXPECTED_NUMBER_OF_SECTIONS).forEach(index -> { Report reportOnIndex = configuration.getSubReports().get(index); defaultCheckOfIfMergedOrContainsGeneratedSubReports(merged, index, reportOnIndex); assertThatReport(reportOnIndex) .as("The config report should have same name that was generated for the index <%s> ", index) .hasName(getConfigReportName(index, nameSuffix)); }); }
public List<Long> getTally() throws InvalidDecryptionProofException { TallyData tallyData = bulletinBoardService.getTallyData(); List<DecryptionProof> decryptionProofs = tallyData.getDecryptionProofs(); List<BigInteger> publicKeyShares = tallyData.getPublicKeyShares(); List<Encryption> finalShuffle = tallyData.getFinalShuffle(); List<List<BigInteger>> partialDecryptions = tallyData.getPartialDecryptions(); Stopwatch decryptionProofCheckWatch = Stopwatch.createStarted(); if (!tallyingAuthoritiesAlgorithm.checkDecryptionProofs(decryptionProofs, publicKeyShares, finalShuffle, partialDecryptions)) { throw new InvalidDecryptionProofException("An invalid decryption proof was found"); } decryptionProofCheckWatch.stop(); perfLog.info(String.format("Administration : checked decryption proofs in %dms", decryptionProofCheckWatch.elapsed(TimeUnit.MILLISECONDS))); List<BigInteger> decryptions = tallyingAuthoritiesAlgorithm.getDecryptions(finalShuffle, partialDecryptions); List<List<Boolean>> votes = tallyingAuthoritiesAlgorithm.getVotes(decryptions, totalCandidateCount); // Additional verifications on the votes validity may be performed here. return IntStream.range(0, totalCandidateCount) .mapToLong(i -> votes.stream().filter(vote -> vote.get(i)).count()) .boxed().collect(Collectors.toList()); }
@Test(dataProvider = "cases") public void testBitsetStream(String name, IntStream data) { BitSet bs = new BitSet(); long setBits = data.distinct() .peek(i -> bs.set(i)) .count(); assertEquals(bs.cardinality(), setBits); assertEquals(bs.cardinality(), bs.stream().reduce(0, (s, i) -> s+1)); PrimitiveIterator.OfInt it = bs.stream().iterator(); for (int i = bs.nextSetBit(0); i >= 0; i = bs.nextSetBit(i+1)) { assertTrue(it.hasNext()); assertEquals(it.nextInt(), i); } assertFalse(it.hasNext()); }
/** * Returns an {@link Optional} of a {@link List} of {@link Player} objects, containing all * players on the server. * * @return an {@link Optional} containg a {@link List} of {@link Player Players} or an empty * {@link Optional} incase the query failed. */ public Optional<List<Player>> getBasicPlayerInfo() { List<Player> players = null; if (send(PACKET_GET_BASIC_PLAYERINFO)) { final byte[] reply = receiveBytes(); if (Objects.nonNull(reply)) { final ByteBuffer buffer = wrapReply(reply); final int numberOfPlayers = buffer.getShort(); players = new ArrayList<>(); for (int i = 0; i < numberOfPlayers; i++) { final byte len = buffer.get(); final byte[] playerName = new byte[len]; IntStream.range(0, len).forEach(j -> playerName[j] = buffer.get()); players.add(new Player(new String(playerName), buffer.getInt())); } } } return Optional.ofNullable(players); }
/** * Returns the header string tokens for the frame * @param frame the frame to create header tokens * @return the header tokens */ private String[] getHeaderTokens(DataFrame<?,?> frame) { final int colCount = frame.colCount() + 1; final String[] header = new String[colCount]; final Class<?> columnKeyType = frame.cols().keyType(); final Printer<Object> printer = formats.getPrinterOrFail(columnKeyType, Object.class); IntStream.range(0, colCount).forEach(colIndex -> { if (colIndex == 0) header[colIndex] = "Index"; else { final Object colKey = frame.cols().key(colIndex - 1); final String colText = printer.apply(colKey); header[colIndex] = colText; } }); return header; }
@Test public void testClose() { AtomicInteger before = new AtomicInteger(); AtomicInteger onClose = new AtomicInteger(); Supplier<Stream<Integer>> s = () -> { before.set(0); onClose.set(0); return Stream.of(1, 2).peek(e -> before.getAndIncrement()); }; s.get().flatMap(i -> Stream.of(i, i).onClose(onClose::getAndIncrement)).count(); assertEquals(before.get(), onClose.get()); s.get().flatMapToInt(i -> IntStream.of(i, i).onClose(onClose::getAndIncrement)).count(); assertEquals(before.get(), onClose.get()); s.get().flatMapToLong(i -> LongStream.of(i, i).onClose(onClose::getAndIncrement)).count(); assertEquals(before.get(), onClose.get()); s.get().flatMapToDouble(i -> DoubleStream.of(i, i).onClose(onClose::getAndIncrement)).count(); assertEquals(before.get(), onClose.get()); }
@Test(dataProvider = "provider") public void testCharAt(String str, char[] expected) { map.get(str) .forEach( (source, data) -> { IntStream .range(0, str.length()) .forEach( i -> assertEquals( str.charAt(i), expected[i], String.format( "testing String(%s).charAt(%d), source : %s, ", escapeNonASCIIs(data), i, source))); }); }
/** * Chunkify strings to a set length, but do not break words * * @param string The string to chunkify * @param length The max length of the chunks * @return A list containing the resulting chunks */ public static List<String> chunkify(String string, int length) { List<String> chunks = new ArrayList<>(); if (StringUtils.isEmpty(string) || string.length() <= length) { chunks.add(string); return chunks; } Iterator<String> words = Iterators.forArray(string.split(" ")); int chunkCount = (int) Math.ceil(((double) string.length()) / length); IntStream.rangeClosed(1, chunkCount).forEach(ci -> { StringBuilder newChunk = new StringBuilder(); while (newChunk.length() < length) { if (!words.hasNext()) break; newChunk.append(" ").append(words.next()); } if (StringUtils.isNotEmpty(newChunk)) chunks.add(newChunk.toString().trim()); }); return chunks; }
/** * Generate a BIG xml file: temporary/bigfile.xml */ @Test @Ignore public void generateBigXmlFile() throws IOException { // make temporary/ dir if it doesn't exist Path temporaryPath = Paths.get(Constants.TMP_PATH); if (!Files.exists(temporaryPath)) { Files.createDirectory(temporaryPath); } // dump loads of xml in temporary/bigfile.xml BufferedWriter bufferedWriter = Files.newBufferedWriter(Paths.get(Constants.BIG_XML_FILE_PATH)); PrintWriter printWriter = new PrintWriter(bufferedWriter); printWriter.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); printWriter.println("<rootelement>"); IntStream .rangeClosed(1, Integer.parseInt(TestProperties.getProperty("com.guido.test.upload.bigfile"))) .forEach(i -> printWriter.println( String.format("<childelement id=\"%d\">HELLO%10d</childelement>", i, i) )); printWriter.println("</rootelement>"); printWriter.flush(); printWriter.close(); }
@Test public void checkPerfWithPoolThread() { Thread[] threads = new Thread[10]; IntStream.range(0, threads.length).forEach(i -> { threads[i] = new Thread(() -> { try { DBpediaMock dBpediaMock = new DBpediaMock(); dBpediaMock.load(dataProducer -> { //Do nothing }); }catch (IllegalArgumentException e) { //Do nothing } }); }); Arrays.stream(threads).forEach(Thread::start); try { Thread.sleep(100); Arrays.stream(threads).forEach(Thread::interrupt); }catch (InterruptedException e) { //Do nothing } }
/** * Returns a map of node IDs to byte offsets, sorted by byte offset. * * @param batchIndex the index of the batch to generate mappings for * @return the sorted map of node IDs to byte offsets */ private Map<Integer, Long> getByteOffsetsBatch(final int batchIndex) { final List<Integer> batchNodeIds = IntStream .rangeClosed(batchIndex * BATCH_SIZE + 1, Math.max(batchIndex * (BATCH_SIZE + 1), numberOfNodesInGraph - 2)) .boxed().collect(Collectors.toList()); return batchNodeIds.stream() .sorted(Comparator.comparingLong(graph::getByteOffset)) .collect(Collectors.toMap( nodeId -> nodeId, graph::getByteOffset, (oldValue, newValue) -> oldValue, LinkedHashMap::new )); }
/** * Extracts connectivity intent specific attributes from a JSON object * and adds them to a builder. * * @param json root JSON object * @param context code context * @param builder builder to use for storing the attributes. Constraints, * selector and treatment are modified by this call. */ public static void intentAttributes(ObjectNode json, CodecContext context, ConnectivityIntent.Builder builder) { JsonNode constraintsJson = json.get(CONSTRAINTS); if (constraintsJson != null) { JsonCodec<Constraint> constraintsCodec = context.codec(Constraint.class); ArrayList<Constraint> constraints = new ArrayList<>(constraintsJson.size()); IntStream.range(0, constraintsJson.size()) .forEach(i -> constraints.add( constraintsCodec.decode(get(constraintsJson, i), context))); builder.constraints(constraints); } ObjectNode selectorJson = get(json, SELECTOR); if (selectorJson != null) { JsonCodec<TrafficSelector> selectorCodec = context.codec(TrafficSelector.class); TrafficSelector selector = selectorCodec.decode(selectorJson, context); builder.selector(selector); } ObjectNode treatmentJson = get(json, TREATMENT); if (treatmentJson != null) { JsonCodec<TrafficTreatment> treatmentCodec = context.codec(TrafficTreatment.class); TrafficTreatment treatment = treatmentCodec.decode(treatmentJson, context); builder.treatment(treatment); } }
@Test public void testMergingRawMetricFail() { GroupData data = make(new GroupOperation(GroupOperationType.COUNT, null, null)); BulletRecord someRecord = RecordBox.get().add("foo", 1).getRecord(); IntStream.range(0, 10).forEach(i -> data.consume(someRecord)); // Not a serialized GroupData data.combine(String.valueOf(242).getBytes()); // Unchanged count BulletRecord expected = RecordBox.get().add(GroupOperationType.COUNT.getName(), 10L).getRecord(); Assert.assertEquals(data.getMetricsAsBulletRecord(), expected); }
/** * Returns a stream of indices for which this {@code BitSet} * contains a bit in the set state. The indices are returned * in order, from lowest to highest. The size of the stream * is the number of bits in the set state, equal to the value * returned by the {@link #cardinality()} method. * * <p>The bit set must remain constant during the execution of the * terminal stream operation. Otherwise, the result of the terminal * stream operation is undefined. * * @return a stream of integers representing set indices * @since 1.8 */ public IntStream stream() { class BitSetIterator implements PrimitiveIterator.OfInt { int next = nextSetBit(0); @Override public boolean hasNext() { return next != -1; } @Override public int nextInt() { if (next != -1) { int ret = next; next = nextSetBit(next+1); return ret; } else { throw new NoSuchElementException(); } } } return StreamSupport.intStream( () -> Spliterators.spliterator( new BitSetIterator(), cardinality(), Spliterator.ORDERED | Spliterator.DISTINCT | Spliterator.SORTED), Spliterator.SIZED | Spliterator.SUBSIZED | Spliterator.ORDERED | Spliterator.DISTINCT | Spliterator.SORTED, false); }
@ParameterizedRobolectricTestRunner.Parameters(name = "Data to save and read = {0}") public static Iterable<Object[]> data() { return Arrays.asList(new Object[][]{ {Collections.emptyList()}, {Collections.singletonList("")}, {Collections.singletonList("val1")}, {Arrays.asList("val1", "val2", "val3")}, {IntStream.range(0, 101).mapToObj(String::valueOf).collect(Collectors.toList())} }); }
/** * Creates a tile pane with the given number of columns and rows. * * @param numColumns the number of columns in the grid. Must be >= 1 * @param numRows the number of rows in the grid. Must be >= 1 */ public TilePane(int numColumns, int numRows) { this.numColumns.addListener((obs, oldCount, newCount) -> { if (newCount > oldCount) { IntStream.range(oldCount, newCount) .mapToObj(__ -> createColumnConstraint()) .forEach(getColumnConstraints()::add); } else { getColumnConstraints().remove(newCount, oldCount); } }); this.numRows.addListener((obs, oldCount, newCount) -> { if (newCount > oldCount) { IntStream.range(oldCount, newCount) .mapToObj(__ -> createRowConstraint()) .forEach(getRowConstraints()::add); } else { getRowConstraints().remove(newCount, oldCount); } }); setNumColumns(numColumns); setNumRows(numRows); // Make sure the tile size is always at least the minimum allowable tileSize.addListener((__, oldSize, newSize) -> { if (newSize.doubleValue() < MIN_TILE_SIZE) { if (oldSize.doubleValue() < MIN_TILE_SIZE) { setTileSize(MIN_TILE_SIZE); } else { setTileSize(oldSize.doubleValue()); } } }); }
public static long part1(String movements) { return Arrays.stream(Arrays.stream(movements.split(",")) // read the input .map(m -> hops.get(m)) // map into movement rules .reduce(new long[] { 0, 0, 0 }, // begins from grid's origin (a, b) -> IntStream.range(0, 3) // for i = 0 to 2 .mapToLong(i -> a[i] + b[i]) // update coord i according to its movement rule .toArray() // wrap the 3 coords into the accumulation array )).max().getAsLong(); // get the distance from the origin }
private void initUi() { interpolatorComboBox.setCellFactory(listView -> new InterpolatorSelectionListCell()); interpolatorComboBox.setButtonCell(new InterpolatorSelectionListCell()); root.getChildren().add(1, interpolatorPlotComponent.getRoot()); interpolatorPlotComponent.getRoot().setMinHeight(Region.USE_PREF_SIZE); IntStream.range(0, NUMBER_OF_SLIDERS).forEach(i -> addSlider()); FocusHelper.suppressFocusStyleOnRelease(interpolatorComboBox); }
public String[] getEntryNames() { int[] attributeOffsets = new int[offsets.capacity()]; offsets.get(attributeOffsets); return IntStream.of(attributeOffsets) .filter(o -> o != 0) .mapToObj(o -> ImageLocation.readFrom(this, o).getFullName()) .sorted() .toArray(String[]::new); }
@Test public void resolveCategoryReferences_WithKeysAsUuidSetAndNotAllowed_ShouldResolveReference() { final ProductSyncOptions productSyncOptions = ProductSyncOptionsBuilder.of(mock(SphereClient.class)) .allowUuidKeys(false) .build(); final int nCategories = 10; final List<Category> categories = IntStream.range(0, nCategories) .mapToObj(i -> UUID.randomUUID().toString()) .map(key -> getMockCategory(key, key)) .collect(Collectors.toList()); final CategoryService mockCategoryService = mockCategoryService(new HashSet<>(categories), emptySet()); final List<Reference<Category>> categoryReferences = categories.stream() .map(Category::toReference) .collect(Collectors.toList()); final Map<String, String> categoryOrderHintValues = categories.stream() .collect(Collectors.toMap(Category::getKey, Resource::getId)); final CategoryOrderHints categoryOrderHints = CategoryOrderHints.of(categoryOrderHintValues); final ProductDraftBuilder productBuilder = getBuilderWithRandomProductTypeUuid() .categories(categoryReferences).categoryOrderHints(categoryOrderHints); final ProductReferenceResolver productReferenceResolver = new ProductReferenceResolver(productSyncOptions, getMockProductTypeService(PRODUCT_TYPE_ID), mockCategoryService, getMockTypeService(), getMockChannelService(getMockSupplyChannel(CHANNEL_ID, CHANNEL_KEY)), getMockTaxCategoryService(TAX_CATEGORY_ID), getMockStateService(STATE_ID), getMockProductService(PRODUCT_ID)); assertThat(productReferenceResolver.resolveCategoryReferences(productBuilder).toCompletableFuture()) .hasFailed() .hasFailedWithThrowableThat() .isExactlyInstanceOf(ReferenceResolutionException.class) .hasMessage("Failed to resolve reference 'category' on ProductDraft" + " with key:'" + productBuilder.getKey() + "'. Reason: Found a UUID" + " in the id field. Expecting a key without a UUID value. If you want to" + " allow UUID values for reference keys, please use the " + "allowUuidKeys(true) option in the sync options."); }
/** * 将元组里的元素重复指定次数 * * @param times 重复次数 * @return 得到的元组 */ public final Tuple repeat(final int times) { if (times < 0) throw new IllegalArgumentException("times must >= 0"); if (times == 0) return this; return TupleN.with(IntStream.range(0, times) .mapToObj(i -> this.valueList.toArray()) .reduce((a, b) -> { Object[] temp = new Object[a.length + b.length]; System.arraycopy(a, 0, temp, 0, a.length); System.arraycopy(b, 0, temp, a.length, b.length); return temp; }).get()); }
private static ImmutableList<Player> createPlayersInfo(GameStartApiResponse startData, GameUpdateApiResponse firstUpdateData) { int numberOfPlayers = startData.getUsernames().length; Map<Integer, ScoreApiResponse> scoresMap = createScoresMap(firstUpdateData); List<Player> players = IntStream.range(0, numberOfPlayers).mapToObj(playerIndex -> { Optional<ScoreApiResponse> scoreForPlayer = Optional.ofNullable(scoresMap.get(playerIndex)); return new Player(playerIndex, startData.getUsernames()[playerIndex], Optional.ofNullable(startData.getTeams()).map(teams -> teams[playerIndex]).orElse(playerIndex), scoreForPlayer.map(ScoreApiResponse::getTiles).orElse(null), scoreForPlayer.map(ScoreApiResponse::getTotal).orElse(null), scoreForPlayer.map(ScoreApiResponse::isDead).orElse(false)); }).collect(Collectors.toList()); return ImmutableList.copyOf(players); }
/** * Returns list of rows in resulting table for a given header/data row. A single header/data row may produce * multiple rows in the resulting table if: * - Contents of a row exceed maxCharInLine for that row * - Contents of a row we're already multiline */ private static List<String> dataRow(int[] colWidths, HorizontalAlign[] horizontalAligns, String[] contents, Character left, Character columnSeparator, Character right) { final List<List<String>> linesContents = IntStream.range(0, colWidths.length) .mapToObj(i -> { String text = i < contents.length && contents[i] != null ? contents[i] : ""; String[] paragraphs = text.split(System.lineSeparator()); return Arrays.stream(paragraphs) .flatMap(paragraph -> splitTextIntoLinesOfMaxLength(paragraph, colWidths[i] - 2* MIN_PADDING).stream()) .collect(Collectors.toList()); }) .collect(Collectors.toList()); final int numLines = linesContents.stream() .mapToInt(List::size) .max().orElse(0); final StringBuilder row = new StringBuilder(getTableWidth(colWidths)); final List<String> lines = new LinkedList<>(); for (int line = 0; line < numLines; line++) { if (left != null) row.append((char) left); for (int col = 0; col < colWidths.length; col++) { String item = linesContents.get(col).size() <= line ? "" : linesContents.get(col).get(line); row.append(justify(item, horizontalAligns[col], colWidths[col], MIN_PADDING)); if (columnSeparator != null && col != colWidths.length - 1) row.append((char) columnSeparator); } if (right != null) row.append((char) right); lines.add(row.toString()); row.setLength(0); } return lines; }
@Override public void deserialize(ByteBuf buf) { int dim = buf.readInt(); int length = buf.readInt(); Int2FloatOpenHashMap data = new Int2FloatOpenHashMap(length); IntStream.range(0,length).forEach(i-> data.put(buf.readInt(), buf.readFloat())); this.dim = dim; this.hashMap = data; }
@Override public void deserialize(ByteBuf buf) { int dim = buf.readInt(); int length = buf.readInt(); int[] data = new int[length]; IntStream.range(0, length).forEach(i -> data[i] = buf.readInt()); this.dim = dim; this.values = data; }
/** * Clear data from projections cache related to given training. * * @param featuresCnt Features count. * @param regs Regions count. * @param aff Affinity function. * @param uuid UUID of training. * @param ignite Ignite instance. */ public static void clear(int featuresCnt, int regs, IgniteBiFunction<Integer, Ignite, Object> aff, UUID uuid, Ignite ignite) { Set<RegionKey> toRmv = IntStream.range(0, featuresCnt).boxed(). flatMap(fIdx -> IntStream.range(0, regs).boxed().map(reg -> new IgniteBiTuple<>(fIdx, reg))). map(t -> key(t.get1(), t.get2(), aff.apply(t.get1(), ignite), uuid)). collect(Collectors.toSet()); getOrCreate(ignite).removeAll(toRmv); }
@Test public void fromStream2() { Folyam<Integer> f = Folyam.fromStream(IntStream.range(1, 6).boxed(), false); f.test().assertResult(1, 2, 3, 4, 5); f.test().assertFailure(IllegalStateException.class); }
@Test(dataProvider = "sizes") public void testIntAfterBuilding(int size) { IntStream.Builder sb = IntStream.builder(); IntStream.range(0, size).forEach(sb); sb.build(); checkISE(() -> sb.accept(1)); checkISE(() -> sb.add(1)); checkISE(() -> sb.build()); }
@Test public void shouldSerializeConcurrentStreamAppendingWithAutoGeneratedSequence() throws Exception { // When appending a number of events concurrently IntStream.range(0, 10).parallel().forEach(i -> { try { save(textAppended(String.valueOf(i)), AUTO_GENERATE_SEQUENCE); } catch (Exception e) { throw new RuntimeException(e); } }); // Then expect it to write them all successfully List<Event> events = eventStore.getEventsForStream(streamId, 0, 100); assertThat(events.size(), is(10)); }
public List<Path> extractAllKeysFor(Path path) { final List<Object> internal = (List<Object>) get(path); if(internal == null) { return Collections.emptyList(); } return IntStream.range(0, internal.size()) .mapToObj(i -> Path.concat(path, i)) .collect(Collectors.toList()); }
/** * Tests GET of all Load statistics objects. */ @Test public void testLoadsGet() throws UnsupportedEncodingException { final WebTarget wt = target(); final String response = wt.path("statistics/flows/link/").request().get(String.class); final JsonObject result = Json.parse(response).asObject(); assertThat(result, notNullValue()); assertThat(result.names(), hasSize(1)); assertThat(result.names().get(0), is("loads")); final JsonArray jsonLoads = result.get("loads").asArray(); assertThat(jsonLoads, notNullValue()); assertThat(jsonLoads.size(), is(3)); // Hash the loads by the current field to allow easy lookup if the // order changes. HashMap<Integer, JsonObject> currentMap = new HashMap<>(); IntStream.range(0, jsonLoads.size()) .forEach(index -> currentMap.put( jsonLoads.get(index).asObject().get("latest").asInt(), jsonLoads.get(index).asObject())); JsonObject load1 = currentMap.get(2); checkValues(load1, 1, 2, true, "src1"); JsonObject load2 = currentMap.get(22); checkValues(load2, 11, 22, true, "src2"); JsonObject load3 = currentMap.get(222); checkValues(load3, 111, 222, true, "src3"); }
default Stream<ChunkPosition> chunkPositions() { final Cuboid bounds = getBounds(); if(!bounds.isBlockFinite()) { throw new UnsupportedOperationException("Cannot enumerate chunks in unbounded region type " + getClass().getSimpleName()); } final ChunkPosition min = ChunkPosition.ofBlock(bounds.minimumBlockInside()), max = ChunkPosition.ofBlock(bounds.maximumBlockInside()); return IntStream.rangeClosed(min.x(), max.x()) .mapToObj(x -> IntStream.rangeClosed(min.z(), max.z()) .mapToObj(z -> new ChunkPosition(x, z))) .flatMap(Function.identity()); }
@Test public void testCounting() { GroupAll groupAll = makeGroupAll(makeGroupOperation(GroupOperationType.COUNT, null, "count")); BulletRecord someRecord = RecordBox.get().add("foo", 1).getRecord(); IntStream.range(0, 10).forEach(i -> groupAll.consume(someRecord)); Assert.assertNotNull(groupAll.getSerializedAggregation()); List<BulletRecord> aggregate = groupAll.getAggregation().getRecords(); Assert.assertEquals(aggregate.size(), 1); BulletRecord actual = aggregate.get(0); BulletRecord expected = RecordBox.get().add("count", 10).getRecord(); Assert.assertEquals(actual, expected); }