private boolean identicalFileIsAlreadyGenerated(CharSequence sourceCode) { try { String existingContent = new CharSource() { final String packagePath = !key.packageName.isEmpty() ? (key.packageName.replace('.', '/') + '/') : ""; final String filename = key.relativeName + ".java"; @Override public Reader openStream() throws IOException { return getFiler() .getResource(StandardLocation.SOURCE_OUTPUT, "", packagePath + filename) .openReader(true); } }.read(); if (existingContent.contentEquals(sourceCode)) { // We are ok, for some reason the same file is already generated, // happens in Eclipse for example. return true; } } catch (Exception ignoredAttemptToGetExistingFile) { // we have some other problem, not an existing file } return false; }
public static void main(String[] argv) throws IOException { if (argv.length != 1) { usage(); } final Parameters params = Parameters.loadSerifStyle(new File(argv[0])); final File docIdToFileMapFile = params.getExistingFile("docIdToFileMap"); final File filterFile = params.getCreatableFile("quoteFilter"); final Map<Symbol, CharSource> docIdToFileMap = FileUtils.loadSymbolToFileCharSourceMap( Files.asCharSource(docIdToFileMapFile, Charsets.UTF_8)); log.info("Building quote filter from {} documents in {}", docIdToFileMap.size(), docIdToFileMapFile); final QuoteFilter quoteFilter = QuoteFilter.createFromOriginalText(docIdToFileMap); log.info("Writing quote filter to {}", filterFile); quoteFilter.saveTo(Files.asByteSink(filterFile)); }
@Override public CorpusEventLinking loadCorpusEventFrames(final CharSource source) throws IOException { int lineNo = 1; try (final BufferedReader in = source.openBufferedStream()) { final ImmutableSet.Builder<CorpusEventFrame> ret = ImmutableSet.builder(); String line; while ((line = in.readLine()) != null) { if (!line.isEmpty() && !line.startsWith("#")) { // check for blank or comment lines ret.add(parseLine(line)); } } return CorpusEventLinking.of(ret.build()); } catch (Exception e) { throw new IOException("Error on line " + lineNo + " of " + source, e); } }
@Test public void testQuotedRegionComputation() throws IOException { final Map<String, ImmutableRangeSet<Integer>> testCases = ImmutableMap.of( "Foo <quote>bar <quote>baz</quote> <quote>meep</quote></quote> blah <quote>another</quote>", ImmutableRangeSet.<Integer>builder().add(Range.closed(4, 60)).add(Range.closed(67, 88)) .build(), "<quote>lalala</quote>", ImmutableRangeSet.of(Range.closed(0, 20)), "No quotes!", ImmutableRangeSet.<Integer>of()); for (final Map.Entry<String, ImmutableRangeSet<Integer>> entry : testCases.entrySet()) { final Symbol docid = Symbol.from("dummy"); final QuoteFilter reference = QuoteFilter.createFromBannedRegions(ImmutableMap.of(docid, entry.getValue())); final QuoteFilter computed = QuoteFilter.createFromOriginalText(ImmutableMap.of(docid, CharSource.wrap(entry.getKey()))); assertEquals(reference, computed); } }
/** * Load the word frequency list, skipping words that are not in the given white list. * * @param whitelist The white list. * @return A Dictomation dictionary with the word probabilities. * @throws IOException * @throws DictionaryBuilderException */ public static DictomatonDictionary loadDictionary(Set<String> whitelist) throws IOException, DictionaryBuilderException { CharSource source = getResource(WORD_FREQUENCIES_FILE); DictomatonDictionary dictomatonDictionary; BufferedReader reader = source.openBufferedStream(); try { if (whitelist == null) { dictomatonDictionary = DictomatonDictionary.read(reader); } else { dictomatonDictionary = DictomatonDictionary.read(reader, whitelist); } } finally { reader.close(); } return dictomatonDictionary; }
public void testGet_io() throws IOException { assertEquals(-1, ArbitraryInstances.get(InputStream.class).read()); assertEquals(-1, ArbitraryInstances.get(ByteArrayInputStream.class).read()); assertEquals(-1, ArbitraryInstances.get(Readable.class).read(CharBuffer.allocate(1))); assertEquals(-1, ArbitraryInstances.get(Reader.class).read()); assertEquals(-1, ArbitraryInstances.get(StringReader.class).read()); assertEquals(0, ArbitraryInstances.get(Buffer.class).capacity()); assertEquals(0, ArbitraryInstances.get(CharBuffer.class).capacity()); assertEquals(0, ArbitraryInstances.get(ByteBuffer.class).capacity()); assertEquals(0, ArbitraryInstances.get(ShortBuffer.class).capacity()); assertEquals(0, ArbitraryInstances.get(IntBuffer.class).capacity()); assertEquals(0, ArbitraryInstances.get(LongBuffer.class).capacity()); assertEquals(0, ArbitraryInstances.get(FloatBuffer.class).capacity()); assertEquals(0, ArbitraryInstances.get(DoubleBuffer.class).capacity()); ArbitraryInstances.get(PrintStream.class).println("test"); ArbitraryInstances.get(PrintWriter.class).println("test"); assertNotNull(ArbitraryInstances.get(File.class)); assertFreshInstanceReturned( ByteArrayOutputStream.class, OutputStream.class, Writer.class, StringWriter.class, PrintStream.class, PrintWriter.class); assertEquals(ByteSource.empty(), ArbitraryInstances.get(ByteSource.class)); assertEquals(CharSource.empty(), ArbitraryInstances.get(CharSource.class)); assertNotNull(ArbitraryInstances.get(ByteSink.class)); assertNotNull(ArbitraryInstances.get(CharSink.class)); }
@Override public String call() throws Exception { try { final URL resource = Resources.getResource("configurable.txt"); final File f = new File(resource.toURI()); if( !f.exists() ) { return NO_CONTENT; } if( lastMod == 0 || lastMod < f.lastModified() ) { final CharSource charSource = Resources.asCharSource(resource, Charset.forName("utf-8")); final StringWriter sw = new StringWriter(); charSource.copyTo(sw); lastContent = sw.toString(); lastMod = f.lastModified(); } return lastContent; } catch( Exception e ) { return NO_CONTENT; } }
@SuppressWarnings("unchecked") public ModAccessTransformer() throws Exception { super(ModAccessTransformer.class); //We are in the new ClassLoader here, so we need to get the static field from the other ClassLoader. ClassLoader classLoader = this.getClass().getClassLoader().getClass().getClassLoader(); //Bit odd but it gets the class loader that loaded our current class loader yay java! Class<?> otherClazz = Class.forName(this.getClass().getName(), true, classLoader); Field otherField = otherClazz.getDeclaredField("embedded"); otherField.setAccessible(true); embedded = (Map<String, String>)otherField.get(null); for (Map.Entry<String, String> e : embedded.entrySet()) { int old_count = getModifiers().size(); processATFile(CharSource.wrap(e.getValue())); int added = getModifiers().size() - old_count; if (added > 0) { FMLRelaunchLog.fine("Loaded %d rules from AccessTransformer mod jar file %s\n", added, e.getKey()); } } }
/** * @param backingFile a file that need not exist, but if it does, contains * the content of the renaming map as formatted by * {@link OutputRenamingMapFormat#JSON}.. */ public StableCssSubstitutionMapProvider(File backingFile) throws IOException { CharSource renameMapJson = Files.asCharSource(backingFile, Charsets.UTF_8); RecordingSubstitutionMap.Builder substitutionMapBuilder = new RecordingSubstitutionMap.Builder() .withSubstitutionMap( new MinimalSubstitutionMap()); ImmutableMap<String, String> mappings = ImmutableMap.of(); try { try (Reader reader = renameMapJson.openBufferedStream()) { mappings = OutputRenamingMapFormat.JSON.readRenamingMap(reader); } } catch (@SuppressWarnings("unused") FileNotFoundException ex) { // Ok. Start with an empty map. } substitutionMapBuilder.withMappings(mappings); this.substitutionMap = substitutionMapBuilder.build(); this.backingFile = backingFile; this.originalMappings = mappings; }
/** Reads the CSS rename map */ @Provides @Singleton public SoyCssRenamingMap provideCssRenamingMap() throws IOException { ImmutableMap.Builder<String, String> cssMapBuilder = ImmutableMap.builder(); URL crUrl = getClass().getResource(CSS_RENAMING_MAP_RESOURCE_PATH); if (crUrl != null) { CharSource cssRenamingMapJson = Resources.asCharSource( crUrl, Charsets.UTF_8); JsonElement json; try (Reader jsonIn = cssRenamingMapJson.openStream()) { json = new JsonParser().parse(jsonIn); } for (Map.Entry<String, JsonElement> e : json.getAsJsonObject().entrySet()) { cssMapBuilder.put(e.getKey(), e.getValue().getAsString()); } } return new SoyCssRenamingMapImpl(cssMapBuilder.build()); }
public static void injectAccessTransformer(File file, String atName, LaunchClassLoader loader) throws IOException { if (!AlchemyEngine.isRuntimeDeobfuscationEnabled() || file == null) return; String at = null; try (JarFile jar = new JarFile(file)) { ZipEntry entry = jar.getEntry("META-INF/" + atName); if (entry != null) at = Tool.read(jar.getInputStream(entry)); } if (at != null) { List<IClassTransformer> transformers = $(loader, "transformers"); for (IClassTransformer t : transformers) if (t instanceof AccessTransformer) { AccessTransformer transformer = (AccessTransformer) t; $(transformer, "processATFile", CharSource.wrap(at)); break; } } }
public void processLines( final Object stream, final Consumer<String> callback ) throws Exception { final CharSource source = toCharSource( stream ); source.readLines( new LineProcessor<Object>() { @Override public boolean processLine( final String line ) throws IOException { callback.accept( line ); return true; } @Override public Object getResult() { return null; } } ); }
public ModAccessTransformer() throws Exception { super(ModAccessTransformer.class); //We are in the new ClassLoader here, so we need to get the static field from the other ClassLoader. ClassLoader classLoader = this.getClass().getClassLoader().getClass().getClassLoader(); //Bit odd but it gets the class loader that loaded our current class loader yay java! Class<?> otherClazz = Class.forName(this.getClass().getName(), true, classLoader); Field otherField = otherClazz.getDeclaredField("embedded"); otherField.setAccessible(true); embedded = (Map<String, String>)otherField.get(null); for (Map.Entry<String, String> e : embedded.entrySet()) { int old_count = getModifiers().size(); processATFile(CharSource.wrap(e.getValue())); int added = getModifiers().size() - old_count; if (added > 0) { FMLRelaunchLog.fine("Loaded %d rules from AccessTransformer mod jar file %s\n", added, e.getKey()); } } }
public static Properties scanProperties(String... patterns) { Properties p = new Properties(); Arrays.asList(patterns).forEach(pattern -> { Resources.scan(pattern).forEach(url -> { CharSource src = Resources.asCharSource(url); try (Reader r = src.openBufferedStream()) { p.load(r); log.info("load property source: <{}>", url.toExternalForm()); } catch (IOException e) { log.error("fail to load property source: <{}> [{}]", url.toExternalForm(), e.getMessage()); } }); }); return encryptedProperties(p); }
public static List<Rewrite> fromCharSource(CharSource source) throws IOException { return source.readLines(new LineProcessor<List<Rewrite>>() { private List<Rewrite> refactorings = new ArrayList<>(); private final Splitter SPLITTER = Splitter.on("->").trimResults().omitEmptyStrings(); @Override public List<Rewrite> getResult() { return Collections.unmodifiableList(refactorings); } @Override public boolean processLine(String line) { List<String> parts = SPLITTER.splitToList(line); refactorings.add(Rewrite.of(parts.get(0), parts.get(1), Rewrite.Visit.Expressions)); return true; } }); }
static Tree parseTree(CharSource input) throws IOException { try (Reader reader = input.openStream()) { ThriftLexer lexer = new ThriftLexer(new ANTLRReaderStream(reader)); ThriftParser parser = new ThriftParser(new CommonTokenStream(lexer)); try { Tree tree = (Tree) parser.document().getTree(); if (parser.getNumberOfSyntaxErrors() > 0) { throw new IllegalArgumentException("syntax error"); } return tree; } catch (RecognitionException e) { throw new IllegalArgumentException(e); } } }
/** * Converts a {@code ByteSource} to a {@code CharSource}. * <p> * This ensures that any Unicode byte order marker is used correctly. * The default encoding is UTF-8 if no BOM is found. * * @param byteSource the byte source * @return the char source, that uses the BOM to determine the encoding */ public static CharSource toCharSource(ByteSource byteSource) { return new CharSource() { @Override public ByteSource asByteSource(Charset charset) { return byteSource; } @Override public Reader openStream() throws IOException { return toReader(byteSource.openStream()); } @Override public String toString() { return "UnicodeBom.toCharSource(" + byteSource.toString() + ")"; } }; }
public static <NodeT extends ConstituentNode<NodeT, ?>> HeadFinder<NodeT> createEnglishPTBFromResources() throws IOException { final boolean headInitial = true; final CharSource resource = Resources .asCharSource(EnglishAndChineseHeadRules.class.getResource("en_heads.collins.txt"), Charsets.UTF_8); final HeadRule<NodeT> englishNPHandling = EnglishNPHeadRules(); final ImmutableMap<Symbol, HeadRule<NodeT>> headRules = headRulesFromResources(headInitial, resource); final ImmutableMap.Builder<Symbol, HeadRule<NodeT>> ruleB = ImmutableMap.builder(); ruleB.putAll(headRules); // english NP rules get an NP key... ruleB.put(NP, englishNPHandling); return MapHeadFinder.create(ruleB.build()); }
static <NodeT extends ConstituentNode<NodeT, ?>> HeadFinder<NodeT> createFromResources() throws IOException { final boolean headInitial = true; final CharSource resource = Resources .asCharSource(SpanishHeadRules.class.getResource("es_heads.opennlp.txt"), Charsets.UTF_8); final ImmutableMap<Symbol, HeadRule<NodeT>> headRules = headRulesFromResources(resource); final ImmutableMap.Builder<Symbol, HeadRule<NodeT>> rules = ImmutableMap.builder(); rules.putAll(headRules); for (final Symbol tag : new Symbol[]{GRUP_NOM, SN}) { final ImmutableList<HeadRule<NodeT>> ruleList = ImmutableList.<HeadRule<NodeT>>of(new SpanishABVNPRule<NodeT>(headInitial), new SpanishSNNOMRule<NodeT>(headInitial), new SpanishAdjRule<NodeT>(headInitial), new SpanishMoreADJRule<NodeT>(headInitial), new SpanishFallbackRule<NodeT>(headInitial) ); rules.put(tag, CompositeHeadRule.create(ruleList)); } return MapHeadFinder.create(rules.build()); }
/** * Parses one or more CSV format trade files with an error-creating type filter. * <p> * A list of types is specified to filter the trades. * Trades that do not match the type will be included in the failure list. * <p> * CSV files sometimes contain a Unicode Byte Order Mark. * Callers are responsible for handling this, such as by using {@link UnicodeBom}. * * @param charSources the CSV character sources * @param tradeTypes the trade types to return * @return the loaded trades, all errors are captured in the result */ public ValueWithFailures<List<Trade>> parse( Collection<CharSource> charSources, List<Class<? extends Trade>> tradeTypes) { ValueWithFailures<List<Trade>> parsed = parse(charSources, Trade.class); List<Trade> valid = new ArrayList<>(); List<FailureItem> failures = new ArrayList<>(parsed.getFailures()); for (Trade trade : parsed.getValue()) { if (tradeTypes.contains(trade.getClass())) { valid.add(trade); } else { failures.add(FailureItem.of( FailureReason.PARSING, "Trade type not allowed {}, only these types are supported: {}", trade.getClass().getName(), tradeTypes.stream().map(t -> t.getSimpleName()).collect(joining(", ")))); } } return ValueWithFailures.of(valid, failures); }
public void test_equalsHashCodeToString() { CsvFile a1 = CsvFile.of(CharSource.wrap(CSV1), true); CsvFile a2 = CsvFile.of(CharSource.wrap(CSV1), true); CsvFile b = CsvFile.of(CharSource.wrap(CSV2), true); CsvFile c = CsvFile.of(CharSource.wrap(CSV3), false); // file assertEquals(a1.equals(a1), true); assertEquals(a1.equals(a2), true); assertEquals(a1.equals(b), false); assertEquals(a1.equals(c), false); assertEquals(a1.equals(null), false); assertEquals(a1.equals(""), false); assertEquals(a1.hashCode(), a2.hashCode()); assertNotNull(a1.toString()); // row assertEquals(a1.row(0).equals(a1.row(0)), true); assertEquals(a1.row(0).equals(a2.row(0)), true); assertEquals(a1.row(0).equals(b.row(0)), false); assertEquals(c.row(0).equals(c.row(1)), false); assertEquals(a1.row(0).equals(""), false); assertEquals(a1.row(0).equals(null), false); assertEquals(a1.row(0).hashCode(), a2.row(0).hashCode()); assertNotNull(a1.row(0).toString()); }
/** * Parses one or more CSV format curve files for all available dates. * <p> * A predicate is specified that is used to filter the dates that are returned. * This could match a single date, a set of dates or all dates. * <p> * If the files contain a duplicate entry an exception will be thrown. * * @param datePredicate the predicate used to select the dates * @param groupsCharSource the curve groups CSV character source * @param settingsCharSource the curve settings CSV character source * @param curveValueCharSources the CSV character sources for curves * @return the loaded curves, mapped by date and identifier * @throws IllegalArgumentException if the files contain a duplicate entry */ public static ImmutableListMultimap<LocalDate, CurveGroup> parse( Predicate<LocalDate> datePredicate, CharSource groupsCharSource, CharSource settingsCharSource, Collection<CharSource> curveValueCharSources) { List<CurveGroupDefinition> curveGroups = CurveGroupDefinitionCsvLoader.parseCurveGroupDefinitions(groupsCharSource); Map<LocalDate, Map<CurveName, Curve>> curves = parseCurves(datePredicate, settingsCharSource, curveValueCharSources); ImmutableListMultimap.Builder<LocalDate, CurveGroup> builder = ImmutableListMultimap.builder(); for (CurveGroupDefinition groupDefinition : curveGroups) { for (Map.Entry<LocalDate, Map<CurveName, Curve>> entry : curves.entrySet()) { CurveGroup curveGroup = CurveGroup.ofCurves(groupDefinition, entry.getValue().values()); builder.put(entry.getKey(), curveGroup); } } return builder.build(); }
public LocalDate get() { LocalDate res = null; try { CharSource charSource = CharSource.wrap(text); String line = charSource.readFirstLine(); line = Strings.nullToEmpty(line); Matcher matcher = REGEX.matcher(line); if (matcher.find()) { String data = matcher.group(0); res = new LocalDateCsvConverter("dd/MM/yyyy").convert(data); } } catch (IOException e) { } return res; }
public void test_of_comment_blank_no_header() { try (CsvIterator csvFile = CsvIterator.of(CharSource.wrap(CSV2), false)) { assertEquals(csvFile.headers().size(), 0); assertEquals(csvFile.hasNext(), true); CsvRow row0 = csvFile.next(); assertEquals(row0.lineNumber(), 1); assertEquals(row0.fieldCount(), 2); assertEquals(row0.field(0), "h1"); assertEquals(row0.field(1), "h2"); CsvRow row1 = csvFile.next(); assertEquals(row1.lineNumber(), 5); assertEquals(row1.fieldCount(), 2); assertEquals(row1.field(0), "r21"); assertEquals(row1.field(1), "r22"); assertEquals(csvFile.hasNext(), false); } }
public List<InputRecord> read(CharSource source) throws IOException { return source.readLines(new LineProcessor<List<InputRecord>>() { private final List<InputRecord> recs = Lists.newArrayList(); @Override public boolean processLine(String line) throws IOException { if (isBlank(line)) { return true; } if (isComment(line)) { return true; } InputRecord maybe = reader.parse(line); if (maybe != null) { recs.add(maybe); } return true; } @Override public List<InputRecord> getResult() { return recs; } }); }
/** * Parses one or more CSV format FX rate files. * <p> * A predicate is specified that is used to filter the dates that are returned. * This could match a single date, a set of dates or all dates. * <p> * If the files contain a duplicate entry an exception will be thrown. * * @param datePredicate the predicate used to select the dates * @param charSources the CSV character sources * @return the loaded FX rates, mapped by {@link LocalDate} and {@linkplain FxRateId rate ID} * @throws IllegalArgumentException if the files contain a duplicate entry */ public static ImmutableMap<LocalDate, ImmutableMap<FxRateId, FxRate>> parse( Predicate<LocalDate> datePredicate, Collection<CharSource> charSources) { // builder ensures keys can only be seen once Map<LocalDate, ImmutableMap.Builder<FxRateId, FxRate>> mutableMap = new HashMap<>(); for (CharSource charSource : charSources) { parseSingle(datePredicate, charSource, mutableMap); } ImmutableMap.Builder<LocalDate, ImmutableMap<FxRateId, FxRate>> builder = ImmutableMap.builder(); for (Entry<LocalDate, Builder<FxRateId, FxRate>> entry : mutableMap.entrySet()) { builder.put(entry.getKey(), entry.getValue().build()); } return builder.build(); }
private static List<NistInstance> readInstances(CharSource input) throws IOException { return input.readLines().stream().map(line -> { List<String> tokens = PARSER.splitToList(line); List<Integer> values = tokens.stream().map(Integer::parseInt).collect(Collectors.toList()); List<Integer> instanceInput = values.subList(0, values.size() - 1); int instanceOutput = values.get(values.size() - 1); ImmutableTable.Builder<Integer, Integer, Integer> instance = ImmutableTable.builder(); for (int i = 0; i < instanceInput.size(); i++) { int value = instanceInput.get(i); int row = i / COLUMNS; int column = i % COLUMNS; instance.put(row, column, value); } return NistInstance.create( instanceInput, instance .orderRowsBy(Comparator.<Integer>naturalOrder()) .orderColumnsBy(Comparator.<Integer>naturalOrder()) .build(), instanceOutput); }).collect(Collectors.toList()); }
public void test_nextBatch2() { try (CsvIterator csvFile = CsvIterator.of(CharSource.wrap(CSV1), true)) { ImmutableList<String> headers = csvFile.headers(); assertEquals(headers.size(), 2); assertEquals(headers.get(0), "h1"); assertEquals(headers.get(1), "h2"); List<CsvRow> a = csvFile.nextBatch(3); assertEquals(a.size(), 2); CsvRow row0 = a.get(0); assertEquals(row0.headers(), headers); assertEquals(row0.fieldCount(), 2); assertEquals(row0.field(0), "r11"); assertEquals(row0.field(1), "r12"); CsvRow row1 = a.get(1); assertEquals(row1.headers(), headers); assertEquals(row1.fieldCount(), 2); assertEquals(row1.field(0), "r21"); assertEquals(row1.field(1), "r22"); List<CsvRow> d = csvFile.nextBatch(2); assertEquals(d.size(), 0); assertEquals(csvFile.hasNext(), false); assertEquals(csvFile.hasNext(), false); } }
@Test public void testWhitelistEntriesAreSubstrings() { assertNoViolation( "requirement: {\n" + " banned_function {\n" + " function: 'quoteKeysIfJs'\n" + " }\n" + " error_message: 'foo'" + " whitelist: 'c/foo/bar/baz.soy'\n" + "}", new StableSoyFileSupplier( CharSource.wrap( "{namespace ns}\n" + "{template .foo}\n" + "{quoteKeysIfJs(['xxx': 'bar', 'yyy': 'baz'])}\n" + "{/template}"), SoyFileKind.SRC, "a/b/c/foo/bar/baz.soy")); }
private static Map<LocalDate, Map<CurveName, Curve>> parseCurves( Predicate<LocalDate> datePredicate, CharSource settingsResource, Collection<CharSource> curvesResources) { // load curve settings Map<CurveName, LoadedCurveSettings> settingsMap = parseCurveSettings(settingsResource); // load curves, ensuring curves only be seen once within a date Map<LocalDate, Map<CurveName, Curve>> resultMap = new TreeMap<>(); for (CharSource curvesResource : curvesResources) { Multimap<LocalDate, Curve> fileCurvesByDate = parseSingle(datePredicate, curvesResource, settingsMap); // Ensure curve names are unique, with a good error message for (LocalDate date : fileCurvesByDate.keySet()) { Collection<Curve> fileCurves = fileCurvesByDate.get(date); Map<CurveName, Curve> resultCurves = resultMap.computeIfAbsent(date, d -> new HashMap<>()); for (Curve fileCurve : fileCurves) { if (resultCurves.put(fileCurve.getName(), fileCurve) != null) { throw new IllegalArgumentException( "Rates curve loader found multiple curves with the same name: " + fileCurve.getName()); } } } } return resultMap; }
public void test_of_simple_no_header() { CsvFile csvFile = CsvFile.of(CharSource.wrap(CSV1), false); assertEquals(csvFile.headers().size(), 0); assertEquals(csvFile.rowCount(), 4); assertEquals(csvFile.row(0).lineNumber(), 1); assertEquals(csvFile.row(1).lineNumber(), 2); assertEquals(csvFile.row(2).lineNumber(), 3); assertEquals(csvFile.row(3).lineNumber(), 4); assertEquals(csvFile.row(0).headers().size(), 0); assertEquals(csvFile.row(0).fieldCount(), 2); assertEquals(csvFile.row(0).field(0), "h1"); assertEquals(csvFile.row(0).field(1), "h2"); assertEquals(csvFile.row(1).headers().size(), 0); assertEquals(csvFile.row(1).fieldCount(), 2); assertEquals(csvFile.row(1).field(0), "r11"); assertEquals(csvFile.row(1).field(1), "r12"); assertEquals(csvFile.row(2).headers().size(), 0); assertEquals(csvFile.row(2).fieldCount(), 2); assertEquals(csvFile.row(2).field(0), "r21"); assertEquals(csvFile.row(2).field(1), "r22"); assertEquals(csvFile.row(0).subRow(0).fieldCount(), 2); assertEquals(csvFile.row(0).subRow(1).fieldCount(), 1); assertEquals(csvFile.row(0).subRow(2).fieldCount(), 0); }
public AG501PosFileHeader(File file) throws IOException { CharSource source = Files.asCharSource(file, Charsets.UTF_8); BufferedReader stream = source.openBufferedStream(); while (lines.size() < NUM_LINES) { String line = stream.readLine(); lines.add(line); } // set number of channels matcher = LINE_3.matcher(lines.get(2)); matcher.find(); numChannels = Integer.parseInt(matcher.group(1)); // set sampling frequency matcher = LINE_4.matcher(lines.get(3)); matcher.find(); samplingFrequency = Integer.parseInt(matcher.group(1)); }
private static ImmutableMap<CurveGroupName, CurveGroupDefinition> parse0( CharSource groupsCharSource, CharSource settingsCharSource, Map<CurveName, SeasonalityDefinition> seasonality, Collection<CharSource> curveNodeCharSources) { // load curve groups and settings List<CurveGroupDefinition> curveGroups = CurveGroupDefinitionCsvLoader.parseCurveGroupDefinitions(groupsCharSource); Map<CurveName, LoadedCurveSettings> settingsMap = RatesCurvesCsvLoader.parseCurveSettings(settingsCharSource); // load curve definitions List<CurveDefinition> curveDefinitions = curveNodeCharSources.stream() .flatMap(res -> parseSingle(res, settingsMap).stream()) .collect(toImmutableList()); // Add the curve definitions to the curve group definitions return curveGroups.stream() .map(groupDefinition -> groupDefinition.withCurveDefinitions(curveDefinitions).withSeasonalityDefinitions(seasonality)) .collect(toImmutableMap(groupDefinition -> groupDefinition.getName())); }
public static String normalize(String output) { StringBuilder result = new StringBuilder(); List<String> lines; try { lines = CharSource.wrap(output).readLines(); } catch (IOException e) { throw new UncheckedIOException(e); } int i = 0; while (i < lines.size()) { String line = lines.get(i); if (line.contains(DaemonStartupMessage.STARTING_DAEMON_MESSAGE)) { // Remove the "daemon starting" message i++; } else if (line.contains(DaemonStateCoordinator.DAEMON_WILL_STOP_MESSAGE)) { // Remove the "Daemon will be shut down" message i++; } else if (i == lines.size() - 1 && line.matches("Total time: [\\d\\.]+ secs")) { result.append("Total time: 1 secs"); result.append('\n'); i++; } else { result.append(line); result.append('\n'); i++; } } return result.toString(); }