public static Table<String, String, String> assignmentsToTable( SortedMap<String, SortedSet<SingleWorkerAssignment<Step2bGoldReasonAnnotator.SentenceLabel>>> assignments) { TreeBasedTable<String, String, String> result = TreeBasedTable.create(); assignments.forEach((unitID, singleWorkerAssignments) -> { singleWorkerAssignments.forEach(sentenceLabelSingleWorkerAssignment -> { String workerID = sentenceLabelSingleWorkerAssignment.getWorkerID(); String label = sentenceLabelSingleWorkerAssignment.getLabel().toString(); // update the table result.put(unitID, workerID, label); }); }); return result; }
static void printTable2(TreeBasedTable<Integer, Integer, CorrelationVectors> table) { System.out.printf("\t%s%n", StringUtils.join(table.columnKeySet(), "\t\t\t")); for (Map.Entry<Integer, Map<Integer, CorrelationVectors>> entry : table.rowMap() .entrySet()) { System.out.printf("%s\t", entry.getKey()); List<Double> allX = new ArrayList<>(); List<Double> allY = new ArrayList<>(); for (CorrelationVectors ds : entry.getValue().values()) { allX.addAll(ds.x); allY.addAll(ds.y); double[] correlation = computeCorrelation(allX, allY); System.out.printf("%.2f\t%.2f\t\t", correlation[0], correlation[1]); } System.out.println(); } }
public static void main(String[] args) { TreeBasedTable<Integer, Integer, Integer> table = TreeBasedTable.create(); table.put(2, 0, 6); table.put(3, 2, 4); table.put(0, 0, 5); table.put(0, 3, 2); table.put(4, 1, 2); table.put(4, 4, 9); CSRSparseMatrix csr = new CSRSparseMatrix(table, 5); for (Table.Cell<Integer, Integer, Integer> cell : table.cellSet()) { if (csr.get(cell.getRowKey(), cell.getColumnKey()) == cell.getValue()) { System.out.println(String.format("%d->%d = %d", cell.getRowKey(), cell.getColumnKey(), cell.getValue())); } else { System.out.println("ERROR"); } } }
public static void main(String[] args) { Table<String, String, String> table = TreeBasedTable.create(); table.put("Row1", "Column1", "Data1"); table.put("Row1", "Column2", "Data2"); table.put("Row2", "Column1", "Data3"); table.put("Row2", "Column2", "Data4"); table.put("Row3", "Column1", "Data5"); table.put("Row3", "Column2", "Data6"); table.put("Row3", "Column3", "Data7"); Joiner.MapJoiner mapJoiner = Joiner.on(',').withKeyValueSeparator("="); //Let's a Guava Joiner to illustrate that table.rowKeySet().forEach(r -> { System.out.println(r + "->" + mapJoiner.join(table.row(r))); }); }
public TargetSet(Collection<T> targets) { targetPositions = TreeBasedTable.create(); for(T target : targets) { ArrayList<T> positionTargets = targetPositions.get(target.getX(), target.getY()); if(positionTargets == null) { targetPositions.put(target.getY(), target.getX(), new ArrayList<>()); targetPositions.get(target.getY(), target.getX()).add(target); } } targetList = new CircularArrayList<>(this.targetPositions.values().stream().flatMap(t -> t.stream()).collect(Collectors.toList())); Collections.sort(targetList, new Comparator<T>() { @Override public int compare(T o1, T o2) { return o1.getPosition().squareDistance(new Point(0, 0)) - o2.getPosition().squareDistance(new Point(0, 0)); } }); }
public static Table<String, String, Long> loadTable(InputStream stream) throws IOException { Table<String, String, Long> result = TreeBasedTable.create(); LineIterator lineIterator = IOUtils.lineIterator(stream, "utf-8"); while (lineIterator.hasNext()) { String line = lineIterator.next(); System.out.println(line); String[] split = line.split("\t"); String language = split[0]; String license = split[1]; Long documents = Long.valueOf(split[2]); Long tokens = Long.valueOf(split[3]); result.put(language, "docs " + license, documents); result.put(language, "tokens " + license, tokens); } return result; }
@Override public Options get() { final Colors color = this.context.side(); final Board board = this.context.board(); final Iterable<IPosition> lights = board.getLights(color); final Pieces remainingPieces = this.context.getPlayer().remainingPieces(); final Table<IPosition, Polyomino, List<Set<IPosition>>> table = TreeBasedTable.create(); for (int radius = MIN_RADIUS; radius <= MAX_RADIUS; ++radius) { final Map<IPosition, Set<IPosition>> potentialPositions = this.getPotentialPositionsByLight(board, color, lights, radius); final Set<Polyomino> polyominos = POLYOMINOS_BY_RADIUS.get(radius); for (final Polyomino polyomino : polyominos) { if (remainingPieces.contains(polyomino)) { final Iterable<PolyominoInstance> instances = polyomino.get(); for (final Entry<IPosition, Set<IPosition>> entry : potentialPositions.entrySet()) { final IPosition position = entry.getKey(); final List<Set<IPosition>> options = Lists.newArrayList(); for (final IPosition potentialPosition : entry.getValue()) options.addAll(this.getLegalPositions(color, board, position, potentialPosition, instances)); if (!options.isEmpty()) table.put(position, polyomino, options); } } } } return new Options(table); }
private static Collection<SRLParse> getPropbankSection(final String section) throws IOException { final Table<String, Integer, TreebankParse> PTB = new PennTreebank().readCorpus(WSJ); final Table<String, Integer, SRLParse> srlParses = SRLParse.parseCorpus(PTB, Util.readFileLineByLine(new File(PROPBANK, "prop.txt")), USING_NOMBANK ? Util.readFileLineByLine(NOMBANK) : null); final Table<String, Integer, SRLParse> goldParses = TreeBasedTable.create(); for (final Cell<String, Integer, TreebankParse> cell : PTB.cellSet()) { // Propbank files skip sentences with no SRL deps. Add a default // empty parse for all sentences. goldParses.put(cell.getRowKey(), cell.getColumnKey(), new SRLParse(cell.getValue().getWords())); } goldParses.putAll(srlParses); final Collection<SRLParse> result = new ArrayList<>(); for (final Cell<String, Integer, SRLParse> entry : goldParses.cellSet()) { if (entry.getRowKey().startsWith("wsj_" + section)) { result.add(entry.getValue()); } } return result; }
void put(double dyn, long urg, double scl, T value) { synchronized (data) { checkArgument(!valuesSet.contains(value), "Value %s already exists.", value); if (!data.containsKey(dyn)) { data.put(dyn, TreeBasedTable.<Long, Double, SortedSet<T>>create()); } if (!data.get(dyn).contains(urg, scl)) { data.get(dyn).put(urg, scl, new TreeSet<>(comparator)); } checkArgument(!data.get(dyn).get(urg, scl).contains(value), "At (%s,%s,%s) value %s already exists.", dyn, urg, scl, value); data.get(dyn).get(urg, scl).add(value); valuesSet.add(value); } }
private Set<SourceSpecificContext> getRequiredSourcesFromLib() { checkState(currentPhase == ModelProcessingPhase.SOURCE_PRE_LINKAGE, "Required library sources can be collected only in ModelProcessingPhase.SOURCE_PRE_LINKAGE phase," + " but current phase was %s", currentPhase); final TreeBasedTable<String, Optional<Revision>, SourceSpecificContext> libSourcesTable = TreeBasedTable.create( String::compareTo, Revision::compare); for (final SourceSpecificContext libSource : libSources) { final SourceIdentifier libSourceIdentifier = requireNonNull(libSource.getRootIdentifier()); libSourcesTable.put(libSourceIdentifier.getName(), libSourceIdentifier.getRevision(), libSource); } final Set<SourceSpecificContext> requiredLibs = new HashSet<>(); for (final SourceSpecificContext source : sources) { collectRequiredSourcesFromLib(libSourcesTable, requiredLibs, source); removeConflictingLibSources(source, requiredLibs); } return requiredLibs; }
public static Table<Integer, Integer, DownloadURL> generateMapping() throws IOException, ParseException { Table<Integer, Integer, DownloadURL> hits = TreeBasedTable.create(); for (DownloadURL url : DownloadURL.values()) { File index = url.getIndexLocal(); System.out.println("reading " + index.toString()); // Document doc = // Jsoup.connect(index).userAgent("Mozilla").timeout(8000).get(); Document doc = Jsoup.parse(index, null); Elements links = doc.select("ul > li > a[href]"); for (Element link : links) { String hit = link.attr("href"); if (hit.endsWith("hgt.zip")) { String name = hit.substring(hit.lastIndexOf('/')); CoordinateInt coord = TileDownload.parseCoordinate(name); hits.put(coord.lat, coord.lon, url); } } } return hits; }
@SuppressWarnings("rawtypes") // TreeBasedTable.create() is defined as such @Generates private static <R extends Comparable, C extends Comparable, V> TreeBasedTable<R, C, V> generateTreeBasedTable(R row, C column, V value) { TreeBasedTable<R, C, V> table = TreeBasedTable.create(); table.put(row, column, value); return table; }
public static void main(String[] args) throws Exception { final File csvFile = new File( "mturk/annotation-task/21-pilot-stance-task.output.csv"); final File argumentsFile = new File( "mturk/annotation-task/data/arguments-with-full-segmentation-rfd.xml.gz"); TreeBasedTable<Integer, Double, DescriptiveStatistics> table = TreeBasedTable.create(); for (int crowdSize = 1; crowdSize <= 9; crowdSize++) { for (Double maceThreshold : Arrays.asList(0.85, 0.9, 0.95, 1.0)) { // ten random repeats for (int i = 0; i < 20; i++) { Random random = new Random(i); File crowdExpert1 = File.createTempFile("crowd1", ".xml.gz"); File crowdExpert2 = File.createTempFile("crowd2", ".xml.gz"); annotateWithGoldLabels(csvFile, argumentsFile, crowdExpert1, maceThreshold, new WorkerAssignmentFilterRandomized(18, 1, crowdSize, random)); annotateWithGoldLabels(csvFile, argumentsFile, crowdExpert2, maceThreshold, new WorkerAssignmentFilterRandomized(18, 2, crowdSize, random)); double kappa = computeKappa(crowdExpert1, crowdExpert2); if (!table.contains(crowdSize, maceThreshold)) { table.put(crowdSize, maceThreshold, new DescriptiveStatistics()); } table.get(crowdSize, maceThreshold).addValue(kappa); FileUtils.forceDelete(crowdExpert1); FileUtils.forceDelete(crowdExpert2); } } } printTable(table); }
public static void printTable(TreeBasedTable<Integer, Double, DescriptiveStatistics> table) { System.out.printf("\t%s%n", StringUtils.join(table.columnKeySet(), "\t\t")); for (Map.Entry<Integer, Map<Double, DescriptiveStatistics>> entry : table.rowMap() .entrySet()) { System.out.printf("%s\t", entry.getKey()); for (DescriptiveStatistics ds : entry.getValue().values()) { System.out.printf("%.2f\t%2f\t", ds.getMean(), ds.getStandardDeviation()); } System.out.println(); } }
static void printTable(TreeBasedTable<Integer, Integer, DescriptiveStatistics> table) { System.out.printf("\t%s%n", StringUtils.join(table.columnKeySet(), "\t\t\t")); for (Map.Entry<Integer, Map<Integer, DescriptiveStatistics>> entry : table.rowMap() .entrySet()) { System.out.printf("%s\t", entry.getKey()); for (DescriptiveStatistics ds : entry.getValue().values()) { System.out.printf("%.2f\t%2f\t%d\t", ds.getMean(), ds.getStandardDeviation(), ds.getN()); } System.out.println(); } }
public CSRSparseMatrix(TreeBasedTable<Integer, Integer, Integer> table, int totalRow) { int size = table.size(); values = new int[size]; columnIndices = new int[size]; this.rowOffset = new int[totalRow + 1]; //rowOffset[0]=0; int point = -1; int inSize = 0; for (Integer rowNum : table.rowKeySet()) { Map<Integer, Integer> row = table.row(rowNum); inSize += row.size(); rowOffset[rowNum + 1] = inSize; for (Map.Entry<Integer, Integer> entry : row.entrySet()) { point++; columnIndices[point] = entry.getKey(); values[point] = entry.getValue(); } } int x = 0; for (int i = 0; i < this.rowOffset.length; i++) { int p = this.rowOffset[i]; if (p > 0) { x = p; } else { this.rowOffset[i] = x; } } }
@Override public void loadFromRealData() throws Exception { MynlpResource source = environment.loadResource(coreDictNgramSetting); TreeBasedTable<Integer, Integer, Integer> table = TreeBasedTable.create(); Pattern pattern = Pattern.compile("^(.+)@(.+)\\s+(\\d+)$"); try (CharSourceLineReader reader = source.openLineReader()) { while (reader.hasNext()) { String line = reader.next(); Matcher matcher = pattern.matcher(line); if (matcher.find()) { String wordA = matcher.group(1); String wordB = matcher.group(2); String num = matcher.group(3); int idA = coreDictionary.indexOf(wordA); if (idA >= 0) { int idB = coreDictionary.indexOf(wordB); if (idB >= 0) { table.put(idA, idB, Ints.tryParse(num)); } } } } } CSRSparseMatrix matrix = new CSRSparseMatrix(table, coreDictionary.size()); this.matrix = matrix; }
protected static Gson makeSerializerGson(Type type, Boolean pretty){ GsonBuilder gsonBuilder = new GsonBuilder(); if(pretty) gsonBuilder.setPrettyPrinting(); switch (type){ case DEFAULT: default: gsonBuilder.setDateFormat("yyyy-MM-dd"); gsonBuilder.registerTypeAdapter(HashBasedTable.class, new GuavaTableSerializer()); gsonBuilder.registerTypeAdapter(TreeBasedTable.class, new GuavaTableSerializer()); break; } return gsonBuilder.create(); }
protected static Gson makeDeserializerGson(Type type){ GsonBuilder gsonBuilder = new GsonBuilder(); switch (type){ case DEFAULT: default: gsonBuilder.setDateFormat("yyyy-MM-dd"); gsonBuilder.registerTypeAdapter(HashBasedTable.class, new GuavaTableDeserializer()); gsonBuilder.registerTypeAdapter(TreeBasedTable.class, new GuavaTableDeserializer()); break; } return gsonBuilder.create(); }
@Override public Table deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { Table<Date, String, Double> table = TreeBasedTable.create(); try { JsonObject parent = (JsonObject) json; JsonArray colArray = parent.get("columns").getAsJsonArray(); JsonArray rowArray = parent.get("data").getAsJsonArray(); for (int i = 0; i < rowArray.size(); i++) { JsonArray row = rowArray.get(i).getAsJsonArray(); Date rowDate = null; for (int j = 0; j < row.size(); j++) { if (j == 0) { //table.put("","",row.get(j)); rowDate = DateUtils.DRONZE_DATE.parse(row.get(j).toString().replace("\"", "")); } else if (row.get(j) != null && !row.get(j).toString().equals("null")) { table.put(rowDate, colArray.get(j).toString().replace("\"", ""), Double.parseDouble(row.get(j).toString())); } } } } catch (ParseException e) { throw new JsonParseException(e); } return table; }
@SuppressWarnings("rawtypes") public static <R extends Comparable, C extends Comparable, V> GTable<R, C, V, RowSortedTable<R, C, V>> createSorted(QTriplet<R, C, V> expr) { return new GTable<R, C, V, RowSortedTable<R, C, V>>(expr) { private static final long serialVersionUID = 1L; @Override protected RowSortedTable<R, C, V> createTable() { return TreeBasedTable.<R, C, V>create(); } }; }
public static <R, C, V> GTable<R, C, V, RowSortedTable<R, C, V>> createSorted(QTriplet<R, C, V> expr, final Comparator<? super R> rowComparator, final Comparator<? super C> columnComparator) { return new GTable<R, C, V, RowSortedTable<R, C, V>>(expr) { private static final long serialVersionUID = 1L; @Override protected RowSortedTable<R, C, V> createTable() { return TreeBasedTable.<R, C, V>create(rowComparator, columnComparator); } }; }
public Options decode(final JsonObject data) { final Table<IPosition, Polyomino, List<Set<IPosition>>> table = TreeBasedTable.create(); final Set<Entry<String, JsonElement>> entrySet = data.entrySet(); for (final Entry<String, JsonElement> entry : entrySet) { final int id = Integer.parseInt(entry.getKey()); final IPosition light = Position(id / 20, id % 20); // TODO !!! final JsonObject polyominos = entry.getValue().getAsJsonObject(); for (final Entry<String, JsonElement> positionsByPolyominos : polyominos.entrySet()) { final String ordinal = positionsByPolyominos.getKey(); final Polyomino polyomino = Polyomino.values()[Integer.parseInt(ordinal)]; final JsonArray positions = positionsByPolyominos.getValue().getAsJsonArray(); final List<Set<IPosition>> list = Lists.newArrayList(); for (final JsonElement jsonElement : positions) { final Set<IPosition> set = Sets.newHashSet(); final JsonArray asJsonArray = jsonElement.getAsJsonArray(); for (final JsonElement jsonElement2 : asJsonArray) { final int asInt = jsonElement2.getAsInt(); final IPosition p = Position(asInt / 20, asInt % 20); // TODO !!! set.add(p); } list.add(set); } table.put(light, polyomino, list); } } return new Options(table); }
private Table<String, String, String> getTenantPartitions(MiruTenantId tenantId) throws Exception { final Table<String, String, String> tenantPartitions = TreeBasedTable.create(); // tree for order MiruPartitionId latestPartitionId = miruWALClient.getLargestPartitionId(tenantId); if (latestPartitionId != null) { for (MiruPartitionId latest = latestPartitionId; latest != null; latest = latest.prev()) { tenantPartitions.put(tenantId.toString(), latest.toString(), ""); } } return tenantPartitions; }
private void collectRequiredSourcesFromLib( final TreeBasedTable<String, Optional<Revision>, SourceSpecificContext> libSourcesTable, final Set<SourceSpecificContext> requiredLibs, final SourceSpecificContext source) { for (final SourceIdentifier requiredSource : source.getRequiredSources()) { final SourceSpecificContext libSource = getRequiredLibSource(requiredSource, libSourcesTable); if (libSource != null && requiredLibs.add(libSource)) { collectRequiredSourcesFromLib(libSourcesTable, requiredLibs, libSource); } } }
/** * Generates the Cocke-Younger-Kasami recognition matrix. */ private void generate() { int size = (this.getWord().length() == 0) ? 1 : this.getWord().length(); this.matrix = TreeBasedTable.create(); for (int r = 1; r <= size; r ++) { for (int c = 1; c <= size; c ++) { this.matrix.put(r, c, new Alphabet()); } } }
/** * Adds a value to the HistoricalData structure for that security / field / date combination. */ @Override synchronized void add(LocalDate date, String security, String field, Object value) { Table<LocalDate, String, TypedObject> securityTable = data.get(security); if (securityTable == null) { securityTable = TreeBasedTable.create(); //to have the dates in order data.put(security, securityTable); } securityTable.put(date, field, TypedObject.of(value)); }
@Before public void setUp() throws Exception { rowMeta = new RowMeta(); data = TreeBasedTable.create(); variables = new Variables(); aggregates = Maps.newHashMap( default_aggregates ); }
public void testTreeBasedTable() { assertFreshInstance(new TypeToken<TreeBasedTable<String, ?, ?>>() {}); }
public static void main(String[] args) throws Exception { final File csvFile = new File( "mturk/annotation-task/95-validation-task-pilot-task.output.csv"); final File argumentsFile = new File( "mturk/annotation-task/data/92-original-warrant-batch-0001-5000-2447-good-reason-claim-pairs.xml.gz"); TreeBasedTable<Integer, Double, DescriptiveStatistics> table = TreeBasedTable.create(); final int requiredAssignmentsSize = 18; IntStream.range(7, 8).parallel().forEach(crowdSize -> { Arrays.asList(0.95).parallelStream() .forEach(maceThreshold -> { // ten random repeats for (int i = 0; i < 1; i++) { Random random = new Random(i); try { File crowdExpert1 = File.createTempFile("crowd1", ".xml.gz"); File crowdExpert2 = File.createTempFile("crowd2", ".xml.gz"); SortedMap<String, String> goldEstimationResult1 = Step8bTaskValidationGoldAnnotator .annotateWithGoldLabels(Collections.singletonList(csvFile), argumentsFile, crowdExpert1, maceThreshold, new WorkerAssignmentFilterRandomized( requiredAssignmentsSize, 1, crowdSize, random)); SortedMap<String, String> goldEstimationResult2 = Step8bTaskValidationGoldAnnotator .annotateWithGoldLabels(Collections.singletonList(csvFile), argumentsFile, crowdExpert2, maceThreshold, new WorkerAssignmentFilterRandomized( requiredAssignmentsSize, 2, crowdSize, random)); double score = computeScore(goldEstimationResult1, goldEstimationResult2); synchronized (table) { if (!table.contains(crowdSize, maceThreshold)) { table.put(crowdSize, maceThreshold, new DescriptiveStatistics()); } table.get(crowdSize, maceThreshold).addValue(score); } FileUtils.forceDelete(crowdExpert1); FileUtils.forceDelete(crowdExpert2); synchronized (table) { System.out.println("==================================="); printTable(table); System.out.println("==================================="); } } catch (Exception ex) { throw new RuntimeException(ex); } } printTable(table); }); }); }
public static void main(String[] args) throws Exception { final File csvFile = new File( "mturk/annotation-task/80-aw-validation-pilot-task.output.csv"); final File argumentsFile = new File( "mturk/annotation-task/data/71-alternative-warrants-batch-0001-5000-001-600aw-batch-2390reason-claim-pairs-with-distracting-reasons.xml.gz"); TreeBasedTable<Integer, Double, DescriptiveStatistics> table = TreeBasedTable.create(); final int requiredAssignmentsSize = 14; IntStream.range(1, 8).parallel().forEach(crowdSize -> { Arrays.asList(0.75, 0.80, 0.85, 0.9, 0.95, 1.0).parallelStream().forEach(maceThreshold -> { // ten random repeats for (int i = 0; i < 20; i++) { Random random = new Random(i); try { File crowdExpert1 = File.createTempFile("crowd1", ".xml.gz"); File crowdExpert2 = File.createTempFile("crowd2", ".xml.gz"); SortedMap<String, String> goldEstimationResult1 = Step6bAlternativeWarrantValidationHITGoldAnnotator .annotateWithGoldLabels(Collections.singletonList(csvFile), Arrays.asList(argumentsFile), crowdExpert1, null, maceThreshold, new WorkerAssignmentFilterRandomized( requiredAssignmentsSize, 1, crowdSize, random)); SortedMap<String, String> goldEstimationResult2 = Step6bAlternativeWarrantValidationHITGoldAnnotator .annotateWithGoldLabels(Collections.singletonList(csvFile), Arrays.asList(argumentsFile), crowdExpert2, null, maceThreshold, new WorkerAssignmentFilterRandomized( requiredAssignmentsSize, 2, crowdSize, random)); double kappa = computeKappa(goldEstimationResult1, goldEstimationResult2); synchronized (table) { if (!table.contains(crowdSize, maceThreshold)) { table.put(crowdSize, maceThreshold, new DescriptiveStatistics()); } table.get(crowdSize, maceThreshold).addValue(kappa); } FileUtils.forceDelete(crowdExpert1); FileUtils.forceDelete(crowdExpert2); synchronized (table) { System.out.println("==================================="); printTable(table); System.out.println("==================================="); } } catch (Exception ex) { throw new RuntimeException(ex); } } // System.out.println("Kappas:"); // for (Map.Entry<Integer, Double> entry : kappas.entrySet()) { // System.out.printf("%d\t%.2f%n", entry.getKey(), entry.getValue()); // } printTable(table); }); }); }
public static void main(String[] args) throws Exception { final File csvFile = new File( "mturk/annotation-task/60-pilot-reason-disambiguation-task.output.csv"); TreeBasedTable<Integer, Double, DescriptiveStatistics> table = TreeBasedTable.create(); for (int crowdSize = 5; crowdSize <= 5; crowdSize++) { for (Double maceThreshold : Arrays.asList(0.85, 0.9, 0.95, 1.0)) { // ten random repeats for (int i = 0; i < 20; i++) { Random random = new Random(i); SortedMap<String, String> gold1 = Step4bReasonDisambiguationGoldAnnotator .annotateWithGoldLabels(csvFile, null, null, maceThreshold, // new WorkerAssignmentsFilterSubsetByTime(0, crowdSize, true)); new WorkerAssignmentFilterRandomized(18, 1, crowdSize, random)); SortedMap<String, String> gold2 = Step4bReasonDisambiguationGoldAnnotator .annotateWithGoldLabels(csvFile, null, null, maceThreshold, // new WorkerAssignmentsFilterSubsetByTime(crowdSize, crowdSize * 2, // false)); new WorkerAssignmentFilterRandomized(18, 2, crowdSize, random)); gold1 = filterOutNullValueEntries(gold1); gold2 = filterOutNullValueEntries(gold2); double kappa = computeKappa(gold1, gold2); if (!table.contains(crowdSize, maceThreshold)) { table.put(crowdSize, maceThreshold, new DescriptiveStatistics()); } table.get(crowdSize, maceThreshold).addValue(kappa); } } } printTable(table); }
@Test public void testTimeseriesClassifierBasic(){ try { Table<Date, String, Double> trainingTable = GsonFactory.fromJson( IOUtils.toString( getClass().getResourceAsStream("/data/01/train/trainTable01.json"), "UTF-8"), TreeBasedTable.class, GsonFactory.Type.DEFAULT); //make the training model SequenceNetworkModel trainingModel = GsonFactory.fromJson( IOUtils.toString( getClass().getResourceAsStream("/data/01/train/trainModel01.json"), "UTF-8"), SequenceNetworkModel.class, GsonFactory.Type.DEFAULT); //get the training model Table<Date, String, Double> testingTable = GsonFactory.fromJson( IOUtils.toString( getClass().getResourceAsStream("/data/01/test/testTable01.json"), "UTF-8"), TreeBasedTable.class, GsonFactory.Type.DEFAULT); //get the config from the classpath Config conf = ConfigFactory.load(); ClassifierNetwork network = new TimeseriesClassifierNetwork.TimeseriesClassifierNetworkBuilder() .setNetworkClasses(trainingModel.getNetworkClasses()) .setTrainClassifications(trainingModel.getNetworkClassifications()) .setTrainTable(trainingTable) .setTestTable(testingTable) .setConfig(conf,"TimeseriesClassifierNetwork") .build(); Map<String,Object> model = new HashedMap(); model.put("startDate", DateUtils.min(testingTable.rowKeySet())); model.put("endDate", DateUtils.max(testingTable.rowKeySet())); Table<Integer, String, Object> result = network.evaluate(); log.info(MustacheUtils.merge("start:{{startDate}}, end:{{endDate}}", model)); result.rowKeySet().forEach(rowId->{ log.info(MustacheUtils.merge("{{seriesNumber}},{{seriesName}},{{classificationName}}", result.row(rowId))); }); } catch (Exception e){ log.error(e.getMessage(),e); Assert.fail(e.getMessage()); } }
public ReflexSparseMatrixValue(int dimension) { table = TreeBasedTable.create(); rowOrder = new ArrayList<ReflexValue>(); colOrder = new ArrayList<ReflexValue>(); }
/** * Outputs the set of options available to be set for the passed in {@link PipelineOptions} * interface. The output is in a human readable format. The format is: * <pre> * OptionGroup: * ... option group description ... * * --option1={@code <type>} or list of valid enum choices * Default: value (if available, see {@link Default}) * ... option description ... (if available, see {@link Description}) * Required groups (if available, see {@link Required}) * --option2={@code <type>} or list of valid enum choices * Default: value (if available, see {@link Default}) * ... option description ... (if available, see {@link Description}) * Required groups (if available, see {@link Required}) * </pre> * This method will attempt to format its output to be compatible with a terminal window. */ public static void printHelp(PrintStream out, Class<? extends PipelineOptions> iface) { checkNotNull(out); checkNotNull(iface); validateWellFormed(iface, REGISTERED_OPTIONS); Set<PipelineOptionSpec> properties = PipelineOptionsReflector.getOptionSpecs(iface); RowSortedTable<Class<?>, String, Method> ifacePropGetterTable = TreeBasedTable.create( ClassNameComparator.INSTANCE, Ordering.natural()); for (PipelineOptionSpec prop : properties) { ifacePropGetterTable.put(prop.getDefiningInterface(), prop.getName(), prop.getGetterMethod()); } for (Map.Entry<Class<?>, Map<String, Method>> ifaceToPropertyMap : ifacePropGetterTable.rowMap().entrySet()) { Class<?> currentIface = ifaceToPropertyMap.getKey(); Map<String, Method> propertyNamesToGetters = ifaceToPropertyMap.getValue(); SortedSetMultimap<String, String> requiredGroupNameToProperties = getRequiredGroupNamesToProperties(propertyNamesToGetters); out.format("%s:%n", currentIface.getName()); prettyPrintDescription(out, currentIface.getAnnotation(Description.class)); out.println(); List<String> lists = Lists.newArrayList(propertyNamesToGetters.keySet()); Collections.sort(lists, String.CASE_INSENSITIVE_ORDER); for (String propertyName : lists) { Method method = propertyNamesToGetters.get(propertyName); String printableType = method.getReturnType().getSimpleName(); if (method.getReturnType().isEnum()) { printableType = Joiner.on(" | ").join(method.getReturnType().getEnumConstants()); } out.format(" --%s=<%s>%n", propertyName, printableType); Optional<String> defaultValue = getDefaultValueFromAnnotation(method); if (defaultValue.isPresent()) { out.format(" Default: %s%n", defaultValue.get()); } prettyPrintDescription(out, method.getAnnotation(Description.class)); prettyPrintRequiredGroups(out, method.getAnnotation(Validation.Required.class), requiredGroupNameToProperties); } out.println(); } }