@Override public TinkerGraph deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException { final Configuration conf = new BaseConfiguration(); conf.setProperty("gremlin.tinkergraph.defaultVertexPropertyCardinality", "list"); final TinkerGraph graph = TinkerGraph.open(conf); while (jsonParser.nextToken() != JsonToken.END_OBJECT) { if (jsonParser.getCurrentName().equals("vertices")) { while (jsonParser.nextToken() != JsonToken.END_ARRAY) { if (jsonParser.currentToken() == JsonToken.START_OBJECT) { final DetachedVertex v = (DetachedVertex) deserializationContext.readValue(jsonParser, Vertex.class); v.attach(Attachable.Method.getOrCreate(graph)); } } } else if (jsonParser.getCurrentName().equals("edges")) { while (jsonParser.nextToken() != JsonToken.END_ARRAY) { if (jsonParser.currentToken() == JsonToken.START_OBJECT) { final DetachedEdge e = (DetachedEdge) deserializationContext.readValue(jsonParser, Edge.class); e.attach(Attachable.Method.getOrCreate(graph)); } } } } return graph; }
@Test public void shouldPersistToGraphML() { final String graphLocation = TestHelper.makeTestDataDirectory(TinkerGraphTest.class) + "shouldPersistToGraphML.xml"; final File f = new File(graphLocation); if (f.exists() && f.isFile()) f.delete(); final Configuration conf = new BaseConfiguration(); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_FORMAT, "graphml"); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_LOCATION, graphLocation); final TinkerGraph graph = TinkerGraph.open(conf); TinkerFactory.generateModern(graph); graph.close(); final TinkerGraph reloadedGraph = TinkerGraph.open(conf); IoTest.assertModernGraph(reloadedGraph, true, true); reloadedGraph.close(); }
@Test public void shouldPersistToGraphSON() { final String graphLocation = TestHelper.makeTestDataDirectory(TinkerGraphTest.class) + "shouldPersistToGraphSON.json"; final File f = new File(graphLocation); if (f.exists() && f.isFile()) f.delete(); final Configuration conf = new BaseConfiguration(); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_FORMAT, "graphson"); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_LOCATION, graphLocation); final TinkerGraph graph = TinkerGraph.open(conf); TinkerFactory.generateModern(graph); graph.close(); final TinkerGraph reloadedGraph = TinkerGraph.open(conf); IoTest.assertModernGraph(reloadedGraph, true, false); reloadedGraph.close(); }
@Test public void shouldPersistToGryo() { final String graphLocation = TestHelper.makeTestDataDirectory(TinkerGraphTest.class) + "shouldPersistToGryo.kryo"; final File f = new File(graphLocation); if (f.exists() && f.isFile()) f.delete(); final Configuration conf = new BaseConfiguration(); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_FORMAT, "gryo"); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_LOCATION, graphLocation); final TinkerGraph graph = TinkerGraph.open(conf); TinkerFactory.generateModern(graph); graph.close(); final TinkerGraph reloadedGraph = TinkerGraph.open(conf); IoTest.assertModernGraph(reloadedGraph, true, false); reloadedGraph.close(); }
@Test public void shouldPersistToGryoAndHandleMultiProperties() { final String graphLocation = TestHelper.makeTestDataDirectory(TinkerGraphTest.class) + "shouldPersistToGryoMulti.kryo"; final File f = new File(graphLocation); if (f.exists() && f.isFile()) f.delete(); final Configuration conf = new BaseConfiguration(); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_FORMAT, "gryo"); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_LOCATION, graphLocation); final TinkerGraph graph = TinkerGraph.open(conf); TinkerFactory.generateTheCrew(graph); graph.close(); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_DEFAULT_VERTEX_PROPERTY_CARDINALITY, VertexProperty.Cardinality.list.toString()); final TinkerGraph reloadedGraph = TinkerGraph.open(conf); IoTest.assertCrewGraph(reloadedGraph, false); reloadedGraph.close(); }
@Test public void shouldPersistWithRelativePath() { final String graphLocation = TestHelper.convertToRelative(TinkerGraphTest.class, new File(TestHelper.makeTestDataDirectory(TinkerGraphTest.class))) + "shouldPersistToGryoRelative.kryo"; final File f = new File(graphLocation); if (f.exists() && f.isFile()) f.delete(); final Configuration conf = new BaseConfiguration(); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_FORMAT, "gryo"); conf.setProperty(TinkerGraph.GREMLIN_TINKERGRAPH_GRAPH_LOCATION, graphLocation); final TinkerGraph graph = TinkerGraph.open(conf); TinkerFactory.generateModern(graph); graph.close(); final TinkerGraph reloadedGraph = TinkerGraph.open(conf); IoTest.assertModernGraph(reloadedGraph, true, false); reloadedGraph.close(); }
public DataStore(Configuration conf) throws QonduitException { try { final BaseConfiguration apacheConf = new BaseConfiguration(); Configuration.Accumulo accumuloConf = conf.getAccumulo(); apacheConf.setProperty("instance.name", accumuloConf.getInstanceName()); apacheConf.setProperty("instance.zookeeper.host", accumuloConf.getZookeepers()); final ClientConfiguration aconf = new ClientConfiguration(Collections.singletonList(apacheConf)); final Instance instance = new ZooKeeperInstance(aconf); connector = instance .getConnector(accumuloConf.getUsername(), new PasswordToken(accumuloConf.getPassword())); } catch (Exception e) { throw new QonduitException(HttpResponseStatus.INTERNAL_SERVER_ERROR.code(), "Error creating DataStoreImpl", e.getMessage(), e); } }
@Override public Configuration configuration() { if (this.origConfig != null) { return this.origConfig; } else { Configuration ans = new BaseConfiguration(); ans.setProperty(DB_PATH_KEY, dbPath.toString()); ans.setProperty(ALLOW_FULL_GRAPH_SCANS_KEY, allowFullGraphScans); ans.setProperty(DEFAULT_ISOLATION_LEVEL_KEY, defaultIsolationLevel.toString()); ans.setProperty(TX_LOG_THRESHOLD_KEY, getTxLogThreshold()); ans.setProperty(REORG_FACTOR_KEY, getReorgFactor()); ans.setProperty(CREATE_DIR_IF_MISSING_KEY, createDirIfMissing); ans.setProperty(VERTEX_INDICES_KEY, String.join(",", getIndexedKeys(Vertex.class))); ans.setProperty(EDGE_INDICES_KEY, String.join(",", getIndexedKeys(Vertex.class))); return ans; } }
public static void main(String[] args) throws Exception { try (ConfigurableApplicationContext ctx = new SpringApplicationBuilder(SpringBootstrap.class) .bannerMode(Mode.OFF).web(false).run(args)) { Configuration conf = ctx.getBean(Configuration.class); final BaseConfiguration apacheConf = new BaseConfiguration(); Configuration.Accumulo accumuloConf = conf.getAccumulo(); apacheConf.setProperty("instance.name", accumuloConf.getInstanceName()); apacheConf.setProperty("instance.zookeeper.host", accumuloConf.getZookeepers()); final ClientConfiguration aconf = new ClientConfiguration(Collections.singletonList(apacheConf)); final Instance instance = new ZooKeeperInstance(aconf); Connector con = instance.getConnector(accumuloConf.getUsername(), new PasswordToken(accumuloConf.getPassword())); Scanner s = con.createScanner(conf.getMetaTable(), con.securityOperations().getUserAuthorizations(con.whoami())); try { s.setRange(new Range(Meta.METRIC_PREFIX, true, Meta.TAG_PREFIX, false)); for (Entry<Key, Value> e : s) { System.out.println(e.getKey().getRow().toString().substring(Meta.METRIC_PREFIX.length())); } } finally { s.close(); } } }
@Test public void shouldConfigPoolOnConstructionWithPoolSizeOneAndNoIoRegistry() throws Exception { final Configuration conf = new BaseConfiguration(); final GryoPool pool = GryoPool.build().poolSize(1).ioRegistries(conf.getList(GryoPool.CONFIG_IO_REGISTRY, Collections.emptyList())).create(); final GryoReader reader = pool.takeReader(); final GryoWriter writer = pool.takeWriter(); pool.offerReader(reader); pool.offerWriter(writer); for (int ix = 0; ix < 100; ix++) { final GryoReader r = pool.takeReader(); final GryoWriter w = pool.takeWriter(); assertReaderWriter(w, r, 1, Integer.class); // should always return the same original instance assertEquals(reader, r); assertEquals(writer, w); pool.offerReader(r); pool.offerWriter(w); } }
@Override public Configuration newGraphConfiguration(final String graphName, final Class<?> test, final String testMethodName, final Map<String, Object> configurationOverrides, final LoadGraphWith.GraphData loadGraphWith) { final Configuration conf = new BaseConfiguration(); getBaseConfiguration(graphName, test, testMethodName, loadGraphWith).entrySet().stream() .forEach(e -> conf.setProperty(e.getKey(), e.getValue())); // assign overrides but don't allow gremlin.graph setting to be overridden. the test suite should // not be able to override that. configurationOverrides.entrySet().stream() .filter(c -> !c.getKey().equals(Graph.GRAPH)) .forEach(e -> conf.setProperty(e.getKey(), e.getValue())); return conf; }
@Override public Iterator<Vertex> head(final String location, final Class readerClass, final int totalLines) { final Configuration configuration = new BaseConfiguration(); configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, location); configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_READER, readerClass.getCanonicalName()); try { if (InputRDD.class.isAssignableFrom(readerClass)) { return IteratorUtils.map(((InputRDD) readerClass.getConstructor().newInstance()).readGraphRDD(configuration, new JavaSparkContext(Spark.getContext())).take(totalLines).iterator(), tuple -> tuple._2().get()); } else if (InputFormat.class.isAssignableFrom(readerClass)) { return IteratorUtils.map(new InputFormatRDD().readGraphRDD(configuration, new JavaSparkContext(Spark.getContext())).take(totalLines).iterator(), tuple -> tuple._2().get()); } } catch (final Exception e) { throw new IllegalArgumentException(e.getMessage(), e); } throw new IllegalArgumentException("The provided parserClass must be an " + InputFormat.class.getCanonicalName() + " or an " + InputRDD.class.getCanonicalName() + ": " + readerClass.getCanonicalName()); }
@Override public <K, V> Iterator<KeyValue<K, V>> head(final String location, final String memoryKey, final Class readerClass, final int totalLines) { final Configuration configuration = new BaseConfiguration(); configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, location); configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_READER, readerClass.getCanonicalName()); try { if (InputRDD.class.isAssignableFrom(readerClass)) { return IteratorUtils.map(((InputRDD) readerClass.getConstructor().newInstance()).readMemoryRDD(configuration, memoryKey, new JavaSparkContext(Spark.getContext())).take(totalLines).iterator(), tuple -> new KeyValue(tuple._1(), tuple._2())); } else if (InputFormat.class.isAssignableFrom(readerClass)) { return IteratorUtils.map(new InputFormatRDD().readMemoryRDD(configuration, memoryKey, new JavaSparkContext(Spark.getContext())).take(totalLines).iterator(), tuple -> new KeyValue(tuple._1(), tuple._2())); } } catch (final Exception e) { throw new IllegalArgumentException(e.getMessage(), e); } throw new IllegalArgumentException("The provided parserClass must be an " + InputFormat.class.getCanonicalName() + " or an " + InputRDD.class.getCanonicalName() + ": " + readerClass.getCanonicalName()); }
@Test public void shouldWriteToArbitraryRDD() throws Exception { final Configuration configuration = new BaseConfiguration(); configuration.setProperty("spark.master", "local[4]"); configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName()); configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName()); configuration.setProperty(Constants.GREMLIN_HADOOP_INPUT_LOCATION, SparkHadoopGraphProvider.PATHS.get("tinkerpop-modern.kryo")); configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_READER, GryoInputFormat.class.getCanonicalName()); configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_WRITER, ExampleOutputRDD.class.getCanonicalName()); configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, TestHelper.makeTestDataDirectory(this.getClass(), "shouldWriteToArbitraryRDD")); configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false); //////// Graph graph = GraphFactory.open(configuration); graph.compute(SparkGraphComputer.class) .result(GraphComputer.ResultGraph.NEW) .persist(GraphComputer.Persist.EDGES) .program(TraversalVertexProgram.build() .traversal(graph.traversal().withComputer(Computer.compute(SparkGraphComputer.class)), "gremlin-groovy", "g.V()").create(graph)).submit().get(); }
@Test public void shouldSupportHadoopGraphOLTP() { final Configuration configuration = new BaseConfiguration(); configuration.setProperty("spark.master", "local[4]"); configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName()); configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName()); configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_READER, ExampleInputRDD.class.getCanonicalName()); configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_WRITER, GryoOutputFormat.class.getCanonicalName()); configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, TestHelper.makeTestDataDirectory(this.getClass(), "shouldSupportHadoopGraphOLTP")); configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false); //////// Graph graph = GraphFactory.open(configuration); GraphTraversalSource g = graph.traversal(); // OLTP; assertEquals("person", g.V().has("age", 29).next().label()); assertEquals(Long.valueOf(4), g.V().count().next()); assertEquals(Long.valueOf(0), g.E().count().next()); assertEquals(Long.valueOf(2), g.V().has("age", P.gt(30)).count().next()); }
@Test public void shouldReadFromWriteToArbitraryRDD() throws Exception { final Configuration configuration = new BaseConfiguration(); configuration.setProperty("spark.master", "local[4]"); configuration.setProperty("spark.serializer", GryoSerializer.class.getCanonicalName()); configuration.setProperty(Graph.GRAPH, HadoopGraph.class.getName()); configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_READER, ExampleInputRDD.class.getCanonicalName()); configuration.setProperty(Constants.GREMLIN_HADOOP_GRAPH_WRITER, ExampleOutputRDD.class.getCanonicalName()); configuration.setProperty(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, TestHelper.makeTestDataDirectory(this.getClass(), "shouldReadFromWriteToArbitraryRDD")); configuration.setProperty(Constants.GREMLIN_HADOOP_JARS_IN_DISTRIBUTED_CACHE, false); //////// Graph graph = GraphFactory.open(configuration); graph.compute(SparkGraphComputer.class) .result(GraphComputer.ResultGraph.NEW) .persist(GraphComputer.Persist.EDGES) .program(TraversalVertexProgram.build() .traversal(graph.traversal().withComputer(SparkGraphComputer.class), "gremlin-groovy", "g.V()").create(graph)).submit().get(); }
public Configuration build() { // create configuration instance Configuration configuration = new BaseConfiguration(); // url configuration.setProperty(Neo4JUrlConfigurationKey, "bolt://" + hostname + ":" + port); // hostname configuration.setProperty(Neo4JHostnameConfigurationKey, hostname); // port configuration.setProperty(Neo4JPortConfigurationKey, port); // username configuration.setProperty(Neo4JUsernameConfigurationKey, username); // password configuration.setProperty(Neo4JPasswordConfigurationKey, password); // graphName configuration.setProperty(Neo4JGraphNameConfigurationKey, graphName); // vertex id provider configuration.setProperty(Neo4JVertexIdProviderClassNameConfigurationKey, vertexIdProviderClassName != null ? vertexIdProviderClassName : elementIdProviderClassName); // edge id provider configuration.setProperty(Neo4JEdgeIdProviderClassNameConfigurationKey, edgeIdProviderClassName != null ? edgeIdProviderClassName : elementIdProviderClassName); // property id provider configuration.setProperty(Neo4JPropertyIdProviderClassNameConfigurationKey, propertyIdProviderClassName != null ? propertyIdProviderClassName : elementIdProviderClassName); // return configuration return configuration; }
@Test public void testLocalNodeUsingExt() throws BackendException, InterruptedException { String baseDir = Joiner.on(File.separator).join("target", "es", "jvmlocal_ext"); assertFalse(new File(baseDir + File.separator + "data").exists()); CommonsConfiguration cc = new CommonsConfiguration(new BaseConfiguration()); cc.set("index." + INDEX_NAME + ".elasticsearch.ext.node.data", "true"); cc.set("index." + INDEX_NAME + ".elasticsearch.ext.node.client", "false"); cc.set("index." + INDEX_NAME + ".elasticsearch.ext.node.local", "true"); cc.set("index." + INDEX_NAME + ".elasticsearch.ext.path.data", baseDir + File.separator + "data"); cc.set("index." + INDEX_NAME + ".elasticsearch.ext.path.work", baseDir + File.separator + "work"); cc.set("index." + INDEX_NAME + ".elasticsearch.ext.path.logs", baseDir + File.separator + "logs"); ModifiableConfiguration config = new ModifiableConfiguration(GraphDatabaseConfiguration.ROOT_NS, cc, BasicConfiguration.Restriction.NONE); config.set(INTERFACE, ElasticSearchSetup.NODE.toString(), INDEX_NAME); Configuration indexConfig = config.restrictTo(INDEX_NAME); IndexProvider idx = new ElasticSearchIndex(indexConfig); simpleWriteAndQuery(idx); idx.close(); assertTrue(new File(baseDir + File.separator + "data").exists()); }
@Test public void testLocalNodeUsingExtAndIndexDirectory() throws BackendException, InterruptedException { String baseDir = Joiner.on(File.separator).join("target", "es", "jvmlocal_ext2"); assertFalse(new File(baseDir + File.separator + "data").exists()); CommonsConfiguration cc = new CommonsConfiguration(new BaseConfiguration()); cc.set("index." + INDEX_NAME + ".elasticsearch.ext.node.data", "true"); cc.set("index." + INDEX_NAME + ".elasticsearch.ext.node.client", "false"); cc.set("index." + INDEX_NAME + ".elasticsearch.ext.node.local", "true"); ModifiableConfiguration config = new ModifiableConfiguration(GraphDatabaseConfiguration.ROOT_NS, cc, BasicConfiguration.Restriction.NONE); config.set(INTERFACE, ElasticSearchSetup.NODE.toString(), INDEX_NAME); config.set(INDEX_DIRECTORY, baseDir, INDEX_NAME); Configuration indexConfig = config.restrictTo(INDEX_NAME); IndexProvider idx = new ElasticSearchIndex(indexConfig); simpleWriteAndQuery(idx); idx.close(); assertTrue(new File(baseDir + File.separator + "data").exists()); }
private static ReadConfiguration getLocalConfiguration(String shortcutOrFile) { File file = new File(shortcutOrFile); if (file.exists()) return getLocalConfiguration(file); else { int pos = shortcutOrFile.indexOf(':'); if (pos<0) pos = shortcutOrFile.length(); String backend = shortcutOrFile.substring(0,pos); Preconditions.checkArgument(StandardStoreManager.getAllManagerClasses().containsKey(backend.toLowerCase()), "Backend shorthand unknown: %s", backend); String secondArg = null; if (pos+1<shortcutOrFile.length()) secondArg = shortcutOrFile.substring(pos + 1).trim(); BaseConfiguration config = new BaseConfiguration(); ModifiableConfiguration writeConfig = new ModifiableConfiguration(ROOT_NS,new CommonsConfiguration(config), BasicConfiguration.Restriction.NONE); writeConfig.set(STORAGE_BACKEND,backend); ConfigOption option = Backend.getOptionForShorthand(backend); if (option==null) { Preconditions.checkArgument(secondArg==null); } else if (option==STORAGE_DIRECTORY || option==STORAGE_CONF_FILE) { Preconditions.checkArgument(StringUtils.isNotBlank(secondArg),"Need to provide additional argument to initialize storage backend"); writeConfig.set(option,getAbsolutePath(secondArg)); } else if (option==STORAGE_HOSTS) { Preconditions.checkArgument(StringUtils.isNotBlank(secondArg),"Need to provide additional argument to initialize storage backend"); writeConfig.set(option,new String[]{secondArg}); } else throw new IllegalArgumentException("Invalid configuration option for backend "+option); return new CommonsConfiguration(config); } }
/** * Asserts that when a property is requested from the configruation, and it fires an error event (ex. Database is not available), the previously stored * values are not cleared. */ @Test public void testAssertGetPropertyErrorReturnPreviousValue() throws Exception { // Get a reloadable property source that loads properties from the configuration every time a property is read. BaseConfiguration configuration = new BaseConfiguration() { @Override public Object getProperty(String key) { fireError(EVENT_READ_PROPERTY, key, null, new IllegalStateException("test exception")); return null; } }; configuration.addProperty(TEST_KEY, TEST_VALUE_1); ReloadablePropertySource reloadablePropertySource = getNewReloadablePropertiesSource(0L, configuration); verifyPropertySourceValue(reloadablePropertySource, TEST_VALUE_1); }
@Before public void setUp() throws Exception { final String clientPort = "21818"; final String dataDirectory = System.getProperty("java.io.tmpdir"); zookeeperHost = "localhost:" + clientPort; ServerConfig config = new ServerConfig(); config.parse(new String[] { clientPort, dataDirectory }); testConfig = new BaseConfiguration(); testConfig.setProperty("quorum", zookeeperHost); testConfig.setProperty("znode", "/config"); testConfig.setProperty(APPNAME_PROPERTY, "test"); testConfig.setProperty(ROOTCONFIG_PROPERTY, "test"); zkServer = new ZookeeperTestUtil.ZooKeeperThread(config); server = new Thread(zkServer); server.start(); zookeeper = connect(zookeeperHost); }
@Test public void testLoad() throws URISyntaxException, InterruptedException, IOException, ConfigurationException { final String INPUT = "test.yml"; URL testUrl = getClass().getResource("/" + INPUT); final String testYaml = testUrl.toURI().getPath(); FileBasedConfigSource source = new FileBasedConfigSource(); Configuration config = new BaseConfiguration(); config.setProperty(ROOTCONFIG_PROPERTY, testYaml); source.configure(config, new HierarchicalConfiguration(), null); HierarchicalConfigurationDeserializer deserializer = new YamlDeserializer(); InputStream is = source.load("test.yml"); ConfigurationResult result = deserializer.deserialize(is); Configuration configuration = result.getConfiguration(); assertThat(configuration.getString("type.unicodeString"), is("€")); }
@Test public void testMultiFileLoad() throws Exception { final String INPUT = "multiple-files/root.yaml"; URL testUrl = getClass().getResource("/" + INPUT); final String testYaml = testUrl.toURI().getPath(); FileBasedConfigSource source = new FileBasedConfigSource(); Configuration config = new BaseConfiguration(); config.setProperty(ROOTCONFIG_PROPERTY, testYaml); source.configure(config, new HierarchicalConfiguration(), null); HierarchicalConfigurationDeserializer deserializer = new YamlDeserializer(); InputStream is = source.load("root.yaml"); ConfigurationResult result = deserializer.deserialize(is); HierarchicalConfiguration configuration = result.getConfiguration(); YamlSerializer serializer = new YamlSerializer(); serializer.serialize(configuration, System.out); // assertThat(configuration.getString("type.unicodeString"), is("€")); }
@Test public void basicTest() { Configuration configuration = new BaseConfiguration(); configuration.setProperty("a", "XXX"); configuration.setProperty("b", "YYY"); configuration.setProperty("c", 1); Precomputed<String> precomputed = Precomputed.monitorByKeys( configuration, config -> config.getString("a") + "--" + config.getString("b") + "--" + config.getInt("c", 0), "a", "b"); assertThat(precomputed.get(), is("XXX--YYY--1")); // Not a monitored value, so no update. configuration.setProperty("c", 2); assertThat(precomputed.get(), is("XXX--YYY--1")); // Monitored value; update. configuration.setProperty("a", "ZZZ"); assertThat(precomputed.get(), is("ZZZ--YYY--2")); // Monitored value; update. configuration.setProperty("b", "XXX"); assertThat(precomputed.get(), is("ZZZ--XXX--2")); }
@Test public void timestampTest() { Configuration configuration = new BaseConfiguration(); configuration.setProperty("a", "AAA"); configuration.setProperty(ConcurrentConfiguration.MODIFICATION_TIMESTAMP, System.nanoTime()); Precomputed<String> precomputed = Precomputed.monitorByUpdate( configuration, config -> config.getString("a") ); assertThat(precomputed.get(), is("AAA")); // Not a monitored value, so no update. configuration.setProperty("a", "BBB"); assertThat(precomputed.get(), is("AAA")); // Touch the timestamp so an update will be required. configuration.setProperty(ConcurrentConfiguration.MODIFICATION_TIMESTAMP, System.nanoTime()); assertThat(precomputed.get(), is("BBB")); }
protected Configuration getConfiguration(boolean create, boolean open, boolean transactional) { if (configuration != null) return configuration; else return new BaseConfiguration() { { setProperty(Graph.GRAPH, OrientGraph.class.getName()); setProperty(OrientGraph.CONFIG_URL, url); setProperty(OrientGraph.CONFIG_USER, user); setProperty(OrientGraph.CONFIG_PASS, password); setProperty(OrientGraph.CONFIG_CREATE, create); setProperty(OrientGraph.CONFIG_OPEN, open); setProperty(OrientGraph.CONFIG_TRANSACTIONAL, transactional); setProperty(OrientGraph.CONFIG_LABEL_AS_CLASSNAME, labelAsClassName); } }; }
@Test public void indexCollation() { OrientGraph graph = newGraph(); String label = "VC1"; String key = "name"; String value = "bob"; Configuration config = new BaseConfiguration(); config.setProperty("type", "UNIQUE"); config.setProperty("keytype", OType.STRING); config.setProperty("collate", "ci"); graph.createVertexIndex(key, label, config); graph.addVertex(label, label, key, value); // TODO: test with a "has" traversal, if/when that supports a case insensitive match predicate // OrientIndexQuery indexRef = new OrientIndexQuery(true, Optional.of(label), key, value.toUpperCase()); // Iterator<OrientVertex> result = graph.getIndexedVertices(indexRef).iterator(); // Assert.assertEquals(result.hasNext(), true); }
@Test public void testFind() throws Exception { String host = System.getProperty("Z3950CatalogTest.host"); String port = System.getProperty("Z3950CatalogTest.port"); String base = System.getProperty("Z3950CatalogTest.base"); String recordCharset = System.getProperty("Z3950CatalogTest.recordCharset"); Assume.assumeNotNull(host, port, base); String fieldName = "sys"; String value = "001704913"; Locale locale = null; final String catalogId = "catalogId"; CatalogConfiguration c = new CatalogConfiguration(catalogId, "", new BaseConfiguration() {{ addProperty(CatalogConfiguration.PROPERTY_FIELDS, "sys"); addProperty(CatalogConfiguration.FIELD_PREFIX + '.' + "sys" + '.' + Z3950Catalog.PROPERTY_FIELD_QUERY, "@attrset bib-1 @attr 1=12 @attr 4=1 \"%s\""); }}); Z3950Catalog instance = new Z3950Catalog(host, Integer.parseInt(port), base, recordCharset == null ? null : Charset.forName(recordCharset), Z3950Catalog.readFields(c) ); List<MetadataItem> result = instance.find(fieldName, value, locale); assertFalse(result.isEmpty()); }
@Before public void setUp() { conf = new BaseConfiguration(); conf.setProperty(DesaServices.PROPERTY_DESASERVICES, "ds1, dsNulls"); String prefix = DesaServices.PREFIX_DESA + '.' + "ds1" + '.'; conf.setProperty(prefix + DesaConfiguration.PROPERTY_USER, "ds1user"); conf.setProperty(prefix + DesaConfiguration.PROPERTY_PASSWD, "ds1passwd"); conf.setProperty(prefix + DesaConfiguration.PROPERTY_PRODUCER, "ds1producer"); conf.setProperty(prefix + DesaConfiguration.PROPERTY_OPERATOR, "ds1operator"); conf.setProperty(prefix + DesaConfiguration.PROPERTY_EXPORTMODELS, "model:id1, model:id2"); conf.setProperty(prefix + DesaConfiguration.PROPERTY_RESTAPI, "https://SERVER/dea-frontend/rest/sipsubmission"); conf.setProperty(prefix + DesaConfiguration.PROPERTY_WEBSERVICE, "https://SERVER/dea-frontend/ws/SIPSubmissionService"); conf.setProperty(prefix + DesaConfiguration.PROPERTY_NOMENCLATUREACRONYMS, "acr1, acr2"); prefix = DesaServices.PREFIX_DESA + '.' + "dsNulls" + '.'; conf.setProperty(prefix + DesaConfiguration.PROPERTY_USER, null); conf.setProperty(prefix + DesaConfiguration.PROPERTY_PASSWD, ""); conf.setProperty(prefix + DesaConfiguration.PROPERTY_EXPORTMODELS, null); conf.setProperty(prefix + DesaConfiguration.PROPERTY_NOMENCLATUREACRONYMS, null); prefix = DesaServices.PREFIX_DESA + '.' + "dsNotActive" + '.'; conf.setProperty(prefix + DesaConfiguration.PROPERTY_USER, "NA"); desaServices = new DesaServices(conf); }