public Builder withConfiguration(Configuration config) { ConfigurationConverter.getMap(config.subset("embeddedClass")) .forEach((k, v) -> addEmbeddedClass(k.toString(), v.toString())); ConfigurationConverter.getMap(config.subset("class")) .forEach((k, v) -> addClass(k.toString(), v.toString())); ConfigurationConverter.getMap(config.subset("dataProperties")) .forEach((k, v) -> addDataProperty(k.toString(), v.toString())); ConfigurationConverter.getMap(config.subset("objectProperties")) .forEach((k, v) -> addObjectProperty(k.toString(), v.toString())); if (config.containsKey("partitionStrategy.partitionKey")) { partitionStrategy(PartitionStrategy.create(config.subset("partitionStrategy"))); } return metadataLabel(config.getString("metadataLabel", null)) .rootLabel(config.getString("rootLabel", null)) .identifierKey(config.getString("identifierKey", null)) .unknownKey(config.getString("unknownKey", null)) .implicitKey(config.getString("implicitKey", null)); }
/** * Load a given job configuration file. * * @param properties Gobblin framework configuration properties * @param jobConfigFile job configuration file to be loaded * @param jobConfigFileDir root job configuration file directory * @return a job configuration in the form of {@link java.util.Properties} */ public static Properties loadJobConfig(Properties properties, File jobConfigFile, File jobConfigFileDir) throws ConfigurationException, IOException { List<Properties> commonPropsList = Lists.newArrayList(); getCommonProperties(commonPropsList, jobConfigFileDir, jobConfigFile.getParentFile()); // Add the framework configuration properties to the end commonPropsList.add(properties); Properties jobProps = new Properties(); // Include common properties in reverse order for (Properties commonProps : Lists.reverse(commonPropsList)) { jobProps.putAll(commonProps); } // Then load the job configuration properties defined in the job configuration file jobProps.putAll(ConfigurationConverter.getProperties(new PropertiesConfiguration(jobConfigFile))); jobProps.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY, jobConfigFile.getAbsolutePath()); return jobProps; }
private static void getCommonProperties(List<Properties> commonPropsList, File jobConfigFileDir, File dir) throws ConfigurationException, IOException { // Make sure the given starting directory is under the job configuration file directory Preconditions.checkArgument(dir.getCanonicalPath().startsWith(jobConfigFileDir.getCanonicalPath()), String.format("%s is not an ancestor directory of %s", jobConfigFileDir, dir)); // Traversal backward until the parent of the root job configuration file directory is reached while (!dir.equals(jobConfigFileDir.getParentFile())) { // Get the properties file that ends with .properties if any String[] propertiesFiles = dir.list(PROPERTIES_FILE_FILTER); if (propertiesFiles != null && propertiesFiles.length > 0) { // There should be a single properties file in each directory (or sub directory) if (propertiesFiles.length != 1) { throw new RuntimeException("Found more than one .properties file in directory: " + dir); } commonPropsList.add( ConfigurationConverter.getProperties(new PropertiesConfiguration(new File(dir, propertiesFiles[0])))); } dir = dir.getParentFile(); } }
public static void main(String[] args) throws Exception { if (args.length < 1 || args.length > 2) { System.err.println( "Usage: SchedulerDaemon <default configuration properties file> [custom configuration properties file]"); System.exit(1); } // Load default framework configuration properties Properties defaultProperties = ConfigurationConverter.getProperties(new PropertiesConfiguration(args[0])); // Load custom framework configuration properties (if any) Properties customProperties = new Properties(); if (args.length == 2) { customProperties.putAll(ConfigurationConverter.getProperties(new PropertiesConfiguration(args[1]))); } // Start the scheduler daemon new SchedulerDaemon(defaultProperties, customProperties).start(); }
/** * The database supplied property sources placeholder configurer that allows access to externalized properties from a database. This method also adds a new * property source that contains the database properties to the environment. * * @return the property sources placeholder configurer. */ @Bean public static PropertySourcesPlaceholderConfigurer databasePropertySourcesPlaceholderConfigurer() { // Get the configurable environment and add a new property source to it that contains the database properties. // That way, the properties can be accessed via the environment or via an injected @Value annotation. // We are adding this property source last so other property sources (e.g. system properties, environment variables) can be used // to override the database properties. Environment environment = ApplicationContextHolder.getApplicationContext().getEnvironment(); if (environment instanceof ConfigurableEnvironment) { ConfigurableEnvironment configurableEnvironment = (ConfigurableEnvironment) environment; ReloadablePropertySource reloadablePropertySource = new ReloadablePropertySource(ReloadablePropertySource.class.getName(), ConfigurationConverter.getProperties(getPropertyDatabaseConfiguration()), getPropertyDatabaseConfiguration()); configurableEnvironment.getPropertySources().addLast(reloadablePropertySource); } return new PropertySourcesPlaceholderConfigurer(); }
private void setPamProperties() { try { this.groupsFromUGI = ApplicationProperties.get().getBoolean("atlas.authentication.method.pam.ugi-groups", true); Properties properties = ConfigurationConverter.getProperties(ApplicationProperties.get() .subset("atlas.authentication.method.pam")); for (String key : properties.stringPropertyNames()) { String value = properties.getProperty(key); options.put(key, value); } if (!options.containsKey("service")) { options.put("service", "atlas-login"); } } catch (Exception e) { LOG.error("Exception while setLdapProperties", e); } }
@Override public synchronized Sail getSail() throws SailException { if (sail == null) { try { config = PlatformConfigHelper.getConfig(); Properties properties = ConfigurationConverter.getProperties(loadProperties()); // BigdataSailRepository repository = new BigdataSailRepository(new com.bigdata.rdf.sail.BigdataSail(properties)); // repo = new BigdataSail(repository); sail = new SimpleTypeInferencingSail( // new SmartSailWrapper( new BigdataSail(properties)); // sail.setPipelineTypes(Arrays.asList( // "getReadOnlyConnection", // "getReadWriteConnection", // "getUnisolatedConnection")); } catch (Exception e) { throw new SailException(e); } } return sail; }
/** * Load a {@link Properties} compatible path using fallback as fallback. * @return The {@link Config} in path with fallback as fallback. * @throws IOException */ private Config loadJavaPropsWithFallback(Path propertiesPath, Config fallback) throws IOException { PropertiesConfiguration propertiesConfiguration = new PropertiesConfiguration(); try (InputStreamReader inputStreamReader = new InputStreamReader(this.fs.open(propertiesPath), Charsets.UTF_8)) { propertiesConfiguration.setDelimiterParsingDisabled(ConfigUtils.getBoolean(fallback, PROPERTY_DELIMITER_PARSING_ENABLED_KEY, DEFAULT_PROPERTY_DELIMITER_PARSING_ENABLED_KEY)); propertiesConfiguration.load(inputStreamReader); Config configFromProps = ConfigUtils.propertiesToConfig(ConfigurationConverter.getProperties(propertiesConfiguration)); return ConfigFactory.parseMap(ImmutableMap.of(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY, PathUtils.getPathWithoutSchemeAndAuthority(propertiesPath).toString())) .withFallback(configFromProps) .withFallback(fallback); } catch (ConfigurationException ce) { throw new IOException(ce); } }
public static void main(String[] args) throws Exception { if (args.length < 1 || args.length > 2) { System.err.println( "Usage: SchedulerDaemon <default configuration properties file> [custom configuration properties file]"); System.exit(1); } // Load default framework configuration properties Properties defaultProperties = ConfigurationConverter.getProperties(new PropertiesConfiguration(args[0])); // Load custom framework configuration properties (if any) Properties customProperties = new Properties(); if (args.length == 2) { customProperties.putAll(ConfigurationConverter.getProperties(new PropertiesConfiguration(args[1]))); } log.debug("Scheduler Daemon::main starting with defaultProperties: {}, customProperties: {}", defaultProperties, customProperties); // Start the scheduler daemon new SchedulerDaemon(defaultProperties, customProperties).start(); }
public static DbDataComparisonConfig createFromProperties(final Configuration config) { Properties propsView = ConfigurationConverter.getProperties(config); // config.getString() automatically parses // for commas...would like to avoid this DbDataComparisonConfig compConfig = new DbDataComparisonConfig(); compConfig.setInputTables(Lists.mutable.with(propsView.getProperty("tables.include").split(","))); compConfig.setExcludedTables(Lists.mutable.with(propsView.getProperty("tables.exclude").split(",")).toSet()); String comparisonsStr = propsView.getProperty("comparisons"); MutableList<Pair<String, String>> compCmdPairs = Lists.mutable.empty(); MutableSet<String> dsNames = UnifiedSet.newSet(); for (String compPairStr : comparisonsStr.split(";")) { String[] pairParts = compPairStr.split(","); compCmdPairs.add(Tuples.pair(pairParts[0], pairParts[1])); // note - if I knew where the Pair.TO_ONE TO_TWO selectors were, I'd use those dsNames.add(pairParts[0]); dsNames.add(pairParts[1]); } compConfig.setComparisonCommandNamePairs(compCmdPairs); MutableList<DbDataSource> dbDataSources = dsNames.toList().collect(new Function<String, DbDataSource>() { @Override public DbDataSource valueOf(String dsName) { Configuration dsConfig = config.subset(dsName); DbDataSource dbDataSource = new DbDataSource(); dbDataSource.setName(dsName); dbDataSource.setUrl(dsConfig.getString("url")); dbDataSource.setSchema(dsConfig.getString("schema")); dbDataSource.setUsername(dsConfig.getString("username")); dbDataSource.setPassword(dsConfig.getString("password")); dbDataSource.setDriverClassName(dsConfig.getString("driverClass")); return dbDataSource; } }); compConfig.setDbDataSources(dbDataSources); return compConfig; }
@Override public File exportSQLFileFromDatabase(int dbId) throws Exception { OrdsPhysicalDatabase database = this.getPhysicalDatabaseFromID(dbId); DatabaseServer server = ServerConfigurationService.Factory.getInstance().getDatabaseServer(database.getDatabaseServer()); // create the file String databaseName = database.getDbConsumedName(); File file = File.createTempFile("dump_" + databaseName, "sql"); Properties properties = ConfigurationConverter.getProperties(MetaConfiguration.getConfiguration()); String postgres_bin = ""; if ( properties.containsKey("ords.postgresql.bin.path")) { postgres_bin = properties.getProperty("ords.postgresql.bin.path"); } ProcessBuilder processBuilder = new ProcessBuilder(postgres_bin+"pg_dump", "-f", file.toString(), "-v", "-o", "-h", database.getDatabaseServer(), "-U", server.getUsername(), database.getDbConsumedName()); processBuilder.environment().put("PGPASSWORD", server.getPassword()); Process process = processBuilder.start(); try { InputStream is = process.getInputStream(); InputStreamReader reader = new InputStreamReader(is); BufferedReader buffer = new BufferedReader(reader); String line; while ((line = buffer.readLine()) != null ) { System.out.println(line); if (log.isDebugEnabled()) { log.debug(line); } } } catch ( Exception e ) { log.error("ERROR", e ); } return file; }
protected Neo4jGraph(final Configuration configuration) { this.configuration.copy(configuration); final String directory = this.configuration.getString(CONFIG_DIRECTORY); final Map neo4jSpecificConfig = ConfigurationConverter.getMap(this.configuration.subset(CONFIG_CONF)); this.baseGraph = Neo4jFactory.Builder.open(directory, neo4jSpecificConfig); this.initialize(this.baseGraph, configuration); }
/** * Parse command line arguments and return a {@link java.util.Properties} object for the gobblin job found. * @param caller Class of the calling main method. Used for error logs. * @param args Command line arguments. * @return Instance of {@link Properties} for the Gobblin job to run. * @throws IOException */ public static Properties parseArgs(Class<?> caller, String[] args) throws IOException { try { // Parse command-line options CommandLine cmd = new DefaultParser().parse(options(), args); if (cmd.hasOption(HELP_OPTION.getOpt())) { printUsage(caller); System.exit(0); } if (!cmd.hasOption(SYS_CONFIG_OPTION.getLongOpt()) || !cmd.hasOption(JOB_CONFIG_OPTION.getLongOpt())) { printUsage(caller); System.exit(1); } // Load system and job configuration properties Properties sysConfig = ConfigurationConverter .getProperties(new PropertiesConfiguration(cmd.getOptionValue(SYS_CONFIG_OPTION.getLongOpt()))); Properties jobConfig = ConfigurationConverter .getProperties(new PropertiesConfiguration(cmd.getOptionValue(JOB_CONFIG_OPTION.getLongOpt()))); return JobConfigurationUtils.combineSysAndJobProperties(sysConfig, jobConfig); } catch (ParseException pe) { throw new IOException(pe); } catch (ConfigurationException ce) { throw new IOException(ce); } }
private void setADProperties() { try { Configuration configuration = ApplicationProperties.get(); Properties properties = ConfigurationConverter.getProperties(configuration.subset("atlas.authentication.method.ldap.ad")); this.adDomain = properties.getProperty("domain"); this.adURL = properties.getProperty("url"); this.adBindDN = properties.getProperty("bind.dn"); this.adBindPassword = properties.getProperty("bind.password"); this.adUserSearchFilter = properties.getProperty("user.searchfilter"); this.adBase = properties.getProperty("base.dn"); this.adReferral = properties.getProperty("referral"); this.adDefaultRole = properties.getProperty("default.role"); this.groupsFromUGI = configuration.getBoolean("atlas.authentication.method.ldap.ugi-groups", true); if(LOG.isDebugEnabled()) { LOG.debug("AtlasADAuthenticationProvider{" + "adURL='" + adURL + '\'' + ", adDomain='" + adDomain + '\'' + ", adBindDN='" + adBindDN + '\'' + ", adUserSearchFilter='" + adUserSearchFilter + '\'' + ", adBase='" + adBase + '\'' + ", adReferral='" + adReferral + '\'' + ", adDefaultRole='" + adDefaultRole + '\'' + ", groupsFromUGI=" + groupsFromUGI + '}'); } } catch (Exception e) { LOG.error("Exception while setADProperties", e); } }
private void setLdapProperties() { try { Configuration configuration = ApplicationProperties.get(); Properties properties = ConfigurationConverter.getProperties(configuration.subset("atlas.authentication.method.ldap")); ldapURL = properties.getProperty("url"); ldapUserDNPattern = properties.getProperty("userDNpattern"); ldapGroupSearchBase = properties.getProperty("groupSearchBase"); ldapGroupSearchFilter = properties.getProperty("groupSearchFilter"); ldapGroupRoleAttribute = properties.getProperty("groupRoleAttribute"); ldapBindDN = properties.getProperty("bind.dn"); ldapBindPassword = properties.getProperty("bind.password"); ldapDefaultRole = properties.getProperty("default.role"); ldapUserSearchFilter = properties.getProperty("user.searchfilter"); ldapReferral = properties.getProperty("referral"); ldapBase = properties.getProperty("base.dn"); groupsFromUGI = configuration.getBoolean("atlas.authentication.method.ldap.ugi-groups", true); if(LOG.isDebugEnabled()) { LOG.debug("AtlasLdapAuthenticationProvider{" + "ldapURL='" + ldapURL + '\'' + ", ldapUserDNPattern='" + ldapUserDNPattern + '\'' + ", ldapGroupSearchBase='" + ldapGroupSearchBase + '\'' + ", ldapGroupSearchFilter='" + ldapGroupSearchFilter + '\'' + ", ldapGroupRoleAttribute='" + ldapGroupRoleAttribute + '\'' + ", ldapBindDN='" + ldapBindDN + '\'' + ", ldapDefaultRole='" + ldapDefaultRole + '\'' + ", ldapUserSearchFilter='" + ldapUserSearchFilter + '\'' + ", ldapReferral='" + ldapReferral + '\'' + ", ldapBase='" + ldapBase + '\'' + ", groupsFromUGI=" + groupsFromUGI + '}'); } } catch (Exception e) { LOG.error("Exception while setLdapProperties", e); } }
public static void init(org.apache.commons.configuration.Configuration atlasConfiguration) throws AtlasException { LOG.debug("==> InMemoryJAASConfiguration.init()"); if (atlasConfiguration != null && !atlasConfiguration.isEmpty()) { Properties properties = ConfigurationConverter.getProperties(atlasConfiguration); init(properties); } else { throw new AtlasException("Failed to load JAAS application properties: configuration NULL or empty!"); } LOG.debug("<== InMemoryJAASConfiguration.init()"); }
/** * Construct a KafkaNotification. * * @param applicationProperties the application properties used to configure Kafka * * @throws AtlasException if the notification interface can not be created */ @Inject public KafkaNotification(Configuration applicationProperties) throws AtlasException { super(applicationProperties); Configuration subsetConfiguration = ApplicationProperties.getSubsetConfiguration(applicationProperties, PROPERTY_PREFIX); properties = ConfigurationConverter.getProperties(subsetConfiguration); //override to store offset in kafka //todo do we need ability to replay? //Override default configs properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); pollTimeOutMs = subsetConfiguration.getLong("poll.timeout.ms", 1000); boolean oldApiCommitEnbleFlag = subsetConfiguration.getBoolean("auto.commit.enable",false); //set old autocommit value if new autoCommit property is not set. properties.put("enable.auto.commit", subsetConfiguration.getBoolean("enable.auto.commit", oldApiCommitEnbleFlag)); properties.put("session.timeout.ms", subsetConfiguration.getString("session.timeout.ms", "30000")); }
@Override public void serialize(final TraversalStrategy traversalStrategy, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); for (final Map.Entry<Object, Object> entry : ConfigurationConverter.getMap(traversalStrategy.getConfiguration()).entrySet()) { jsonGenerator.writeObjectField((String) entry.getKey(), entry.getValue()); } jsonGenerator.writeEndObject(); }
/** * Filter io contain the properties we wish to substitute in templates. * * Uses Apache Commons Configuration to load filters. */ private Properties readFilterIntoProperties(final FileInfo filter) throws ConfigurationException, IOException { final CompositeConfiguration composite = new CompositeConfiguration(); final List<File> files = filter.getFiles(); for (final File file : files) { final PropertiesConfiguration config = new PropertiesConfiguration(file); config.setEncoding(configGeneratorParameters.getEncoding()); composite.addConfiguration(config); } if (StringUtils.isNotBlank(configGeneratorParameters.getFilterSourcePropertyName())) { composite.setProperty(configGeneratorParameters.getFilterSourcePropertyName(), filter.getAllSources()); } return ConfigurationConverter.getProperties(composite); }
/** * Load the properties from the specified file into a {@link Properties} object. * * @param fileName the name of the file to load properties from * @param conf configuration object to determine the file system to be used * @return a new {@link Properties} instance */ public static Properties fileToProperties(String fileName, Configuration conf) throws IOException, ConfigurationException { PropertiesConfiguration propsConfig = new PropertiesConfiguration(); Path filePath = new Path(fileName); URI fileURI = filePath.toUri(); if (fileURI.getScheme() == null && fileURI.getAuthority() == null) { propsConfig.load(FileSystem.getLocal(conf).open(filePath)); } else { propsConfig.load(filePath.getFileSystem(conf).open(filePath)); } return ConfigurationConverter.getProperties(propsConfig); }
@SuppressWarnings("unchecked") protected static final Context configToContext(Configuration configuration) { if (configuration == null) { return new Context(); } return new Context(ConfigurationConverter.getMap(configuration)); }
/** * Copy constructor * * @param other * the configuration to copy */ public StormSinkConfiguration(final StormSinkConfiguration other) { batchSize = other.batchSize; locationServiceFactoryClassName = other.locationServiceFactoryClassName; serviceProviderSerializationClassName = other.serviceProviderSerializationClassName; connectionParametersFactoryClassName = other.connectionParametersFactoryClassName; eventSenderFactoryClassName = other.eventSenderFactoryClassName; configuration = new MapConfiguration(ConfigurationConverter.getMap(other.configuration)); }
@Override public synchronized void createSchema() { final Properties props = ConfigurationConverter.getProperties(configuration.subset(String.format(CONFIG_PREFIX_FORMAT, schemaName.toLowerCase()))); try { props.setProperty("name", props.getProperty("keyspace")); LOG.info("Creating schema: " + schemaName + " " + props); this.keyspace.createKeyspace(props); } catch (ConnectionException e) { LOG.error("Failed to create schema '{}' with properties '{}'", new Object[]{schemaName, props.toString(), e}); throw new RuntimeException("Failed to create keyspace " + keyspace.getKeyspaceName(), e); } }
public SendMailTLS() { props = ConfigurationConverter.getProperties(MetaConfiguration.getConfiguration()); }
@Override public void importSQLFileToDatabase(String hostName, String databaseName, File sqlFile, int databaseId) throws Exception { DatabaseServer server = ServerConfigurationService.Factory.getInstance().getDatabaseServer(hostName); //Subject s = SecurityUtils.getSubject(); //String principalName = s.getPrincipal().toString(); //User u = this.getUserByPrincipal(principalName); //RestoreEmailService emailService = RestoreEmailService.Factory.getInstance(); //emailService.setEmail(u.getEmail()); //emailService.setDatabaseName(databaseName); Properties properties = ConfigurationConverter.getProperties(MetaConfiguration.getConfiguration()); String postgres_bin = ""; if ( properties.containsKey("ords.postgresql.bin.path")) { postgres_bin = properties.getProperty("ords.postgresql.bin.path"); } ProcessBuilder processBuilder = new ProcessBuilder(postgres_bin+"psql", "-d", databaseName, "-h", hostName, "-U", server.getUsername(), "-f", sqlFile.toString()); processBuilder.environment().put("PGPASSWORD", server.getPassword()); DatabaseUploadService uploadService = DatabaseUploadService.Factory.getInstance(); try { Process process = processBuilder.start(); uploadService.setImportProgress(databaseId, OrdsPhysicalDatabase.ImportType.IN_PROGRESS); InputStream is = process.getInputStream(); InputStreamReader reader = new InputStreamReader(is); BufferedReader buffer = new BufferedReader(reader); String line; while ((line = buffer.readLine()) != null ) { System.out.println(line); if (log.isDebugEnabled()) { log.debug(line); } } //emailService.sendRestoreSuccessfulMessage(); uploadService.setImportProgress(databaseId, OrdsPhysicalDatabase.ImportType.FINISHED); } catch ( Exception e ) { log.error("ERROR", e ); try { //emailService.sendRestoreUnsuccessfulMessage(e.toString()); uploadService.setImportProgress(databaseId, OrdsPhysicalDatabase.ImportType.FAILED); } catch (Exception e1) { log.error("ERROR", e1); e1.printStackTrace(); } } }
@Override public void run() { Properties properties = ConfigurationConverter.getProperties(MetaConfiguration.getConfiguration()); String postgres_bin = ""; if ( properties.containsKey("ords.postgresql.bin.path")) { postgres_bin = properties.getProperty("ords.postgresql.bin.path"); } /* * ProcessBuilder processBuilder = new ProcessBuilder(postgres_bin+"pg_dump", "-f", file.toString(), "-v", "-o", "-h", database.getDatabaseServer(), "-U", server.getUsername(), database.getDbConsumedName()); */ ProcessBuilder processBuilder = new ProcessBuilder(postgres_bin+"psql", "-d", this.databaseName, "-h", this.server, "-U", this.databaseRole, "-f", this.dbFile.toString()); processBuilder.environment().put("PGPASSWORD", this.databasePwd); DatabaseUploadService uploadService = DatabaseUploadService.Factory.getInstance(); try { Process process = processBuilder.start(); uploadService.setImportProgress(databaseId, OrdsPhysicalDatabase.ImportType.IN_PROGRESS); InputStream is = process.getInputStream(); InputStreamReader reader = new InputStreamReader(is); BufferedReader buffer = new BufferedReader(reader); String line; while ((line = buffer.readLine()) != null ) { System.out.println(line); if (log.isDebugEnabled()) { log.debug(line); } } emailService.sendRestoreSuccessfulMessage(); uploadService.setImportProgress(databaseId, OrdsPhysicalDatabase.ImportType.FINISHED); } catch ( Exception e ) { log.error("ERROR", e ); try { emailService.sendRestoreUnsuccessfulMessage(e.toString()); uploadService.setImportProgress(databaseId, OrdsPhysicalDatabase.ImportType.FAILED); } catch (Exception e1) { log.error("ERROR", e1); e1.printStackTrace(); } } }
/** * Recursively load job configuration files under the given directory. */ private static void loadJobConfigsRecursive(List<Properties> jobConfigs, Properties rootProps, Set<String> jobConfigFileExtensions, File jobConfigDir) throws ConfigurationException { // Get the properties file that ends with .properties if any String[] propertiesFiles = jobConfigDir.list(PROPERTIES_FILE_FILTER); if (propertiesFiles != null && propertiesFiles.length > 0) { // There should be a single properties file in each directory (or sub directory) if (propertiesFiles.length != 1) { throw new RuntimeException("Found more than one .properties file in directory: " + jobConfigDir); } // Load the properties, which may overwrite the same properties defined in the parent or ancestor directories. rootProps.putAll(ConfigurationConverter .getProperties(new PropertiesConfiguration(new File(jobConfigDir, propertiesFiles[0])))); } // Get all non-properties files String[] names = jobConfigDir.list(NON_PROPERTIES_FILE_FILTER); if (names == null || names.length == 0) { return; } for (String name : names) { File file = new File(jobConfigDir, name); if (file.isDirectory()) { Properties rootPropsCopy = new Properties(); rootPropsCopy.putAll(rootProps); loadJobConfigsRecursive(jobConfigs, rootPropsCopy, jobConfigFileExtensions, file); } else { if (!jobConfigFileExtensions.contains(Files.getFileExtension(file.getName()).toLowerCase())) { LOGGER.warn("Skipped file " + file + " that has an unsupported extension"); continue; } File doneFile = new File(file + ".done"); if (doneFile.exists()) { // Skip the job configuration file when a .done file with the same name exists, // which means the job configuration file is for a one-time job and the job has // already run and finished. LOGGER.info("Skipped job configuration file " + file + " for which a .done file exists"); continue; } Properties jobProps = new Properties(); // Put all parent/ancestor properties first jobProps.putAll(rootProps); // Then load the job configuration properties defined in the job configuration file jobProps.putAll(ConfigurationConverter.getProperties(new PropertiesConfiguration(file))); jobProps.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY, file.getAbsolutePath()); jobConfigs.add(jobProps); } } }
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); // Parse generic options String[] genericCmdLineOpts = new GenericOptionsParser(conf, args).getCommandLine().getArgs(); // Build command-line options Option sysConfigOption = OptionBuilder .withArgName("system configuration file") .withDescription("Gobblin system configuration file") .hasArgs() .withLongOpt("sysconfig") .create(); Option jobConfigOption = OptionBuilder .withArgName("job configuration file") .withDescription("Gobblin job configuration file") .hasArgs() .withLongOpt("jobconfig") .create(); Option helpOption = OptionBuilder.withArgName("help") .withDescription("Display usage information") .withLongOpt("help") .create('h'); Options options = new Options(); options.addOption(sysConfigOption); options.addOption(jobConfigOption); options.addOption(helpOption); // Parse command-line options CommandLine cmd = new BasicParser().parse(options, genericCmdLineOpts); if (cmd.hasOption('h')) { printUsage(options); System.exit(0); } if (!cmd.hasOption("sysconfig") || !cmd.hasOption("jobconfig")) { printUsage(options); System.exit(1); } // Load system and job configuration properties Properties sysConfig = ConfigurationConverter.getProperties(new PropertiesConfiguration(cmd.getOptionValue("sysconfig"))); Properties jobConfig = ConfigurationConverter.getProperties(new PropertiesConfiguration(cmd.getOptionValue("jobconfig"))); // Launch and run the job System.exit(ToolRunner.run(conf, new CliMRJobLauncher(sysConfig, jobConfig), args)); }
/** * Refreshes the properties from the configuration if it's time to. */ @SuppressWarnings({"unchecked", "rawtypes"}) protected void refreshPropertiesIfNeeded() { // Ensure we update the properties in a synchronized fashion to avoid possibly corrupting the properties. synchronized (this) { // See if it's time to refresh the properties (i.e. the elapsed time is greater than the configured refresh interval). LOGGER.debug("Checking if properties need to be refreshed. currentTime={} lastRefreshTime={} millisecondsSinceLastPropertiesRefresh={}", System.currentTimeMillis(), lastRefreshTime, System.currentTimeMillis() - lastRefreshTime); if (System.currentTimeMillis() - lastRefreshTime >= refreshIntervalMillis) { // Enough time has passed so refresh the properties. LOGGER.debug("Refreshing properties..."); // Get the latest properties from the configuration. Properties properties = ConfigurationConverter.getProperties(configuration); if (lastConfigurationErrorEvent != null) { LOGGER.error("An error occurred while retrieving configurations. Previous values are retained. See cause for details.", lastConfigurationErrorEvent.getCause()); lastConfigurationErrorEvent = null; } else { // Log the properties we just retrieved from the configuration. if (LOGGER.isDebugEnabled()) { LOGGER.debug("New properties just retrieved."); for (Map.Entry<Object, Object> entry : properties.entrySet()) { LOGGER.debug("{}=\"{}\"", entry.getKey(), entry.getValue()); } } // Update our property sources properties with the ones just read by clearing and adding in the new ones since the "source" is final. this.source.clear(); this.source.putAll((Map) properties); // Log the properties we have in our property source. if (LOGGER.isDebugEnabled()) { LOGGER.debug("Updated reloadable properties."); for (Object key : source.keySet()) { LOGGER.debug("{}=\"{}\"", key, properties.get(key)); } } } // Update the last refresh time and refresh interval. updateLastRefreshTime(); updateRefreshInterval(); LOGGER.debug("The properties have been refreshed from the configuration."); } } }
/** * Initialize the filter. * * @param filterConfig filter configuration. * @throws ServletException thrown if the filter could not be initialized. */ @Override public void init(FilterConfig filterConfig) throws ServletException { LOG.info("AtlasAuthenticationFilter initialization started"); final FilterConfig globalConf = filterConfig; final Map<String, String> params = new HashMap<>(); try { configuration = ApplicationProperties.get(); } catch (Exception e) { throw new ServletException(e); } if (configuration != null) { headerProperties = ConfigurationConverter.getProperties(configuration.subset("atlas.headers")); } FilterConfig filterConfig1 = new FilterConfig() { @Override public ServletContext getServletContext() { if (globalConf != null) { return globalConf.getServletContext(); } else { return nullContext; } } @SuppressWarnings("unchecked") @Override public Enumeration<String> getInitParameterNames() { return new IteratorEnumeration(params.keySet().iterator()); } @Override public String getInitParameter(String param) { return params.get(param); } @Override public String getFilterName() { return "AtlasAuthenticationFilter"; } }; super.init(filterConfig1); optionsServlet = new HttpServlet() { }; optionsServlet.init(); }
/** * @see org.springframework.beans.factory.FactoryBean#getObject() */ public java.util.Properties getObject() throws Exception { return (configuration != null) ? ConfigurationConverter.getProperties(configuration) : null; }
public Server(PropertiesConfiguration properties) throws IOException { m_properties = properties; m_httpServer = new HttpServer(); int port = properties.getInt(PROPERTY_PORT, DEFAULT_PORT); String host = properties.getString(PROPERTY_HOST, DEFAULT_HOST); final NetworkListener networkListener = new NetworkListener( NET_LISTENER_NAME, host, port); // Enable SSL on the listener networkListener.setSecure(true); networkListener.setSSLEngineConfig(makeSSLConfig(m_properties)); CompressionConfig compressionConfig = networkListener.getCompressionConfig(); compressionConfig.setCompressionMode(CompressionConfig.CompressionMode.ON); // the mode compressionConfig.setCompressionMinSize(100); // the min amount of bytes to compress compressionConfig.setCompressableMimeTypes("text/plain", "text/html", "application/x-protobuf", "application/pdf"); // the mime types to compress m_httpServer.addListener(networkListener); // Create a concurrent, nonblocking, asynchronous, batching JPA-based store for persistence // of request data. Async is OK, as persistence failures do not need to be handled by the client. m_store = new JPABatchStore(ConfigurationConverter.getMap(m_properties)); m_batchPersister = new AsyncConcurrentBatchingProcessor<PersistEntityEvent>( m_store, PersistEntityEvent::new, PersistEntityEvent::translate ); final ServerConfiguration config = m_httpServer.getServerConfiguration(); config.setMaxPostSize(MAX_POST_SIZE); AsyncPostHandler.ErrorHandler errorHandler = (ByteBuffer postBytes, Response resp, Throwable t) -> { LogManager.getLogger(this).warn("Invalid submission.", t); resp.sendError(300); resp.finish(); }; AsyncPostHandler certHandler = new AsyncPostHandler(new CertificateHandler(m_batchPersister, CERT_TEMPLATE_PATH, errorHandler), errorHandler); AsyncPostHandler versionHandler = new AsyncPostHandler(new VersionCheckHandler(m_batchPersister, properties), errorHandler); config.addHttpHandler(certHandler, PATH_SUBMIT); config.addHttpHandler(versionHandler, PATH_VERSION_CHECK); }
@Override public void configurationChanged(ConfigurationEvent event) { LOG.debug(String.format("configurationChanged(%s)", event.getPropertyValue())); processProperties(ConfigurationConverter.getProperties((Configuration) event.getSource())); }
/** * Returns the pool configuration of the scorer. * * @return a map from configuration key to configuration value */ @NotNull public Map<Object, Object> getPoolConfiguration() { return ConfigurationConverter.getMap(configuration.subset(GenericObjectPoolConfig.class.getName())); }
/** * Get java.util.Properties * * @return */ public static java.util.Properties getAllCurrentProperties() { return ConfigurationConverter.getProperties(compositeConfiguration); }