public static void copyIDEALicense(final String sandboxHome) { File sandboxSystemPath = new File(sandboxHome, "system"); File systemPath = new File(PathManager.getSystemPath()); File[] runningIdeaLicenses = systemPath.listFiles(new PatternFilenameFilter("idea\\d+\\.key")); if (runningIdeaLicenses != null) { for (File license : runningIdeaLicenses) { File devIdeaLicense = new File(sandboxSystemPath, license.getName()); if (!devIdeaLicense.exists()) { try { FileUtil.copy(license, devIdeaLicense); } catch (IOException e) { LOG.error(e); } } } } }
/** * Method to get file names from the util package * * @return File names as List<> */ public static List<String> getExampleTurtleFileNames () { List<String> fileNames = new ArrayList(); URL fdpFileURL = ExampleTurtleFiles.class.getResource( EXAMPLE_FDP_METADATA_FILE); String sourceFileURI = fdpFileURL.getPath(); sourceFileURI = sourceFileURI.replace(EXAMPLE_FDP_METADATA_FILE, ""); // Matches only turtle files Pattern pattern = Pattern.compile("^.*.ttl"); FilenameFilter filterByExtension = new PatternFilenameFilter(pattern); File dir = new File(sourceFileURI); File[] files = dir.listFiles(filterByExtension); for (File file: files) { fileNames.add(file.getName()); } LOGGER.info("Turtle files in util packaage " + fileNames.toString()); return fileNames; }
@Override @Async public void signalEnd(final List<RecorderInfo> recorders, final File workingDir) { // let's merge the results String fileName = recorders.get(0).getOutputFile().getName(); int i = 1; File[] genDirs = workingDir.listFiles(new PatternFilenameFilter(".*gen-\\d+")); for (File genDir : genDirs) { File file = new File(genDir, fileName); try { Files.copy(file, new File(fileName + ".part" + i++)); } catch (IOException e) { log.error("Could not copy '" + file.getName() + "' to '" + fileName + ".part" + i , e); } } ResultFileMerger merger = new ResultFileMerger(); merger.merge(recorders, workingDir); context.publishEvent(new ClusterMain.ExperimentEndedEvent(this)); }
/** * Run scripts in the directory if the file names match the pattern. This * will try to run all scripts, even if some fail. Each script runs * independently in its own environment. * * TODO: Add boolean recursive option * * @param map * initial environment * @param directoryName * directory where scripts should be found * @param pattern * Only run scripts whose name match this file name pattern. */ public static void runScriptsInDirectory(Map<String, Object> map, String directoryName, final String pattern) { File dir = new File(directoryName); ArrayList<String> fileNames = new ArrayList<String>(); FilenameFilter filter = pattern == null ? unravlScriptFile : new PatternFilenameFilter(pattern); if (dir.exists() && dir.isDirectory()) { File files[] = dir.listFiles(filter); for (File file : files) { if (!file.isDirectory()) fileNames.add(file.getAbsolutePath()); } int count = runScriptFiles(map, fileNames.toArray(new String[fileNames.size()])); System.out.println(String.format("Ran %s scripts in %s%s", count, directoryName, pattern == null ? "" : " matching pattern " + pattern)); } else { throw new AssertionError(String.format( "Warning: directory %s does not exist.", dir)); } }
public static void runScriptsInDirectory(UnRAVLRuntime runtime, String directoryName, final String pattern) { File dir = new File(directoryName); ArrayList<String> fileNames = new ArrayList<String>(); FilenameFilter filter = pattern == null ? unravlScriptFile : new PatternFilenameFilter(pattern); if (dir.exists() && dir.isDirectory()) { File files[] = dir.listFiles(filter); for (File file : files) { if (!file.isDirectory()) fileNames.add(file.getAbsolutePath()); } int count = runScriptFiles(runtime, fileNames.toArray(new String[fileNames.size()])); System.out.println(String.format("Ran %s scripts in %s%s", count, directoryName, pattern == null ? "" : " matching pattern " + pattern)); } else { throw new AssertionError(String.format( "Warning: directory %s does not exist.", dir)); } }
public static void tryScriptsInDirectory(UnRAVLRuntime runtime, Map<String, Object> env, String directoryName, String pattern) { File dir = new File(directoryName); ArrayList<String> fileNames = new ArrayList<String>(); FilenameFilter filter = pattern == null ? unravlScriptFile : new PatternFilenameFilter(pattern); if (dir.exists() && dir.isDirectory()) { File files[] = dir.listFiles(filter); for (File file : files) { if (!file.isDirectory()) fileNames.add(file.getAbsolutePath()); } int count = JUnitWrapper.tryScriptFiles( runtime == null ? new UnRAVLRuntime(env) : runtime, fileNames.toArray(new String[fileNames.size()])); System.out.println(String.format("Tried %s scripts in %s%s", count, directoryName, pattern == null ? "" : " matching pattern " + pattern)); } else { throw new AssertionError(String.format( "Warning: directory %s does not exist.", dir)); } }
public SeqVectorReader(String dirName, String filterPattern, boolean shuffle) throws IOException{ dir = new File(dirName); listFiles = dir.listFiles(new PatternFilenameFilter(dir.getName() + filterPattern)); if (shuffle){ shuffle(listFiles); System.out.println(Arrays.toString(listFiles)); }else { //sort by blockid } if (listFiles == null || listFiles.length == 0){ throw new RuntimeException("blocks not found!"); } msgpack = new MessagePack(); unpacker = msgpack.createUnpacker(new BufferedInputStream(new FileInputStream(listFiles[blockIt]),1024 * 1024 * 2)); stat = DataSetInfo.load(new File(dirName + "/" + dir.getName() + ".sta")); vt = VectorType.valueOf(stat.getProperty("vectorType")); }
public MultiVectorReader(String dirName, int... parts) throws IOException{ if (parts == null){ //all File dir = new File(dirName); File[] listFiles = dir.listFiles(new PatternFilenameFilter(dir.getName() + "\\.v\\.\\d+\\.dat")); parts = new int[listFiles.length]; for(int i = 0 ; i < listFiles.length; i++){ String blockPart = listFiles[i].getName().split("\\.v\\.")[1]; int blockId = Integer.parseInt(blockPart.substring(0, blockPart.indexOf(".dat"))); parts[i] = blockId; } }else{ this.parts = parts; } readers = new ArrayList<SeqVectorReader>(parts.length); currentVectors = new ArrayList<Vector>(parts.length); for(int i = 0 ; i < parts.length; i++){ readers.add(new SeqVectorReader(dirName, "\\.v\\." + parts[i] +"\\.dat")); } vt = readers.get(0).getVectorType(); dir = readers.get(0).getDataDir(); }
/** * {@inheritDoc} */ @Override public Iterable<StorageMetadata> list() { final Set<StorageMetadata> metadatas = Sets.newHashSet(); final File[] files = baseStorageDir().listFiles(new PatternFilenameFilter(".*\\.unc")); if (files != null) { for (final File file : files) { final StorageMetadata sm = new StorageMetadata(); sm.setCreationTimestamp(new Date(file.lastModified())); sm.setHandle(handleFromFile(file)); sm.setUncompressedSize(file.length()); metadatas.add(sm); } } return metadatas; }
private String getPlayer(String check) { synchronized (players) { if (players.isEmpty()) { File worldFolder = ess.getServer().getWorlds().get(0).getWorldFolder(); File playersFolder = new File(worldFolder, "players"); for (String filename : playersFolder.list(new PatternFilenameFilter(".+\\.dat"))) { String name = filename.substring(0, filename.length() - 4).toLowerCase(Locale.ENGLISH); String sanitizedName = Util.sanitizeFileName(name); String mapName = players.get(sanitizedName); if (mapName != null && !mapName.equals(name)) { players.put(sanitizedName, BROKENNAME); } else { players.put(sanitizedName, name); } } } } return players.get(check); }
/** * @param client recommender to load * @param dataDirectory a directory containing data files from which user and item IDs should be read * @param steps number of load steps to run */ public LoadRunner(MyrrixRecommender client, File dataDirectory, int steps) throws IOException { Preconditions.checkNotNull(client); Preconditions.checkNotNull(dataDirectory); Preconditions.checkArgument(steps > 0); log.info("Reading IDs..."); FastIDSet userIDsSet = new FastIDSet(); FastIDSet itemIDsSet = new FastIDSet(); Splitter comma = Splitter.on(','); for (File f : dataDirectory.listFiles(new PatternFilenameFilter(".+\\.csv(\\.(zip|gz))?"))) { for (CharSequence line : new FileLineIterable(f)) { Iterator<String> it = comma.split(line).iterator(); userIDsSet.add(Long.parseLong(it.next())); itemIDsSet.add(Long.parseLong(it.next())); } } this.client = client; this.uniqueUserIDs = userIDsSet.toArray(); this.uniqueItemIDs = itemIDsSet.toArray(); this.steps = steps; }
/** Generates classpath string using paths of all plugins available and the Bukkit JAR */ private String generateClasspath() { File pluginDir = new File("plugins/").getAbsoluteFile(); File[] plugins = pluginDir.listFiles( new PatternFilenameFilter(".+\\.jar") ); String classpath = BUKKIT_JAR; for (File plugin : plugins) classpath += ";" + plugin.getAbsolutePath(); return classpath; }
@Override public void afterPropertiesSet() throws IOException { File[] jsonfiles = defDir.listFiles(new PatternFilenameFilter(".*\\.json$")); configure(jsonfiles, jsonObjectMapper); File[] yamlfiles = defDir.listFiles(new PatternFilenameFilter(".*\\.yaml$")); configure(yamlfiles, yamlObjectMapper); }
void doPartition(VectorReader reader, int shards, String suffix) throws IOException{ assert (isPowerOfTwo(shards) == true); File[] dats = reader.getDataDir().listFiles(new PatternFilenameFilter(".*\\.v\\.\\d+\\.dat")); for(File dat : dats){ System.out.print(dat.getName() + ";"); dat.delete(); } String dataName = reader.getDataDir().getName(); HashPartitioner partitioner = new HashPartitioner(shards); String blockPathTemplate = reader.getDataDir().getAbsolutePath() + "/" + dataName + ".v.%d.dat" + suffix; ArrayList<Packer> packers = new ArrayList<Packer>(shards); MessagePack messagePack = new MessagePack(); for(int i = 0 ; i < shards;i++){ //output packers.add(messagePack.createPacker( new BufferedOutputStream(new FileOutputStream(String.format(blockPathTemplate, i)), 1024 * 1024 * 2))); } int count = 0; for(Vector v = reader.getNextVector(); v!= null; v = reader.getNextVector()){ List<Vector> divided = v.divideByFeature(partitioner); for(int i = 0 ; i < divided.size(); i++){ packers.get(i).write(true); divided.get(i).pack(packers.get(i)); } count ++; if (count % 2000 == 0){ for(int i = 0 ; i < packers.size(); i++){ packers.get(i).flush(); } System.out.print("*"); } } for(int i = 0 ; i < packers.size(); i++){ packers.get(i).write(false).close(); } }
public SeqVectorReader(String dirName, String filterPattern) throws IOException{ dir = new File(dirName); listFiles = dir.listFiles(new PatternFilenameFilter(dir.getName() + filterPattern)); if (listFiles == null || listFiles.length == 0){ throw new RuntimeException("blocks not found!"); } msgpack = new MessagePack(); unpacker = msgpack.createUnpacker(new BufferedInputStream(new FileInputStream(listFiles[blockIt]),1024 * 1024 * 2)); stat = new Properties(); FileInputStream fis = new FileInputStream(dirName + "/" + dir.getName() + ".sta"); stat.load(fis); fis.close(); vt = VectorType.valueOf(stat.getProperty("vectorType")); }
/** * @return the availableSettingsFiles */ public Set<String> getAvailableSettingsFiles() { if (availableSettingsFiles == null) { String[] list = settingsDir.list(new PatternFilenameFilter(".*\\.xml")); if (list == null) { availableSettingsFiles = newTreeSet(); } else { availableSettingsFiles = newTreeSet(asList(list)); } } return ImmutableSet.copyOf(availableSettingsFiles); }
private void deleteOldExtractors(File dir, String classSuffix) { File[] files = dir.listFiles(new PatternFilenameFilter(".*" + classSuffix + ".java")); if (files != null) for (File e : files) e.delete(); dir.mkdirs(); }
/** * Custom separate dload report component, so it can be called elsewhere, or overwritten by child Providers. Checks * the "report" to ensure it is a directory then looks for reportconf.yaml or reportconf.json inside the file. If it * exists loads it. * @param result The collection of reports to load the contents into. * @param report The directory that contains the report files. * @param reportId The report id * @throws IOException */ protected void loadReport(ReportsConfig result, File report, String reportId) throws IOException { if (report.isDirectory()) { FilenameFilter configYamlFilter = new PatternFilenameFilter("^reportconf.(yaml|json)$"); File[] selectYaml = report.listFiles(configYamlFilter); if (selectYaml != null && selectYaml.length == 1) { File selectedYaml = selectYaml[0]; loadReport(result, FileUtils.openInputStream(selectedYaml), reportId); } } }
private File getStructureFile(File dirWithDb) throws InvalidFileException { //Look for the STR file File[] dbfiles = dirWithDb.listFiles(new PatternFilenameFilter(Pattern.compile(".*\\.STR"))); if (dbfiles == null || dbfiles.length != 1) { throw new InvalidFileException("I expected one .STR file in the zip"); } return dbfiles[0]; }
private static Multimap<Long,RecommendedItem> readAndCopyDataFiles(File dataDir, File tempDir) throws IOException { Multimap<Long,RecommendedItem> data = ArrayListMultimap.create(); for (File dataFile : dataDir.listFiles(new PatternFilenameFilter(".+\\.csv(\\.(zip|gz))?"))) { log.info("Reading {}", dataFile); int count = 0; for (CharSequence line : new FileLineIterable(dataFile)) { Iterator<String> parts = COMMA_TAB_SPLIT.split(line).iterator(); long userID = Long.parseLong(parts.next()); long itemID = Long.parseLong(parts.next()); if (parts.hasNext()) { String token = parts.next().trim(); if (!token.isEmpty()) { data.put(userID, new GenericRecommendedItem(itemID, LangUtils.parseFloat(token))); } // Ignore remove lines } else { data.put(userID, new GenericRecommendedItem(itemID, 1.0f)); } if (++count % 1000000 == 0) { log.info("Finished {} lines", count); } } Files.copy(dataFile, new File(tempDir, dataFile.getName())); } return data; }
public ChromeWebDriverFactory() { super( // BrowserType.CHROME, // (targetDirectory) -> { final String extension = getExecutableExtension(true); final Pattern executablePattern = Pattern.compile("chromedriver-(?<version>.*)" + extension); final File[] files = targetDirectory.listFiles(new PatternFilenameFilter(executablePattern)); if (files == null || files.length != 1) { return Optional.empty(); } final File executable = files[0]; final Matcher executableMatcher = executablePattern.matcher(executable.getName()); if (!executableMatcher.find()) { throw new IllegalStateException( String.format("Unable to determine version of executable %s", executable)); } final String version = executableMatcher.group("version"); return Optional .of(new DownloadWebDriverExecutable.WebDriverExecutable(executable, version)); }, // () -> getString(LATEST_RELEASE_URL), // (version, targetDirectory) -> { final String system; if (SystemUtils.IS_OS_WINDOWS) { system = "win32"; } else if (SystemUtils.IS_OS_MAC) { system = "mac64"; } else if (SystemUtils.IS_OS_LINUX) { final StringBuilder linuxBuilder = new StringBuilder("linux"); // TODO this property does not reflect the OS architecture, it is the VM`s "bitness" if (SystemUtils.OS_ARCH.contains("64")) { linuxBuilder.append("64"); } else { linuxBuilder.append("32"); } system = linuxBuilder.toString(); } else { throw new UnsupportedOperationException( String.format("Unsupported operation system: %s %s", SystemUtils.OS_NAME, SystemUtils.OS_VERSION)); } LOGGER.debug("System '{}' detected.", system); final String downloadUrl = DOWNLOAD_URL + String.format("/%s/chromedriver_%s.zip", version, system); LOGGER.debug("Download chromedriver from {}", downloadUrl); downloadZipAndExtract(downloadUrl, targetDirectory); final File chromeDriverFile = new File(targetDirectory, String.format("chromedriver%s", getExecutableExtension(false))); final File chromeDriverFileWithVersion = new File(targetDirectory, String.format("chromedriver-%s%s", version, getExecutableExtension(false))); if (!chromeDriverFile.renameTo(chromeDriverFileWithVersion)) { throw new RuntimeException(); } if (!chromeDriverFileWithVersion.setExecutable(true, false)) { throw new RuntimeException(); } return new DownloadWebDriverExecutable.WebDriverExecutable(chromeDriverFileWithVersion, version); }, // ChromeDriverService.CHROME_DRIVER_EXE_PROPERTY // ); }
public Observable<Status> downloadUpdate(final String uri) { return Observable.unsafeCreate(subscriber -> { try { File directory = new File(context.getCacheDir(), "updates"); if (!directory.exists() && !directory.mkdirs()) { Crashlytics.log("Could not create apk directory: " + directory); } // clear all previous files File[] files = directory.listFiles(new PatternFilenameFilter("bs-update.*apk")); for (File file : files) { if (!file.delete()) {Crashlytics.log("Could not delete file: " + file); } } // and download the new file. File tempFile = File.createTempFile( "bs-update", ".apk", directory); try (OutputStream output = new FileOutputStream(tempFile)) { Request request = new Request.Builder().url(uri).build(); Call call = okHttpClient.newCall(request); subscriber.add(Subscriptions.create(() -> call.cancel())); Response response = call.execute(); Interval interval = new Interval(250); try (CountingInputStream input = new CountingInputStream(response.body().byteStream())) { int count; byte[] buffer = new byte[1024 * 32]; while ((count = ByteStreams.read(input, buffer, 0, buffer.length)) > 0) { output.write(buffer, 0, count); if (interval.check()) { float progress = input.getCount() / (float) response.body().contentLength(); subscriber.onNext(new Status(progress, null)); } } } } subscriber.onNext(new Status(1, tempFile)); subscriber.onCompleted(); } catch (Throwable error) { subscriber.onError(error); } }); }
public TaggerEmbeddings(final File modelFolder, final double beta, final int maxTagsPerWord, final CutoffsDictionaryInterface cutoffs) throws IOException { super(cutoffs, beta, loadCategories(new File(modelFolder, "categories")), maxTagsPerWord); try { final FilenameFilter embeddingsFileFilter = new PatternFilenameFilter("embeddings.*"); // If we're using POS tags or lexical features, load l. this.posFeatures = loadSparseFeatures(new File(modelFolder + "/postags")); this.lexicalFeatures = loadSparseFeatures(new File(modelFolder + "/frequentwords")); // Load word embeddings. embeddingsFeatures = loadEmbeddings(true, modelFolder.listFiles(embeddingsFileFilter)); // Load embeddings for capitalization and suffix features. discreteFeatures = new HashMap<>(); discreteFeatures.putAll(loadEmbeddings(false, new File(modelFolder, "capitals"))); discreteFeatures.putAll(loadEmbeddings(false, new File(modelFolder, "suffix"))); totalFeatures = (embeddingsFeatures.get(unknownLower).length + discreteFeatures.get(unknownSuffix).length + discreteFeatures.get(capsLower).length + posFeatures.size() + lexicalFeatures.size()) * (2 * contextWindow + 1); // Load the list of categories used by the model. categoryToIndex = new HashMap<>(); for (int i = 0; i < lexicalCategories.size(); i++) { categoryToIndex.put(lexicalCategories.get(i), i); } // Load the weight matrix used by the classifier. weightMatrix = new DenseMatrix(lexicalCategories.size(), totalFeatures); loadMatrix(weightMatrix, new File(modelFolder, "classifier")); weightMatrixRows = new ArrayList<>(lexicalCategories.size()); for (int i = 0; i < lexicalCategories.size(); i++) { final Vector row = new DenseVector(totalFeatures); for (int j = 0; j < totalFeatures; j++) { row.set(j, weightMatrix.get(i, j)); } weightMatrixRows.add(row); } bias = new DenseVector(lexicalCategories.size()); loadVector(bias, new File(modelFolder, "bias")); } catch (final Exception e) { throw new RuntimeException(e); } }
public TaggerEmbeddings(File modelFolder, int maxSentenceLength, double beta, int maxTagsPerWord) { try { FilenameFilter embeddingsFileFilter = new PatternFilenameFilter("embeddings.*"); // If we're using POS tags or lexical features, load l. this.posFeatures = loadSparseFeatures(new File(modelFolder + "/postags")); this.lexicalFeatures = loadSparseFeatures(new File(modelFolder + "/frequentwords")); // Load word embeddings. embeddingsFeatures = loadEmbeddings(true, modelFolder.listFiles(embeddingsFileFilter)); // Load embeddings for capitalization and suffix features. discreteFeatures = new HashMap<String, double[]>(); discreteFeatures.putAll(loadEmbeddings(false, new File(modelFolder, "capitals"))); discreteFeatures.putAll(loadEmbeddings(false, new File(modelFolder, "suffix"))); totalFeatures = (embeddingsFeatures.get(unknownLower).length + discreteFeatures.get(unknownSuffix).length + discreteFeatures.get(capsLower).length + posFeatures.size() + lexicalFeatures.size()) * (2 * contextWindow + 1); // Load the list of categories used by the model. lexicalCategories = loadCategories(new File(modelFolder, "categories")); // Load the weight matrix used by the classifier. weightMatrix = new DenseMatrix(lexicalCategories.size(), totalFeatures); loadMatrix(weightMatrix, new File(modelFolder, "classifier")); weightMatrixRows = new ArrayList<Vector>(lexicalCategories.size()); for (int i=0; i<lexicalCategories.size(); i++) { Vector row = new DenseVector(totalFeatures); for (int j=0; j<totalFeatures; j++) { row.set(j, weightMatrix.get(i, j)); } weightMatrixRows.add(row); } bias = new DenseVector(lexicalCategories.size()); this.beta = beta; this.maxTagsPerWord = maxTagsPerWord; int maxCategoryID = 0; for (Category c : lexicalCategories) { maxCategoryID = Math.max(maxCategoryID, c.getID()); } this.tagDict = ImmutableMap.copyOf(loadTagDictionary(modelFolder)); terminalFactory = new SyntaxTreeNodeFactory(maxSentenceLength, maxCategoryID); loadVector(bias, new File(modelFolder, "bias")); } catch (Exception e) { throw new RuntimeException(e); } }
@Test public void testMerge() throws Exception { final Path workingDir = Paths.get(System.getProperty("user.dir")); assertThat(workingDir.toFile().exists()).isTrue(); assertThat(workingDir.toFile().isDirectory()).isTrue(); String dataDir = System.getProperty("testdatadir"); if (dataDir.charAt(dataDir.length() - 1) != '/') { dataDir = dataDir + '/'; } final Path mergerManifest = rlocation(dataDir + "merger/AndroidManifest.xml"); final Path mergeeManifestOne = rlocation(dataDir + "mergeeOne/AndroidManifest.xml"); final Path mergeeManifestTwo = rlocation(dataDir + "mergeeTwo/AndroidManifest.xml"); assertThat(mergerManifest.toFile().exists()).isTrue(); assertThat(mergeeManifestOne.toFile().exists()).isTrue(); assertThat(mergeeManifestTwo.toFile().exists()).isTrue(); // The following code retrieves the path of the only AndroidManifest.xml in the expected/ // manifests directory. Unfortunately, this test runs internally and externally and the files // have different names. final File expectedManifestDirectory = mergerManifest.getParent().resolveSibling("expected").toFile(); final String[] debug = expectedManifestDirectory.list(new PatternFilenameFilter(".*AndroidManifest\\.xml$")); assertThat(debug).isNotNull(); final File[] expectedManifestDirectoryManifests = expectedManifestDirectory.listFiles((File dir, String name) -> true); assertThat(expectedManifestDirectoryManifests).isNotNull(); assertThat(expectedManifestDirectoryManifests).hasLength(1); final Path expectedManifest = expectedManifestDirectoryManifests[0].toPath(); Files.createDirectories(working.resolve("output")); final Path mergedManifest = working.resolve("output/mergedManifest.xml"); List<String> args = generateArgs( mergerManifest, ImmutableMap.of(mergeeManifestOne, "mergeeOne", mergeeManifestTwo, "mergeeTwo"), false, /* isLibrary */ ImmutableMap.of("applicationId", "com.google.android.apps.testapp"), "", /* custom_package */ mergedManifest); ManifestMergerAction.main(args.toArray(new String[0])); assertThat( Joiner.on(" ") .join(Files.readAllLines(mergedManifest, UTF_8)) .replaceAll("\\s+", " ") .trim()) .isEqualTo( Joiner.on(" ") .join(Files.readAllLines(expectedManifest, UTF_8)) .replaceAll("\\s+", " ") .trim()); }
@Override @Before public void setUp() throws Exception { super.setUp(); // Clear some sys properties that may have been set per test for (Iterator<?> it = System.getProperties().keySet().iterator(); it.hasNext();) { if (it.next().toString().startsWith("model.")) { it.remove(); } } File tempDir = getTestTempDir(); File testDataDir = new File(getTestDataPath()); Preconditions.checkState(testDataDir.exists() && testDataDir.isDirectory(), "%s is not an existing directory", testDataDir.getAbsolutePath()); log.info("Copying files to {}", tempDir); if (savedModelFile == null) { log.info("No saved model file, building model"); File[] srcDataFiles = testDataDir.listFiles(new PatternFilenameFilter("[^.].*")); if (srcDataFiles != null) { for (File srcDataFile : srcDataFiles) { File destFile = new File(tempDir, srcDataFile.getName()); Files.copy(srcDataFile, destFile); } } } else { log.info("Found saved model file {} (size {})", savedModelFile, savedModelFile.length()); Files.copy(savedModelFile, new File(tempDir, "model.bin.gz")); } log.info("Configuring recommender..."); RunnerConfiguration runnerConfig = new RunnerConfiguration(); runnerConfig.setInstanceID("test"); runnerConfig.setPort(8090); // Odd ports to avoid conflicts if (useSecurity()) { runnerConfig.setSecurePort(8453); // Odd ports to avoid conflicts runnerConfig.setKeystorePassword("changeit"); runnerConfig.setKeystoreFile(new File("testdata/keystore")); runnerConfig.setUserName("foo"); runnerConfig.setPassword("bar"); } runnerConfig.setLocalInputDir(tempDir); runner = new Runner(runnerConfig); runner.call(); boolean clientSecure = runnerConfig.getKeystoreFile() != null; int clientPort = clientSecure ? runnerConfig.getSecurePort() : runnerConfig.getPort(); MyrrixClientConfiguration clientConfig = new MyrrixClientConfiguration(); clientConfig.setHost("localhost"); clientConfig.setPort(clientPort); clientConfig.setSecure(clientSecure); clientConfig.setKeystorePassword(runnerConfig.getKeystorePassword()); clientConfig.setKeystoreFile(runnerConfig.getKeystoreFile()); clientConfig.setUserName(runnerConfig.getUserName()); clientConfig.setPassword(runnerConfig.getPassword()); client = new ClientRecommender(clientConfig); if (callAwait()) { log.info("Waiting for client..."); client.await(); log.info("Client ready"); } }
@Test public void find_files_by_extension_guava () { Pattern pattern = Pattern.compile("^.*.txt"); FilenameFilter filterByExtension = new PatternFilenameFilter(pattern); File dir = new File(sourceFileURI); File[] files = dir.listFiles(filterByExtension); logger.info(Arrays.toString(files)); assertTrue(files.length >= 4); }