@Override public void execute() throws MojoExecutionException, MojoFailureException { extractTemplatesFromJar(); try { PublishProvider provider; switch (publish.getProvider()) { default: provider = new ConfluenceProvider(publish.getEndpoint(), publish.getUsername(), publish.getPassword()); } List<Page> pages = readHtmlPages(); publish(provider, pages); } catch (Exception e) { throw new MojoExecutionException("Unexpected error", e); } }
@Test public void assureLinuxWebApp() throws Exception { final SiteInner siteInner = mock(SiteInner.class); doReturn("app,linux").when(siteInner).kind(); final WebApp app = mock(WebApp.class); doReturn(siteInner).when(app).inner(); // Linux Web App WebAppUtils.assureLinuxWebApp(app); // Non-Linux Web App doReturn("app").when(siteInner).kind(); MojoExecutionException exception = null; try { WebAppUtils.assureLinuxWebApp(app); } catch (MojoExecutionException e) { exception = e; } finally { assertNotNull(exception); } }
public void execute() throws MojoExecutionException { this.sourcesDirectory = Paths.get(outputDirectory).resolve("generated-sources").resolve("sdk"); this.testsDirectory = Paths.get(outputDirectory).resolve("generated-test-sources").resolve("sdk-tests"); findModelRoots().forEach(p -> { try { getLog().info("Loading from: " + p.toString()); generateCode(C2jModels.builder() .codeGenConfig(loadCodeGenConfig(p)) .customizationConfig(loadCustomizationConfig(p)) .serviceModel(loadServiceModel(p)) .waitersModel(loadWaiterModel(p)) .paginatorsModel(loadPaginatorModel(p)) .examplesModel(loadExamplesModel(p)) .build()); } catch (MojoExecutionException e) { throw new RuntimeException(e); } }); project.addCompileSourceRoot(sourcesDirectory.toFile().getAbsolutePath()); project.addTestCompileSourceRoot(testsDirectory.toFile().getAbsolutePath()); }
public boolean isMarkerOlder( Artifact artifact1 ) throws MojoExecutionException { File marker = getMarkerFile(); if ( marker.exists() ) { long artifactLastModified = artifact1.getFile().lastModified(); long markerLastModified = marker.lastModified(); return artifact1.getFile().lastModified() > marker.lastModified(); } else { // if the marker doesn't exist, we want to copy so assume it is // infinately older return true; } }
@Override public void execute () throws MojoExecutionException, MojoFailureException { getLog ().debug ( "START HERE" ); try { final Document doc = XmlHelper.parse ( this.sourceFile ); updateFile ( doc ); XmlHelper.write ( doc, this.targetFile ); } catch ( final Exception e ) { throw new MojoExecutionException ( "Failed to update version", e ); } }
public String generate() throws MojoExecutionException { final ClassPath classPath = initClassPath(); final Set<ClassInfo> allClasses = classPath.getTopLevelClassesRecursive(prefix); String diagram = classDiagramBuilder .addClasse(allClasses.stream() // apply filters .filter(defaultFilter()) .filter(additionalClassPredicate) .map(ClassInfo::load).collect(Collectors.toList())) .excludes(excludes) .setHeader(readHeader()) .setFooter(readFooter()) .withNamesMapper(namesMapper) .withLinkMaker(this) .withDependencies(diagramWithDependencies) .build(); return diagram; }
public void execute() throws MojoExecutionException { for (String inputDirectory : getChartDirectories(getChartDirectory())) { if (getExcludes() != null && Arrays.asList(getExcludes()).contains(inputDirectory)) { getLog().debug("Skip excluded directory " + inputDirectory); continue; } getLog().info("Packaging chart " + inputDirectory + "..."); String helmCommand = getHelmExecuteable() + " package " + inputDirectory + " -d " + getOutputDirectory(); if (getChartVersion() != null) { getLog().info(String.format("Setting chart version to %s", getChartVersion())); helmCommand = helmCommand + " --version " + getChartVersion(); } callCli(helmCommand, "Unable to package chart at " + inputDirectory, true); } }
public void testMarkerTimeStamp() throws MojoExecutionException, IOException, InterruptedException { File theFile = new File( outputFolder, "theFile.jar" ); outputFolder.mkdirs(); theFile.createNewFile(); ArtifactItem theArtifactItem = (ArtifactItem) artifactItems.get( 0 ); Artifact theArtifact = theArtifactItem.getArtifact(); theArtifact.setFile( theFile ); UnpackFileMarkerHandler handler = new UnpackFileMarkerHandler( theArtifactItem, this.outputFolder ); assertFalse( handler.isMarkerSet() ); // if the marker is not set, assume it is infinately older than the // artifact. assertTrue( handler.isMarkerOlder( theArtifact ) ); handler.setMarker(); assertFalse( handler.isMarkerOlder( theArtifact ) ); theFile.setLastModified( theFile.lastModified() + 60000 ); assertTrue( handler.isMarkerOlder( theArtifact ) ); theFile.delete(); handler.clearMarker(); assertFalse( handler.isMarkerSet() ); }
@Override public void execute() throws MojoExecutionException { final Log log = getLog(); if (skipGenTemplate) { log.info("skipGenTemplate is true. Skipping generate template step."); return; } try { log.info("Generating default Dockerrun.aws.json template..."); Map<String, Object> templateArgs = getTemplateArgs(); FileGenerator.generateDefaultDockerrunFile( dockerrunDest, templateArgs ); } catch (Exception e) { throw new MojoExecutionException("Failed to generate file", e); } }
public void testIncludesExcludesMarker() throws MojoExecutionException, IOException { UnpackFileMarkerHandler handler = new UnpackFileMarkerHandler( (ArtifactItem) artifactItems.get( 3 ), outputFolder ); File handle = handler.getMarkerFile(); assertFalse( handle.exists() ); assertFalse( handler.isMarkerSet() ); handler.setMarker(); assertTrue( handler.isMarkerSet() ); assertTrue( handle.exists() ); String hashCode = "" + ( 0 + "**/*.class".hashCode() + "**/*.xml".hashCode() ); assertTrue( handle.getName().indexOf( hashCode ) > -1 ); handle.delete(); assertFalse( handler.isMarkerSet() ); handle.createNewFile(); assertTrue( handler.isMarkerSet() ); handler.clearMarker(); assertFalse( handle.exists() ); }
@Override public void execute() throws MojoExecutionException, MojoFailureException { MetricFilterPublisher publisher = dryRun ? new DryRunMetricFilterPublisher(getLog()) : new CloudwatchMetricFilterPublisher(getLog()); // Get a map of fully-namespaced metric names mapped to the metric fields in classes that extend LambdaMetricSet Map<String, Field> metricFields = new HashMap<>(); try { metricFields.putAll(new MetricsFinder(project).find()); } catch (DependencyResolutionRequiredException | MalformedURLException e) { throw new MojoExecutionException("Could not scan classpath for metric fields", e); } if (!metricFields.isEmpty()) { getLog().info(String.format("Found [%d] metric fields in classpath.", metricFields.size())); int removed = publisher.removeMetricFilters(getMetricFilters(metricFields)); getLog().info(String.format("Removed [%d] metric filters.", removed)); } else { getLog().warn("Did not find any metric fields in classpath."); } }
void extractJar(File jar, ManifestTransformer manifestTransformer) throws MojoExecutionException { try (JarFile jarFile = new JarFile(jar)) { for (Enumeration<JarEntry> jarEntries = jarFile.entries(); jarEntries.hasMoreElements(); ) { JarEntry jarEntry = jarEntries.nextElement(); if (manifestTransformer.canTransform(jarEntry)) { jarEntry = manifestTransformer.transform(jarEntry); } if (!jarEntry.isDirectory() && !content.contains(jarEntry.getName())) { content.add(jarEntry.getName()); makeDirsRecursively(jarEntry.getName()); try (InputStream in = getInputStream(jarEntry, jarFile, manifestTransformer)) { jarOutputStream.putNextEntry(jarEntry); IOUtil.copy(in, jarOutputStream); } } } } catch (IOException e) { throw new MojoExecutionException("Error adding " + jar.getName() + " to target JAR: " + e.getMessage(), e); } }
private void processRootAnnotation(Map<String, String> paths, String className, AnnotationInstance rootAnnotation) throws MojoExecutionException { if (rootAnnotation == null) return; List<MethodInfo> methods = rootAnnotation.target().asClass().methods(); for (MethodInfo method : methods) if (!containsPathAnnotation(method)) { String httpVerb = getHttpVerb(method); if (httpVerb != null) { String path = httpVerb; path = addProducer(path, method); path = addConsumer(path, method); path = path.concat("/").concat(getRootPath(rootAnnotation)); addInPaths(paths, className, path); } } }
@Override public void execute() throws MojoExecutionException, MojoFailureException { // Create test context try (final GenericXmlApplicationContext generateContext = new GenericXmlApplicationContext()) { generateContext.load("classpath:generate-context.xml"); generateContext.refresh(); final DataSource dataSource = generateContext.getBean(DataSource.class); final Generator generator = new Generator(getLog(), this::processRecord); getLog().info("Loading templates from: " + source.getAbsolutePath()); getLog().info("Package: " + packageName); for (final File file : source.listFiles(this::acceptAllFiles)) { generator.generate(dataSource, file, destination, packageName); } } }
/** * Checks hierarchical intra with inter file type linking. */ @Test public void file1UsesFile2TypeDefFile3Type() throws IOException, ParserException, MojoExecutionException { String searchDir = "src/test/resources/file1UsesFile2TypeDefFile3Type"; utilManager.createYangFileInfoSet(YangFileScanner.getYangFiles(searchDir)); utilManager.parseYangFileInfoSet(); utilManager.resolveDependenciesUsingLinker(); String userDir = System.getProperty("user.dir"); YangPluginConfig yangPluginConfig = new YangPluginConfig(); yangPluginConfig.setCodeGenDir("target/file1UsesFile2TypeDefFile3Type/"); utilManager.translateToJava(utilManager.getYangFileInfoSet(), yangPluginConfig); deleteDirectory(userDir + "/target/file1UsesFile2TypeDefFile3Type/"); }
private void setupHintedShader() throws MojoExecutionException { if ( shaderHint != null ) { try { shader = (Shader) plexusContainer.lookup( Shader.ROLE, shaderHint ); } catch ( ComponentLookupException e ) { throw new MojoExecutionException( "unable to lookup own Shader implementation with hint:'" + shaderHint + "'", e ); } } }
/** * Called by the project to let the task do its work. * This will invoke PatternGenerator for each file in the nested FileSet(s). * * @throws MojoExecutionException if something goes wrong. */ public void execute() throws MojoExecutionException { PatternGenerator pg = null; try { File directory = new File(fileSet.getDirectory()); String includes = StringUtils.join(fileSet.getIncludes(), ","); String excludes = StringUtils.join(fileSet.getExcludes(), ","); List<File> fileList = FileUtils.getFiles(directory, includes, excludes); for (File file : fileList) { pg = new PatternGenerator(file.toURI().toURL()); pg.setData(fileList); pg.processPattern(); pg.destroy(); pg = null; } } catch (Exception e) { e.printStackTrace(); throw new MojoExecutionException("Error in executing PatternGeneratorMojo: " + e.getMessage(), e); } finally { if (pg != null) { pg.destroy(); } } }
public void testUnpackDontOverWriteReleases() throws IOException, MojoExecutionException, InterruptedException { stubFactory.setCreateFiles( true ); Artifact release = stubFactory.getReleaseArtifact(); release.getFile().setLastModified( System.currentTimeMillis() - 2000 ); ArtifactItem item = new ArtifactItem( release ); ArrayList list = new ArrayList( 1 ); list.add( item ); mojo.setArtifactItems( list ); mojo.setOverWriteIfNewer( false ); mojo.execute(); assertUnpacked( item, false ); }
private String parseSoliditySource(String includedFile) throws MojoExecutionException { try { byte[] contract = Files.readAllBytes(Paths.get(soliditySourceFiles.getDirectory(), includedFile)); CompilerResult result = SolidityCompiler.getInstance(getLog()).compileSrc( contract, SolidityCompiler.Options.ABI, SolidityCompiler.Options.BIN, SolidityCompiler.Options.INTERFACE, SolidityCompiler.Options.METADATA ); if (result.isFailed()) { throw new MojoExecutionException("Could not compile solidity files\n" + result.errors); } getLog().debug("\t\tResult:\t" + result.output); getLog().debug("\t\tError: \t" + result.errors); return result.output; } catch (IOException ioException) { throw new MojoExecutionException("Could not compile files", ioException); } }
public void testUnpackOverWriteReleases() throws IOException, MojoExecutionException, InterruptedException { stubFactory.setCreateFiles( true ); Artifact release = stubFactory.getReleaseArtifact(); release.getFile().setLastModified( System.currentTimeMillis() - 2000 ); ArtifactItem item = new ArtifactItem( release ); ArrayList list = new ArrayList( 1 ); list.add( item ); mojo.setArtifactItems( list ); mojo.setOverWriteIfNewer( false ); mojo.setOverWriteReleases( true ); mojo.execute(); assertUnpacked( item, true ); }
public void testUnpackOverWriteSnapshot() throws IOException, MojoExecutionException, InterruptedException { stubFactory.setCreateFiles( true ); Artifact artifact = stubFactory.getSnapshotArtifact(); artifact.getFile().setLastModified( System.currentTimeMillis() - 2000 ); ArtifactItem item = new ArtifactItem( artifact ); ArrayList list = new ArrayList( 1 ); list.add( item ); mojo.setArtifactItems( list ); mojo.setOverWriteIfNewer( false ); mojo.setOverWriteReleases( false ); mojo.setOverWriteSnapshots( true ); mojo.execute(); assertUnpacked( item, true ); }
@Override public void execute() throws MojoExecutionException, MojoFailureException { if (artifact.endsWith("jar") || artifact.endsWith("war")) { getLog().debug("Setting fixed timestamp for " + artifact); try { final Path tmpFolderPath = createTmpFolderPath(); if (tmpFolderPath.toFile().exists()) { deleteTmpFolder(tmpFolderPath); } final Path tmpFolder = Files.createDirectory(tmpFolderPath); backupArtifact(); Set<String> keys = pakArchiefBestandUit(tmpFolder); pakArchiefBestandOpnieuwIn(tmpFolder, keys); Files.deleteIfExists(FileSystems.getDefault().getPath(artifact + ".bak")); deleteTmpFolder(tmpFolder); } catch (IOException e) { throw new MojoExecutionException("Cannot create temp folder", e); } } else { getLog().debug("Artifact is not a jar or war file"); } }
/** * Loads the javadoc from the project, creates a {@link Swagger} object and * creates a Swagger definition */ @Override public void execute() throws MojoExecutionException { LocalDateTime start = LocalDateTime.now(); log = getLog(); List<Tag> tags = new ArrayList<>(); List<Definition> definitions = new ArrayList<>(); try { ClassLoader classLoader = new ClassLoader(project); JavaFileLoader loader = new JavaFileLoader(log); List<JavaFile> javaFiles = loader.getJavaFiles(project); Parser parser = new Parser(log, classLoader, tags, definitions); Swagger swagger = parser.parse(javaFiles); setSwaggerVersion(swagger); SwaggerWriter swaggerWriter = new SwaggerWriter(log); String basePath = project.getBuild().getOutputDirectory(); swaggerWriter.createSwaggerDefinition(swagger, basePath, tags, definitions); log.info("Swagger file created"); LocalDateTime end = LocalDateTime.now(); Duration elapsed = Duration.between(start, end); log.info("total time elapsed: " + elapsed.toMillis() + " milliseconds"); } catch (Exception e) { throw new MojoExecutionException(e.getMessage(), e); } }
private MavenProject loadProject ( File file, final MavenSession session ) throws Exception { file = file.getCanonicalFile (); for ( final MavenProject project : session.getProjects () ) { final File projectFile = project.getFile ().getCanonicalFile (); if ( projectFile.equals ( file ) ) { return project; } } throw new MojoExecutionException ( file, "Unreferenced project found", String.format ( "Project at '%s' is not in the list of active projects. This plugin can only" + "work on projects there were loaded by maven. You need to include the project in your build.", file ) ); }
public void execute() throws MojoExecutionException { if (!isExecutionRoot()) { getLog().debug("Not in execution root. Do not execute."); return; } try { getLog().info("Installing git hooks"); doExecute(); getLog().info("Installed git hooks"); } catch (Exception e) { throw new MojoExecutionException(e.getMessage(), e); } }
FileFilter createFileFilter() throws MojoExecutionException { try { URL urlSource = URLUtil.parseURL(sourceDirectory); return new MatchPatternsFileFilter.Builder().addIncludes(includes).addExcludes(excludes).addDefaultExcludes().withSourceDirectory(URLUtil.getFileFromURL(urlSource).getCanonicalPath()).withCaseSensitive(false).build(); } catch (IOException e) { throw new MojoExecutionException("could not create file filter", e); } }
@Override public synchronized void execute () throws MojoExecutionException { fillFromProperties ( "additionalProperties", this.additionalProperties ); this.changeManager = new ChangeManager ( getLog () ); try { getLog ().info ( "Overwriting qualifier properties: " + this.qualifierProperties ); getLog ().info ( "Overwriting properties: " + this.additionalProperties ); getLog ().info ( "Force update parents: " + this.forceUpdateParentQualifiers ); if ( this.dryRun ) { getLog ().info ( "This is a dry run" ); } final Collection<MavenProject> projects = this.helper.expandProjects ( getReactorProjects (), getLog (), this.session ); getLog ().info ( String.format ( "Processing %s modules", projects.size () ) ); for ( final MavenProject project : projects ) { getLog ().debug ( String.format ( " -> %s", project ) ); process ( projects, project ); } if ( !this.dryRun ) { this.changeManager.applyAll (); } } catch ( final Exception e ) { throw new MojoExecutionException ( "Failed to set qualifier", e ); } }
List<String> getChartDirectories(String path) throws MojoExecutionException { try (Stream<Path> files = Files.walk(Paths.get(path))) { return files.filter(p -> p.getFileName().toString().equalsIgnoreCase("chart.yaml")) .map(p -> p.getParent().toString()) .collect(Collectors.toList()); } catch (IOException e) { throw new MojoExecutionException("Unable to scan chart directory at " + path, e); } }
@Override public void execute() throws MojoExecutionException, MojoFailureException { Generator generator = new Generator(); try { generator.parse(input); } catch (Exception e) { e.printStackTrace(); throw new MojoExecutionException("Error during GCM parse stage", e); } final String[] gcVersion = {""}; project.getPluginArtifacts().forEach(artifact -> { if (artifact.getGroupId().equals("com.datathings") && artifact.getArtifactId().equals("greycat-mavenplugin")) { gcVersion[0] = artifact.getVersion(); } }); final List<File> cps = new ArrayList<File>(); if (project != null) { for (Artifact a : project.getArtifacts()) { File file = a.getFile(); if (file != null) { if (file.isFile()) { cps.add(file); } } } } generator.generate(packageName, pluginName, targetGen, targetGenJS, generateJava, generateJS, gcVersion[0], project.getVersion(), cps); project.addCompileSourceRoot(targetGen.getAbsolutePath()); }
public void execute() throws MojoExecutionException { this.setPerformInitOnly(true); this.setNoPrompt(true); this.setCleanFirst(false); this.validateIsSourcePathPopulated(); super.execute(); }
private void processAppPng(File iconFile) throws MojoExecutionException { try { File stagedIconFile = new File(stageDirectory, APP_PNG); if (iconFile.exists()) { FileUtils.copyFile(iconFile, stagedIconFile); } } catch (IOException e) { throw new MojoExecutionException("Unable to copy app.png", e); } }
@Override public void controlAppiumServer() throws MojoExecutionException { getLog().info(" "); getLog().info("-------------------------------------------------------"); getLog().info(" S T O P P I N G A P P I U M S E R V E R"); getLog().info("-------------------------------------------------------"); getLog().info(" "); try { int appiumProcessPID = Integer.parseInt(readFileToString(new File(projectBuildDirectory, APPIUM_PID), UTF_8)); PidProcess appiumProcess = Processes.newPidProcess(appiumProcessPID); if (Os.isFamily(Os.FAMILY_WINDOWS)) { WindowsProcess appiumWindowsProcess = (WindowsProcess) appiumProcess; appiumWindowsProcess.setIncludeChildren(true); appiumProcess = appiumWindowsProcess; } if (!appiumProcess.isAlive()) { throw new MojoExecutionException("Could not find a process running on " + appiumProcessPID); } ProcessUtil.destroyGracefullyOrForcefullyAndWait(appiumProcess, shutdownTimeout, TimeUnit.SECONDS, forceShutdownTimeout, TimeUnit.SECONDS); if (appiumProcess.isAlive()) { throw new MojoExecutionException("Unable to stop Appium server..."); } } catch (InterruptedException | TimeoutException | IOException ex) { throw new MojoExecutionException("Unable to stop Appium server...", ex); } }
private Stream<Path> findModelRoots() throws MojoExecutionException { try { return Files.find(codeGenResources.toPath(), 10, this::isModelFile) .map(Path::getParent) .sorted(this::modelSharersLast); } catch (IOException e) { throw new MojoExecutionException("Failed to find '" + MODEL_FILE + "' files in " + codeGenResources, e); } }
@Override public RuntimeHandler getRuntimeHandler(final AbstractWebAppMojo mojo) throws MojoExecutionException { final JavaVersion javaVersion = mojo.getJavaVersion(); final ContainerSetting containerSetting = mojo.getContainerSettings(); // Neither <javaVersion> nor <containerSettings> is specified if (javaVersion == null && (containerSetting == null || containerSetting.isEmpty())) { return new NullRuntimeHandlerImpl(); } // Both <javaVersion> and <containerSettings> are specified if (javaVersion != null && containerSetting != null && !containerSetting.isEmpty()) { throw new MojoExecutionException(RUNTIME_CONFIG_CONFLICT); } if (javaVersion != null) { return new JavaRuntimeHandlerImpl(mojo); } final DockerImageType imageType = WebAppUtils.getDockerImageType(containerSetting); switch (imageType) { case PUBLIC_DOCKER_HUB: return new PublicDockerHubRuntimeHandlerImpl(mojo); case PRIVATE_DOCKER_HUB: return new PrivateDockerHubRuntimeHandlerImpl(mojo); case PRIVATE_REGISTRY: return new PrivateRegistryRuntimeHandlerImpl(mojo); case NONE: throw new MojoExecutionException(IMAGE_NAME_MISSING); } throw new MojoExecutionException(NO_RUNTIME_HANDLER); }
@Override public void executeInternal() throws MojoExecutionException, MojoFailureException { if (getService() == null) { getRuntime().clean(); } else { getRuntime().clean(getService()); } }
/** * Initializes a the plugin configuration with the following ascending priority: * - properties file (gemnasium.properties) * - plugin configuration (within pom.xml) * - env variables * @param baseDir The maven project baseDir. * @param baseUrl The base URL of the Gemnasium instance (for Gemnasium Enteprise usage). * @param apiKey Your Gemanisum API key * @param projectBranch Current branch * @param projectSlug The project identifier on Gemnasium. * @param projectRevision Current revision * @param ignoredScopes Comma separated list of Maven dependency scopes to ignore. * @throws MojoExecutionException if properties configuration can't be loaded. */ public Config(File baseDir, String baseUrl, String apiKey, String projectBranch, String projectSlug, String projectRevision, String ignoredScopes) throws MojoExecutionException { this.baseDir = baseDir; Properties configProperties; try { configProperties = loadConfigProperties(); } catch (Exception e) { throw new MojoExecutionException("Can't load configuration file.", e); } this.baseUrl = getFirstNotEmpty(System.getenv().get("GEMNASIUM_BASE_URL"), baseUrl, configProperties.getProperty("baseUrl")); // Set default baseUrl if none provided if (this.baseUrl == null || this.baseUrl.isEmpty()) { this.baseUrl = DEFAULT_BASE_URL; } this.apiKey = getFirstNotEmpty(System.getenv().get("GEMNASIUM_API_KEY"), apiKey, configProperties.getProperty("apiKey")); this.projectBranch = getFirstNotEmpty(System.getenv().get("GEMNASIUM_PROJECT_BRANCH"), projectBranch, configProperties.getProperty("projectBranch")); this.projectSlug = getFirstNotEmpty(System.getenv().get("GEMNASIUM_PROJECT_SLUG"), projectSlug, configProperties.getProperty("projectSlug")); this.projectRevision = getFirstNotEmpty(System.getenv().get("GEMNASIUM_PROJECT_REVISION"), projectRevision, configProperties.getProperty("projectRevision")); this.ignoredScopes = getFirstNotEmpty(System.getenv().get("GEMNASIUM_IGNORED_SCOPES"), ignoredScopes, configProperties.getProperty("ignoredScopes")); }
protected void saveExtensionMetaData(Extension jsonObject) throws MojoExecutionException { File targetFile = new File(metadataDestination); if (!targetFile.getParentFile().exists() && !targetFile.getParentFile().mkdirs()) { throw new MojoExecutionException("Cannot create directory " + targetFile.getParentFile()); } try { Json.mapper().writerWithDefaultPrettyPrinter().writeValue(targetFile, jsonObject); getLog().info("Created file " + targetFile.getAbsolutePath()); } catch (IOException e) { throw new MojoExecutionException("Cannot write to file: " + metadataDestination, e); } }
private InputStream getInputStream(JarEntry jarEntry, JarFile jarFile, ManifestTransformer manifestTransformer) throws IOException, MojoExecutionException { InputStream in = jarFile.getInputStream(jarEntry); if (manifestTransformer.canTransform(jarEntry)) { in = manifestTransformer.transform(in); } return in; }
public boolean isArtifactIncluded( ArtifactItem item ) throws ArtifactFilterException { Artifact artifact = item.getArtifact(); boolean overWrite = false; boolean result = false; if ( ( artifact.isSnapshot() && this.overWriteSnapshots ) || ( !artifact.isSnapshot() && this.overWriteReleases ) ) { overWrite = true; } handler.setArtifact( artifact ); try { if ( overWrite || ( !handler.isMarkerSet() || ( overWriteIfNewer && handler.isMarkerOlder( artifact ) ) ) ) { result = true; } } catch ( MojoExecutionException e ) { throw new ArtifactFilterException (e.getMessage(),e); } return result; }
@Override public void execute() throws MojoExecutionException, MojoFailureException { try { logger.info("Checking WeDeploy status:"); WeDeployStatusDTO status = weDeployStatus.get(true); logger.info("Auth service: " + status.auth); logger.info("Data service: " + status.data); logger.info("Email service: " + status.email); } catch (WeDeployClientException e) { throw new MojoExecutionException("Failed to check WeDeploy status", e); } }