FELIX-1200: Move features-maven-plugin into Karaf
git-svn-id: https://svn.apache.org/repos/asf/felix/trunk@785091 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/AddFeaturesToRepoMojo.java b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/AddFeaturesToRepoMojo.java
new file mode 100644
index 0000000..a245fcb
--- /dev/null
+++ b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/AddFeaturesToRepoMojo.java
@@ -0,0 +1,327 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.felix.karaf.tooling.features;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Properties;
+import java.util.Hashtable;
+import java.util.Set;
+import java.util.HashSet;
+import java.io.File;
+import java.io.IOException;
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.io.BufferedOutputStream;
+import java.io.FileOutputStream;
+import java.io.FileInputStream;
+import java.net.URI;
+import java.net.URL;
+
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+
+import org.w3c.dom.*;
+
+import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.plugin.MojoFailureException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.handler.DefaultArtifactHandler;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.xml.sax.SAXException;
+
+/**
+ * Generates the features XML file
+ *
+ * @version $Revision: 1.1 $
+ * @goal add-features-to-repo
+ * @phase compile
+ * @execute phase="compile"
+ * @requiresDependencyResolution runtime
+ * @inheritByDefault true
+ * @description Add the features to the repository
+ */
+public class AddFeaturesToRepoMojo extends MojoSupport {
+
+ /**
+ * @parameter
+ */
+ private List<String> descriptors;
+
+ /**
+ * @parameter
+ */
+ private List<String> features;
+
+ /**
+ * @parameter
+ */
+ private File repository;
+
+ public void execute() throws MojoExecutionException, MojoFailureException {
+ try {
+ Map<String, Feature> featuresMap = new HashMap<String, Feature>();
+ for (String uri : descriptors) {
+ Repository repo = new Repository(URI.create(translateFromMaven(uri)));
+ for (Feature f : repo.getFeatures()) {
+ featuresMap.put(f.getName(), f);
+ }
+ }
+ Set<String> transitiveFeatures = new HashSet<String>();
+ addFeatures(features, transitiveFeatures, featuresMap);
+ Set<String> bundles = new HashSet<String>();
+ for (String feature : transitiveFeatures) {
+ bundles.addAll(featuresMap.get(feature).getBundles());
+ }
+ System.out.println("Base repo: " + localRepo.getUrl());
+ for (String bundle : bundles) {
+ if (!bundle.startsWith("mvn:")) {
+ throw new MojoExecutionException("Bundle url is not a maven url: " + bundle);
+ }
+ String[] parts = bundle.substring("mvn:".length()).split("/");
+ String groupId = parts[0];
+ String artifactId = parts[1];
+ String version = null;
+ String classifier = null;
+ String type = "jar";
+ if (parts.length > 2) {
+ version = parts[2];
+ if (parts.length > 3) {
+ classifier = parts[3];
+ if (parts.length > 4) {
+ type = parts[4];
+ }
+ }
+ }
+ String dir = groupId.replace('.', '/') + "/" + artifactId + "/" + version + "/";
+ String name = artifactId + "-" + version + (classifier != null ? "-" + classifier : "") + "." + type;
+ System.out.println("Copy: " + dir + name);
+ copy(new URL(getLocalRepoUrl() + "/" + dir + name).openStream(),
+ repository,
+ name,
+ dir,
+ new byte[8192]);
+
+ }
+ } catch (MojoExecutionException e) {
+ throw e;
+ } catch (MojoFailureException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new MojoExecutionException("Error populating repository", e);
+ }
+ }
+
+ private String translateFromMaven(String uri) {
+ if (uri.startsWith("mvn:")) {
+ String[] parts = uri.substring("mvn:".length()).split("/");
+ String groupId = parts[0];
+ String artifactId = parts[1];
+ String version = null;
+ String classifier = null;
+ String type = "jar";
+ if (parts.length > 2) {
+ version = parts[2];
+ if (parts.length > 3) {
+ type = parts[3];
+ if (parts.length > 4) {
+ classifier = parts[4];
+ }
+ }
+ }
+ String dir = groupId.replace('.', '/') + "/" + artifactId + "/" + version + "/";
+ String name = artifactId + "-" + version + (classifier != null ? "-" + classifier : "") + "." + type;
+
+ return getLocalRepoUrl() + "/" + dir + name;
+ }
+ if (System.getProperty("os.name").startsWith("Windows") && uri.startsWith("file:")) {
+ String baseDir = uri.substring(5).replace('\\', '/').replaceAll(" ", "%20");
+ String result = baseDir;
+ if (baseDir.indexOf(":") > 0) {
+ result = "file:///" + baseDir;
+ }
+ return result;
+ }
+ return uri;
+ }
+
+ private String getLocalRepoUrl() {
+ if (System.getProperty("os.name").startsWith("Windows")) {
+ String baseDir = localRepo.getBasedir().replace('\\', '/').replaceAll(" ", "%20");
+ return localRepo.getProtocol() + ":///" + baseDir;
+ } else {
+ return localRepo.getUrl();
+ }
+ }
+
+ private void addFeatures(List<String> features, Set<String> transitiveFeatures, Map<String, Feature> featuresMap) {
+ for (String feature : features) {
+ Feature f = featuresMap.get(feature);
+ transitiveFeatures.add(feature);
+ addFeatures(f.getDependencies(), transitiveFeatures, featuresMap);
+ }
+ }
+
+ public static void copy(
+ InputStream is, File dir, String destName, String destDir, byte[] buffer)
+ throws IOException
+ {
+ if (destDir == null)
+ {
+ destDir = "";
+ }
+
+ // Make sure the target directory exists and
+ // that is actually a directory.
+ File targetDir = new File(dir, destDir);
+ if (!targetDir.exists())
+ {
+ if (!targetDir.mkdirs())
+ {
+ throw new IOException("Unable to create target directory: "
+ + targetDir);
+ }
+ }
+ else if (!targetDir.isDirectory())
+ {
+ throw new IOException("Target is not a directory: "
+ + targetDir);
+ }
+
+ BufferedOutputStream bos = new BufferedOutputStream(
+ new FileOutputStream(new File(targetDir, destName)));
+ int count = 0;
+ while ((count = is.read(buffer)) > 0)
+ {
+ bos.write(buffer, 0, count);
+ }
+ bos.close();
+ }
+
+ public static class Feature {
+
+ private String name;
+ private List<String> dependencies = new ArrayList<String>();
+ private List<String> bundles = new ArrayList<String>();
+ private Map<String, Map<String,String>> configs = new HashMap<String, Map<String,String>>();
+
+ public Feature(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public List<String> getDependencies() {
+ return dependencies;
+ }
+
+ public List<String> getBundles() {
+ return bundles;
+ }
+
+ public Map<String, Map<String, String>> getConfigurations() {
+ return configs;
+ }
+
+ public void addDependency(String dependency) {
+ dependencies.add(dependency);
+ }
+
+ public void addBundle(String bundle) {
+ bundles.add(bundle);
+ }
+
+ public void addConfig(String name, Map<String,String> properties) {
+ configs.put(name, properties);
+ }
+ }
+
+ public static class Repository {
+
+ private URI uri;
+ private List<Feature> features;
+
+ public Repository(URI uri) {
+ this.uri = uri;
+ }
+
+ public URI getURI() {
+ return uri;
+ }
+
+ public Feature[] getFeatures() throws Exception {
+ if (features == null) {
+ load();
+ }
+ return features.toArray(new Feature[features.size()]);
+ }
+
+ public void load() throws IOException {
+ try {
+ features = new ArrayList<Feature>();
+ DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
+ Document doc = factory.newDocumentBuilder().parse(uri.toURL().openStream());
+ NodeList nodes = doc.getDocumentElement().getChildNodes();
+ for (int i = 0; i < nodes.getLength(); i++) {
+ org.w3c.dom.Node node = nodes.item(i);
+ if (!(node instanceof Element) || !"feature".equals(node.getNodeName())) {
+ continue;
+ }
+ Element e = (Element) nodes.item(i);
+ String name = e.getAttribute("name");
+ Feature f = new Feature(name);
+ NodeList featureNodes = e.getElementsByTagName("feature");
+ for (int j = 0; j < featureNodes.getLength(); j++) {
+ Element b = (Element) featureNodes.item(j);
+ f.addDependency(b.getTextContent());
+ }
+ NodeList configNodes = e.getElementsByTagName("config");
+ for (int j = 0; j < configNodes.getLength(); j++) {
+ Element c = (Element) configNodes.item(j);
+ String cfgName = c.getAttribute("name");
+ String data = c.getTextContent();
+ Properties properties = new Properties();
+ properties.load(new ByteArrayInputStream(data.getBytes()));
+ Map<String, String> hashtable = new Hashtable<String, String>();
+ for (Object key : properties.keySet()) {
+ String n = key.toString();
+ hashtable.put(n, properties.getProperty(n));
+ }
+ f.addConfig(cfgName, hashtable);
+ }
+ NodeList bundleNodes = e.getElementsByTagName("bundle");
+ for (int j = 0; j < bundleNodes.getLength(); j++) {
+ Element b = (Element) bundleNodes.item(j);
+ f.addBundle(b.getTextContent());
+ }
+ features.add(f);
+ }
+ } catch (SAXException e) {
+ throw (IOException) new IOException().initCause(e);
+ } catch (ParserConfigurationException e) {
+ throw (IOException) new IOException().initCause(e);
+ }
+ }
+
+ }
+}
diff --git a/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/GenerateFeaturesFileMojo.java b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/GenerateFeaturesFileMojo.java
new file mode 100644
index 0000000..07004f2
--- /dev/null
+++ b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/GenerateFeaturesFileMojo.java
@@ -0,0 +1,479 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.felix.karaf.tooling.features;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipException;
+import java.util.zip.ZipFile;
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.InvalidArtifactRTException;
+import org.apache.maven.artifact.metadata.ArtifactMetadataRetrievalException;
+import org.apache.maven.artifact.metadata.ResolutionGroup;
+import org.apache.maven.artifact.resolver.ArtifactNotFoundException;
+import org.apache.maven.artifact.resolver.ArtifactResolutionException;
+import org.apache.maven.artifact.versioning.ArtifactVersion;
+import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
+import org.apache.maven.artifact.versioning.OverConstrainedVersionException;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.apache.maven.model.Dependency;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.plugin.MojoFailureException;
+import org.osgi.impl.bundle.obr.resource.Manifest;
+
+/**
+ * Generates the features XML file
+ *
+ * @version $Revision: 1.1 $
+ * @goal generate-features-file
+ * @phase compile
+ * @execute phase="compile"
+ * @requiresDependencyResolution runtime
+ * @inheritByDefault true
+ * @description Generates the features XML file
+ */
+@SuppressWarnings("unchecked")
+public class GenerateFeaturesFileMojo extends MojoSupport {
+ protected static final String SEPARATOR = "/";
+
+ /**
+ * The file to generate
+ *
+ * @parameter default-value="${project.build.directory}/classes/feature.xml"
+ */
+ private File outputFile;
+
+ /**
+ * The name of the feature, which defaults to the artifact ID if its not
+ * specified
+ *
+ * @parameter default-value="${project.artifactId}"
+ */
+ private String featureName;
+
+ /**
+ * The artifact type for attaching the generated file to the project
+ *
+ * @parameter default-value="xml"
+ */
+ private String attachmentArtifactType = "xml";
+
+ /**
+ * The artifact classifier for attaching the generated file to the project
+ *
+ * @parameter default-value="features"
+ */
+ private String attachmentArtifactClassifier = "features";
+
+ /**
+ * Should we generate a <feature> for the current project?
+ *
+ * @parameter default-value="false"
+ */
+ private boolean includeProject = false;
+
+ /**
+ * Should we generate a <feature> for the current project's <dependency>s?
+ *
+ * @parameter default-value="true"
+ */
+ private boolean includeDependencies = true;
+
+ /**
+ * The kernel version for which to generate the bundle
+ *
+ * @parameter
+ */
+ private String kernelVersion;
+
+ /**
+ * A properties file containing bundle translations
+ *
+ * @parameter
+ */
+ private File translation;
+
+ /*
+ * The translations
+ */
+ private Map<String, Map<VersionRange, String>> translations = new HashMap<String, Map<VersionRange,String>>() {
+ @Override
+ public Map<VersionRange, String> get(Object key) {
+ if (super.get(key) == null) {
+ super.put(key.toString(), new HashMap<VersionRange, String>());
+ }
+ return super.get(key);
+ }
+ };
+
+ /*
+ * These bundles are the features that will be built
+ */
+ private Set<Artifact> features = new HashSet<Artifact>();
+
+ /*
+ * These bundles are provided by SMX4 and will be excluded from <feature/>
+ * generation
+ */
+ private Set<Artifact> provided = new HashSet<Artifact>();
+
+ /*
+ * List of bundles included in the current feature
+ */
+ private Set<Artifact> currentFeature = new HashSet<Artifact>();
+
+ /*
+ * List of missing bundles
+ */
+ private Set<Artifact> missingBundles = new TreeSet<Artifact>();
+
+ public void execute() throws MojoExecutionException, MojoFailureException {
+ OutputStream out = null;
+ try {
+ prepare();
+ getLog().info(String.format("-- Start generating %s --", outputFile.getAbsolutePath()));
+ outputFile.getParentFile().mkdirs();
+ out = new FileOutputStream(outputFile);
+
+ PrintStream printer = new PrintStream(out);
+ populateProperties(printer);
+ getLog().info(String.format("-- Done generating %s --", outputFile.getAbsolutePath()));
+
+ // now lets attach it
+ projectHelper.attachArtifact(project, attachmentArtifactType, attachmentArtifactClassifier, outputFile);
+ } catch (Exception e) {
+ throw new MojoExecutionException("Unable to create dependencies file: " + e, e);
+ } finally {
+ if (out != null) {
+ try {
+ out.close();
+ } catch (IOException e) {
+ getLog().info("Failed to close: " + outputFile + ". Reason: " + e, e);
+ }
+ }
+ }
+ }
+
+ protected void populateProperties(PrintStream out) throws ArtifactResolutionException, ArtifactNotFoundException, IOException {
+ out.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
+ out.println("<features>");
+ if (includeProject) {
+ writeCurrentProjectFeature(out);
+ }
+ if (includeDependencies) {
+ writeProjectDependencyFeatures(out);
+ }
+ out.println("</features>");
+ }
+
+ private void prepare() throws ArtifactResolutionException, ArtifactNotFoundException, IOException, InvalidVersionSpecificationException {
+ if (translation != null) {
+ InputStream stream = null;
+ try {
+ stream = new BufferedInputStream(new FileInputStream(translation));
+ Properties file = new Properties();
+ file.load(stream);
+ ArrayList<String> stringNames = getStringNames(file);
+ for (String key : stringNames) {
+ String[] elements = key.split("/");
+ translations.get(String.format("%s/%s", elements[0], elements[1]))
+ .put(VersionRange.createFromVersionSpec(elements[2]), file.getProperty(key));
+ }
+ getLog().info("Loaded " + translations.size() + " bundle name translation rules from " + translation.getAbsolutePath());
+ } finally {
+ if (stream != null) {
+ stream.close();
+ }
+ }
+ }
+
+ Artifact kernel = factory.createArtifact("org.apache.felix.karaf",
+ "apache-felix-karaf",
+ kernelVersion, Artifact.SCOPE_PROVIDED, "pom");
+ resolver.resolve(kernel, remoteRepos, localRepo);
+ getLog().info("-- List of bundles provided by ServiceMix Kernel " + kernelVersion + " --");
+ for (Artifact artifact : getDependencies(kernel)) {
+ getLog().info(" " + artifact);
+ provided.add(artifact);
+ }
+ getLog().info("-- <end of list> --");
+ }
+
+ private ArrayList<String> getStringNames(Properties file) {
+ // this method simulate the Properties.stringPropertyNames() of JDK6 in order to make this class
+ // compile with jdk5
+ ArrayList<String> ret = new ArrayList<String>();
+ Enumeration<?> name = file.propertyNames();
+ while (name.hasMoreElements()) {
+ Object ele = name.nextElement();
+ if (ele instanceof String && file.get(ele) instanceof String) {
+ ret.add((String)ele);
+ }
+ }
+ return ret;
+ }
+
+ private void writeProjectDependencyFeatures(PrintStream out) {
+ Set<Artifact> dependencies = (Set<Artifact>)project.getDependencyArtifacts();
+ dependencies.removeAll(provided);
+ for (Artifact artifact : dependencies) {
+ getLog().info(" Generating feature " + artifact.getArtifactId() + " from " + artifact);
+ out.println(" <feature name='" + artifact.getArtifactId() + "'>");
+ currentFeature.clear();
+ writeBundle(out, artifact);
+ features.add(artifact);
+ out.println(" </feature>");
+ }
+ if (missingBundles.size() > 0) {
+ getLog().info("-- Some bundles were missing --");
+ for (Artifact artifact : missingBundles) {
+ getLog().info(String.format(" %s", artifact));
+ }
+ }
+ }
+
+ private void writeBundle(PrintStream out, Artifact artifact) {
+ Artifact replacement = getReplacement(artifact);
+ if (replacement != null) {
+ writeBundle(out, replacement);
+ return;
+ }
+ if (isProvided(artifact)) {
+ getLog().debug(String.format("Skipping '%s' -- bundle will be provided at runtime", artifact));
+ return;
+ }
+ if (features.contains(artifact)) {
+ // if we already created a feature for this one, just add that instead of the bundle
+ out.println(String.format(" <feature>%s</feature>", artifact.getArtifactId()));
+ return;
+ }
+ // first write the dependencies
+ for (Artifact dependency : getDependencies(artifact)) {
+ if (dependency.isOptional() || Artifact.SCOPE_TEST.equals(dependency.getScope())) {
+ // omit optional dependencies
+ getLog().debug(String.format("Omitting optional and/or test scoped dependency '%s' for '%s'",
+ dependency, artifact));
+ continue;
+ }
+ getLog().debug(String.format("Adding '%s' as a dependency for '%s'", dependency, artifact));
+ writeBundle(out, dependency);
+ }
+ // skip the bundle if it was already added to this feature previously
+ if (!currentFeature.add(artifact)) {
+ getLog().debug(String.format("Artifact '%s' was already added to the current feature", artifact));
+ return;
+ }
+ // and then write the bundle itself
+ if (isBundle(artifact)) {
+ getLog().info(String.format(" adding bundle %s", artifact));
+ writeBundle(out, artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion());
+ } else {
+ Artifact wrapper = findServicemixBundle(artifact);
+ if (wrapper != null) {
+ getLog().info(String.format(" adding bundle %s (for %s)", wrapper, artifact));
+ writeBundle(out, wrapper.getGroupId(), wrapper.getArtifactId(), wrapper.getBaseVersion());
+ } else {
+ getLog().error(String.format(" unable to find suitable bundle for artifact '%s'", artifact));
+ missingBundles.add(artifact);
+ }
+ }
+ }
+
+ private Artifact getReplacement(Artifact artifact) {
+ String key = String.format("%s/%s", artifact.getGroupId(), artifact.getArtifactId());
+ String bundle = null;
+ for (VersionRange range : translations.get(key).keySet()) {
+ try {
+ if (range.containsVersion(artifact.getSelectedVersion())) {
+ bundle = translations.get(key).get(range);
+ break;
+ }
+ } catch (OverConstrainedVersionException e) {
+ bundle = null;
+ }
+ }
+ if (bundle != null) {
+ String[] split = bundle.split("/");
+ return factory.createArtifact(split[0], split[1], split[2], Artifact.SCOPE_PROVIDED, artifact.getArtifactHandler().getPackaging());
+ } else {
+ return null;
+ }
+ }
+
+ private Artifact findServicemixBundle(Artifact artifact) {
+ Artifact noVersionWrapper = factory.createArtifact("org.apache.servicemix.bundles",
+ "org.apache.servicemix.bundles." + artifact.getArtifactId(),
+ "",
+ artifact.getScope(), artifact.getType());
+ try {
+ List versions = artifactMetadataSource.retrieveAvailableVersions(noVersionWrapper, localRepo, remoteRepos);
+ Artifact wrapper = factory.createArtifact("org.apache.servicemix.bundles",
+ "org.apache.servicemix.bundles." + artifact.getArtifactId(),
+ getBestVersionForArtifact(artifact, versions),
+ artifact.getScope(), artifact.getType());
+ // let's check if the servicemix bundle for this artifact exists
+ resolver.resolve(wrapper, remoteRepos, localRepo);
+ for (Artifact dependency : getDependencies(wrapper)) {
+ //some of these wrapper bundles provide for multiple JAR files, no need to include any of them after adding the wrapper
+ getLog().debug(String.format("'%s' also provides '%s'", wrapper, dependency));
+ currentFeature.add(dependency);
+ }
+ return wrapper;
+ } catch (ArtifactResolutionException e) {
+ getLog().debug("Couldn't find a ServiceMix bundle for " + artifact, e);
+ } catch (ArtifactNotFoundException e) {
+ getLog().debug("Couldn't find a ServiceMix bundle for " + artifact, e);
+ } catch (ArtifactMetadataRetrievalException e) {
+ getLog().debug("Couldn't find a ServiceMix bundle for " + artifact, e);
+ }
+ if (artifact.getArtifactId().contains("-")) {
+ //let's try to see if we can't find a bundle wrapping multiple artifacts (e.g. mina -> mina-core, mina-codec, ...)
+ return findServicemixBundle(factory.createArtifact(artifact.getGroupId(), artifact.getArtifactId().split("-")[0],
+ artifact.getVersion(), artifact.getScope(), artifact.getType()));
+ } else {
+ return null;
+ }
+ }
+
+ protected String getBestVersionForArtifact(Artifact artifact, List<ArtifactVersion> versions) throws ArtifactMetadataRetrievalException {
+ if (versions.size() == 0) {
+ throw new ArtifactMetadataRetrievalException("No wrapper bundle available for " + artifact);
+ }
+ Collections.sort(versions, Collections.reverseOrder());
+ //check for same version
+ for (ArtifactVersion version : versions) {
+ if (version.toString().startsWith(artifact.getVersion())) {
+ return version.toString();
+ }
+ }
+ //check for same major/minor version
+ for (ArtifactVersion version : versions) {
+ String[] elements = version.toString().split("\\.");
+ if (elements.length >= 2 && artifact.getVersion().startsWith(elements[0] + "." + elements[1])) {
+ return version.toString();
+ }
+ }
+ throw new ArtifactMetadataRetrievalException("No suitable version found for " + artifact + " wrapper bundle");
+ }
+
+ private boolean isProvided(Artifact bundle) {
+ for (Artifact artifact : provided) {
+ if (bundle.getArtifactId().equals(artifact.getArtifactId())
+ && bundle.getGroupId().equals(artifact.getGroupId())) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private boolean isBundle(Artifact artifact) {
+ if (artifact.getArtifactHandler().getPackaging().equals("bundle")) {
+ return true;
+ } else {
+ try {
+ resolver.resolve(artifact, remoteRepos, localRepo);
+ ZipFile file = new ZipFile(artifact.getFile());
+ ZipEntry entry = file.getEntry("META-INF/MANIFEST.MF");
+ Manifest manifest = new Manifest(file.getInputStream(entry));
+ if (manifest.getBsn() != null) {
+ getLog().debug(String.format("MANIFEST.MF for '%s' contains Bundle-Name '%s'",
+ artifact, manifest.getBsn().getName()));
+ return true;
+ }
+ } catch (ZipException e) {
+ getLog().warn("Unable to determine if " + artifact + " is a bundle; defaulting to false", e);
+ } catch (IOException e) {
+ getLog().warn("Unable to determine if " + artifact + " is a bundle; defaulting to false", e);
+ } catch (Exception e) {
+ getLog().warn("Unable to determine if " + artifact + " is a bundle; defaulting to false", e);
+ }
+ }
+ return false;
+ }
+
+ private List<Artifact> getDependencies(Artifact artifact) {
+ List<Artifact> list = new ArrayList<Artifact>();
+ try {
+ ResolutionGroup pom = artifactMetadataSource.retrieve(artifact, localRepo, remoteRepos);
+ if (pom != null) {
+ list.addAll(pom.getArtifacts());
+ }
+ } catch (ArtifactMetadataRetrievalException e) {
+ getLog().warn("Unable to retrieve metadata for " + artifact + ", not including dependencies for it");
+ } catch (InvalidArtifactRTException e) {
+ getLog().warn("Unable to retrieve metadata for " + artifact + ", not including dependencies for it");
+ }
+ return list;
+ }
+
+
+ private void writeCurrentProjectFeature(PrintStream out) {
+ out.println(" <feature name='" + featureName + "'>");
+
+ writeBundle(out, project.getGroupId(), project.getArtifactId(), project.getVersion());
+ out.println();
+
+ Iterator iterator = project.getDependencies().iterator();
+ while (iterator.hasNext()) {
+ Dependency dependency = (Dependency)iterator.next();
+
+ if (isValidDependency(dependency)) {
+ out.print(" ");
+ writeBundle(out, dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion());
+ }
+ }
+
+ out.println(" </feature>");
+ }
+
+ protected boolean isValidDependency(Dependency dependency) {
+ // TODO filter out only compile time dependencies which are OSGi
+ // bundles?
+ return true;
+ }
+
+ protected void writeBundle(PrintStream out, String groupId, String artifactId, String version) {
+ out.print(" <bundle>mvn:");
+ out.print(groupId);
+ out.print("/");
+ out.print(artifactId);
+ out.print("/");
+ out.print(version);
+ out.print("</bundle>");
+ out.println();
+ }
+}
diff --git a/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/GenerateFeaturesXmlMojo.java b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/GenerateFeaturesXmlMojo.java
new file mode 100644
index 0000000..85d4647
--- /dev/null
+++ b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/GenerateFeaturesXmlMojo.java
@@ -0,0 +1,578 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.felix.karaf.tooling.features;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.Stack;
+import java.util.zip.ZipException;
+import java.util.zip.ZipFile;
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.InvalidArtifactRTException;
+import org.apache.maven.artifact.metadata.ArtifactMetadataRetrievalException;
+import org.apache.maven.artifact.metadata.ResolutionGroup;
+import org.apache.maven.artifact.resolver.ArtifactNotFoundException;
+import org.apache.maven.artifact.resolver.ArtifactResolutionException;
+import org.apache.maven.artifact.resolver.DefaultArtifactCollector;
+import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.plugin.MojoFailureException;
+import org.apache.maven.shared.dependency.tree.DependencyNode;
+import org.apache.maven.shared.dependency.tree.DependencyTreeBuilder;
+import org.apache.maven.shared.dependency.tree.DependencyTreeBuilderException;
+import org.apache.maven.shared.dependency.tree.traversal.DependencyNodeVisitor;
+import org.osgi.impl.bundle.obr.resource.Manifest;
+import org.osgi.impl.bundle.obr.resource.ManifestEntry;
+import org.osgi.impl.bundle.obr.resource.VersionRange;
+
+/**
+ * Generates the features XML file
+ *
+ * @version $Revision: 1.1 $
+ * @goal generate-features-xml
+ * @phase compile
+ * @execute phase="compile"
+ * @requiresDependencyResolution runtime
+ * @inheritByDefault true
+ * @description Generates the features XML file
+ */
+@SuppressWarnings("unchecked")
+public class GenerateFeaturesXmlMojo extends MojoSupport {
+ protected static final String SEPARATOR = "/";
+
+ /**
+ * The dependency tree builder to use.
+ *
+ * @component
+ * @required
+ * @readonly
+ */
+ private DependencyTreeBuilder dependencyTreeBuilder;
+
+ /**
+ * The file to generate
+ *
+ * @parameter default-value="${project.build.directory}/classes/feature.xml"
+ */
+ private File outputFile;
+
+ /**
+ * The artifact type for attaching the generated file to the project
+ *
+ * @parameter default-value="xml"
+ */
+ private String attachmentArtifactType = "xml";
+
+ /**
+ * The artifact classifier for attaching the generated file to the project
+ *
+ * @parameter default-value="features"
+ */
+ private String attachmentArtifactClassifier = "features";
+
+ /**
+ * The kernel version for which to generate the bundle
+ *
+ * @parameter
+ */
+ private String kernelVersion;
+
+ /*
+ * A list of packages exported by the kernel
+ */
+ private Map<String, VersionRange> kernelExports = new HashMap<String, VersionRange>();
+
+ /**
+ * A file containing the list of bundles
+ *
+ * @parameter
+ */
+ private File bundles;
+
+ /*
+ * A set of known bundles
+ */
+ private Set<String> knownBundles = new HashSet<String>();
+
+ /*
+ * A list of exports by the bundles
+ */
+ private Map<String, Map<VersionRange, Artifact>> bundleExports = new HashMap<String, Map<VersionRange, Artifact>>();
+
+ /*
+ * The set of system exports
+ */
+ private List<String> systemExports = new LinkedList<String>();
+
+ /*
+ * These bundles are the features that will be built
+ */
+ private Map<Artifact, Feature> features = new HashMap<Artifact, Feature>();
+
+ public void execute() throws MojoExecutionException, MojoFailureException {
+ PrintStream out = null;
+ try {
+ out = new PrintStream(new FileOutputStream(outputFile));
+ readSystemPackages();
+ readKernelBundles();
+ readBundles();
+ discoverBundles();
+ writeFeatures(out);
+ // now lets attach it
+ projectHelper.attachArtifact(project, attachmentArtifactType, attachmentArtifactClassifier, outputFile);
+ } catch (Exception e) {
+ getLog().error(e);
+ throw new MojoExecutionException("Unable to create features.xml file: " + e, e);
+ } finally {
+ if (out != null) {
+ out.close();
+ }
+ }
+ }
+
+ /*
+ * Read all the system provided packages from the <code>config.properties</code> file
+ */
+ private void readSystemPackages() throws IOException {
+ Properties properties = new Properties();
+ properties.load(getClass().getClassLoader().getResourceAsStream("config.properties"));
+ readSystemPackages(properties, "jre-1.5");
+ readSystemPackages(properties, "osgi");
+ }
+
+
+ private void readSystemPackages(Properties properties, String key) {
+ String packages = (String) properties.get(key);
+ for (String pkg : packages.split(";")) {
+ systemExports.add(pkg.trim());
+ }
+ }
+
+ /*
+ * Download a Kernel distro and check the list of bundles provided by the Kernel
+ */
+ private void readKernelBundles() throws ArtifactResolutionException, ArtifactNotFoundException, MojoExecutionException,
+ ZipException, IOException, DependencyTreeBuilderException {
+ final Collection<Artifact> kernelArtifacts;
+ if (kernelVersion == null) {
+ getLog().info("Step 1: Building list of provided bundle exports");
+ kernelArtifacts = new HashSet<Artifact>();
+ DependencyNode tree = dependencyTreeBuilder.buildDependencyTree(project, localRepo, factory, artifactMetadataSource, new ArtifactFilter() {
+
+ public boolean include(Artifact artifact) {
+ return true;
+ }
+
+ }, new DefaultArtifactCollector());
+ tree.accept(new DependencyNodeVisitor() {
+ public boolean endVisit(DependencyNode node) {
+ // we want the next sibling too
+ return true;
+ }
+ public boolean visit(DependencyNode node) {
+ if (node.getState() != DependencyNode.OMITTED_FOR_CONFLICT) {
+ Artifact artifact = node.getArtifact();
+ if (Artifact.SCOPE_PROVIDED.equals(artifact.getScope()) && !artifact.getType().equals("pom")) {
+ kernelArtifacts.add(artifact);
+ }
+ }
+ // we want the children too
+ return true;
+ }
+ });
+ } else {
+ getLog().info("Step 1 : Building list of kernel exports");
+ getLog().warn("Use of 'kernelVersion' is deprecated -- use a dependency with scope 'provided' instead");
+ Artifact kernel = factory.createArtifact("org.apache.servicemix.kernel", "apache-servicemix-kernel", kernelVersion, Artifact.SCOPE_PROVIDED, "pom");
+ resolver.resolve(kernel, remoteRepos, localRepo);
+ kernelArtifacts = getDependencies(kernel);
+ }
+ for (Artifact artifact : kernelArtifacts) {
+ registerKernelBundle(artifact);
+ }
+ getLog().info("...done!");
+ }
+
+ private void registerKernelBundle(Artifact artifact) throws ArtifactResolutionException, ArtifactNotFoundException, ZipException,
+ IOException {
+ Manifest manifest = getManifest(artifact);
+ if (manifest.getExports() != null) {
+ for (ManifestEntry entry : (List<ManifestEntry>)manifest.getExports()) {
+ kernelExports.put(entry.getName(), entry.getVersion());
+ getLog().debug(" adding kernel export " + entry.getName() + " (" + entry.getVersion() + ")");
+ }
+ }
+ registerBundle(artifact);
+ }
+
+ /*
+ * Read the list of bundles we can use to satisfy links
+ */
+ private void readBundles() throws IOException, ArtifactResolutionException, ArtifactNotFoundException {
+ BufferedReader reader = null;
+ try {
+ if (bundles != null) {
+ getLog().info("Step 2 : Building a list of exports for bundles in " + bundles.getAbsolutePath());
+ reader = new BufferedReader(new FileReader(bundles));
+ String line = reader.readLine();
+ while (line != null) {
+ if (line.contains("/") && !line.startsWith("#")) {
+ String[] elements = line.split("/");
+ Artifact artifact = factory.createArtifact(elements[0], elements[1], elements[2], Artifact.SCOPE_PROVIDED,
+ elements[3]);
+ registerBundle(artifact);
+ }
+ line = reader.readLine();
+ }
+ } else {
+ getLog().info("Step 2 : No Bundle file supplied for building list of exports");
+ }
+ } finally {
+ if (reader != null) {
+ reader.close();
+ }
+ }
+ getLog().info("...done!");
+ }
+
+ /*
+ * Auto-discover bundles currently in the dependencies
+ */
+ private void discoverBundles() throws ArtifactResolutionException, ArtifactNotFoundException, ZipException, IOException {
+ getLog().info("Step 3 : Discovering bundles in Maven dependencies");
+ for (Artifact dependency : (Set<Artifact>) project.getArtifacts()) {
+ // we will generate a feature for this afterwards
+ if (project.getDependencyArtifacts().contains(dependency)) {
+ continue;
+ }
+ // this is a provided bundle, has been handled in step 1
+ if (dependency.getScope().equals(Artifact.SCOPE_PROVIDED)) {
+ continue;
+ }
+ if (isDiscoverableBundle(dependency)) {
+ getLog().info(" Discovered " + dependency);
+ registerBundle(dependency);
+ }
+ }
+ getLog().info("...done!");
+ }
+
+ /*
+ * Write all project dependencies as feature
+ */
+ private void writeFeatures(PrintStream out) throws ArtifactResolutionException, ArtifactNotFoundException,
+ ZipException, IOException {
+ getLog().info("Step 4 : Generating " + outputFile.getAbsolutePath());
+ out.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
+ out.println("<features>");
+ Set<Artifact> dependencies = (Set<Artifact>)project.getDependencyArtifacts();
+ for (Artifact artifact : dependencies) {
+ if (!artifact.getScope().equals(Artifact.SCOPE_PROVIDED) && !artifact.getType().equals("pom")) {
+ getLog().info(" Generating feature " + artifact.getArtifactId() + " from " + artifact);
+ Feature feature = getFeature(artifact);
+ feature.write(out);
+ registerFeature(artifact, feature);
+ }
+ }
+ out.println("</features>");
+ getLog().info("...done!");
+ }
+
+ /*
+ * Get the feature for an artifact
+ */
+ private Feature getFeature(Artifact artifact) throws ArtifactResolutionException, ArtifactNotFoundException, ZipException, IOException {
+ Feature feature = new Feature(artifact);
+ addRequirements(artifact, feature);
+ return feature;
+ }
+
+ /*
+ * Only auto-discover an OSGi bundle
+ * - if it is not already known as a feature itself
+ * - if it is not another version of an already known bundle
+ */
+ private boolean isDiscoverableBundle(Artifact artifact) {
+ if (isBundle(artifact) && !isFeature(artifact) && !artifact.getScope().equals(Artifact.SCOPE_PROVIDED)) {
+ for (String known : knownBundles) {
+ String[] elements = known.split("/");
+ if (artifact.getGroupId().equals(elements[0]) &&
+ artifact.getArtifactId().equals(elements[1])) {
+ getLog().debug(String.format(" Avoid auto-discovery for %s because of existing bundle %s",
+ toString(artifact), known));
+ return false;
+ }
+ }
+ return true;
+ }
+ return false;
+ }
+
+ /*
+ * Check if the given artifact is a bundle
+ */
+ private boolean isBundle(Artifact artifact) {
+ if (knownBundles.contains(toString(artifact)) || artifact.getArtifactHandler().getPackaging().equals("bundle")) {
+ return true;
+ } else {
+ try {
+ Manifest manifest = getManifest(artifact);
+ if (manifest.getBsn() != null) {
+ getLog().debug(String.format("MANIFEST.MF for '%s' contains Bundle-Name '%s'",
+ artifact, manifest.getBsn().getName()));
+ return true;
+ }
+ } catch (ZipException e) {
+ getLog().debug("Unable to determine if " + artifact + " is a bundle; defaulting to false", e);
+ } catch (IOException e) {
+ getLog().debug("Unable to determine if " + artifact + " is a bundle; defaulting to false", e);
+ } catch (Exception e) {
+ getLog().debug("Unable to determine if " + artifact + " is a bundle; defaulting to false", e);
+ }
+ }
+ return false;
+ }
+
+ /*
+ * Add requirements for an artifact to a feature
+ */
+ private void addRequirements(Artifact artifact, Feature feature) throws ArtifactResolutionException, ArtifactNotFoundException, ZipException, IOException {
+ Manifest manifest = getManifest(artifact);
+ Collection<ManifestEntry> remaining = getRemainingImports(manifest);
+ Artifact previous = null;
+ for (ManifestEntry entry : remaining) {
+ Artifact add = null;
+ Map<VersionRange, Artifact> versions = bundleExports.get(entry.getName());
+ if (versions != null) {
+ for (VersionRange range : versions.keySet()) {
+ add = versions.get(range);
+ if (range.compareTo(entry.getVersion()) == 0) {
+ add = versions.get(range);
+ }
+ }
+ }
+ if (add == null) {
+ if (isOptional(entry)) {
+ // debug logging for optional dependency...
+ getLog().debug(String.format(" Unable to find suitable bundle for optional dependency %s (%s)",
+ entry.getName(), entry.getVersion()));
+ } else {
+ // ...but a warning for a mandatory dependency
+ getLog().warn(
+ String.format(" Unable to find suitable bundle for dependency %s (%s) (required by %s)",
+ entry.getName(), entry.getVersion(), artifact.getArtifactId()));
+ }
+ } else {
+ if (!add.equals(previous) && feature.push(add) && !isFeature(add)) {
+ //and get requirements for the bundle we just added
+ getLog().debug(" Getting requirements for " + add);
+ addRequirements(add, feature);
+ }
+ }
+ previous = add;
+ }
+ }
+
+ /*
+ * Check if a given bundle is itself being generated as a feature
+ */
+ private boolean isFeature(Artifact artifact) {
+ return features.containsKey(artifact);
+ }
+
+ /*
+ * Check a manifest entry and check if the resolution for the import has been marked as optional
+ */
+ private boolean isOptional(ManifestEntry entry) {
+ return entry.getAttributes() != null && entry.getAttributes().get("resolution:") != null
+ && entry.getAttributes().get("resolution:").equals("optional");
+ }
+
+ /*
+ * Register a bundle, enlisting all packages it provides
+ */
+ private void registerBundle(Artifact artifact) throws ArtifactResolutionException, ArtifactNotFoundException, ZipException,
+ IOException {
+ getLog().debug("Registering bundle " + artifact);
+ knownBundles.add(toString(artifact));
+ Manifest manifest = getManifest(artifact);
+ for (ManifestEntry entry : getManifestEntries(manifest.getExports())) {
+ Map<VersionRange, Artifact> versions = bundleExports.get(entry.getName());
+ if (versions == null) {
+ versions = new HashMap<VersionRange, Artifact>();
+ }
+ versions.put(entry.getVersion(), artifact);
+ getLog().debug(String.format(" %s exported by bundle %s", entry.getName(), artifact));
+ bundleExports.put(entry.getName(), versions);
+ }
+ }
+
+ /*
+ * Register a feature and also register the bundle for the feature
+ */
+ private void registerFeature(Artifact artifact, Feature feature) throws ArtifactResolutionException, ArtifactNotFoundException, ZipException,
+ IOException {
+ features.put(artifact, feature);
+ registerBundle(artifact);
+ }
+
+ /*
+ * Determine the list of imports to be resolved
+ */
+ private Collection<ManifestEntry> getRemainingImports(Manifest manifest) {
+ // take all imports
+ Collection<ManifestEntry> input = getManifestEntries(manifest.getImports());
+ Collection<ManifestEntry> output = new LinkedList<ManifestEntry>(input);
+ // remove imports satisfied by exports in the same bundle
+ for (ManifestEntry entry : input) {
+ for (ManifestEntry export : getManifestEntries(manifest.getExports())) {
+ if (entry.getName().equals(export.getName())) {
+ output.remove(entry);
+ }
+ }
+ }
+ // remove imports for packages exported by the kernel
+ for (ManifestEntry entry : input) {
+ for (String export : kernelExports.keySet()) {
+ if (entry.getName().equals(export)) {
+ output.remove(entry);
+ }
+ }
+ }
+ // remove imports for packages exported by the system bundle
+ for (ManifestEntry entry : input) {
+ if (systemExports.contains(entry.getName())) {
+ output.remove(entry);
+ }
+ }
+ return output;
+ }
+
+ private Collection<ManifestEntry> getManifestEntries(List imports) {
+ if (imports == null) {
+ return new LinkedList<ManifestEntry>();
+ } else {
+ return (Collection<ManifestEntry>)imports;
+ }
+ }
+
+ private Manifest getManifest(Artifact artifact) throws ArtifactResolutionException, ArtifactNotFoundException, ZipException,
+ IOException {
+ File localFile = new File(localRepo.pathOf(artifact));
+ ZipFile file;
+ if (localFile.exists()) {
+ //avoid going over to the repository if the file is already on the disk
+ file = new ZipFile(localFile);
+ } else {
+ resolver.resolve(artifact, remoteRepos, localRepo);
+ file = new ZipFile(artifact.getFile());
+ }
+ return new Manifest(file.getInputStream(file.getEntry("META-INF/MANIFEST.MF")));
+ }
+
+ private List<Artifact> getDependencies(Artifact artifact) {
+ List<Artifact> list = new ArrayList<Artifact>();
+ try {
+ ResolutionGroup pom = artifactMetadataSource.retrieve(artifact, localRepo, remoteRepos);
+ if (pom != null) {
+ list.addAll(pom.getArtifacts());
+ }
+ } catch (ArtifactMetadataRetrievalException e) {
+ getLog().warn("Unable to retrieve metadata for " + artifact + ", not including dependencies for it");
+ } catch (InvalidArtifactRTException e) {
+ getLog().warn("Unable to retrieve metadata for " + artifact + ", not including dependencies for it");
+ }
+ return list;
+ }
+
+ public static String toString(Artifact artifact) {
+ return String.format("%s/%s/%s", artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion());
+ }
+
+ private class Feature {
+
+ private Stack<Artifact> artifacts = new Stack<Artifact>();
+ private final Artifact artifact;
+
+ private Feature(Artifact artifact) {
+ super();
+ this.artifact = artifact;
+ artifacts.push(artifact);
+ }
+
+ public boolean push(Artifact item) {
+ if (artifacts.contains(item)) {
+ artifacts.remove(item);
+ artifacts.push(item);
+ return false;
+ }
+ if (!artifacts.contains(item)) {
+ artifacts.push(item);
+ return true;
+ }
+ return false;
+ }
+
+ public void write(PrintStream out) {
+ out.println(" <feature name='" + artifact.getArtifactId() + "' version='"
+ + artifact.getBaseVersion() + "'>");
+
+ Stack<Artifact> resulting = new Stack<Artifact>();
+ resulting.addAll(artifacts);
+
+ // remove dependencies for included features
+ for (Artifact next : artifacts) {
+ if (isFeature(next)) {
+ resulting.removeAll(features.get(next).getDependencies());
+ }
+ }
+
+ while (!resulting.isEmpty()) {
+ Artifact next = resulting.pop();
+ if (isFeature(next)) {
+ out.println(" <feature version='"
+ + next.getBaseVersion() + "'>" + String.format("%s</feature>", next.getArtifactId()));
+ } else {
+ out.println(String.format(" <bundle>mvn:%s/%s/%s</bundle>",
+ next.getGroupId(), next.getArtifactId(), next.getBaseVersion()));
+ }
+ }
+ out.println(" </feature>");
+ }
+
+ public List<Artifact> getDependencies() {
+ List<Artifact> dependencies = new LinkedList<Artifact>(artifacts);
+ dependencies.remove(artifact);
+ return dependencies;
+ }
+ }
+}
diff --git a/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/GraphArtifactCollector.java b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/GraphArtifactCollector.java
new file mode 100644
index 0000000..05608d9
--- /dev/null
+++ b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/GraphArtifactCollector.java
@@ -0,0 +1,435 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.felix.karaf.tooling.features;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.metadata.ArtifactMetadataRetrievalException;
+import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
+import org.apache.maven.artifact.metadata.ResolutionGroup;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.resolver.ArtifactCollector;
+import org.apache.maven.artifact.resolver.ArtifactResolutionException;
+import org.apache.maven.artifact.resolver.ArtifactResolutionResult;
+import org.apache.maven.artifact.resolver.CyclicDependencyException;
+import org.apache.maven.artifact.resolver.ResolutionListener;
+import org.apache.maven.artifact.resolver.ResolutionNode;
+import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
+import org.apache.maven.artifact.versioning.ArtifactVersion;
+import org.apache.maven.artifact.versioning.OverConstrainedVersionException;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+/**
+ * @version $Revision: 1.1 $
+ */
+public class GraphArtifactCollector implements ArtifactCollector {
+ public ArtifactResolutionResult collect(
+ Set artifacts,
+ Artifact originatingArtifact,
+ ArtifactRepository localRepository,
+ List remoteRepositories,
+ ArtifactMetadataSource source,
+ ArtifactFilter filter,
+ List listeners) throws ArtifactResolutionException {
+ return collect(artifacts, originatingArtifact, Collections.EMPTY_MAP,
+ localRepository, remoteRepositories, source, filter, listeners);
+ }
+
+ public ArtifactResolutionResult collect(
+ Set artifacts,
+ Artifact originatingArtifact,
+ Map managedVersions,
+ ArtifactRepository localRepository,
+ List remoteRepositories,
+ ArtifactMetadataSource source,
+ ArtifactFilter filter,
+ List listeners) throws ArtifactResolutionException {
+ Map resolvedArtifacts = new HashMap();
+
+ ResolutionNode root = new ResolutionNode(originatingArtifact, remoteRepositories);
+ root.addDependencies(artifacts, remoteRepositories, filter);
+ recurse(root, resolvedArtifacts, managedVersions, localRepository,
+ remoteRepositories, source, filter, listeners);
+
+ Set set = new HashSet();
+ for (Iterator i = resolvedArtifacts.values().iterator(); i.hasNext();) {
+ List nodes = (List) i.next();
+ for (Iterator j = nodes.iterator(); j.hasNext();) {
+ ResolutionNode node = (ResolutionNode) j.next();
+ Artifact artifact = node.getArtifact();
+ if (!node.equals(root) && node.isActive() && node.filterTrail(filter)
+ // If it was optional and not a direct dependency,
+ // we don't add it or its children, just allow the
+ // update of the version and scope
+ && (node.isChildOfRootNode() || !artifact.isOptional())) {
+ artifact.setDependencyTrail(node.getDependencyTrail());
+ set.add(node);
+ }
+ }
+ }
+
+ ArtifactResolutionResult result = new ArtifactResolutionResult();
+ result.setArtifactResolutionNodes(set);
+ return result;
+ }
+
+ private void recurse(
+ ResolutionNode node,
+ Map resolvedArtifacts,
+ Map managedVersions,
+ ArtifactRepository localRepository,
+ List remoteRepositories,
+ ArtifactMetadataSource source,
+ ArtifactFilter filter,
+ List listeners) throws CyclicDependencyException, ArtifactResolutionException,
+ OverConstrainedVersionException {
+ fireEvent(ResolutionListener.TEST_ARTIFACT, listeners, node);
+
+ // TODO: use as a conflict resolver
+ Object key = node.getKey();
+ if (managedVersions.containsKey(key)) {
+ Artifact artifact = (Artifact) managedVersions.get(key);
+ fireEvent(ResolutionListener.MANAGE_ARTIFACT, listeners, node, artifact);
+ if (artifact.getVersion() != null) {
+ node.getArtifact().setVersion(artifact.getVersion());
+ }
+ if (artifact.getScope() != null) {
+ node.getArtifact().setScope(artifact.getScope());
+ }
+ }
+
+ List previousNodes = (List) resolvedArtifacts.get(key);
+ if (previousNodes != null) {
+ node = checkPreviousNodes(node, listeners, previousNodes);
+ }
+ else {
+ previousNodes = new ArrayList();
+ resolvedArtifacts.put(key, previousNodes);
+ }
+ previousNodes.add(node);
+
+ if (node.isActive()) {
+ fireEvent(ResolutionListener.INCLUDE_ARTIFACT, listeners, node);
+ }
+
+ // don't pull in the transitive deps of a system-scoped dependency.
+ if (node.isActive() && !Artifact.SCOPE_SYSTEM.equals(node.getArtifact().getScope())) {
+ fireEvent(ResolutionListener.PROCESS_CHILDREN, listeners, node);
+ for (Iterator i = node.getChildrenIterator(); i.hasNext();) {
+ ResolutionNode child = (ResolutionNode) i.next();
+ // We leave in optional ones, but don't pick up its dependencies
+ if (!child.isResolved()
+ && (!child.getArtifact().isOptional() || child.isChildOfRootNode())) {
+ Artifact artifact = child.getArtifact();
+ try {
+ if (artifact.getVersion() == null) {
+ // set the recommended version
+ // TODO: maybe its better to just pass the range
+ // through to retrieval and use a transformation?
+ ArtifactVersion version;
+ version = getArtifactVersion(localRepository, remoteRepositories, source, artifact);
+
+ artifact.selectVersion(version.toString());
+ fireEvent(ResolutionListener.SELECT_VERSION_FROM_RANGE,
+ listeners, child);
+ }
+
+ ResolutionGroup rGroup = source.retrieve(artifact,
+ localRepository, remoteRepositories);
+
+ // TODO might be better to have source.retreive() throw
+ // a specific exception for this situation
+ // and catch here rather than have it return null
+ if (rGroup == null) {
+ // relocated dependency artifact is declared
+ // excluded, no need to add and recurse further
+ continue;
+ }
+
+ child.addDependencies(rGroup.getArtifacts(),
+ rGroup.getResolutionRepositories(), filter);
+ }
+ catch (CyclicDependencyException e) {
+ // would like to throw this, but we have crappy stuff in
+ // the repo
+
+ fireEvent(ResolutionListener.OMIT_FOR_CYCLE, listeners,
+ new ResolutionNode(e.getArtifact(), remoteRepositories, child));
+ }
+ catch (ArtifactMetadataRetrievalException e) {
+ artifact.setDependencyTrail(node.getDependencyTrail());
+ throw new ArtifactResolutionException(
+ "Unable to get dependency information: "
+ + e.getMessage(), artifact, e);
+ }
+
+ recurse(child, resolvedArtifacts, managedVersions,
+ localRepository, remoteRepositories, source,
+ filter, listeners);
+ }
+ }
+ fireEvent(ResolutionListener.FINISH_PROCESSING_CHILDREN, listeners,
+ node);
+ }
+ }
+
+ private ArtifactVersion getArtifactVersion(
+ ArtifactRepository localRepository,
+ List remoteRepositories,
+ ArtifactMetadataSource source,
+ Artifact artifact) throws OverConstrainedVersionException,
+ ArtifactMetadataRetrievalException {
+ ArtifactVersion version;
+ if (!artifact.isSelectedVersionKnown()) {
+ List versions = artifact.getAvailableVersions();
+ if (versions == null) {
+ versions = source.retrieveAvailableVersions(
+ artifact, localRepository,
+ remoteRepositories);
+ artifact.setAvailableVersions(versions);
+ }
+
+ VersionRange versionRange = artifact.getVersionRange();
+
+ version = versionRange.matchVersion(versions);
+
+ if (version == null) {
+ if (versions.isEmpty()) {
+ throw new OverConstrainedVersionException(
+ "No versions are present in the repository for the artifact with a range "
+ + versionRange, artifact, remoteRepositories);
+ }
+ else {
+ throw new OverConstrainedVersionException(
+ "Couldn't find a version in "
+ + versions
+ + " to match range "
+ + versionRange,
+ artifact, remoteRepositories);
+ }
+ }
+ }
+ else {
+ version = artifact.getSelectedVersion();
+ }
+ return version;
+ }
+
+ private ResolutionNode checkPreviousNodes(
+ ResolutionNode node,
+ List listeners,
+ List previousNodes) throws OverConstrainedVersionException {
+ for (Iterator i = previousNodes.iterator(); i.hasNext();) {
+ ResolutionNode previous = (ResolutionNode) i.next();
+ if (previous.isActive()) {
+ // Version mediation
+ VersionRange previousRange = previous.getArtifact().getVersionRange();
+ VersionRange currentRange = node.getArtifact().getVersionRange();
+ // TODO: why do we force the version on it? what if they
+ // don't match?
+ if (previousRange == null) {
+ // version was already resolved
+ node.getArtifact().setVersion(previous.getArtifact().getVersion());
+ }
+ else if (currentRange == null) {
+ // version was already resolved
+ previous.getArtifact().setVersion(node.getArtifact().getVersion());
+ }
+ else {
+ // TODO: shouldn't need to double up on this work, only
+ // done for simplicity of handling recommended
+ // version but the restriction is identical
+ VersionRange newRange = previousRange.restrict(currentRange);
+ // TODO: ick. this forces the OCE that should have come
+ // from the previous call. It is still correct
+ if (newRange.isSelectedVersionKnown(previous.getArtifact())) {
+ fireEvent(ResolutionListener.RESTRICT_RANGE,
+ listeners, node, previous.getArtifact(),
+ newRange);
+ }
+ previous.getArtifact().setVersionRange(newRange);
+ node.getArtifact().setVersionRange(
+ currentRange.restrict(previousRange));
+
+ // Select an appropriate available version from the (now
+ // restricted) range
+ // Note this version was selected before to get the
+ // appropriate POM
+ // But it was reset by the call to setVersionRange on
+ // restricting the version
+ ResolutionNode[] resetNodes = {previous, node};
+ for (int j = 0; j < 2; j++) {
+ Artifact resetArtifact = resetNodes[j]
+ .getArtifact();
+ if (resetArtifact.getVersion() == null
+ && resetArtifact.getVersionRange() != null
+ && resetArtifact.getAvailableVersions() != null) {
+
+ resetArtifact
+ .selectVersion(resetArtifact
+ .getVersionRange()
+ .matchVersion(
+ resetArtifact
+ .getAvailableVersions())
+ .toString());
+ fireEvent(ResolutionListener.SELECT_VERSION_FROM_RANGE,
+ listeners, resetNodes[j]);
+ }
+ }
+ }
+
+ // Conflict Resolution
+ // TODO: use as conflict resolver(s), chain
+
+ // TODO: should this be part of mediation?
+ // previous one is more dominant
+ if (previous.getDepth() <= node.getDepth()) {
+ checkScopeUpdate(node, previous, listeners);
+ }
+ else {
+ checkScopeUpdate(previous, node, listeners);
+ }
+
+ if (previous.getDepth() <= node.getDepth()) {
+ // previous was nearer
+ fireEvent(ResolutionListener.OMIT_FOR_NEARER,
+ listeners, node, previous.getArtifact());
+ node.disable();
+ node = previous;
+ }
+ else {
+ fireEvent(ResolutionListener.OMIT_FOR_NEARER,
+ listeners, previous, node.getArtifact());
+ previous.disable();
+ }
+ }
+ }
+ return node;
+ }
+
+ private void checkScopeUpdate(ResolutionNode farthest,
+ ResolutionNode nearest, List listeners) {
+ boolean updateScope = false;
+ Artifact farthestArtifact = farthest.getArtifact();
+ Artifact nearestArtifact = nearest.getArtifact();
+
+ if (Artifact.SCOPE_RUNTIME.equals(farthestArtifact.getScope())
+ && (Artifact.SCOPE_TEST.equals(nearestArtifact.getScope()) || Artifact.SCOPE_PROVIDED
+ .equals(nearestArtifact.getScope()))) {
+ updateScope = true;
+ }
+
+ if (Artifact.SCOPE_COMPILE.equals(farthestArtifact.getScope())
+ && !Artifact.SCOPE_COMPILE.equals(nearestArtifact.getScope())) {
+ updateScope = true;
+ }
+
+ // current POM rules all
+ if (nearest.getDepth() < 2 && updateScope) {
+ updateScope = false;
+
+ fireEvent(ResolutionListener.UPDATE_SCOPE_CURRENT_POM, listeners,
+ nearest, farthestArtifact);
+ }
+
+ if (updateScope) {
+ fireEvent(ResolutionListener.UPDATE_SCOPE, listeners, nearest,
+ farthestArtifact);
+
+ // previously we cloned the artifact, but it is more effecient to
+ // just update the scope
+ // if problems are later discovered that the original object needs
+ // its original scope value, cloning may
+ // again be appropriate
+ nearestArtifact.setScope(farthestArtifact.getScope());
+ }
+ }
+
+ private void fireEvent(int event, List listeners, ResolutionNode node) {
+ fireEvent(event, listeners, node, null);
+ }
+
+ private void fireEvent(int event, List listeners, ResolutionNode node,
+ Artifact replacement) {
+ fireEvent(event, listeners, node, replacement, null);
+ }
+
+ private void fireEvent(int event, List listeners, ResolutionNode node,
+ Artifact replacement, VersionRange newRange) {
+ for (Iterator i = listeners.iterator(); i.hasNext();) {
+ ResolutionListener listener = (ResolutionListener) i.next();
+
+ switch (event) {
+ case ResolutionListener.TEST_ARTIFACT:
+ listener.testArtifact(node.getArtifact());
+ break;
+ case ResolutionListener.PROCESS_CHILDREN:
+ listener.startProcessChildren(node.getArtifact());
+ break;
+ case ResolutionListener.FINISH_PROCESSING_CHILDREN:
+ listener.endProcessChildren(node.getArtifact());
+ break;
+ case ResolutionListener.INCLUDE_ARTIFACT:
+ listener.includeArtifact(node.getArtifact());
+ break;
+ case ResolutionListener.OMIT_FOR_NEARER:
+ String version = node.getArtifact().getVersion();
+ String replacementVersion = replacement.getVersion();
+ if (version != null ? !version.equals(replacementVersion)
+ : replacementVersion != null) {
+ listener.omitForNearer(node.getArtifact(), replacement);
+ }
+ break;
+ case ResolutionListener.OMIT_FOR_CYCLE:
+ listener.omitForCycle(node.getArtifact());
+ break;
+ case ResolutionListener.UPDATE_SCOPE:
+ listener
+ .updateScope(node.getArtifact(), replacement.getScope());
+ break;
+ case ResolutionListener.UPDATE_SCOPE_CURRENT_POM:
+ listener.updateScopeCurrentPom(node.getArtifact(), replacement
+ .getScope());
+ break;
+ case ResolutionListener.MANAGE_ARTIFACT:
+ listener.manageArtifact(node.getArtifact(), replacement);
+ break;
+ case ResolutionListener.SELECT_VERSION_FROM_RANGE:
+ listener.selectVersionFromRange(node.getArtifact());
+ break;
+ case ResolutionListener.RESTRICT_RANGE:
+ if (node.getArtifact().getVersionRange().hasRestrictions()
+ || replacement.getVersionRange().hasRestrictions()) {
+ listener.restrictRange(node.getArtifact(), replacement,
+ newRange);
+ }
+ break;
+ default:
+ throw new IllegalStateException("Unknown event: " + event);
+ }
+ }
+ }
+}
diff --git a/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/MojoSupport.java b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/MojoSupport.java
new file mode 100644
index 0000000..d309b58
--- /dev/null
+++ b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/MojoSupport.java
@@ -0,0 +1,299 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.felix.karaf.tooling.features;
+
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.resolver.ArtifactCollector;
+import org.apache.maven.artifact.resolver.ArtifactResolutionException;
+import org.apache.maven.artifact.resolver.ArtifactResolver;
+import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.apache.maven.model.Dependency;
+import org.apache.maven.model.DependencyManagement;
+import org.apache.maven.plugin.AbstractMojo;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.project.MavenProjectHelper;
+import org.apache.maven.project.ProjectBuildingException;
+
+/**
+ * @version $Revision: 1.1 $
+ */
+public abstract class MojoSupport extends AbstractMojo {
+
+ /**
+ * Maven ProjectHelper
+ *
+ * @component
+ */
+ protected MavenProjectHelper projectHelper;
+
+ /**
+ * The maven project.
+ *
+ * @parameter expression="${project}"
+ * @required
+ * @readonly
+ */
+ protected MavenProject project;
+
+ /**
+ * Directory that resources are copied to during the build.
+ *
+ * @parameter expression="${project.build.directory}/${project.artifactId}-${project.version}-installer"
+ * @required
+ */
+ protected File workDirectory;
+
+ /**
+ * @component
+ */
+ protected MavenProjectBuilder projectBuilder;
+
+ /**
+ * @parameter default-value="${localRepository}"
+ */
+ protected ArtifactRepository localRepo;
+
+ /**
+ * @parameter default-value="${project.remoteArtifactRepositories}"
+ */
+ protected List remoteRepos;
+
+ /**
+ * @component
+ */
+ protected ArtifactMetadataSource artifactMetadataSource;
+
+ /**
+ * @component
+ */
+ protected ArtifactResolver resolver;
+
+ protected ArtifactCollector collector = new GraphArtifactCollector();
+
+ /**
+ * @component
+ */
+ protected ArtifactFactory factory;
+
+ protected MavenProject getProject() {
+ return project;
+ }
+
+ protected File getWorkDirectory() {
+ return workDirectory;
+ }
+
+ public MavenProjectHelper getProjectHelper() {
+ return projectHelper;
+ }
+
+ protected void removeBranch(ResolutionListenerImpl listener,
+ Artifact artifact) {
+ Node n = listener.getNode(artifact);
+ if (n != null) {
+ for (Iterator it = n.getParents().iterator(); it.hasNext();) {
+ Node parent = (Node) it.next();
+ parent.getChildren().remove(n);
+ }
+ }
+ }
+
+ protected void removeChildren(ResolutionListenerImpl listener,
+ Artifact artifact) {
+ Node n = listener.getNode(artifact);
+ n.getChildren().clear();
+ }
+
+ protected Set getArtifacts(Node n, Set s) {
+ if (!s.contains(n.getArtifact())) {
+ s.add(n.getArtifact());
+ for (Iterator iter = n.getChildren().iterator(); iter.hasNext();) {
+ Node c = (Node) iter.next();
+ getArtifacts(c, s);
+ }
+ }
+ return s;
+ }
+
+ protected void excludeBranch(Node n, Set excludes) {
+ excludes.add(n);
+ for (Iterator iter = n.getChildren().iterator(); iter.hasNext();) {
+ Node c = (Node) iter.next();
+ excludeBranch(c, excludes);
+ }
+ }
+
+ protected void print(Node rootNode) {
+ for (Iterator iter = getArtifacts(rootNode, new HashSet()).iterator(); iter.hasNext();) {
+ Artifact a = (Artifact) iter.next();
+ getLog().info(" " + a);
+ }
+ }
+
+ protected Set retainArtifacts(Set includes, ResolutionListenerImpl listener) {
+ Set finalIncludes = new HashSet();
+ Set filteredArtifacts = getArtifacts(listener.getRootNode(),
+ new HashSet());
+ for (Iterator iter = includes.iterator(); iter.hasNext();) {
+ Artifact artifact = (Artifact) iter.next();
+ for (Iterator iter2 = filteredArtifacts.iterator(); iter2.hasNext();) {
+ Artifact filteredArtifact = (Artifact) iter2.next();
+ if (filteredArtifact.getArtifactId().equals(
+ artifact.getArtifactId())
+ && filteredArtifact.getType()
+ .equals(artifact.getType())
+ && filteredArtifact.getGroupId().equals(
+ artifact.getGroupId())) {
+ if (!filteredArtifact.getVersion().equals(
+ artifact.getVersion())) {
+ getLog()
+ .warn(
+ "Resolved artifact "
+ + artifact
+ + " has a different version from that in dependency management "
+ + filteredArtifact
+ + ", overriding dependency management");
+ }
+ finalIncludes.add(artifact);
+ }
+ }
+
+ }
+
+ return finalIncludes;
+ }
+
+ protected ResolutionListenerImpl resolveProject() {
+ Map managedVersions = null;
+ try {
+ managedVersions = createManagedVersionMap(project.getId(), project
+ .getDependencyManagement());
+ } catch (ProjectBuildingException e) {
+ getLog().error(
+ "An error occurred while resolving project dependencies.",
+ e);
+ }
+ ResolutionListenerImpl listener = new ResolutionListenerImpl();
+ listener.setLog(getLog());
+ try {
+ collector.collect(project.getDependencyArtifacts(), project
+ .getArtifact(), managedVersions, localRepo, remoteRepos,
+ artifactMetadataSource, null, Collections
+ .singletonList(listener));
+ } catch (ArtifactResolutionException e) {
+ getLog().error(
+ "An error occurred while resolving project dependencies.",
+ e);
+ }
+ if (getLog().isDebugEnabled()) {
+ getLog().debug("Dependency graph");
+ getLog().debug("================");
+ print(listener.getRootNode());
+ getLog().debug("================");
+ }
+ return listener;
+ }
+
+ protected Map createManagedVersionMap(String projectId,
+ DependencyManagement dependencyManagement) throws ProjectBuildingException {
+ Map map;
+ if (dependencyManagement != null
+ && dependencyManagement.getDependencies() != null) {
+ map = new HashMap();
+ for (Iterator i = dependencyManagement.getDependencies().iterator(); i
+ .hasNext();) {
+ Dependency d = (Dependency) i.next();
+
+ try {
+ VersionRange versionRange = VersionRange
+ .createFromVersionSpec(d.getVersion());
+ Artifact artifact = factory.createDependencyArtifact(d
+ .getGroupId(), d.getArtifactId(), versionRange, d
+ .getType(), d.getClassifier(), d.getScope());
+ map.put(d.getManagementKey(), artifact);
+ } catch (InvalidVersionSpecificationException e) {
+ throw new ProjectBuildingException(projectId,
+ "Unable to parse version '" + d.getVersion()
+ + "' for dependency '"
+ + d.getManagementKey() + "': "
+ + e.getMessage(), e);
+ }
+ }
+ } else {
+ map = Collections.EMPTY_MAP;
+ }
+ return map;
+ }
+
+ /**
+ * Set up a classloader for the execution of the main class.
+ *
+ * @return
+ * @throws MojoExecutionException
+ */
+ protected URLClassLoader getClassLoader() throws MojoExecutionException {
+ try {
+ Set urls = new HashSet();
+
+ URL mainClasses = new File(project.getBuild().getOutputDirectory())
+ .toURL();
+ getLog().debug("Adding to classpath : " + mainClasses);
+ urls.add(mainClasses);
+
+ URL testClasses = new File(project.getBuild()
+ .getTestOutputDirectory()).toURL();
+ getLog().debug("Adding to classpath : " + testClasses);
+ urls.add(testClasses);
+
+ Set dependencies = project.getArtifacts();
+ Iterator iter = dependencies.iterator();
+ while (iter.hasNext()) {
+ Artifact classPathElement = (Artifact) iter.next();
+ getLog().debug(
+ "Adding artifact: " + classPathElement.getFile()
+ + " to classpath");
+ urls.add(classPathElement.getFile().toURL());
+ }
+ URLClassLoader appClassloader = new URLClassLoader((URL[]) urls
+ .toArray(new URL[urls.size()]), this.getClass().getClassLoader());
+ return appClassloader;
+ } catch (MalformedURLException e) {
+ throw new MojoExecutionException(
+ "Error during setting up classpath", e);
+ }
+ }
+}
diff --git a/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/Node.java b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/Node.java
new file mode 100644
index 0000000..18fe8bc
--- /dev/null
+++ b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/Node.java
@@ -0,0 +1,56 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.felix.karaf.tooling.features;
+
+import java.util.Set;
+import java.util.HashSet;
+
+import org.apache.maven.artifact.Artifact;
+
+/**
+ * @version $Revision: 1.1 $
+*/
+public class Node {
+ private Set children = new HashSet();
+ private Set parents = new HashSet();
+ private Artifact artifact;
+
+ public Set getChildren() {
+ return children;
+ }
+
+ public Artifact getArtifact() {
+ return artifact;
+ }
+
+ public Set getParents() {
+ return parents;
+ }
+
+ public void setChildren(Set children) {
+ this.children = children;
+ }
+
+ public void setParents(Set parents) {
+ this.parents = parents;
+ }
+
+ public void setArtifact(Artifact artifact) {
+ this.artifact = artifact;
+ }
+}
diff --git a/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/ResolutionListenerImpl.java b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/ResolutionListenerImpl.java
new file mode 100644
index 0000000..c43dff2
--- /dev/null
+++ b/karaf/tooling/features-maven-plugin/src/main/java/org/apache/felix/karaf/tooling/features/ResolutionListenerImpl.java
@@ -0,0 +1,161 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.felix.karaf.tooling.features;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Stack;
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.resolver.ResolutionListener;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.apache.maven.plugin.logging.Log;
+
+/**
+ * @author Edwin Punzalan
+ * @version $Revision: 1.1 $
+ */
+public class ResolutionListenerImpl implements ResolutionListener {
+ private Stack parents = new Stack();
+ private Map artifacts = new HashMap();
+ private Node rootNode;
+ private Log log;
+
+ public void setLog(Log log) {
+ this.log = log;
+ }
+
+ public Log getLog() {
+ return log;
+ }
+
+ public void testArtifact(Artifact artifact) {
+ // getLog().debug("testArtifact: " + artifact);
+ // intentionally blank
+ }
+
+ public void startProcessChildren(Artifact artifact) {
+ // getLog().debug("startProcessChildren: " + artifact);
+ Node node = (Node) artifacts.get(artifact.getDependencyConflictId());
+ if (parents.isEmpty()) {
+ rootNode = node;
+ }
+ parents.push(node);
+ }
+
+ public void endProcessChildren(Artifact artifact) {
+ // getLog().debug("endProcessChildren: " + artifact);
+ Node check = (Node) parents.pop();
+ assert artifact.equals(check.getArtifact());
+ }
+
+ public void omitForNearer(Artifact omitted, Artifact kept) {
+ // getLog().debug("omitForNearer: omitted=" + omitted + ", kept=" +
+ // kept);
+ assert omitted.getDependencyConflictId().equals(
+ kept.getDependencyConflictId());
+ Node node = (Node) artifacts.get(omitted.getDependencyConflictId());
+ assert node != null;
+ node.setArtifact(kept);
+ }
+
+ public void omitForCycle(Artifact artifact) {
+ // getLog().debug("omitForCycle: " + artifact);
+ // intentionally blank
+ }
+
+ public void includeArtifact(Artifact artifact) {
+ // getLog().debug("includeArtifact: " + artifact);
+ Node node = (Node) artifacts.get(artifact.getDependencyConflictId());
+ if (node == null) {
+ node = new Node();
+ artifacts.put(artifact.getDependencyConflictId(), node);
+ }
+ node.setArtifact(artifact);
+ if (!parents.isEmpty()) {
+ Node parent = (Node) parents.peek();
+ parent.getChildren().add(node);
+ node.getParents().add(parent);
+ }
+ if (rootNode != null) {
+ // print(rootNode, "");
+ }
+ }
+
+ protected void print(Node node, String string) {
+ // getLog().debug(string + rootNode.getArtifact());
+ for (Iterator iter = node.getChildren().iterator(); iter.hasNext();) {
+ Node n = (Node) iter.next();
+ print(n, string + " ");
+ }
+ }
+
+ public void updateScope(Artifact artifact, String scope) {
+ // getLog().debug("updateScope: " + artifact);
+ Node node = (Node) artifacts.get(artifact.getDependencyConflictId());
+
+ node.getArtifact().setScope(scope);
+ }
+
+ public void manageArtifact(Artifact artifact, Artifact replacement) {
+ // getLog().debug("manageArtifact: artifact=" + artifact + ",
+ // replacement=" + replacement);
+ Node node = (Node) artifacts.get(artifact.getDependencyConflictId());
+ if (node != null) {
+ if (replacement.getVersion() != null) {
+ node.getArtifact().setVersion(replacement.getVersion());
+ }
+ if (replacement.getScope() != null) {
+ node.getArtifact().setScope(replacement.getScope());
+ }
+ }
+ }
+
+ public void updateScopeCurrentPom(Artifact artifact, String key) {
+
+ getLog().debug("updateScopeCurrentPom: " + artifact);
+ // intentionally blank
+ }
+
+ public void selectVersionFromRange(Artifact artifact) {
+
+ getLog().debug("selectVersionFromRange: " + artifact);
+ // intentionally blank
+ }
+
+ public void restrictRange(Artifact artifact, Artifact artifact1,
+ VersionRange versionRange) {
+
+ getLog().debug("restrictRange: " + artifact);
+ // intentionally blank
+ }
+
+ public Node getNode(Artifact artifact) {
+ return (Node) artifacts.get(artifact.getDependencyConflictId());
+ }
+
+ public Collection getArtifacts() {
+ return artifacts.values();
+ }
+
+ public Node getRootNode() {
+ return rootNode;
+ }
+}
diff --git a/karaf/tooling/features-maven-plugin/src/main/resources/config.properties b/karaf/tooling/features-maven-plugin/src/main/resources/config.properties
new file mode 100644
index 0000000..96597f2
--- /dev/null
+++ b/karaf/tooling/features-maven-plugin/src/main/resources/config.properties
@@ -0,0 +1,127 @@
+jre-1.5= \
+ javax.accessibility; \
+ javax.activity; \
+ javax.crypto; \
+ javax.crypto.interfaces; \
+ javax.crypto.spec; \
+ javax.imageio; \
+ javax.imageio.event; \
+ javax.imageio.metadata; \
+ javax.imageio.plugins.bmp; \
+ javax.imageio.plugins.jpeg; \
+ javax.imageio.spi; \
+ javax.imageio.stream; \
+ javax.management; \
+ javax.management.loading; \
+ javax.management.modelmbean; \
+ javax.management.monitor; \
+ javax.management.openmbean; \
+ javax.management.relation; \
+ javax.management.remote; \
+ javax.management.remote.rmi; \
+ javax.management.timer; \
+ javax.naming; \
+ javax.naming.directory; \
+ javax.naming.event; \
+ javax.naming.ldap; \
+ javax.naming.spi; \
+ javax.net; \
+ javax.net.ssl; \
+ javax.print; \
+ javax.print.attribute; \
+ javax.print.attribute.standard; \
+ javax.print.event; \
+ javax.rmi; \
+ javax.rmi.CORBA; \
+ javax.rmi.ssl; \
+ javax.security.auth; \
+ javax.security.auth.callback; \
+ javax.security.auth.kerberos; \
+ javax.security.auth.login; \
+ javax.security.auth.spi; \
+ javax.security.auth.x500; \
+ javax.security.cert; \
+ javax.security.sasl; \
+ javax.sound.midi; \
+ javax.sound.midi.spi; \
+ javax.sound.sampled; \
+ javax.sound.sampled.spi; \
+ javax.sql; \
+ javax.sql.rowset; \
+ javax.sql.rowset.serial; \
+ javax.sql.rowset.spi; \
+ javax.swing; \
+ javax.swing.border; \
+ javax.swing.colorchooser; \
+ javax.swing.event; \
+ javax.swing.filechooser; \
+ javax.swing.plaf; \
+ javax.swing.plaf.basic; \
+ javax.swing.plaf.metal; \
+ javax.swing.plaf.multi; \
+ javax.swing.plaf.synth; \
+ javax.swing.table; \
+ javax.swing.text; \
+ javax.swing.text.html; \
+ javax.swing.text.html.parser; \
+ javax.swing.text.rtf; \
+ javax.swing.tree; \
+ javax.swing.undo; \
+ javax.xml; \
+ javax.xml.datatype; \
+ javax.xml.namespace; \
+ javax.xml.parsers; \
+ javax.xml.transform; \
+ javax.xml.transform.dom; \
+ javax.xml.transform.sax; \
+ javax.xml.transform.stream; \
+ javax.xml.validation; \
+ javax.xml.xpath; \
+ org.ietf.jgss; \
+ org.omg.CORBA; \
+ org.omg.CORBA_2_3; \
+ org.omg.CORBA_2_3.portable; \
+ org.omg.CORBA.DynAnyPackage; \
+ org.omg.CORBA.ORBPackage; \
+ org.omg.CORBA.portable; \
+ org.omg.CORBA.TypeCodePackage; \
+ org.omg.CosNaming; \
+ org.omg.CosNaming.NamingContextExtPackage; \
+ org.omg.CosNaming.NamingContextPackage; \
+ org.omg.Dynamic; \
+ org.omg.DynamicAny; \
+ org.omg.DynamicAny.DynAnyFactoryPackage; \
+ org.omg.DynamicAny.DynAnyPackage; \
+ org.omg.IOP; \
+ org.omg.IOP.CodecFactoryPackage; \
+ org.omg.IOP.CodecPackage; \
+ org.omg.Messaging; \
+ org.omg.PortableInterceptor; \
+ org.omg.PortableInterceptor.ORBInitInfoPackage; \
+ org.omg.PortableServer; \
+ org.omg.PortableServer.CurrentPackage; \
+ org.omg.PortableServer.POAManagerPackage; \
+ org.omg.PortableServer.POAPackage; \
+ org.omg.PortableServer.portable; \
+ org.omg.PortableServer.ServantLocatorPackage; \
+ org.omg.SendingContext; \
+ org.omg.stub.java.rmi; \
+ org.omg.stub.javax.management.remote.rmi; \
+ org.w3c.dom; \
+ org.w3c.dom.bootstrap; \
+ org.w3c.dom.css; \
+ org.w3c.dom.events; \
+ org.w3c.dom.html; \
+ org.w3c.dom.ls; \
+ org.w3c.dom.ranges; \
+ org.w3c.dom.stylesheets; \
+ org.w3c.dom.traversal; \
+ org.w3c.dom.views; \
+ org.xml.sax; \
+ org.xml.sax.ext; \
+ org.xml.sax.helpers; \
+ sun.misc; \
+ sun.reflect
+
+osgi= \
+ org.osgi.framework;