Given a maven project with a parent POM - including mostly plugin and dependency versions - how can I generate a POM which takes the information from the parent, places it into the child and removes the reference to the parent?
Ideally this would be done with the maven-assembly-plugin.
Update: I need this done automatically, since manually it's boring and tedious.
Update 2: I'm preparing the source code for an external and want to deliver only one project, not the whole family.
I had a similar issue a while back, you can avoid Maven substituting for the properties and resolving paths etc. by defining a new Maven plugin to do the following.
Resolve each parent using the standard artifact factory
Read each pom file (without resolving it) using the MavenXpp3Reader
Merge the unresolved projects
Write the merged project to a file
Here is some test code I used to prove the process for myself, you'd obviously need to wrap this up in a Maven plugin and bind that to some phase of your process. The resolved pom is output to the output directory (i.e. target) with the name resolved-pom.xml by default, these two properties can be overridden with the usual Maven plugin configuration approach by setting the "outputDir" and/or "pomfileName" properties.
package name.seller.rich;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.Stack;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.resolver.ArtifactNotFoundException;
import org.apache.maven.artifact.resolver.ArtifactResolutionException;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.inheritance.ModelInheritanceAssembler;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
/**
* #goal output-project
* #phase process-resources
* #requiresProject true
*/
public class OutputResolvedProjectMojo extends AbstractMojo {
/**
* Used to look up overlay the parent models on the project's model.
*
* #parameter expression=
* "${component.org.apache.maven.project.inheritance.ModelInheritanceAssembler}"
* #required
* #readonly
*/
private ModelInheritanceAssembler modelInheritanceAssembler;
/**
* Used to look up Artifacts in the remote repository.
*
* #parameter expression=
* "${component.org.apache.maven.artifact.factory.ArtifactFactory}"
* #required
* #readonly
*/
protected org.apache.maven.artifact.factory.ArtifactFactory factory;
/**
* Used to look up Artifacts in the remote repository.
*
* #parameter expression=
* "${component.org.apache.maven.artifact.resolver.ArtifactResolver}"
* #required
* #readonly
*/
protected org.apache.maven.artifact.resolver.ArtifactResolver artifactResolver;
/**
* List of Remote Repositories used by the resolver
*
* #parameter expression="${project.remoteArtifactRepositories}"
* #readonly
* #required
*/
protected java.util.List remoteRepos;
/**
* Location of the local repository.
*
* #parameter expression="${localRepository}"
* #readonly
* #required
*/
protected org.apache.maven.artifact.repository.ArtifactRepository local;
/**
* #parameter expression="${project}"
* #required
* #readonly
*/
private MavenProject mavenProject;
/**
* The directory to output the resolved project to.
*
* #parameter expression="${project.build.directory}"
*/
private File outputDir;
/**
* The directory to output the resolved project to.
*
* #parameter expression="resolved-pom.xml"
*/
private String pomfileName;
public void execute() throws MojoExecutionException, MojoFailureException {
MavenProject parentProject = mavenProject.getParent();
// get the unresolved project by reading the file
MavenProject bareProject = readBareProject(mavenProject.getFile());
Stack hierarchy = new Stack();
hierarchy.push(bareProject);
try {
while (parentProject != null) {
try {
// get Maven to resolve the parent artifact (download if
// needed)
Artifact pomArtifact = this.factory.createArtifact(
parentProject.getGroupId(), parentProject
.getArtifactId(), parentProject
.getVersion(), "", "pom");
artifactResolver.resolve(pomArtifact, this.remoteRepos,
this.local);
// get the file from the local repository and read the bare
// project
File parentPomFile = pomArtifact.getFile();
parentProject = readBareProject(parentPomFile);
hierarchy.push(parentProject);
parentProject = parentProject.getParent();
} catch (ArtifactResolutionException e) {
getLog().error("can't resolve parent pom", e);
} catch (ArtifactNotFoundException e) {
getLog().error("can't resolve parent pom", e);
}
}
// merge each model starting with the oldest ancestors
MavenProject currentParent = (MavenProject) hierarchy.pop();
MavenProject currentProject = null;
while (hierarchy.size() != 0) {
currentProject = (MavenProject) hierarchy.pop();
modelInheritanceAssembler.assembleModelInheritance(
currentProject.getModel(), currentParent.getModel());
currentParent = currentProject;
}
// spit the merged model to the output file.
Writer writer = getWriter(outputDir, pomfileName);
if (writer != null) {
currentProject.writeModel(writer);
writer.close();
}
} catch (IOException e) {
getLog().error("can't write resolved pom", e);
}
}
/**
* Creates and returns a writer for outputting the project to a pom file.
*
* #param logDir
* the directory to output the file to.
* #param logFileName
* name of the log file
* #return the writer.
* #throws IOException
* if the writer cannot be created.
*/
private Writer getWriter(final File logDir, final String logFileName)
throws IOException {
if (!logDir.exists()) {
logDir.mkdirs();
}
File pomLog = new File(logDir, logFileName);
if (!pomLog.exists()) {
pomLog.createNewFile();
}
return new FileWriter(pomLog);
}
/**
* Read the mavenProject without resolving any inherited settings.
*
* #return the MavenProject for the project's POM
* #throws MojoExecutionException
* if the POM can't be parsed.
*/
MavenProject readBareProject(final File file) {
MavenXpp3Reader reader = new MavenXpp3Reader();
Model model = null;
try {
model = reader.read(new FileReader(file));
} catch (IOException e) {
getLog().error("can't read pom file", e);
} catch (XmlPullParserException e) {
getLog().error("can't read pom file", e);
}
return new MavenProject(model);
}
}
Would mvn help:effective-pom do what you need? You can send the output to a file with -Doutput=new-pom.xml.
If you don't want a parent you can try the BOM (Bill of materials) pattern and select that pom for import in your dependancy management section.
Since you want no references to anything else, you will need to build an app to parse the xml from the "parents" and then write them to the target pom file in the dependencies section. I don't think there is any plugin to do what you want since it seems to go against the very core of how maven is supposed to help you. The whole point of maven is so that you can include all your dependancies using inheritance or importing them with the import scope.
Another option, but I think you've excluded it, is to maintain your releasable POM file separately. Though I think you may find an easier time with this if you reference LATEST or SNAPSHOTS and then use the release plugin to resolve them to released versions.
Related
In project we use some technical scripts in python with usage of Subprocess to extract some data from hive, run msck repair table etc ( I know we should switch to beeline :p) unfortunately after the issue with log4j in every output we started to get something like this:
WARN JNDI lookup class is not available because this JRE does not support JNDI. JNDI string lookups will not be available, continuing configuration. Ignoring java.lang.ClassNotFoundException: org.apache.logging.log4j.core.lookup.JndiLookup
Our Infra team is not allowing us so far to introduce any changes in the log4j properties.
As we have multiple technical scripts in many places we would like to find simple solution for time being (till we can fix it on infra level).
Tried:
usage hive -s
settings hive.root.looger to console (I think somehow log4j does not understand this is warning)
Guys does anyone know how we can fix it (preferably during runtime)
Thanks!
One way to fix this is to patch your log4j-core with a nerfed version of JndiLookup rather then removing the class entirely with zip -d .... A simple version of that class could look something like this:
package org.apache.logging.log4j.core.lookup;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.config.plugins.Plugin;
#Plugin(name = "jndi", category = StrLookup.CATEGORY)
public class JndiLookup extends AbstractLookup {
#Override
public String lookup(final LogEvent event, final String key) {
return null;
}
}
Here's a gist with a bash script that automates the compile and replace process:
https://gist.github.com/xyu/348e9cecf25e09bef997a58e1901481b
I looked at what changes were introduced to JndiLookup.java from version 2.0 through 2.14.1 (just before the 2.15 release). No significant changes. It appears regardless of what log4j2 version your application is using you can craft a JndiLookup.java mock class that simply calls LOGGER.warn and nothing else when JndiLookup.lookup is called. You would then replace JndiLookup.class instead of removing it from the jar. The class will load, but do nothing of meaning besides give you a log message you can monitor in your application logs, if doing so adds value.
$git diff rel/2.0 rel/2.14.1 log4j-core/src/main/java/org/apache/logging/log4j/core/lookup/JndiLookup.java
diff --git a/log4j-core/src/main/java/org/apache/logging/log4j/core/lookup/JndiLookup.java b/log4j-core/src/main/java/org/apache/logging/log4j/core/lookup/JndiLookup.java
index 77749e015..30e65ad24 100644
--- a/log4j-core/src/main/java/org/apache/logging/log4j/core/lookup/JndiLookup.java
+++ b/log4j-core/src/main/java/org/apache/logging/log4j/core/lookup/JndiLookup.java
## -16,53 +16,47 ##
*/
package org.apache.logging.log4j.core.lookup;
-import javax.naming.Context;
-import javax.naming.InitialContext;
+import java.util.Objects;
+
import javax.naming.NamingException;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.Marker;
+import org.apache.logging.log4j.MarkerManager;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.config.plugins.Plugin;
-import org.apache.logging.log4j.core.util.Closer;
+import org.apache.logging.log4j.core.net.JndiManager;
+import org.apache.logging.log4j.status.StatusLogger;
/**
* Looks up keys from JNDI resources.
*/
-#Plugin(name = "jndi", category = "Lookup")
-public class JndiLookup implements StrLookup {
+#Plugin(name = "jndi", category = StrLookup.CATEGORY)
+public class JndiLookup extends AbstractLookup {
- /** JNDI resourcce path prefix used in a J2EE container */
- static final String CONTAINER_JNDI_RESOURCE_PATH_PREFIX = "java:comp/env/";
+ private static final Logger LOGGER = StatusLogger.getLogger();
+ private static final Marker LOOKUP = MarkerManager.getMarker("LOOKUP");
- /**
- * Looks up the value of the JNDI resource.
- * #param key the JNDI resource name to be looked up, may be null
- * #return The value of the JNDI resource.
- */
- #Override
- public String lookup(final String key) {
- return lookup(null, key);
- }
+ /** JNDI resource path prefix used in a J2EE container */
+ static final String CONTAINER_JNDI_RESOURCE_PATH_PREFIX = "java:comp/env/";
/**
* Looks up the value of the JNDI resource.
* #param event The current LogEvent (is ignored by this StrLookup).
* #param key the JNDI resource name to be looked up, may be null
- * #return The value of the JNDI resource.
+ * #return The String value of the JNDI resource.
*/
#Override
public String lookup(final LogEvent event, final String key) {
if (key == null) {
return null;
}
-
- Context ctx = null;
- try {
- ctx = new InitialContext();
- return (String) ctx.lookup(convertJndiName(key));
+ final String jndiName = convertJndiName(key);
+ try (final JndiManager jndiManager = JndiManager.getDefaultManager()) {
+ return Objects.toString(jndiManager.lookup(jndiName), null);
} catch (final NamingException e) {
+ LOGGER.warn(LOOKUP, "Error looking up JNDI resource [{}].", jndiName, e);
return null;
- } finally {
- Closer.closeSilently(ctx);
}
}
## -73,11 +67,10 ## public class JndiLookup implements StrLookup {
* #param jndiName The name of the resource.
* #return The fully qualified name to look up.
*/
- private String convertJndiName(String jndiName) {
+ private String convertJndiName(final String jndiName) {
if (!jndiName.startsWith(CONTAINER_JNDI_RESOURCE_PATH_PREFIX) && jndiName.indexOf(':') == -1) {
- jndiName = CONTAINER_JNDI_RESOURCE_PATH_PREFIX + jndiName;
+ return CONTAINER_JNDI_RESOURCE_PATH_PREFIX + jndiName;
}
-
return jndiName;
}
}
I am using a custom annotation in my groovy script:
#Retention(RetentionPolicy.RUNTIME)
public #interface ScriptManifest {
/**
* Sets the scripts publicity mode.
* #return ScriptMode
*/
ScriptMode mode();
/**
* Sets the type of script we are using.
* #return ScriptType
*/
Class<? extends ScriptContext>[] type();
/**
* Sets the name of the script.
* #return name
*/
String script_name();
/**
* Sets the author of the script.
* #return author
*/
String author();
}
Creating the test script, my IntelliJ shows the error " Expected 'ScriptMode.PUBLIC' to be an inline constant" and same for TestContext.class. The script functions and runs fine I just am not sure as to why it is showing this for me as if it is an error.
#ScriptManifest(mode = ScriptMode.PUBLIC, type = TestContext.class, script_name = "Test", author = "Jake")
class Test extends Script {
#Override
void prepare() {
println "no" + getName()
}
}
Fixed the issue doing the following: I was trying to compile it in my Maven project with the javac, I added a new groovy module and compiled it with groovyc and it works fine now.
in the Docs
http://symfony.com/doc/master/cookbook/profiler/storage.html
you still can find Information about Profiler Storage.
I just checked the code and could not find any clues how to set a custom storage.
I also find no Documentation stating this except some #legacy notes in the Original Source at 2.8.
Is there a Reason why this was removed?
I was using redis to store this data with a lifetime of eta 1hour.
Now I need to run a manual cleanup to whipe all files in that directory.
If anybody has some clues or hints on helping me with this issue are appreceated ^^
Chris
Thanks to the Tip of Matteo I was able to solve this quite flexible.
The Team of Symfony removed this, because it was hard coded into the Profiler Subsystem.
Instead of fixing this by adding a class parameter I had to solve it. :)
Ok, here is the code, If somebody needs this to.
First of all we need the Original Classes from Symfony 2.7 (at least I reused them as I only need the Redis Option ( I use it, because I can Compress the data using igbinary)
Next you need to implement a Compiler Pass.
namespace AcmeBunlde\DependencyInjection\CompilerPass;
use Symfony\Component\DependencyInjection\Compiler\CompilerPassInterface;
use Symfony\Component\DependencyInjection\ContainerBuilder;
class ProfilerCompilerPass implements CompilerPassInterface
{
/**
* You can modify the container here before it is dumped to PHP code.
*
* #param ContainerBuilder $container
*/
public function process(ContainerBuilder $container)
{
$definition = $container->getDefinition('profiler');
$definition->addArgument('%acmebundle.profiler.defaultEnabled%');
$definition->addArgument('%acmebundle.profiler.class%');
$definition->addArgument('%acmebundle.profiler.dsn%');
$definition->addArgument('%acmebundle.profiler.username%');
$definition->addArgument('%acmebundle.profiler.password%');
$definition->addArgument('%acmebundle.profiler.ttl%');
$definition->setClass('acmebundle\Profiler\Profiler');
}
}
This needs to be loaded inside the Bundle Loader:
public function build(ContainerBuilder $container)
{
...
$container->addCompilerPass(new ProfilerCompilerPass());
}
After this we need to add the Configuration for the New Profiler Storage in the DependencyInjection Folder.
namespace AcmeBundle\DependencyInjection;
use Symfony\Component\Config\Definition\Builder\ArrayNodeDefinition;
use Symfony\Component\Config\Definition\Builder\TreeBuilder;
use Symfony\Component\Config\Definition\ConfigurationInterface;
/**
* This is the class that validates and merges configuration from your app/config files
*
* To learn more see {#link http://symfony.com/doc/current/cookbook/bundles/extension.html#cookbook-bundles-extension-config-class}
* #author Chris
*/
class Configuration implements ConfigurationInterface
/**
* {#inheritdoc}
*/
public function getConfigTreeBuilder()
{
$treeBuilder = new TreeBuilder();
$rootNode = $treeBuilder->root('library');
$rootNode
->children()
->arrayNode('profiler')
->addDefaultsIfNotSet()
->children()
->booleanNode('defaultStorage')
->defaultTrue()
->end()
->scalarNode('class')
->defaultValue('')
->end()
->scalarNode('dsn')
->defaultValue('')
->end()
->scalarNode('username')
->defaultValue('')
->end()
->scalarNode('password')
->defaultValue('')
->end()
->scalarNode('ttl')
->defaultValue('3600')
->end()
->end()
->end();
return $treeBuilder();
}
}
Now set the Default Values in The Dependency Injection Bundle Loader
<?php
namespace AcmeBundle\DependencyInjection;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\Config\FileLocator;
use Symfony\Component\HttpKernel\DependencyInjection\Extension;
use Symfony\Component\DependencyInjection\Loader;
use Symfony\Component\DependencyInjection\Extension\PrependExtensionInterface;
/**
* This is the class that loads and manages your bundle configuration
*
* To learn more see {#link http://symfony.com/doc/current/cookbook/bundles/extension.html}
* #author Chris
*/
class AcmeExtension extends Extension
{
/**
* {#inheritdoc}
*/
public function load(array $configs, ContainerBuilder $container)
{
$configuration = new Configuration();
$config = $this->processConfiguration($configuration, $configs);
...
$container->setParameter('acmebundle.profiler.defaultEnabled',$config['profiler']['defaultStorage']);
$container->setParameter('acmebundle.profiler.class',$config['profiler']['class']);
$container->setParameter('acmebundle.profiler.dsn',$config['profiler']['dsn']);
$container->setParameter('acmebundle.profiler.username',$config['profiler']['username']);
$container->setParameter('acmebundle.profiler.password',$config['profiler']['password']);
$container->setParameter('acmebundle.profiler.ttl',$config['profiler']['ttl']);
...
}
...
}
As Last Step you need to build a basic container for adding the new Profiler Handler.
I have choosen to implement it not to complex:
<?php
namespace AcmeBundle\Profiler;
use Psr\Log\LoggerInterface;
use \Symfony\Component\HttpKernel\Profiler\Profiler as ProfilerSrc;
use Symfony\Component\HttpKernel\Profiler\ProfilerStorageInterface;
/**
* Profiler.
*/
class Profiler extends ProfilerSrc
{
public function __construct(ProfilerStorageInterface $storage, LoggerInterface $logger, $defaultEnabled=true,$class=null,$dsn=null,$username=null,$password=null,$ttl=3600)
{
if($defaultEnabled!==true)
{
$storage = new $class($dsn,$username,$password,$ttl);
}
parent::__construct($storage , $logger);
}
}
I have also added a Library to define the Constructor of the Storage Interface.
<?php
namespace AcmeBundle\Profiler;
use Symfony\Component\HttpKernel\Profiler\ProfilerStorageInterface as ProfilerStorageSource;
interface ProfilerStorageInterface extends ProfilerStorageSource
{
/**
* ProfilerStorageInterface constructor.
*
* #param $dsn
* #param $username
* #param $password
* #param $ttl
*/
public function __construct($dsn,$username,$password,$ttl);
}
All you need to do now is to define some Options in your config_dev.yml file.
acmebundle:
profiler:
defaultEnabled: false
class:CLASSNAME INCLUDING NAMESPACE
dsn: redis://localhost/1
username:
password
ttl: 3600
with defaultEnabled = true you can reenable to Original Handler.
the rest is, I believe self explaining.
username + password is from the original feature set.
(ttl == lifetime)
I hope this helps somebody else as well :)
Is marked as deprecated since 2.8 with the suppression in the 3.0. I can't find any motivation about in the PR. The doc is not yet updated as you mention.
The only suggestion is about a comment in this issue:
If you want to use your own implementation of a profiler storage,
then just override the profile.storage service.
Hope this help
How do I check from within a Mojo if an artifact already exists in the local repository?
I'm installing large binaries into the local Maven repository and I need to know if they already exist before attempting to download them.
Solved with the help of http://docs.codehaus.org/display/MAVENUSER/Mojo+Developer+Cookbook
/**
* The local maven repository.
*
* #parameter expression="${localRepository}"
* #required
* #readonly
*/
#SuppressWarnings("UWF_UNWRITTEN_FIELD")
private ArtifactRepository localRepository;
/**
* #parameter default-value="${project.remoteArtifactRepositories}"
* #required
* #readonly
*/
private List<?> remoteRepositories;
/**
* Resolves Artifacts in the local repository.
*
* #component
*/
private ArtifactResolver artifactResolver;
/**
* #component
*/
private ArtifactFactory artifactFactory;
[...]
Artifact artifact = artifactFactory.createArtifactWithClassifier(groupId, artifactId, version, packagingType, classifier);
boolean artifactExists;
try
{
// Downloads the remote artifact, if necessary
artifactResolver.resolve(artifact, remoteRepositories, localRepository);
artifactExists = true;
}
catch (ArtifactResolutionException e)
{
throw new MojoExecutionException("", e);
}
catch (ArtifactNotFoundException e)
{
artifactExists = false;
}
if (artifactExists)
System.out.println("Artifact found at: " + artifact.getFile());
If you want to check if a remote artifact exists without downloading it, you can use the Aether library to do the following (based on http://dev.eclipse.org/mhonarc/lists/aether-users/msg00127.html):
MavenDefaultLayout defaultLayout = new MavenDefaultLayout();
RemoteRepository centralRepository = new RemoteRepository.Builder("central", "default", "http://repo1.maven.org/maven2/").build();
URI centralUri = URI.create(centralRepository.getUrl());
URI artifactUri = centralUri.resolve(defaultLayout.getPath(artifact));
HttpURLConnection connection = (HttpURLConnection) artifactUri.toURL().openConnection();
connection.setRequestMethod("HEAD");
connection.connect();
boolean artifactExists = connection.getResponseCode() != 404;
With following dependency: org.eclipse.aether:aether-util:0.9.0.M2 and following imports:
import java.net.HttpURLConnection;
import java.net.URI;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.util.repository.layout.MavenDefaultLayout;
Since the answer which is accepted as correct, is no longer pointing to valid URL-s and because I know a better way, I am posting a new answer.
There's the wagon-maven-plugin, which has an exist goal. The documentation is a bit inaccurate, but you can use that.
Code-wise, you can have a look at the DefaultWagonDownload class' exists method:
/**
*
* #param wagon - a Wagon instance
* #param resource - Remote resource to check
* #throws WagonException
*/
public boolean exists( Wagon wagon, String resource )
throws WagonException
{
return wagon.resourceExists( resource );
}
If you expect your artifacts being present in a remote maven repository, I'd suggest you simply use the copy mojo of the maven-dependency-plugin.
It will use normal maven resolution mechanism for retrieving artifacts so will not download something that is already in the local repository.
In a plugin, when using maven 2 (not sure about maven3) you can use the mojo executor to call a mojo from within you code easily.
I am using Apache's Velocity templating engine, and I would like to create a custom Directive. That is, I want to be able to write "#doMyThing()" and have it invoke some java code I wrote in order to generate the text.
I know that I can register a custom directive by adding a line
userdirective=my.package.here.MyDirectiveName
to my velocity.properties file. And I know that I can write such a class by extending the Directive class. What I don't know is how to extend the Directive class -- some sort of documentation for the author of a new Directive. For instance I'd like to know if my getType() method return "BLOCK" or "LINE" and I'd like to know what should my setLocation() method should do?
Is there any documentation out there that is better than just "Use the source, Luke"?
On the Velocity wiki, there's a presentation and sample code from a talk I gave called "Hacking Velocity". It includes an example of a custom directive.
Also was trying to come up with a custom directive. Couldn't find any documentation at all, so I looked at some user created directives: IfNullDirective (nice and easy one), MergeDirective as well as velocity build-in directives.
Here is my simple block directive that returns compressed content (complete project with some directive installation instructions is located here):
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import org.apache.velocity.context.InternalContextAdapter;
import org.apache.velocity.exception.MethodInvocationException;
import org.apache.velocity.exception.ParseErrorException;
import org.apache.velocity.exception.ResourceNotFoundException;
import org.apache.velocity.exception.TemplateInitException;
import org.apache.velocity.runtime.RuntimeServices;
import org.apache.velocity.runtime.directive.Directive;
import org.apache.velocity.runtime.parser.node.Node;
import org.apache.velocity.runtime.log.Log;
import com.googlecode.htmlcompressor.compressor.HtmlCompressor;
/**
* Velocity directive that compresses an HTML content within #compressHtml ... #end block.
*/
public class HtmlCompressorDirective extends Directive {
private static final HtmlCompressor htmlCompressor = new HtmlCompressor();
private Log log;
public String getName() {
return "compressHtml";
}
public int getType() {
return BLOCK;
}
#Override
public void init(RuntimeServices rs, InternalContextAdapter context, Node node) throws TemplateInitException {
super.init(rs, context, node);
log = rs.getLog();
//set compressor properties
htmlCompressor.setEnabled(rs.getBoolean("userdirective.compressHtml.enabled", true));
htmlCompressor.setRemoveComments(rs.getBoolean("userdirective.compressHtml.removeComments", true));
}
public boolean render(InternalContextAdapter context, Writer writer, Node node)
throws IOException, ResourceNotFoundException, ParseErrorException, MethodInvocationException {
//render content to a variable
StringWriter content = new StringWriter();
node.jjtGetChild(0).render(context, content);
//compress
try {
writer.write(htmlCompressor.compress(content.toString()));
} catch (Exception e) {
writer.write(content.toString());
String msg = "Failed to compress content: "+content.toString();
log.error(msg, e);
throw new RuntimeException(msg, e);
}
return true;
}
}
Block directives always accept a body and must end with #end when used in a template. e.g. #foreach( $i in $foo ) this has a body! #end
Line directives do not have a body or an #end. e.g. #parse( 'foo.vtl' )
You don't need to both with setLocation() at all. The parser uses that.
Any other specifics i can help with?
Also, have you considered using a "tool" approach? Even if you don't use VelocityTools to automatically make your tool available and whatnot, you can just create a tool class that does what you want, put it in the context and either have a method you call to generate content or else just have its toString() method generate the content. e.g. $tool.doMyThing() or just $myThing
Directives are best for when you need to mess with Velocity internals (access to InternalContextAdapter or actual Nodes).
Prior to velocity v1.6 I had a #blockset($v)#end directive to be able to deal with a multiline #set($v) but this function is now handled by the #define directive.
Custom block directives are a pain with modern IDEs because they don't parse the structure correctly, assuming your #end associated with #userBlockDirective is an extra and paints the whole file RED. They should be avoided if possible.
I copied something similar from the velocity source code and created a "blockset" (multiline) directive.
import org.apache.velocity.runtime.directive.Directive;
import org.apache.velocity.runtime.RuntimeServices;
import org.apache.velocity.runtime.parser.node.Node;
import org.apache.velocity.context.InternalContextAdapter;
import org.apache.velocity.exception.MethodInvocationException;
import org.apache.velocity.exception.ResourceNotFoundException;
import org.apache.velocity.exception.ParseErrorException;
import org.apache.velocity.exception.TemplateInitException;
import java.io.Writer;
import java.io.IOException;
import java.io.StringWriter;
public class BlockSetDirective extends Directive {
private String blockKey;
/**
* Return name of this directive.
*/
public String getName() {
return "blockset";
}
/**
* Return type of this directive.
*/
public int getType() {
return BLOCK;
}
/**
* simple init - get the blockKey
*/
public void init( RuntimeServices rs, InternalContextAdapter context,
Node node )
throws TemplateInitException {
super.init( rs, context, node );
/*
* first token is the name of the block. I don't even check the format,
* just assume it looks like this: $block_name. Should check if it has
* a '$' or not like macros.
*/
blockKey = node.jjtGetChild( 0 ).getFirstToken().image.substring( 1 );
}
/**
* Renders node to internal string writer and stores in the context at the
* specified context variable
*/
public boolean render( InternalContextAdapter context, Writer writer,
Node node )
throws IOException, MethodInvocationException,
ResourceNotFoundException, ParseErrorException {
StringWriter sw = new StringWriter(256);
boolean b = node.jjtGetChild( 1 ).render( context, sw );
context.put( blockKey, sw.toString() );
return b;
}
}