카테고리 없음2016. 8. 23. 18:15
반응형


클래스에 EnvironmentAware 상속 받은다음에 Override setEnvironment 해주면 됩니다.

시점때문에 생기는 문제네요..



1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
@Configuration
@PropertySource("classpath:myProperties.properties")
public class MyConfiguration implements EnvironmentAware {
 
    private Environment environment;
 
    @Override
    public void setEnvironment(final Environment environment) {
        this.environment = environment;
    }
 
    public void myMethod() {
        final String myPropertyValue = environment.getProperty("myProperty");
        // ...
    }
 
}
cs



1
2
3
4
5
6
7
8
9
10
public class SystemLogApplication  implements EnvironmentAware{
 
@Autowired
private static Environment env;
 
@Override
    public void setEnvironment(Environment environment) {
        // TODO Auto-generated method stub
        this.env = environment;
    } 
cs


Posted by 1010
카테고리 없음2016. 8. 23. 18:11
반응형


1
2
3
4
5
6
7
8
9
10
11
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE configuration PUBLIC "-//mybatis.org//DTD Config 3.0//EN" "http://mybatis.org/dtd/mybatis-3-config.dtd">
<configuration>
    <settings>
        <setting name="callSettersOnNulls" value="true" />
        <setting name="jdbcTypeForNull" value="NULL" />
    </settings>
    <plugins>
        <plugin interceptor="kr.go.safepeople.config.interceptor.MybatisSqlLogInterceptor" />
    </plugins>
</configuration>
cs



1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
 
import java.lang.reflect.Field;
import java.sql.Statement;
import java.util.List;
import java.util.Map;
import java.util.Properties;
 
import org.apache.ibatis.executor.statement.StatementHandler;
import org.apache.ibatis.mapping.BoundSql;
import org.apache.ibatis.mapping.ParameterMapping;
import org.apache.ibatis.plugin.Interceptor;
import org.apache.ibatis.plugin.Intercepts;
import org.apache.ibatis.plugin.Invocation;
import org.apache.ibatis.plugin.Plugin;
import org.apache.ibatis.plugin.Signature;
import org.apache.ibatis.session.ResultHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
 
@Intercepts({
     @Signature(type=StatementHandler.class, method="update", args={Statement.class})
    ,@Signature(type=StatementHandler.class, method="query", args={Statement.class, ResultHandler.class})
})
public class MybatisSqlLogInterceptor implements Interceptor {
 
    private Logger logger = LoggerFactory.getLogger(this.getClass());
    
    @Override
    public Object intercept(Invocation invocation) throws Throwable {
        // TODO Auto-generated method stub
        StatementHandler handler = (StatementHandler)invocation.getTarget();
        BoundSql boundSql = handler.getBoundSql();
        String sql = boundSql.getSql();
        Object param = handler.getParameterHandler().getParameterObject();
        
        if(param == null){                
            sql = sql.replaceFirst("\\?""''");
        }else{
            if(param instanceof Integer || param instanceof Long || param instanceof Float || param instanceof Double){
                sql = sql.replaceFirst("\\?", param.toString());
            }else if(param instanceof String){    
                sql = sql.replaceFirst("\\?""'" + param + "'");
            }else if(param instanceof Map){        
                List<ParameterMapping> paramMapping = boundSql.getParameterMappings();    
                
                for(ParameterMapping mapping : paramMapping){
                    String propValue = mapping.getProperty();        
                    Object value = ((Map) param).get(propValue);    
                    if(value != null){
                        if(value instanceof String){            
                            sql = sql.replaceFirst("\\?""'" + value + "'");
                        }else{
                            sql = sql.replaceFirst("\\?", value.toString());
                        }
                    }else{
                        sql = sql.replaceFirst("\\?""'null'");
                    }
                    
                }
            }else{
                List<ParameterMapping> paramMapping = boundSql.getParameterMappings();
                Class<extends Object> paramClass = param.getClass();
                for(ParameterMapping mapping : paramMapping){
                    String propValue = mapping.getProperty();            
                    Field field = paramClass.getDeclaredField(propValue);    
                    field.setAccessible(true);                    
                    Class<?> javaType = mapping.getJavaType();            
                    if(String.class == javaType){                
                        sql = sql.replaceFirst("\\?""'" + field.get(param) + "'");
                    }else{
                        sql = sql.replaceFirst("\\?", field.get(param).toString());
                    }
                }
            }
        }
         
        logger.debug("=====================================================================");
        logger.debug("SQL : "+sql);
        logger.debug("=====================================================================");
 
        return invocation.proceed(); 
    }
 
    @Override
    public Object plugin(Object target) {
        // TODO Auto-generated method stub
            return Plugin.wrap(target, this);
    }
 
    @Override
    public void setProperties(Properties properties) {
        // TODO Auto-generated method stub
    }
 
}
 
cs


Posted by 1010
카테고리 없음2016. 8. 23. 13:03
반응형

In an earlier post, I blogged about the REST capabilities we added to Spring @MVC version 3.0. Later, Alef wrote about using the introduced functionality to add an Atom view to the Pet Clinic application. In this post, I would like to introduce the client-side capabilities we added in Milestone 2.

RestTemplate

The RestTemplate is the central Spring class for client-side HTTP access. Conceptually, it is very similar to the JdbcTemplateJmsTemplate, and the various other templates found in the Spring Framework and other portfolio projects. This means, for instance, that the RestTemplate is thread-safe once constructed, and that you can use callbacks to customize its operations.

RestTemplate Methods

The main entry points of the template are named after the six main HTTP methods:

HTTPRESTTEMPLATE
DELETEdelete(String, String...)
GETgetForObject(String, Class, String...)
HEADheadForHeaders(String, String...)
OPTIONSoptionsForAllow(String, String...)
POSTpostForLocation(String, Object, String...)
PUTput(String, Object, String...)

The names of these methods clearly indicate which HTTP method they invoke, while the second part of the name indicates what is returned. For instance, getForObject() will perform a GET, convert the HTTP response into an object type of your choice, and returns that object.postForLocation will do a POST, converting the given object into a HTTP request, and returns the response HTTP Location header where the newly created object can be found. As you can see, these methods try to enforce REST best practices.

URI Templates

Each of these methods takes a URI as first argument. That URI can be a URI template, and variables can be used to expand the template to a normal URI. The template variables can be passed in two forms: as a String variable arguments array, or as a Map<String, String>. The string varargs variant expands the given template variables in order, so that


String result = restTemplate.getForObject("http://example.com/hotels/{hotel}/bookings/{booking}", String.class, "42", "21");

will perform a GET on http://example.com/hotels/42/bookings/21. The map variant expands the template based on variable name, and is therefore more useful when using many variables, or when a single variable is used multiple times. For example:


Map<String, String> vars = new HashMap<String, String>(); vars.put("hotel", "42"); vars.put("booking", "21"); String result = restTemplate.getForObject("http://example.com/hotels/{hotel}/bookings/{booking}", String.class, vars);

will also perform a GET on http://example.com/hotels/42/rooms/42.

HttpMessageConverters

Objects passed to and returned from the methods getForObject()postForLocation(), andput() and are converted to HTTP requests and from HTTP responses by HttpMessageConverters. Converters for the main mime types and Java types are registered by default, but you can also write your own converter and plug it in the RestTemplate. In the example below, I will show you how that’s done.

Using the RestTemplate to retrieve photos from Flickr

Rather than going through the various methods of the RestTemplate, I will show you how to use it for retrieving pictures from Flickr, Yahoo!s online photo-sharing application. This sample application searches Flickr for photos that match a given search term. It then shows these pictures using a simple Swing UI. To run the application yourself, you will need to create a Flickr account and apply for an API key.

Searching for photos

Flickr exposes various APIs to manipulate its vast library of photos. The flickr.photos.searchmethod allows you to search for photos, by issuing a GET request onhttp://www.flickr.com/services/rest?method=flickr.photos.search&api+key=xxx&tags=penguins, where you enter your API key and the thing to search for (penguins in this case). As a result, you get back a XML document, describing the photos that conform to your query. Something like:


<photos page="2" pages="89" perpage="10" total="881"> <photo id="2636" owner="47058503995@N01" secret="a123456" server="2" title="test_04" ispublic="1" isfriend="0" isfamily="0" /> <photo id="2635" owner="47058503995@N01" secret="b123456" server="2" title="test_03" ispublic="0" isfriend="1" isfamily="1" /> <photo id="2633" owner="47058503995@N01" secret="c123456" server="2" title="test_01" ispublic="1" isfriend="0" isfamily="0" /> <photo id="2610" owner="12037949754@N01" secret="d123456" server="2" title="00_tall" ispublic="1" isfriend="0" isfamily="0" /> </photos>

Using the RestTemplate, retrieving such a document is quite trivial:


final String photoSearchUrl = "http://www.flickr.com/services/rest?method=flickr.photos.search&api+key={api-key}&tags={tag}&per_page=10"; Source photos = restTemplate.getForObject(photoSearchUrl, Source.class, apiKey, searchTerm);

where apiKey and searchTerm are two Strings given on the command line. This method uses theSourceHttpMessageConverter to convert the HTTP XML response into ajavax.xml.transform.Source (Note that the SourceHttpMessageConverter was introduced shortly after we released Spring 3.0 M2, so you will have to get a recent snapshot (or the upcoming M3) to use it. The sample project available below is set up to retrieve these via Maven).

Retrieving the photos

Next, we’re going to use an XPath expression to retrieve all the photo elements of the document. For this, we are going to use the XPathTemplate from Spring Web Services. We are going to execute the //photo expressions, returning all photo elements occurring anywhere in the document. The NodeMapper is a callback interface, whose mapNode() method will be invoked for each photo element in the document. In this case, we are retrieving the serverid, andsecret attributes of this element, and use those to fill up a Map. Finally, we use the RestTemplate again, to retrieve the photo as a java.awt.image.BufferedImage. Thus when the XPath evaluation is done, the resulting imageList will contain an image for each photo in the XML document.


List<BufferedImage> imageList = xpathTemplate.evaluate("//photo", photos, new NodeMapper() { public Object mapNode(Node node, int i) throws DOMException { Element photo = (Element) node; Map<String, String> variables = new HashMap<String, String>(3); variables.put("server", photo.getAttribute("server")); variables.put("id", photo.getAttribute("id")); variables.put("secret", photo.getAttribute("secret")); String photoUrl = "http://static.flickr.com/{server}/{id}_{secret}_m.jpg"; return restTemplate.getForObject(photoUrl, BufferedImage.class, variables); } });

For instance, given the XML document given above, the imageList will contain 4 images. The URL for the first image retrieved will be http://static.flickr.com/2/2636_ a123456_m.jpg, the second is http://static.flickr.com/2/2635_ b123456_m.jpg, etc.

Converting the images

There is one more thing that needs to be done in order for the code to work: we will need to write a HttpMessageConverter that is able to read from the HTTP response, and create aBufferedImagefrom that. Doing so with the Java Image I/O API is fairly simple, we just need to implement the read() method defined in the HttpMessageConverter interface. Overall, our simple converter looks like this:


public class BufferedImageHttpMessageConverter implements HttpMessageConverter<BufferedImage> { public List<MediaType> getSupportedMediaTypes() { return Collections.singletonList(new MediaType("image", "jpeg")); } public boolean supports(Class<? extends BufferedImage> clazz) { return BufferedImage.class.equals(clazz); } public BufferedImage read(Class<BufferedImage> clazz, HttpInputMessage inputMessage) throws IOException { return ImageIO.read(inputMessage.getBody()); } public void write(BufferedImage image, HttpOutputMessage message) throws IOException { throw new UnsupportedOperationException("Not implemented"); } }

Note that we didn’t implement write() because we are not uploading images, just downloading them. Now we just have to plug this converter into the RestTemplate. We do that in the Spring application context:


<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"> <bean id="flickrClient" class="com.springsource.samples.resttemplate.FlickrClient"> <constructor-arg ref="restTemplate"/> <constructor-arg ref="xpathTemplate"/> </bean> <bean id="restTemplate" class="org.springframework.web.client.RestTemplate"> <property name="messageConverters"> <list> <bean class="org.springframework.http.converter.xml.SourceHttpMessageConverter"/> <bean class="com.springsource.samples.resttemplate.BufferedImageHttpMessageConverter"/> </list> </property> </bean> <bean id="xpathTemplate" class="org.springframework.xml.xpath.Jaxp13XPathTemplate"/> </beans>

Showing the photos

The final stage is to show the photos in a simple GUI. For this, we use Swing:


JFrame frame = new JFrame(searchTerm + " photos"); frame.setLayout(new GridLayout(2, imageList.size() / 2)); for (BufferedImage image : imageList) { frame.add(new JLabel(new ImageIcon(image))); } frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.pack(); frame.setVisible(true);

which gives us the following:

Penguins

Overall, I hope this post showed you how simple it can be to use the RestTemplate to interact with HTTP servers. In just under 30 lines of Java code, we created a GUI that shows pictures of everybody’s favorite bird: the penguin! Check out the RestTemplate and let us know what you think!

Downloads

A Maven project containing the code above can be downloaded here. Note that the project is based on a nightly snapshot build of Spring. The upcoming Milestone 3 of Spring will contain the necessary classes as well.

Posted by 1010
카테고리 없음2016. 8. 23. 11:23
반응형

ava unlimited runtime class and resource redefinition.

The main purpose of this project is to avoid infamous change->restart + wait->check development lifecycle. Save&Reload during development should be standard and many other languages (including C#) contain this feature.

This project is still in a beta version.

Easy to start

Download and install latest DCEVM Java patch + agent jar and launch your application server with options -XXaltjvm=dcevm -javaagent:hotswap-agent.jar to get basic setup. Optionally add hotswap-agent.properties to your application to configure plugins and agent behaviour.

Plugins

Each application framework (Spring, Hibernate, Logback, ...) needs special reloading mechanism to keep up-to-date after class redefinition (e.g. Hibernate configuration reload after new entity class is introduced). Hotswap agent works as a plugin system and ships preconfigured with all major framework plugins. It is easy to write your custom plugin even as part of your application.

Contribute

This project is very complex due to lot of supported frameworks and various versions. Community contribution is mandatory to keep it alive. You can start by creating a plugin inside your application or by writing an example/integration test. There is always need for documentation improvement :-). Thank you for any help!

Quick start:

Install

  1. download latest release of DCEVM Java patch and launch the installer (e.g. java -jar installer-light.jar). Currently you need to select correct installer for Java major version (7/8).
  2. select java installation directory on your disc and press "Install DCEVM as altjvm" button. Java 1.7+ versions are supported.
  3. download latest release of Hotswap agent jar, unpack hotswap-agent.jar and put it anywhere on your disc. For example:C:\java\hotswap-agent.jar

Run your application

  1. add following command line java attributes: -XXaltjvm=dcevm -javaagent:PATH_TO_AGENT\hotswap-agent.jar (you need to replace PATH_TO_AGENT with an actual) directory. For example java -XXaltjvm=dcevm -javaagent:c:\java\hotswap-agent.jar YourApp. See IntelliJ IDEA and Netbeans forum threads for IDE specific setup guides.
  2. (optional) create a file named "hotswap-agent.properties" inside your resources directory, see available properties and default values: https://github.com/HotswapProjects/HotswapAgent/blob/master/hotswap-agent-core/src/main/resources/hotswap-agent.properties
  3. start the application in debug mode, check that the agent and plugins are initialized correctly:

    HOTSWAP AGENT: 9:49:29.548 INFO (org.hotswap.agent.HotswapAgent) - Loading Hotswap agent - unlimited runtime class redefinition.
    HOTSWAP AGENT: 9:49:29.725 INFO (org.hotswap.agent.config.PluginRegistry) - Discovered plugins: [org.hotswap.agent.plugin.hotswapper.HotswapperPlugin, org.hotswap.agent.plugin.jvm.AnonymousClassPatchPlugin, org.hotswap.agent.plugin.hibernate.HibernatePlugin, org.hotswap.agent.plugin.spring.SpringPlugin, org.hotswap.agent.plugin.jetty.JettyPlugin, org.hotswap.agent.plugin.tomcat.TomcatPlugin, org.hotswap.agent.plugin.zk.ZkPlugin, org.hotswap.agent.plugin.logback.LogbackPlugin]
    ...
    HOTSWAP AGENT: 9:49:38.700 INFO (org.hotswap.agent.plugin.spring.SpringPlugin) - Spring plugin initialized - Spring core version '3.2.3.RELEASE'
    
  4. save a changed resource and/or use the HotSwap feature of your IDE to reload changes

What is available?

  • Enhanced Java Hotswap - change method body, add/rename a method, field, ... The only unsupported operation is hierarchy change (change the superclass or remove an interface).
    • You can use standard Java Hotswap from IDE in debug mode to reload changed class
    • or set autoHotswap property -XXaltjvm=dcevm -javaagent:PATH_TO_AGENT\hotswap-agent.jar=autoHotswap=true to reload changed classes after compilation. This setup allows even reload on production system without restart.
  • Automatic configuration - all local classes and resources known to the running Java application are automatically discovered and watched for reload (all files on local filesystem, not inside JAR file).
  • Extra classpath - Need change a runtime class inside dependent JAR? Use extraClasspath property to add any directory as a classpath to watch for class files.
  • Reload resource after a change - resources from webapp directory are usually reloaded by application server. But what about other resources like src/main/resources? Use watchResources property to add any directory to watch for a resource change.
  • Framework support - through plugin system, many frameworks are supported. New plugins can be easily added.
  • Fast - until the plugin is initialized, it does not consume any resources or slow down the application (see Runtime overhead for more information)

Should you have any problems or questions, ask at HotswapAgent forum.

This project is similar to JRebel. Main differences are:

  • HotswapAgent (DCEVM) supports Java8!
  • HotswapAgent does not need any additional configuration for basic project setup.
  • JRebel is currently more mature and contains more plugins.
  • JRebel is neither open source nor free.
  • JRebel modifies bytecode of all classes on reload. You need special IDE plugin to fix debugging.
  • HotswapAgent extraClasspath is similar to JRebel configuration
  • HotswapAgent adds watchResources configuration

Examples

See HotswapAgentExamples GitHub project. The purpose of an example application is:

  • complex automate integration tests (check various configurations before a release, see run-tests.sh script)
  • to check "real world" plugin usage during plugin development (i.e. inside container)
  • to provide working solution for typical application setups
  • sandbox to simulate issues for existing or new setups

Feel free to fork/branch and create an application for your setup (functional, but as simple as possible). General setups will be merged into the master.

IDE support

None needed :) Really, all changes are transparent and all you need to do is to download patch+agent and setup your application / application server. Because we use standard java hotswap behaviour, your IDE will work as expected. However, we work on IDE plugins to help with download & configuration.

Configuration

The basic configuration set to reload classes and resources from classpath known to the running application (classloader). If you need a different configuration, add hotswap-agent.properties file to the classpath root (e.g. src/main/resources/hotswap-agent.properties).

Detail documentation of available properties and default values can be found in the agent properties file

Hotswap agent command line options

Full syntax of command line options is:

-javaagent:[yourpath/]hotswap-agent.jar=[option1]=[value1],[option2]=[value2]

Hotswap agent accepts following options:

  • autoHotswap=true - watch all .class files for change and automatically Hotswap the class in the running application (instead of running Hotswap from your IDE debugging session)
  • disablePlugin=[pluginName] - disable a plugin. Note that this will completely forbid the plugin to load (opposite to disablePlugin option in hotswap-agent.properties, which will only disable the plugin for a classloader. You can repeat this option for every plugin to disable.

How does it work?

DCEVM

Hotswap agent does the work of reloading resources and framework configuration (Spring, Hibernate, ...), but it depends on standard Java hotswap mechanism to actually reload classes. Standard Java hotswap allows only method body change , which makes it practically unusable. DCEVM is a JRE patch witch allows almost any structural class change on hotswap (with an exception of a hierarchy change). Although hotswap agent works even with standard java, we recommend to use DCEVM (and all tutorials use DCEVM as target JVM).

Hotswap agent

Hotswap agent is a plugin container with plugin manager, plugin registry, and several agent services (e.g. to watch for class/resource change). It helps with common tasks and classloading issues. It scans classpath for class annotated with @Plugin annotation, injects agent services and registers reloading hooks. Runtime bytecode modification is provided by javaasist library.

Plugins

Plugins administered by Hotswap agent are usually targeted towards a specific framework. For example Spring plugin uses agent services to:

  • Modify root Spring classes to get Spring contexts and registered scan path
  • Watch for any resource change on a scan path
  • Watch for a hotswap of a class file within a scan path package
  • Reload bean definition after a change
  • ... and many other

Packaged plugins:

  • Hibernate (4x) - Reload Hibernate configuration after entity create/change.
  • Spring (3x) - Reload Spring configuration after class definition/change.
  • Jetty - add extra classpath to the app classloader. All versions supporting WebAppContext.getExtraClasspath should be supported.
  • ZK (5x-7x) - ZK Framework (http://www.zkoss.org/). Change library properties default values to disable caches, maintains Label cache and bean resolver cache.
  • Logback - Logback configuration reload
  • Hotswapper - Watch for any class file change and reload (hotswap) it on the fly via Java Platform Debugger Architecture (JPDA)
  • AnonymousClassPatch - Swap anonymous inner class names to avoid not compatible changes.
  • ELResolver 2.2 (JuelEL, Appache Commons EL, Oracle EL 3.0)- clear ELResolver cache on class change. Support hotswap for #{...} expressions.
  • Seam (2.2, 2.3) - flush JBoss reference cache. Support for properties file change (messages[])
  • JBossModules - add extra class path to JBoss's module class loader.
  • JSF (mojarra 2.1, 2.2) - support for application resource bundle files change (properties files).
  • OsgiEquinox - Hotswap support for Eclipse plugin or Eclipse platform development.
  • RESTEasy (2.x, 3.x) - reload @Path annotated classes on class create/change
  • CDI/Weld - reload bean class definition after class create(managed beans)/change. Proxy bean redefinition after proxied class change. EAR support.
  • WebObjects - Clear key value coding, component, action and validation caches after class change.

Find a detail documentation of each plugin in the plugin project main README.md file.

Runtime overhead

It really depends on how many frameworks you use and which caches are disabled. Example measurements for a large, real world enterprise application based on Spring + Hibernate, run on Jetty.

SetupStartup time
Run (plain Java)23s
Debug (plain Java)24s
Debug (plain DCEVM)28s
Agent - disabled all plugins31s
Agent - all plugins35s

How to write a plugin

You can write plugin directly as a part of your application. Set pluginPackages=your.plugin.package inside your hotswap-agent.properties configuration to discover @Plugin annotated classes. You will also need agent JAR dependency to compile, but be careful NOT to add the JAR to your application, it must be loaded only as a javaagent. Maven dependency:

    <dependency>
        <groupId>org.hotswap.agent</groupId>
        <artifactId>HotswapAgent</artifactId>
        <version>${project.version}</version>
        <scope>provided</scope>
    </dependency>

(Note that the JAR is not yet in central maven repository - you need to build it from source first).

See ExamplePlugin (part of TestApplication) to go through a commented simple plugin. Read agent readme to understand agent concepts. Check existing plugins source code for more examples.

Creating Release

Launch run-tests.sh script in the main directory. Currently you have to setup JAVA_HOME location directory manually. At least Java 7 and Java 8 with DCEVM should be checked before a release. All automatic tests are set to fail the whole script in case of any single test failure.

Go to directory representing repository root. In case DCEVM is named dcevm

mvn release:prepare
mvn release:perform

In case your DCEVM is named differently i.e. server

mvn release:prepare -Darguments="-Ddcevm=server"
mvn release:perform -Darguments="-Ddcevm=server"

Plugin specific settings

OsgiEquinox / Eclipse RCP

OsgiEquinox / Eclipse plugin provides hotswap support for Eclipse plugin or Eclipse platform development (Do not confuse it with common development in Eclipse!).

Following options should be setup in eclipse.ini for debugee Eclipse instance:

 # use application classloader for the framework
-Dosgi.frameworkParentClassloader=app
 # development classpath that is added to each plugin classpath
-Dosgi.dev=[extra_classpath]
 # use dcevm as JVM
-XXaltjvm=dcevm
 # enable hotswapagent
-javaagent:PATH_TO_AGENT/hotswap-agent.jar
 # enable remote debugging on port 8000
-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=8000

extra_classpath points to directory with compiled classes. When a new class is compiled it is sent by remote debugger to HotswapAgent. HotswapAgent stores this file into extra_classpath directory.

It is also necessary to setup following hotswap-agent.properties:

extraClasspath=[extra_classpath]
osgiEquinox.debugMode=true

then connect the IDE debugger (eclipse, netbeans or idea) to port 8000 and happy hotswapping!

Posted by 1010
카테고리 없음2016. 8. 17. 17:14
반응형

19. Data access with JDBC

19.1 Introduction to Spring Framework JDBC

The value-add provided by the Spring Framework JDBC abstraction is perhaps best shown by the sequence of actions outlined in the table below. The table shows what actions Spring will take care of and which actions are the responsibility of you, the application developer.

Table 19.1. Spring JDBC - who does what?

ActionSpringYou

Define connection parameters.

 

X

Open the connection.

X

 

Specify the SQL statement.

 

X

Declare parameters and provide parameter values

 

X

Prepare and execute the statement.

X

 

Set up the loop to iterate through the results (if any).

X

 

Do the work for each iteration.

 

X

Process any exception.

X

 

Handle transactions.

X

 

Close the connection, statement and resultset.

X

 

The Spring Framework takes care of all the low-level details that can make JDBC such a tedious API to develop with.

19.1.1 Choosing an approach for JDBC database access

You can choose among several approaches to form the basis for your JDBC database access. In addition to three flavors of the JdbcTemplate, a new SimpleJdbcInsert and SimplejdbcCall approach optimizes database metadata, and the RDBMS Object style takes a more object-oriented approach similar to that of JDO Query design. Once you start using one of these approaches, you can still mix and match to include a feature from a different approach. All approaches require a JDBC 2.0-compliant driver, and some advanced features require a JDBC 3.0 driver.

  • JdbcTemplate is the classic Spring JDBC approach and the most popular. This "lowest level" approach and all others use a JdbcTemplate under the covers.
  • NamedParameterJdbcTemplate wraps a JdbcTemplate to provide named parameters instead of the traditional JDBC "?" placeholders. This approach provides better documentation and ease of use when you have multiple parameters for an SQL statement.
  • SimpleJdbcInsert and SimpleJdbcCall optimize database metadata to limit the amount of necessary configuration. This approach simplifies coding so that you only need to provide the name of the table or procedure and provide a map of parameters matching the column names. This only works if the database provides adequate metadata. If the database doesn’t provide this metadata, you will have to provide explicit configuration of the parameters.
  • RDBMS Objects including MappingSqlQuery, SqlUpdate and StoredProcedure requires you to create reusable and thread-safe objects during initialization of your data access layer. This approach is modeled after JDO Query wherein you define your query string, declare parameters, and compile the query. Once you do that, execute methods can be called multiple times with various parameter values passed in.

19.1.2 Package hierarchy

The Spring Framework’s JDBC abstraction framework consists of four different packages, namely coredatasourceobject, and support.

The org.springframework.jdbc.core package contains the JdbcTemplate class and its various callback interfaces, plus a variety of related classes. A subpackage named org.springframework.jdbc.core.simple contains the SimpleJdbcInsert and SimpleJdbcCall classes. Another subpackage named org.springframework.jdbc.core.namedparam contains the NamedParameterJdbcTemplate class and the related support classes. SeeSection 19.2, “Using the JDBC core classes to control basic JDBC processing and error handling”Section 19.4, “JDBC batch operations”, and Section 19.5, “Simplifying JDBC operations with the SimpleJdbc classes”.

The org.springframework.jdbc.datasource package contains a utility class for easy DataSource access, and various simple DataSourceimplementations that can be used for testing and running unmodified JDBC code outside of a Java EE container. A subpackage namedorg.springfamework.jdbc.datasource.embedded provides support for creating embedded databases using Java database engines such as HSQL, H2, and Derby. See Section 19.3, “Controlling database connections” and Section 19.8, “Embedded database support”.

The org.springframework.jdbc.object package contains classes that represent RDBMS queries, updates, and stored procedures as thread-safe, reusable objects. See Section 19.6, “Modeling JDBC operations as Java objects”. This approach is modeled by JDO, although objects returned by queries are naturally disconnected from the database. This higher level of JDBC abstraction depends on the lower-level abstraction in theorg.springframework.jdbc.core package.

The org.springframework.jdbc.support package provides SQLException translation functionality and some utility classes. Exceptions thrown during JDBC processing are translated to exceptions defined in the org.springframework.dao package. This means that code using the Spring JDBC abstraction layer does not need to implement JDBC or RDBMS-specific error handling. All translated exceptions are unchecked, which gives you the option of catching the exceptions from which you can recover while allowing other exceptions to be propagated to the caller. See Section 19.2.3, “SQLExceptionTranslator”.

19.2 Using the JDBC core classes to control basic JDBC processing and error handling

19.2.1 JdbcTemplate

The JdbcTemplate class is the central class in the JDBC core package. It handles the creation and release of resources, which helps you avoid common errors such as forgetting to close the connection. It performs the basic tasks of the core JDBC workflow such as statement creation and execution, leaving application code to provide SQL and extract results. The JdbcTemplate class executes SQL queries, update statements and stored procedure calls, performs iteration over ResultSets and extraction of returned parameter values. It also catches JDBC exceptions and translates them to the generic, more informative, exception hierarchy defined in the org.springframework.dao package.

When you use the JdbcTemplate for your code, you only need to implement callback interfaces, giving them a clearly defined contract. ThePreparedStatementCreator callback interface creates a prepared statement given a Connection provided by this class, providing SQL and any necessary parameters. The same is true for the CallableStatementCreator interface, which creates callable statements. The RowCallbackHandler interface extracts values from each row of a ResultSet.

The JdbcTemplate can be used within a DAO implementation through direct instantiation with a DataSource reference, or be configured in a Spring IoC container and given to DAOs as a bean reference.

[Note]

The DataSource should always be configured as a bean in the Spring IoC container. In the first case the bean is given to the service directly; in the second case it is given to the prepared template.

All SQL issued by this class is logged at the DEBUG level under the category corresponding to the fully qualified class name of the template instance (typicallyJdbcTemplate, but it may be different if you are using a custom subclass of the JdbcTemplate class).

Examples of JdbcTemplate class usage

This section provides some examples of JdbcTemplate class usage. These examples are not an exhaustive list of all of the functionality exposed by theJdbcTemplate; see the attendant javadocs for that.

Querying (SELECT)

Here is a simple query for getting the number of rows in a relation:

int rowCount = this.jdbcTemplate.queryForObject("select count(*) from t_actor", Integer.class);

A simple query using a bind variable:

int countOfActorsNamedJoe = this.jdbcTemplate.queryForObject(
        "select count(*) from t_actor where first_name = ?", Integer.class, "Joe");

Querying for a String:

String lastName = this.jdbcTemplate.queryForObject(
        "select last_name from t_actor where id = ?",
        new Object[]{1212L}, String.class);

Querying and populating a single domain object:

Actor actor = this.jdbcTemplate.queryForObject(
        "select first_name, last_name from t_actor where id = ?",
        new Object[]{1212L},
        new RowMapper<Actor>() {
            public Actor mapRow(ResultSet rs, int rowNum) throws SQLException {
                Actor actor = new Actor();
                actor.setFirstName(rs.getString("first_name"));
                actor.setLastName(rs.getString("last_name"));
                return actor;
            }
        });

Querying and populating a number of domain objects:

List<Actor> actors = this.jdbcTemplate.query(
        "select first_name, last_name from t_actor",
        new RowMapper<Actor>() {
            public Actor mapRow(ResultSet rs, int rowNum) throws SQLException {
                Actor actor = new Actor();
                actor.setFirstName(rs.getString("first_name"));
                actor.setLastName(rs.getString("last_name"));
                return actor;
            }
        });

If the last two snippets of code actually existed in the same application, it would make sense to remove the duplication present in the two RowMapperanonymous inner classes, and extract them out into a single class (typically a static nested class) that can then be referenced by DAO methods as needed. For example, it may be better to write the last code snippet as follows:

public List<Actor> findAllActors() {
    return this.jdbcTemplate.query( "select first_name, last_name from t_actor", new ActorMapper());
}

private static final class ActorMapper implements RowMapper<Actor> {

    public Actor mapRow(ResultSet rs, int rowNum) throws SQLException {
        Actor actor = new Actor();
        actor.setFirstName(rs.getString("first_name"));
        actor.setLastName(rs.getString("last_name"));
        return actor;
    }
}
Updating (INSERT/UPDATE/DELETE) with jdbcTemplate

You use the update(..) method to perform insert, update and delete operations. Parameter values are usually provided as var args or alternatively as an object array.

this.jdbcTemplate.update(
        "insert into t_actor (first_name, last_name) values (?, ?)",
        "Leonor", "Watling");
this.jdbcTemplate.update(
        "update t_actor set last_name = ? where id = ?",
        "Banjo", 5276L);
this.jdbcTemplate.update(
        "delete from actor where id = ?",
        Long.valueOf(actorId));
Other jdbcTemplate operations

You can use the execute(..) method to execute any arbitrary SQL, and as such the method is often used for DDL statements. It is heavily overloaded with variants taking callback interfaces, binding variable arrays, and so on.

this.jdbcTemplate.execute("create table mytable (id integer, name varchar(100))");

The following example invokes a simple stored procedure. More sophisticated stored procedure support is covered later.

this.jdbcTemplate.update(
        "call SUPPORT.REFRESH_ACTORS_SUMMARY(?)",
        Long.valueOf(unionId));

JdbcTemplate best practices

Instances of the JdbcTemplate class are threadsafe once configured. This is important because it means that you can configure a single instance of aJdbcTemplate and then safely inject this shared reference into multiple DAOs (or repositories). The JdbcTemplate is stateful, in that it maintains a reference to a DataSource, but this state is not conversational state.

A common practice when using the JdbcTemplate class (and the associated NamedParameterJdbcTemplate classes) is to configure a DataSource in your Spring configuration file, and then dependency-inject that shared DataSource bean into your DAO classes; the JdbcTemplate is created in the setter for theDataSource. This leads to DAOs that look in part like the following:

public class JdbcCorporateEventDao implements CorporateEventDao {

    private JdbcTemplate jdbcTemplate;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
    }

    // JDBC-backed implementations of the methods on the CorporateEventDao follow...
}

The corresponding configuration might look like this.

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xmlns:context="http://www.springframework.org/schema/context"
    xsi:schemaLocation="
        http://www.springframework.org/schema/beans
        http://www.springframework.org/schema/beans/spring-beans.xsd
        http://www.springframework.org/schema/context
        http://www.springframework.org/schema/context/spring-context.xsd">

    <bean id="corporateEventDao" class="com.example.JdbcCorporateEventDao">
        <property name="dataSource" ref="dataSource"/>
    </bean>

    <bean id="dataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close">
        <property name="driverClassName" value="${jdbc.driverClassName}"/>
        <property name="url" value="${jdbc.url}"/>
        <property name="username" value="${jdbc.username}"/>
        <property name="password" value="${jdbc.password}"/>
    </bean>

    <context:property-placeholder location="jdbc.properties"/>

</beans>

An alternative to explicit configuration is to use component-scanning and annotation support for dependency injection. In this case you annotate the class with@Repository (which makes it a candidate for component-scanning) and annotate the DataSource setter method with @Autowired.

@Repository
public class JdbcCorporateEventDao implements CorporateEventDao {

    private JdbcTemplate jdbcTemplate;

    @Autowired
    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
    }

    // JDBC-backed implementations of the methods on the CorporateEventDao follow...
}

The corresponding XML configuration file would look like the following:

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xmlns:context="http://www.springframework.org/schema/context"
    xsi:schemaLocation="
        http://www.springframework.org/schema/beans
        http://www.springframework.org/schema/beans/spring-beans.xsd
        http://www.springframework.org/schema/context
        http://www.springframework.org/schema/context/spring-context.xsd">

    <!-- Scans within the base package of the application for @Component classes to configure as beans -->
    <context:component-scan base-package="org.springframework.docs.test" />

    <bean id="dataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close">
        <property name="driverClassName" value="${jdbc.driverClassName}"/>
        <property name="url" value="${jdbc.url}"/>
        <property name="username" value="${jdbc.username}"/>
        <property name="password" value="${jdbc.password}"/>
    </bean>

    <context:property-placeholder location="jdbc.properties"/>

</beans>

If you are using Spring’s JdbcDaoSupport class, and your various JDBC-backed DAO classes extend from it, then your sub-class inherits asetDataSource(..) method from the JdbcDaoSupport class. You can choose whether to inherit from this class. The JdbcDaoSupport class is provided as a convenience only.

Regardless of which of the above template initialization styles you choose to use (or not), it is seldom necessary to create a new instance of a JdbcTemplateclass each time you want to execute SQL. Once configured, a JdbcTemplate instance is threadsafe. You may want multiple JdbcTemplate instances if your application accesses multiple databases, which requires multiple DataSources, and subsequently multiple differently configured JdbcTemplates.

19.2.2 NamedParameterJdbcTemplate

The NamedParameterJdbcTemplate class adds support for programming JDBC statements using named parameters, as opposed to programming JDBC statements using only classic placeholder ( '?') arguments. The NamedParameterJdbcTemplate class wraps a JdbcTemplate, and delegates to the wrapped JdbcTemplate to do much of its work. This section describes only those areas of the NamedParameterJdbcTemplate class that differ from theJdbcTemplate itself; namely, programming JDBC statements using named parameters.

// some JDBC-backed DAO class...
private NamedParameterJdbcTemplate namedParameterJdbcTemplate;

public void setDataSource(DataSource dataSource) {
    this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource);
}

public int countOfActorsByFirstName(String firstName) {

    String sql = "select count(*) from T_ACTOR where first_name = :first_name";

    SqlParameterSource namedParameters = new MapSqlParameterSource("first_name", firstName);

    return this.namedParameterJdbcTemplate.queryForObject(sql, namedParameters, Integer.class);
}

Notice the use of the named parameter notation in the value assigned to the sql variable, and the corresponding value that is plugged into thenamedParameters variable (of type MapSqlParameterSource).

Alternatively, you can pass along named parameters and their corresponding values to a NamedParameterJdbcTemplate instance by using the Map-based style.The remaining methods exposed by the NamedParameterJdbcOperations and implemented by the NamedParameterJdbcTemplate class follow a similar pattern and are not covered here.

The following example shows the use of the Map-based style.

// some JDBC-backed DAO class...
private NamedParameterJdbcTemplate namedParameterJdbcTemplate;

public void setDataSource(DataSource dataSource) {
    this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource);
}

public int countOfActorsByFirstName(String firstName) {

    String sql = "select count(*) from T_ACTOR where first_name = :first_name";

    Map<String, String> namedParameters = Collections.singletonMap("first_name", firstName);

    return this.namedParameterJdbcTemplate.queryForObject(sql, namedParameters,  Integer.class);
}

One nice feature related to the NamedParameterJdbcTemplate (and existing in the same Java package) is the SqlParameterSource interface. You have already seen an example of an implementation of this interface in one of the previous code snippet (the MapSqlParameterSource class). AnSqlParameterSource is a source of named parameter values to a NamedParameterJdbcTemplate. The MapSqlParameterSource class is a very simple implementation that is simply an adapter around a java.util.Map, where the keys are the parameter names and the values are the parameter values.

Another SqlParameterSource implementation is the BeanPropertySqlParameterSource class. This class wraps an arbitrary JavaBean (that is, an instance of a class that adheres to the JavaBean conventions), and uses the properties of the wrapped JavaBean as the source of named parameter values.

public class Actor {

    private Long id;
    private String firstName;
    private String lastName;

    public String getFirstName() {
        return this.firstName;
    }

    public String getLastName() {
        return this.lastName;
    }

    public Long getId() {
        return this.id;
    }

    // setters omitted...

}
// some JDBC-backed DAO class...
private NamedParameterJdbcTemplate namedParameterJdbcTemplate;

public void setDataSource(DataSource dataSource) {
    this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource);
}

public int countOfActors(Actor exampleActor) {

    // notice how the named parameters match the properties of the above 'Actor' class
    String sql = "select count(*) from T_ACTOR where first_name = :firstName and last_name = :lastName";

    SqlParameterSource namedParameters = new BeanPropertySqlParameterSource(exampleActor);

    return this.namedParameterJdbcTemplate.queryForObject(sql, namedParameters, Integer.class);
}

Remember that the NamedParameterJdbcTemplate class wraps a classic JdbcTemplate template; if you need access to the wrapped JdbcTemplateinstance to access functionality only present in the JdbcTemplate class, you can use the getJdbcOperations() method to access the wrappedJdbcTemplate through the JdbcOperations interface.

See also the section called “JdbcTemplate best practices” for guidelines on using the NamedParameterJdbcTemplate class in the context of an application.

19.2.3 SQLExceptionTranslator

SQLExceptionTranslator is an interface to be implemented by classes that can translate between SQLExceptions and Spring’s ownorg.springframework.dao.DataAccessException, which is agnostic in regard to data access strategy. Implementations can be generic (for example, using SQLState codes for JDBC) or proprietary (for example, using Oracle error codes) for greater precision.

SQLErrorCodeSQLExceptionTranslator is the implementation of SQLExceptionTranslator that is used by default. This implementation uses specific vendor codes. It is more precise than the SQLState implementation. The error code translations are based on codes held in a JavaBean type class calledSQLErrorCodes. This class is created and populated by an SQLErrorCodesFactory which as the name suggests is a factory for creating SQLErrorCodesbased on the contents of a configuration file named sql-error-codes.xml. This file is populated with vendor codes and based on theDatabaseProductName taken from the DatabaseMetaData. The codes for the actual database you are using are used.

The SQLErrorCodeSQLExceptionTranslator applies matching rules in the following sequence:

[Note]

The SQLErrorCodesFactory is used by default to define Error codes and custom exception translations. They are looked up in a file namedsql-error-codes.xml from the classpath and the matching SQLErrorCodes instance is located based on the database name from the database metadata of the database in use.

  • Any custom translation implemented by a subclass. Normally the provided concrete SQLErrorCodeSQLExceptionTranslator is used so this rule does not apply. It only applies if you have actually provided a subclass implementation.
  • Any custom implementation of the SQLExceptionTranslator interface that is provided as the customSqlExceptionTranslator property of theSQLErrorCodes class.
  • The list of instances of the CustomSQLErrorCodesTranslation class, provided for the customTranslations property of the SQLErrorCodes class, are searched for a match.
  • Error code matching is applied.
  • Use the fallback translator. SQLExceptionSubclassTranslator is the default fallback translator. If this translation is not available then the next fallback translator is the SQLStateSQLExceptionTranslator.

You can extend SQLErrorCodeSQLExceptionTranslator:

public class CustomSQLErrorCodesTranslator extends SQLErrorCodeSQLExceptionTranslator {

    protected DataAccessException customTranslate(String task, String sql, SQLException sqlex) {
        if (sqlex.getErrorCode() == -12345) {
            return new DeadlockLoserDataAccessException(task, sqlex);
        }
        return null;
    }
}

In this example, the specific error code -12345 is translated and other errors are left to be translated by the default translator implementation. To use this custom translator, it is necessary to pass it to the JdbcTemplate through the method setExceptionTranslator and to use this JdbcTemplate for all of the data access processing where this translator is needed. Here is an example of how this custom translator can be used:

private JdbcTemplate jdbcTemplate;

public void setDataSource(DataSource dataSource) {

    // create a JdbcTemplate and set data source
    this.jdbcTemplate = new JdbcTemplate();
    this.jdbcTemplate.setDataSource(dataSource);

    // create a custom translator and set the DataSource for the default translation lookup
    CustomSQLErrorCodesTranslator tr = new CustomSQLErrorCodesTranslator();
    tr.setDataSource(dataSource);
    this.jdbcTemplate.setExceptionTranslator(tr);

}

public void updateShippingCharge(long orderId, long pct) {
    // use the prepared JdbcTemplate for this update
    this.jdbcTemplate.update("update orders" +
        " set shipping_charge = shipping_charge * ? / 100" +
        " where id = ?", pct, orderId);
}

The custom translator is passed a data source in order to look up the error codes in sql-error-codes.xml.

19.2.4 Executing statements

Executing an SQL statement requires very little code. You need a DataSource and a JdbcTemplate, including the convenience methods that are provided with the JdbcTemplate. The following example shows what you need to include for a minimal but fully functional class that creates a new table:

import javax.sql.DataSource;
import org.springframework.jdbc.core.JdbcTemplate;

public class ExecuteAStatement {

    private JdbcTemplate jdbcTemplate;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
    }

    public void doExecute() {
        this.jdbcTemplate.execute("create table mytable (id integer, name varchar(100))");
    }
}

19.2.5 Running queries

Some query methods return a single value. To retrieve a count or a specific value from one row, use queryForObject(..). The latter converts the returned JDBC Type to the Java class that is passed in as an argument. If the type conversion is invalid, then an InvalidDataAccessApiUsageException is thrown. Here is an example that contains two query methods, one for an int and one that queries for a String.

import javax.sql.DataSource;
import org.springframework.jdbc.core.JdbcTemplate;

public class RunAQuery {

    private JdbcTemplate jdbcTemplate;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
    }

    public int getCount() {
        return this.jdbcTemplate.queryForObject("select count(*) from mytable", Integer.class);
    }

    public String getName() {
        return this.jdbcTemplate.queryForObject("select name from mytable", String.class);
    }
}

In addition to the single result query methods, several methods return a list with an entry for each row that the query returned. The most generic method isqueryForList(..) which returns a List where each entry is a Map with each entry in the map representing the column value for that row. If you add a method to the above example to retrieve a list of all the rows, it would look like this:

private JdbcTemplate jdbcTemplate;

public void setDataSource(DataSource dataSource) {
    this.jdbcTemplate = new JdbcTemplate(dataSource);
}

public List<Map<String, Object>> getList() {
    return this.jdbcTemplate.queryForList("select * from mytable");
}

The list returned would look something like this:

[{name=Bob, id=1}, {name=Mary, id=2}]

19.2.6 Updating the database

The following example shows a column updated for a certain primary key. In this example, an SQL statement has placeholders for row parameters. The parameter values can be passed in as varargs or alternatively as an array of objects. Thus primitives should be wrapped in the primitive wrapper classes explicitly or using auto-boxing.

import javax.sql.DataSource;

import org.springframework.jdbc.core.JdbcTemplate;

public class ExecuteAnUpdate {

    private JdbcTemplate jdbcTemplate;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
    }

    public void setName(int id, String name) {
        this.jdbcTemplate.update("update mytable set name = ? where id = ?", name, id);
    }
}

19.2.7 Retrieving auto-generated keys

An update() convenience method supports the retrieval of primary keys generated by the database. This support is part of the JDBC 3.0 standard; see Chapter 13.6 of the specification for details. The method takes a PreparedStatementCreator as its first argument, and this is the way the required insert statement is specified. The other argument is a KeyHolder, which contains the generated key on successful return from the update. There is not a standard single way to create an appropriate PreparedStatement (which explains why the method signature is the way it is). The following example works on Oracle but may not work on other platforms:

final String INSERT_SQL = "insert into my_test (name) values(?)";
final String name = "Rob";

KeyHolder keyHolder = new GeneratedKeyHolder();
jdbcTemplate.update(
    new PreparedStatementCreator() {
        public PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
            PreparedStatement ps = connection.prepareStatement(INSERT_SQL, new String[] {"id"});
            ps.setString(1, name);
            return ps;
        }
    },
    keyHolder);

// keyHolder.getKey() now contains the generated key

19.3 Controlling database connections

19.3.1 DataSource

Spring obtains a connection to the database through a DataSource. A DataSource is part of the JDBC specification and is a generalized connection factory. It allows a container or a framework to hide connection pooling and transaction management issues from the application code. As a developer, you need not know details about how to connect to the database; that is the responsibility of the administrator that sets up the datasource. You most likely fill both roles as you develop and test code, but you do not necessarily have to know how the production data source is configured.

When using Spring’s JDBC layer, you obtain a data source from JNDI or you configure your own with a connection pool implementation provided by a third party. Popular implementations are Apache Jakarta Commons DBCP and C3P0. Implementations in the Spring distribution are meant only for testing purposes and do not provide pooling.

This section uses Spring’s DriverManagerDataSource implementation, and several additional implementations are covered later.

[Note]

Only use the DriverManagerDataSource class should only be used for testing purposes since it does not provide pooling and will perform poorly when multiple requests for a connection are made.

You obtain a connection with DriverManagerDataSource as you typically obtain a JDBC connection. Specify the fully qualified classname of the JDBC driver so that the DriverManager can load the driver class. Next, provide a URL that varies between JDBC drivers. (Consult the documentation for your driver for the correct value.) Then provide a username and a password to connect to the database. Here is an example of how to configure a DriverManagerDataSource in Java code:

DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("org.hsqldb.jdbcDriver");
dataSource.setUrl("jdbc:hsqldb:hsql://localhost:");
dataSource.setUsername("sa");
dataSource.setPassword("");

Here is the corresponding XML configuration:

<bean id="dataSource" class="org.springframework.jdbc.datasource.DriverManagerDataSource">
    <property name="driverClassName" value="${jdbc.driverClassName}"/>
    <property name="url" value="${jdbc.url}"/>
    <property name="username" value="${jdbc.username}"/>
    <property name="password" value="${jdbc.password}"/>
</bean>

<context:property-placeholder location="jdbc.properties"/>

The following examples show the basic connectivity and configuration for DBCP and C3P0. To learn about more options that help control the pooling features, see the product documentation for the respective connection pooling implementations.

DBCP configuration:

<bean id="dataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close">
    <property name="driverClassName" value="${jdbc.driverClassName}"/>
    <property name="url" value="${jdbc.url}"/>
    <property name="username" value="${jdbc.username}"/>
    <property name="password" value="${jdbc.password}"/>
</bean>

<context:property-placeholder location="jdbc.properties"/>

C3P0 configuration:

<bean id="dataSource" class="com.mchange.v2.c3p0.ComboPooledDataSource" destroy-method="close">
    <property name="driverClass" value="${jdbc.driverClassName}"/>
    <property name="jdbcUrl" value="${jdbc.url}"/>
    <property name="user" value="${jdbc.username}"/>
    <property name="password" value="${jdbc.password}"/>
</bean>

<context:property-placeholder location="jdbc.properties"/>

19.3.2 DataSourceUtils

The DataSourceUtils class is a convenient and powerful helper class that provides static methods to obtain connections from JNDI and close connections if necessary. It supports thread-bound connections with, for example, DataSourceTransactionManager.

19.3.3 SmartDataSource

The SmartDataSource interface should be implemented by classes that can provide a connection to a relational database. It extends the DataSourceinterface to allow classes using it to query whether the connection should be closed after a given operation. This usage is efficient when you know that you will reuse a connection.

19.3.4 AbstractDataSource

AbstractDataSource is an abstract base class for Spring’s DataSource implementations that implements code that is common to all DataSourceimplementations. You extend the AbstractDataSource class if you are writing your own DataSource implementation.

19.3.5 SingleConnectionDataSource

The SingleConnectionDataSource class is an implementation of the SmartDataSource interface that wraps a single Connection that is not closed after each use. Obviously, this is not multi-threading capable.

If any client code calls close in the assumption of a pooled connection, as when using persistence tools, set the suppressClose property to true. This setting returns a close-suppressing proxy wrapping the physical connection. Be aware that you will not be able to cast this to a native Oracle Connection or the like anymore.

This is primarily a test class. For example, it enables easy testing of code outside an application server, in conjunction with a simple JNDI environment. In contrast to DriverManagerDataSource, it reuses the same connection all the time, avoiding excessive creation of physical connections.

19.3.6 DriverManagerDataSource

The DriverManagerDataSource class is an implementation of the standard DataSource interface that configures a plain JDBC driver through bean properties, and returns a new Connection every time.

This implementation is useful for test and stand-alone environments outside of a Java EE container, either as a DataSource bean in a Spring IoC container, or in conjunction with a simple JNDI environment. Pool-assuming Connection.close() calls will simply close the connection, so any DataSource-aware persistence code should work. However, using JavaBean-style connection pools such as commons-dbcp is so easy, even in a test environment, that it is almost always preferable to use such a connection pool over DriverManagerDataSource.

19.3.7 TransactionAwareDataSourceProxy

TransactionAwareDataSourceProxy is a proxy for a target DataSource, which wraps that target DataSource to add awareness of Spring-managed transactions. In this respect, it is similar to a transactional JNDI DataSource as provided by a Java EE server.

[Note]

It is rarely desirable to use this class, except when already existing code that must be called and passed a standard JDBC DataSource interface implementation. In this case, it’s possible to still have this code be usable, and at the same time have this code participating in Spring managed transactions. It is generally preferable to write your own new code using the higher level abstractions for resource management, such asJdbcTemplate or DataSourceUtils.

(See the TransactionAwareDataSourceProxy javadocs for more details.)

19.3.8 DataSourceTransactionManager

The DataSourceTransactionManager class is a PlatformTransactionManager implementation for single JDBC datasources. It binds a JDBC connection from the specified data source to the currently executing thread, potentially allowing for one thread connection per data source.

Application code is required to retrieve the JDBC connection through DataSourceUtils.getConnection(DataSource) instead of Java EE’s standardDataSource.getConnection. It throws unchecked org.springframework.dao exceptions instead of checked SQLExceptions. All framework classes likeJdbcTemplate use this strategy implicitly. If not used with this transaction manager, the lookup strategy behaves exactly like the common one - it can thus be used in any case.

The DataSourceTransactionManager class supports custom isolation levels, and timeouts that get applied as appropriate JDBC statement query timeouts. To support the latter, application code must either use JdbcTemplate or call the DataSourceUtils.applyTransactionTimeout(..) method for each created statement.

This implementation can be used instead of JtaTransactionManager in the single resource case, as it does not require the container to support JTA. Switching between both is just a matter of configuration, if you stick to the required connection lookup pattern. JTA does not support custom isolation levels!

19.3.9 NativeJdbcExtractor

Sometimes you need to access vendor specific JDBC methods that differ from the standard JDBC API. This can be problematic if you are running in an application server or with a DataSource that wraps the ConnectionStatement and ResultSet objects with its own wrapper objects. To gain access to the native objects you can configure your JdbcTemplate or OracleLobHandler with a NativeJdbcExtractor.

The NativeJdbcExtractor comes in a variety of flavors to match your execution environment:

  • SimpleNativeJdbcExtractor
  • C3P0NativeJdbcExtractor
  • CommonsDbcpNativeJdbcExtractor
  • JBossNativeJdbcExtractor
  • WebLogicNativeJdbcExtractor
  • WebSphereNativeJdbcExtractor
  • XAPoolNativeJdbcExtractor

Usually the SimpleNativeJdbcExtractor is sufficient for unwrapping a Connection object in most environments. See the javadocs for more details.

19.4 JDBC batch operations

Most JDBC drivers provide improved performance if you batch multiple calls to the same prepared statement. By grouping updates into batches you limit the number of round trips to the database.

19.4.1 Basic batch operations with the JdbcTemplate

You accomplish JdbcTemplate batch processing by implementing two methods of a special interface, BatchPreparedStatementSetter, and passing that in as the second parameter in your batchUpdate method call. Use the getBatchSize method to provide the size of the current batch. Use the setValuesmethod to set the values for the parameters of the prepared statement. This method will be called the number of times that you specified in the getBatchSizecall. The following example updates the actor table based on entries in a list. The entire list is used as the batch in this example:

public class JdbcActorDao implements ActorDao {
    private JdbcTemplate jdbcTemplate;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
    }

    public int[] batchUpdate(final List<Actor> actors) {
        int[] updateCounts = jdbcTemplate.batchUpdate("update t_actor set first_name = ?, " +
                "last_name = ? where id = ?",
            new BatchPreparedStatementSetter() {
                public void setValues(PreparedStatement ps, int i) throws SQLException {
                        ps.setString(1, actors.get(i).getFirstName());
                        ps.setString(2, actors.get(i).getLastName());
                        ps.setLong(3, actors.get(i).getId().longValue());
                    }

                    public int getBatchSize() {
                        return actors.size();
                    }
                });
        return updateCounts;
    }

    // ... additional methods
}

If you are processing a stream of updates or reading from a file, then you might have a preferred batch size, but the last batch might not have that number of entries. In this case you can use the InterruptibleBatchPreparedStatementSetter interface, which allows you to interrupt a batch once the input source is exhausted. The isBatchExhausted method allows you to signal the end of the batch.

19.4.2 Batch operations with a List of objects

Both the JdbcTemplate and the NamedParameterJdbcTemplate provides an alternate way of providing the batch update. Instead of implementing a special batch interface, you provide all parameter values in the call as a list. The framework loops over these values and uses an internal prepared statement setter. The API varies depending on whether you use named parameters. For the named parameters you provide an array of SqlParameterSource, one entry for each member of the batch. You can use the SqlParameterSource.createBatch method to create this array, passing in either an array of JavaBeans or an array of Maps containing the parameter values.

This example shows a batch update using named parameters:

public class JdbcActorDao implements ActorDao {
    private NamedParameterTemplate namedParameterJdbcTemplate;

    public void setDataSource(DataSource dataSource) {
        this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource);
    }

    public int[] batchUpdate(final List<Actor> actors) {
        SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(actors.toArray());
        int[] updateCounts = namedParameterJdbcTemplate.batchUpdate(
                "update t_actor set first_name = :firstName, last_name = :lastName where id = :id",
                batch);
        return updateCounts;
    }

    // ... additional methods
}

For an SQL statement using the classic "?" placeholders, you pass in a list containing an object array with the update values. This object array must have one entry for each placeholder in the SQL statement, and they must be in the same order as they are defined in the SQL statement.

The same example using classic JDBC "?" placeholders:

public class JdbcActorDao implements ActorDao {

    private JdbcTemplate jdbcTemplate;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
    }

    public int[] batchUpdate(final List<Actor> actors) {
        List<Object[]> batch = new ArrayList<Object[]>();
        for (Actor actor : actors) {
            Object[] values = new Object[] {
                    actor.getFirstName(),
                    actor.getLastName(),
                    actor.getId()};
            batch.add(values);
        }
        int[] updateCounts = jdbcTemplate.batchUpdate(
                "update t_actor set first_name = ?, last_name = ? where id = ?",
                batch);
        return updateCounts;
    }

    // ... additional methods

}

All of the above batch update methods return an int array containing the number of affected rows for each batch entry. This count is reported by the JDBC driver. If the count is not available, the JDBC driver returns a -2 value.

19.4.3 Batch operations with multiple batches

The last example of a batch update deals with batches that are so large that you want to break them up into several smaller batches. You can of course do this with the methods mentioned above by making multiple calls to the batchUpdate method, but there is now a more convenient method. This method takes, in addition to the SQL statement, a Collection of objects containing the parameters, the number of updates to make for each batch and aParameterizedPreparedStatementSetter to set the values for the parameters of the prepared statement. The framework loops over the provided values and breaks the update calls into batches of the size specified.

This example shows a batch update using a batch size of 100:

public class JdbcActorDao implements ActorDao {

    private JdbcTemplate jdbcTemplate;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
    }

    public int[][] batchUpdate(final Collection<Actor> actors) {
        int[][] updateCounts = jdbcTemplate.batchUpdate(
                "update t_actor set first_name = ?, last_name = ? where id = ?",
                actors,
                100,
                new ParameterizedPreparedStatementSetter<Actor>() {
                    public void setValues(PreparedStatement ps, Actor argument) throws SQLException {
                        ps.setString(1, argument.getFirstName());
                        ps.setString(2, argument.getLastName());
                        ps.setLong(3, argument.getId().longValue());
                    }
                });
        return updateCounts;
    }

    // ... additional methods

}

The batch update methods for this call returns an array of int arrays containing an array entry for each batch with an array of the number of affected rows for each update. The top level array’s length indicates the number of batches executed and the second level array’s length indicates the number of updates in that batch. The number of updates in each batch should be the batch size provided for all batches except for the last one that might be less, depending on the total number of update objects provided. The update count for each update statement is the one reported by the JDBC driver. If the count is not available, the JDBC driver returns a -2 value.

19.5 Simplifying JDBC operations with the SimpleJdbc classes

The SimpleJdbcInsert and SimpleJdbcCall classes provide a simplified configuration by taking advantage of database metadata that can be retrieved through the JDBC driver. This means there is less to configure up front, although you can override or turn off the metadata processing if you prefer to provide all the details in your code.

19.5.1 Inserting data using SimpleJdbcInsert

Let’s start by looking at the SimpleJdbcInsert class with the minimal amount of configuration options. You should instantiate the SimpleJdbcInsert in the data access layer’s initialization method. For this example, the initializing method is the setDataSource method. You do not need to subclass theSimpleJdbcInsert class; simply create a new instance and set the table name using the withTableName method. Configuration methods for this class follow the "fluid" style that returns the instance of the SimpleJdbcInsert, which allows you to chain all configuration methods. This example uses only one configuration method; you will see examples of multiple ones later.

public class JdbcActorDao implements ActorDao {

    private JdbcTemplate jdbcTemplate;
    private SimpleJdbcInsert insertActor;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
        this.insertActor = new SimpleJdbcInsert(dataSource).withTableName("t_actor");
    }

    public void add(Actor actor) {
        Map<String, Object> parameters = new HashMap<String, Object>(3);
        parameters.put("id", actor.getId());
        parameters.put("first_name", actor.getFirstName());
        parameters.put("last_name", actor.getLastName());
        insertActor.execute(parameters);
    }

    // ... additional methods
}

The execute method used here takes a plain java.utils.Map as its only parameter. The important thing to note here is that the keys used for the Map must match the column names of the table as defined in the database. This is because we read the metadata in order to construct the actual insert statement.

19.5.2 Retrieving auto-generated keys using SimpleJdbcInsert

This example uses the same insert as the preceding, but instead of passing in the id it retrieves the auto-generated key and sets it on the new Actor object. When you create the SimpleJdbcInsert, in addition to specifying the table name, you specify the name of the generated key column with theusingGeneratedKeyColumns method.

public class JdbcActorDao implements ActorDao {

    private JdbcTemplate jdbcTemplate;
    private SimpleJdbcInsert insertActor;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
        this.insertActor = new SimpleJdbcInsert(dataSource)
                .withTableName("t_actor")
                .usingGeneratedKeyColumns("id");
    }

    public void add(Actor actor) {
        Map<String, Object> parameters = new HashMap<String, Object>(2);
        parameters.put("first_name", actor.getFirstName());
        parameters.put("last_name", actor.getLastName());
        Number newId = insertActor.executeAndReturnKey(parameters);
        actor.setId(newId.longValue());
    }

    // ... additional methods
}

The main difference when executing the insert by this second approach is that you do not add the id to the Map and you call the executeAndReturnKeymethod. This returns a java.lang.Number object with which you can create an instance of the numerical type that is used in our domain class. You cannot rely on all databases to return a specific Java class here; java.lang.Number is the base class that you can rely on. If you have multiple auto-generated columns, or the generated values are non-numeric, then you can use a KeyHolder that is returned from the executeAndReturnKeyHolder method.

19.5.3 Specifying columns for a SimpleJdbcInsert

You can limit the columns for an insert by specifying a list of column names with the usingColumns method:

public class JdbcActorDao implements ActorDao {

    private JdbcTemplate jdbcTemplate;
    private SimpleJdbcInsert insertActor;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
        this.insertActor = new SimpleJdbcInsert(dataSource)
                .withTableName("t_actor")
                .usingColumns("first_name", "last_name")
                .usingGeneratedKeyColumns("id");
    }

    public void add(Actor actor) {
        Map<String, Object> parameters = new HashMap<String, Object>(2);
        parameters.put("first_name", actor.getFirstName());
        parameters.put("last_name", actor.getLastName());
        Number newId = insertActor.executeAndReturnKey(parameters);
        actor.setId(newId.longValue());
    }

    // ... additional methods

}

The execution of the insert is the same as if you had relied on the metadata to determine which columns to use.

19.5.4 Using SqlParameterSource to provide parameter values

Using a Map to provide parameter values works fine, but it’s not the most convenient class to use. Spring provides a couple of implementations of theSqlParameterSource interface that can be used instead.The first one is BeanPropertySqlParameterSource, which is a very convenient class if you have a JavaBean-compliant class that contains your values. It will use the corresponding getter method to extract the parameter values. Here is an example:

public class JdbcActorDao implements ActorDao {

    private JdbcTemplate jdbcTemplate;
    private SimpleJdbcInsert insertActor;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
        this.insertActor = new SimpleJdbcInsert(dataSource)
                .withTableName("t_actor")
                .usingGeneratedKeyColumns("id");
    }

    public void add(Actor actor) {
        SqlParameterSource parameters = new BeanPropertySqlParameterSource(actor);
        Number newId = insertActor.executeAndReturnKey(parameters);
        actor.setId(newId.longValue());
    }

    // ... additional methods

}

Another option is the MapSqlParameterSource that resembles a Map but provides a more convenient addValue method that can be chained.

public class JdbcActorDao implements ActorDao {

    private JdbcTemplate jdbcTemplate;
    private SimpleJdbcInsert insertActor;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
        this.insertActor = new SimpleJdbcInsert(dataSource)
                .withTableName("t_actor")
                .usingGeneratedKeyColumns("id");
    }

    public void add(Actor actor) {
        SqlParameterSource parameters = new MapSqlParameterSource()
                .addValue("first_name", actor.getFirstName())
                .addValue("last_name", actor.getLastName());
        Number newId = insertActor.executeAndReturnKey(parameters);
        actor.setId(newId.longValue());
    }

    // ... additional methods

}

As you can see, the configuration is the same; only the executing code has to change to use these alternative input classes.

19.5.5 Calling a stored procedure with SimpleJdbcCall

The SimpleJdbcCall class leverages metadata in the database to look up names of in and out parameters, so that you do not have to declare them explicitly. You can declare parameters if you prefer to do that, or if you have parameters such as ARRAY or STRUCT that do not have an automatic mapping to a Java class. The first example shows a simple procedure that returns only scalar values in VARCHAR and DATE format from a MySQL database. The example procedure reads a specified actor entry and returns first_namelast_name, and birth_date columns in the form of out parameters.

CREATE PROCEDURE read_actor (
    IN in_id INTEGER,
    OUT out_first_name VARCHAR(100),
    OUT out_last_name VARCHAR(100),
    OUT out_birth_date DATE)
BEGIN
    SELECT first_name, last_name, birth_date
    INTO out_first_name, out_last_name, out_birth_date
    FROM t_actor where id = in_id;
END;

The in_id parameter contains the id of the actor you are looking up. The out parameters return the data read from the table.

The SimpleJdbcCall is declared in a similar manner to the SimpleJdbcInsert. You should instantiate and configure the class in the initialization method of your data access layer. Compared to the StoredProcedure class, you don’t have to create a subclass and you don’t have to declare parameters that can be looked up in the database metadata. Following is an example of a SimpleJdbcCall configuration using the above stored procedure. The only configuration option, in addition to the DataSource, is the name of the stored procedure.

public class JdbcActorDao implements ActorDao {

    private JdbcTemplate jdbcTemplate;
    private SimpleJdbcCall procReadActor;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
        this.procReadActor = new SimpleJdbcCall(dataSource)
                .withProcedureName("read_actor");
    }

    public Actor readActor(Long id) {
        SqlParameterSource in = new MapSqlParameterSource()
                .addValue("in_id", id);
        Map out = procReadActor.execute(in);
        Actor actor = new Actor();
        actor.setId(id);
        actor.setFirstName((String) out.get("out_first_name"));
        actor.setLastName((String) out.get("out_last_name"));
        actor.setBirthDate((Date) out.get("out_birth_date"));
        return actor;
    }

    // ... additional methods

}

The code you write for the execution of the call involves creating an SqlParameterSource containing the IN parameter. It’s important to match the name provided for the input value with that of the parameter name declared in the stored procedure. The case does not have to match because you use metadata to determine how database objects should be referred to in a stored procedure. What is specified in the source for the stored procedure is not necessarily the way it is stored in the database. Some databases transform names to all upper case while others use lower case or use the case as specified.

The execute method takes the IN parameters and returns a Map containing any out parameters keyed by the name as specified in the stored procedure. In this case they are out_first_name, out_last_name and out_birth_date.

The last part of the execute method creates an Actor instance to use to return the data retrieved. Again, it is important to use the names of the outparameters as they are declared in the stored procedure. Also, the case in the names of the out parameters stored in the results map matches that of the outparameter names in the database, which could vary between databases. To make your code more portable you should do a case-insensitive lookup or instruct Spring to use a LinkedCaseInsensitiveMap. To do the latter, you create your own JdbcTemplate and set the setResultsMapCaseInsensitive property to true. Then you pass this customized JdbcTemplate instance into the constructor of your SimpleJdbcCall. Here is an example of this configuration:

public class JdbcActorDao implements ActorDao {

    private SimpleJdbcCall procReadActor;

    public void setDataSource(DataSource dataSource) {
        JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
        jdbcTemplate.setResultsMapCaseInsensitive(true);
        this.procReadActor = new SimpleJdbcCall(jdbcTemplate)
                .withProcedureName("read_actor");
    }

    // ... additional methods

}

By taking this action, you avoid conflicts in the case used for the names of your returned out parameters.

19.5.6 Explicitly declaring parameters to use for a SimpleJdbcCall

You have seen how the parameters are deduced based on metadata, but you can declare then explicitly if you wish. You do this by creating and configuringSimpleJdbcCall with the declareParameters method, which takes a variable number of SqlParameter objects as input. See the next section for details on how to define an SqlParameter.

[Note]

Explicit declarations are necessary if the database you use is not a Spring-supported database. Currently Spring supports metadata lookup of stored procedure calls for the following databases: Apache Derby, DB2, MySQL, Microsoft SQL Server, Oracle, and Sybase. We also support metadata lookup of stored functions for MySQL, Microsoft SQL Server, and Oracle.

You can opt to declare one, some, or all the parameters explicitly. The parameter metadata is still used where you do not declare parameters explicitly. To bypass all processing of metadata lookups for potential parameters and only use the declared parameters, you call the methodwithoutProcedureColumnMetaDataAccess as part of the declaration. Suppose that you have two or more different call signatures declared for a database function. In this case you call the useInParameterNames to specify the list of IN parameter names to include for a given signature.

The following example shows a fully declared procedure call, using the information from the preceding example.

public class JdbcActorDao implements ActorDao {

    private SimpleJdbcCall procReadActor;

    public void setDataSource(DataSource dataSource) {
        JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
        jdbcTemplate.setResultsMapCaseInsensitive(true);
        this.procReadActor = new SimpleJdbcCall(jdbcTemplate)
                .withProcedureName("read_actor")
                .withoutProcedureColumnMetaDataAccess()
                .useInParameterNames("in_id")
                .declareParameters(
                        new SqlParameter("in_id", Types.NUMERIC),
                        new SqlOutParameter("out_first_name", Types.VARCHAR),
                        new SqlOutParameter("out_last_name", Types.VARCHAR),
                        new SqlOutParameter("out_birth_date", Types.DATE)
                );
    }

    // ... additional methods
}

The execution and end results of the two examples are the same; this one specifies all details explicitly rather than relying on metadata.

19.5.7 How to define SqlParameters

To define a parameter for the SimpleJdbc classes and also for the RDBMS operations classes, covered in Section 19.6, “Modeling JDBC operations as Java objects”, you use an SqlParameter or one of its subclasses. You typically specify the parameter name and SQL type in the constructor. The SQL type is specified using the java.sql.Types constants. We have already seen declarations like:

new SqlParameter("in_id", Types.NUMERIC),
    new SqlOutParameter("out_first_name", Types.VARCHAR),

The first line with the SqlParameter declares an IN parameter. IN parameters can be used for both stored procedure calls and for queries using theSqlQuery and its subclasses covered in the following section.

The second line with the SqlOutParameter declares an out parameter to be used in a stored procedure call. There is also an SqlInOutParameter forInOut parameters, parameters that provide an IN value to the procedure and that also return a value.

[Note]

Only parameters declared as SqlParameter and SqlInOutParameter will be used to provide input values. This is different from theStoredProcedure class, which for backwards compatibility reasons allows input values to be provided for parameters declared asSqlOutParameter.

For IN parameters, in addition to the name and the SQL type, you can specify a scale for numeric data or a type name for custom database types. For outparameters, you can provide a RowMapper to handle mapping of rows returned from a REF cursor. Another option is to specify an SqlReturnType that provides an opportunity to define customized handling of the return values.

19.5.8 Calling a stored function using SimpleJdbcCall

You call a stored function in almost the same way as you call a stored procedure, except that you provide a function name rather than a procedure name. You use the withFunctionName method as part of the configuration to indicate that we want to make a call to a function, and the corresponding string for a function call is generated. A specialized execute call, executeFunction, is used to execute the function and it returns the function return value as an object of a specified type, which means you do not have to retrieve the return value from the results map. A similar convenience method named executeObject is also available for stored procedures that only have one out parameter. The following example is based on a stored function named get_actor_name that returns an actor’s full name. Here is the MySQL source for this function:

CREATE FUNCTION get_actor_name (in_id INTEGER)
RETURNS VARCHAR(200) READS SQL DATA
BEGIN
    DECLARE out_name VARCHAR(200);
    SELECT concat(first_name, ' ', last_name)
        INTO out_name
        FROM t_actor where id = in_id;
    RETURN out_name;
END;

To call this function we again create a SimpleJdbcCall in the initialization method.

public class JdbcActorDao implements ActorDao {

    private JdbcTemplate jdbcTemplate;
    private SimpleJdbcCall funcGetActorName;

    public void setDataSource(DataSource dataSource) {
        this.jdbcTemplate = new JdbcTemplate(dataSource);
        JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
        jdbcTemplate.setResultsMapCaseInsensitive(true);
        this.funcGetActorName = new SimpleJdbcCall(jdbcTemplate)
                .withFunctionName("get_actor_name");
    }

    public String getActorName(Long id) {
        SqlParameterSource in = new MapSqlParameterSource()
                .addValue("in_id", id);
        String name = funcGetActorName.executeFunction(String.class, in);
        return name;
    }

    // ... additional methods

}

The execute method used returns a String containing the return value from the function call.

19.5.9 Returning ResultSet/REF Cursor from a SimpleJdbcCall

Calling a stored procedure or function that returns a result set is a bit tricky. Some databases return result sets during the JDBC results processing while others require an explicitly registered out parameter of a specific type. Both approaches need additional processing to loop over the result set and process the returned rows. With the SimpleJdbcCall you use the returningResultSet method and declare a RowMapper implementation to be used for a specific parameter. In the case where the result set is returned during the results processing, there are no names defined, so the returned results will have to match the order in which you declare the RowMapper implementations. The name specified is still used to store the processed list of results in the results map that is returned from the execute statement.

The next example uses a stored procedure that takes no IN parameters and returns all rows from the t_actor table. Here is the MySQL source for this procedure:

CREATE PROCEDURE read_all_actors()
BEGIN
 SELECT a.id, a.first_name, a.last_name, a.birth_date FROM t_actor a;
END;

To call this procedure you declare the RowMapper. Because the class you want to map to follows the JavaBean rules, you can use aBeanPropertyRowMapper that is created by passing in the required class to map to in the newInstance method.

public class JdbcActorDao implements ActorDao {

    private SimpleJdbcCall procReadAllActors;

    public void setDataSource(DataSource dataSource) {
        JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
        jdbcTemplate.setResultsMapCaseInsensitive(true);
        this.procReadAllActors = new SimpleJdbcCall(jdbcTemplate)
                .withProcedureName("read_all_actors")
                .returningResultSet("actors",
                BeanPropertyRowMapper.newInstance(Actor.class));
    }

    public List getActorsList() {
        Map m = procReadAllActors.execute(new HashMap<String, Object>(0));
        return (List) m.get("actors");
    }

    // ... additional methods

}

The execute call passes in an empty Map because this call does not take any parameters. The list of Actors is then retrieved from the results map and returned to the caller.

19.6 Modeling JDBC operations as Java objects

The org.springframework.jdbc.object package contains classes that allow you to access the database in a more object-oriented manner. As an example, you can execute queries and get the results back as a list containing business objects with the relational column data mapped to the properties of the business object. You can also execute stored procedures and run update, delete, and insert statements.

[Note]

Many Spring developers believe that the various RDBMS operation classes described below (with the exception of the StoredProcedure class) can often be replaced with straight JdbcTemplate calls. Often it is simpler to write a DAO method that simply calls a method on aJdbcTemplate directly (as opposed to encapsulating a query as a full-blown class).

However, if you are getting measurable value from using the RDBMS operation classes, continue using these classes.

19.6.1 SqlQuery

SqlQuery is a reusable, threadsafe class that encapsulates an SQL query. Subclasses must implement the newRowMapper(..) method to provide aRowMapper instance that can create one object per row obtained from iterating over the ResultSet that is created during the execution of the query. TheSqlQuery class is rarely used directly because the MappingSqlQuery subclass provides a much more convenient implementation for mapping rows to Java classes. Other implementations that extend SqlQuery are MappingSqlQueryWithParameters and UpdatableSqlQuery.

19.6.2 MappingSqlQuery

MappingSqlQuery is a reusable query in which concrete subclasses must implement the abstract mapRow(..) method to convert each row of the suppliedResultSet into an object of the type specified. The following example shows a custom query that maps the data from the t_actor relation to an instance of the Actor class.

public class ActorMappingQuery extends MappingSqlQuery<Actor> {

    public ActorMappingQuery(DataSource ds) {
        super(ds, "select id, first_name, last_name from t_actor where id = ?");
        super.declareParameter(new SqlParameter("id", Types.INTEGER));
        compile();
    }

    @Override
    protected Actor mapRow(ResultSet rs, int rowNumber) throws SQLException {
        Actor actor = new Actor();
        actor.setId(rs.getLong("id"));
        actor.setFirstName(rs.getString("first_name"));
        actor.setLastName(rs.getString("last_name"));
        return actor;
    }

}

The class extends MappingSqlQuery parameterized with the Actor type. The constructor for this customer query takes the DataSource as the only parameter. In this constructor you call the constructor on the superclass with the DataSource and the SQL that should be executed to retrieve the rows for this query. This SQL will be used to create a PreparedStatement so it may contain place holders for any parameters to be passed in during execution.You must declare each parameter using the declareParameter method passing in an SqlParameter. The SqlParameter takes a name and the JDBC type as defined in java.sql.Types. After you define all parameters, you call the compile() method so the statement can be prepared and later executed. This class is thread-safe after it is compiled, so as long as these instances are created when the DAO is initialized they can be kept as instance variables and be reused.

private ActorMappingQuery actorMappingQuery;

@Autowired
public void setDataSource(DataSource dataSource) {
    this.actorMappingQuery = new ActorMappingQuery(dataSource);
}

public Customer getCustomer(Long id) {
    return actorMappingQuery.findObject(id);
}

The method in this example retrieves the customer with the id that is passed in as the only parameter. Since we only want one object returned we simply call the convenience method findObject with the id as parameter. If we had instead a query that returned a list of objects and took additional parameters then we would use one of the execute methods that takes an array of parameter values passed in as varargs.

public List<Actor> searchForActors(int age, String namePattern) {
    List<Actor> actors = actorSearchMappingQuery.execute(age, namePattern);
    return actors;
}

19.6.3 SqlUpdate

The SqlUpdate class encapsulates an SQL update. Like a query, an update object is reusable, and like all RdbmsOperation classes, an update can have parameters and is defined in SQL. This class provides a number of update(..) methods analogous to the execute(..) methods of query objects. TheSQLUpdate class is concrete. It can be subclassed, for example, to add a custom update method, as in the following snippet where it’s simply called execute. However, you don’t have to subclass the SqlUpdate class since it can easily be parameterized by setting SQL and declaring parameters.

import java.sql.Types;

import javax.sql.DataSource;

import org.springframework.jdbc.core.SqlParameter;
import org.springframework.jdbc.object.SqlUpdate;

public class UpdateCreditRating extends SqlUpdate {

    public UpdateCreditRating(DataSource ds) {
        setDataSource(ds);
        setSql("update customer set credit_rating = ? where id = ?");
        declareParameter(new SqlParameter("creditRating", Types.NUMERIC));
        declareParameter(new SqlParameter("id", Types.NUMERIC));
        compile();
    }

    /**
     * @param id for the Customer to be updated
     * @param rating the new value for credit rating
     * @return number of rows updated
     */
    public int execute(int id, int rating) {
        return update(rating, id);
    }
}

19.6.4 StoredProcedure

The StoredProcedure class is a superclass for object abstractions of RDBMS stored procedures. This class is abstract, and its various execute(..)methods have protected access, preventing use other than through a subclass that offers tighter typing.

The inherited sql property will be the name of the stored procedure in the RDBMS.

To define a parameter for the StoredProcedure class, you use an SqlParameter or one of its subclasses. You must specify the parameter name and SQL type in the constructor like in the following code snippet. The SQL type is specified using the java.sql.Types constants.

new SqlParameter("in_id", Types.NUMERIC),
    new SqlOutParameter("out_first_name", Types.VARCHAR),

The first line with the SqlParameter declares an IN parameter. IN parameters can be used for both stored procedure calls and for queries using theSqlQuery and its subclasses covered in the following section.

The second line with the SqlOutParameter declares an out parameter to be used in the stored procedure call. There is also an SqlInOutParameter for InOut parameters, parameters that provide an in value to the procedure and that also return a value.

For i n parameters, in addition to the name and the SQL type, you can specify a scale for numeric data or a type name for custom database types. For outparameters you can provide a RowMapper to handle mapping of rows returned from a REF cursor. Another option is to specify an SqlReturnType that enables you to define customized handling of the return values.

Here is an example of a simple DAO that uses a StoredProcedure to call a function, sysdate(),which comes with any Oracle database. To use the stored procedure functionality you have to create a class that extends StoredProcedure. In this example, the StoredProcedure class is an inner class, but if you need to reuse the StoredProcedure you declare it as a top-level class. This example has no input parameters, but an output parameter is declared as a date type using the class SqlOutParameter. The execute() method executes the procedure and extracts the returned date from the results Map. The resultsMap has an entry for each declared output parameter, in this case only one, using the parameter name as the key.

import java.sql.Types;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;

import javax.sql.DataSource;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.SqlOutParameter;
import org.springframework.jdbc.object.StoredProcedure;

public class StoredProcedureDao {

    private GetSysdateProcedure getSysdate;

    @Autowired
    public void init(DataSource dataSource) {
        this.getSysdate = new GetSysdateProcedure(dataSource);
    }

    public Date getSysdate() {
        return getSysdate.execute();
    }

    private class GetSysdateProcedure extends StoredProcedure {

        private static final String SQL = "sysdate";

        public GetSysdateProcedure(DataSource dataSource) {
            setDataSource(dataSource);
            setFunction(true);
            setSql(SQL);
            declareParameter(new SqlOutParameter("date", Types.DATE));
            compile();
        }

        public Date execute() {
            // the 'sysdate' sproc has no input parameters, so an empty Map is supplied...
            Map<String, Object> results = execute(new HashMap<String, Object>());
            Date sysdate = (Date) results.get("date");
            return sysdate;
        }
    }

}

The following example of a StoredProcedure has two output parameters (in this case, Oracle REF cursors).

import oracle.jdbc.OracleTypes;
import org.springframework.jdbc.core.SqlOutParameter;
import org.springframework.jdbc.object.StoredProcedure;

import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;

public class TitlesAndGenresStoredProcedure extends StoredProcedure {

    private static final String SPROC_NAME = "AllTitlesAndGenres";

    public TitlesAndGenresStoredProcedure(DataSource dataSource) {
        super(dataSource, SPROC_NAME);
        declareParameter(new SqlOutParameter("titles", OracleTypes.CURSOR, new TitleMapper()));
        declareParameter(new SqlOutParameter("genres", OracleTypes.CURSOR, new GenreMapper()));
        compile();
    }

    public Map<String, Object> execute() {
        // again, this sproc has no input parameters, so an empty Map is supplied
        return super.execute(new HashMap<String, Object>());
    }
}

Notice how the overloaded variants of the declareParameter(..) method that have been used in the TitlesAndGenresStoredProcedure constructor are passed RowMapper implementation instances; this is a very convenient and powerful way to reuse existing functionality. The code for the two RowMapperimplementations is provided below.

The TitleMapper class maps a ResultSet to a Title domain object for each row in the supplied ResultSet:

import org.springframework.jdbc.core.RowMapper;

import java.sql.ResultSet;
import java.sql.SQLException;

import com.foo.domain.Title;

public final class TitleMapper implements RowMapper<Title> {

    public Title mapRow(ResultSet rs, int rowNum) throws SQLException {
        Title title = new Title();
        title.setId(rs.getLong("id"));
        title.setName(rs.getString("name"));
        return title;
    }
}

The GenreMapper class maps a ResultSet to a Genre domain object for each row in the supplied ResultSet.

import org.springframework.jdbc.core.RowMapper;

import java.sql.ResultSet;
import java.sql.SQLException;

import com.foo.domain.Genre;

public final class GenreMapper implements RowMapper<Genre> {

    public Genre mapRow(ResultSet rs, int rowNum) throws SQLException {
        return new Genre(rs.getString("name"));
    }
}

To pass parameters to a stored procedure that has one or more input parameters in its definition in the RDBMS, you can code a strongly typed execute(..)method that would delegate to the superclass' untyped execute(Map parameters) method (which has protected access); for example:

import oracle.jdbc.OracleTypes;
import org.springframework.jdbc.core.SqlOutParameter;
import org.springframework.jdbc.core.SqlParameter;
import org.springframework.jdbc.object.StoredProcedure;

import javax.sql.DataSource;

import java.sql.Types;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;

public class TitlesAfterDateStoredProcedure extends StoredProcedure {

    private static final String SPROC_NAME = "TitlesAfterDate";
    private static final String CUTOFF_DATE_PARAM = "cutoffDate";

    public TitlesAfterDateStoredProcedure(DataSource dataSource) {
        super(dataSource, SPROC_NAME);
        declareParameter(new SqlParameter(CUTOFF_DATE_PARAM, Types.DATE);
        declareParameter(new SqlOutParameter("titles", OracleTypes.CURSOR, new TitleMapper()));
        compile();
    }

    public Map<String, Object> execute(Date cutoffDate) {
        Map<String, Object> inputs = new HashMap<String, Object>();
        inputs.put(CUTOFF_DATE_PARAM, cutoffDate);
        return super.execute(inputs);
    }
}

19.7 Common problems with parameter and data value handling

Common problems with parameters and data values exist in the different approaches provided by the Spring Framework JDBC.

19.7.1 Providing SQL type information for parameters

Usually Spring determines the SQL type of the parameters based on the type of parameter passed in. It is possible to explicitly provide the SQL type to be used when setting parameter values. This is sometimes necessary to correctly set NULL values.

You can provide SQL type information in several ways:

  • Many update and query methods of the JdbcTemplate take an additional parameter in the form of an int array. This array is used to indicate the SQL type of the corresponding parameter using constant values from the java.sql.Types class. Provide one entry for each parameter.
  • You can use the SqlParameterValue class to wrap the parameter value that needs this additional information.Create a new instance for each value and pass in the SQL type and parameter value in the constructor. You can also provide an optional scale parameter for numeric values.
  • For methods working with named parameters, use the SqlParameterSource classes BeanPropertySqlParameterSource orMapSqlParameterSource. They both have methods for registering the SQL type for any of the named parameter values.

19.7.2 Handling BLOB and CLOB objects

You can store images, other binary data, and large chunks of text in the database. These large objects are called BLOBs (Binary Large OBject) for binary data and CLOBs (Character Large OBject) for character data. In Spring you can handle these large objects by using the JdbcTemplate directly and also when using the higher abstractions provided by RDBMS Objects and the SimpleJdbc classes. All of these approaches use an implementation of the LobHandlerinterface for the actual management of the LOB (Large OBject) data. The LobHandler provides access to a LobCreator class, through the getLobCreatormethod, used for creating new LOB objects to be inserted.

The LobCreator/LobHandler provides the following support for LOB input and output:

  • BLOB

    • byte[] — getBlobAsBytes and setBlobAsBytes
    • InputStream — getBlobAsBinaryStream and setBlobAsBinaryStream
  • CLOB

    • String — getClobAsString and setClobAsString
    • InputStream — getClobAsAsciiStream and setClobAsAsciiStream
    • Reader — getClobAsCharacterStream and setClobAsCharacterStream

The next example shows how to create and insert a BLOB. Later you will see how to read it back from the database.

This example uses a JdbcTemplate and an implementation of the AbstractLobCreatingPreparedStatementCallback. It implements one method,setValues. This method provides a LobCreator that you use to set the values for the LOB columns in your SQL insert statement.

For this example we assume that there is a variable, lobHandler, that already is set to an instance of a DefaultLobHandler. You typically set this value through dependency injection.

final File blobIn = new File("spring2004.jpg");
final InputStream blobIs = new FileInputStream(blobIn);
final File clobIn = new File("large.txt");
final InputStream clobIs = new FileInputStream(clobIn);
final InputStreamReader clobReader = new InputStreamReader(clobIs);
jdbcTemplate.execute(
    "INSERT INTO lob_table (id, a_clob, a_blob) VALUES (?, ?, ?)",
    new AbstractLobCreatingPreparedStatementCallback(lobHandler) { 1
        protected void setValues(PreparedStatement ps, LobCreator lobCreator) throws SQLException {
            ps.setLong(1, 1L);
            lobCreator.setClobAsCharacterStream(ps, 2, clobReader, (int)clobIn.length()); 2
            lobCreator.setBlobAsBinaryStream(ps, 3, blobIs, (int)blobIn.length()); 3
        }
    }
);
blobIs.close();
clobReader.close();

1

Pass in the lobHandler that in this example is a plain DefaultLobHandler.

2

Using the method setClobAsCharacterStream, pass in the contents of the CLOB.

3

Using the method setBlobAsBinaryStream, pass in the contents of the BLOB.

[Note]

If you invoke the setBlobAsBinaryStreamsetClobAsAsciiStream, or setClobAsCharacterStream method on the LobCreator returned from DefaultLobHandler.getLobCreator(), you can optionally specify a negative value for the contentLength argument. If the specified content length is negative, the DefaultLobHandler will use the JDBC 4.0 variants of the set-stream methods without a length parameter; otherwise, it will pass the specified length on to the driver.

Consult the documentation for the JDBC driver in use to verify support for streaming a LOB without providing the content length.

Now it’s time to read the LOB data from the database. Again, you use a JdbcTemplate with the same instance variable lobHandler and a reference to aDefaultLobHandler.

List<Map<String, Object>> l = jdbcTemplate.query("select id, a_clob, a_blob from lob_table",
    new RowMapper<Map<String, Object>>() {
        public Map<String, Object> mapRow(ResultSet rs, int i) throws SQLException {
            Map<String, Object> results = new HashMap<String, Object>();
            String clobText = lobHandler.getClobAsString(rs, "a_clob"); 1
results.put("CLOB", clobText); byte[] blobBytes = lobHandler.getBlobAsBytes(rs, "a_blob"); 2
results.put("BLOB", blobBytes); return results; } });

1

Using the method getClobAsString, retrieve the contents of the CLOB.

2

Using the method getBlobAsBytes, retrieve the contents of the BLOB.

19.7.3 Passing in lists of values for IN clause

The SQL standard allows for selecting rows based on an expression that includes a variable list of values. A typical example would beselect * from T_ACTOR where id in (1, 2, 3). This variable list is not directly supported for prepared statements by the JDBC standard; you cannot declare a variable number of placeholders. You need a number of variations with the desired number of placeholders prepared, or you need to generate the SQL string dynamically once you know how many placeholders are required. The named parameter support provided in the NamedParameterJdbcTemplateand JdbcTemplate takes the latter approach. Pass in the values as a java.util.List of primitive objects. This list will be used to insert the required placeholders and pass in the values during the statement execution.

[Note]

Be careful when passing in many values. The JDBC standard does not guarantee that you can use more than 100 values for an in expression list. Various databases exceed this number, but they usually have a hard limit for how many values are allowed. Oracle’s limit is 1000.

In addition to the primitive values in the value list, you can create a java.util.List of object arrays. This list would support multiple expressions defined for the in clause such as select * from T_ACTOR where (id, last_name) in 1, 'Johnson'), (2, 'Harrop'. This of course requires that your database supports this syntax.

19.7.4 Handling complex types for stored procedure calls

When you call stored procedures you can sometimes use complex types specific to the database. To accommodate these types, Spring provides aSqlReturnType for handling them when they are returned from the stored procedure call and SqlTypeValue when they are passed in as a parameter to the stored procedure.

Here is an example of returning the value of an Oracle STRUCT object of the user declared type ITEM_TYPE. The SqlReturnType interface has a single method named getTypeValue that must be implemented. This interface is used as part of the declaration of an SqlOutParameter.

final TestItem = new TestItem(123L, "A test item",
        new SimpleDateFormat("yyyy-M-d").parse("2010-12-31"));

declareParameter(new SqlOutParameter("item", OracleTypes.STRUCT, "ITEM_TYPE",
    new SqlReturnType() {
        public Object getTypeValue(CallableStatement cs, int colIndx, int sqlType, String typeName) throws SQLException {
            STRUCT struct = (STRUCT) cs.getObject(colIndx);
            Object[] attr = struct.getAttributes();
            TestItem item = new TestItem();
            item.setId(((Number) attr[0]).longValue());
            item.setDescription((String) attr[1]);
            item.setExpirationDate((java.util.Date) attr[2]);
            return item;
        }
    }));

You use the SqlTypeValue to pass in the value of a Java object like TestItem into a stored procedure. The SqlTypeValue interface has a single method named createTypeValue that you must implement. The active connection is passed in, and you can use it to create database-specific objects such asStructDescriptors, as shown in the following example, or ArrayDescriptors.

final TestItem = new TestItem(123L, "A test item",
        new SimpleDateFormat("yyyy-M-d").parse("2010-12-31"));

SqlTypeValue value = new AbstractSqlTypeValue() {
    protected Object createTypeValue(Connection conn, int sqlType, String typeName) throws SQLException {
        StructDescriptor itemDescriptor = new StructDescriptor(typeName, conn);
        Struct item = new STRUCT(itemDescriptor, conn,
        new Object[] {
            testItem.getId(),
            testItem.getDescription(),
            new java.sql.Date(testItem.getExpirationDate().getTime())
        });
        return item;
    }
};

This SqlTypeValue can now be added to the Map containing the input parameters for the execute call of the stored procedure.

Another use for the SqlTypeValue is passing in an array of values to an Oracle stored procedure. Oracle has its own internal ARRAY class that must be used in this case, and you can use the SqlTypeValue to create an instance of the Oracle ARRAY and populate it with values from the Java ARRAY.

final Long[] ids = new Long[] {1L, 2L};

SqlTypeValue value = new AbstractSqlTypeValue() {
    protected Object createTypeValue(Connection conn, int sqlType, String typeName) throws SQLException {
        ArrayDescriptor arrayDescriptor = new ArrayDescriptor(typeName, conn);
        ARRAY idArray = new ARRAY(arrayDescriptor, conn, ids);
        return idArray;
    }
};

19.8 Embedded database support

The org.springframework.jdbc.datasource.embedded package provides support for embedded Java database engines. Support for HSQLH2, andDerby is provided natively. You can also use an extensible API to plug in new embedded database types and DataSource implementations.

19.8.1 Why use an embedded database?

An embedded database is useful during the development phase of a project because of its lightweight nature. Benefits include ease of configuration, quick startup time, testability, and the ability to rapidly evolve SQL during development.

19.8.2 Creating an embedded database using Spring XML

If you want to expose an embedded database instance as a bean in a Spring ApplicationContext, use the embedded-database tag in the spring-jdbcnamespace:

<jdbc:embedded-database id="dataSource" generate-name="true">
    <jdbc:script location="classpath:schema.sql"/>
    <jdbc:script location="classpath:test-data.sql"/>
</jdbc:embedded-database>

The preceding configuration creates an embedded HSQL database populated with SQL from schema.sql and test-data.sql resources in the root of the classpath. In addition, as a best practice, the embedded database will be assigned a uniquely generated name. The embedded database is made available to the Spring container as a bean of type javax.sql.DataSource which can then be injected into data access objects as needed.

19.8.3 Creating an embedded database programmatically

The EmbeddedDatabaseBuilder class provides a fluent API for constructing an embedded database programmatically. Use this when you need to create an embedded database in a standalone environment or in a standalone integration test like in the following example.

EmbeddedDatabase db = new EmbeddedDatabaseBuilder()
    .generateUniqueName(true)
    .setType(H2)
    .setScriptEncoding("UTF-8")
    .ignoreFailedDrops(true)
    .addScript("schema.sql")
    .addScripts("user_data.sql", "country_data.sql")
    .build();

// perform actions against the db (EmbeddedDatabase extends javax.sql.DataSource)

db.shutdown()

Consult the Javadoc for EmbeddedDatabaseBuilder for further details on all supported options.

The EmbeddedDatabaseBuilder can also be used to create an embedded database using Java Config like in the following example.

@Configuration
public class DataSourceConfig {

    @Bean
    public DataSource dataSource() {
        return new EmbeddedDatabaseBuilder()
            .generateUniqueName(true)
            .setType(H2)
            .setScriptEncoding("UTF-8")
            .ignoreFailedDrops(true)
            .addScript("schema.sql")
            .addScripts("user_data.sql", "country_data.sql")
            .build();
    }
}

19.8.4 Selecting the embedded database type

Using HSQL

Spring supports HSQL 1.8.0 and above. HSQL is the default embedded database if no type is specified explicitly. To specify HSQL explicitly, set the typeattribute of the embedded-database tag to HSQL. If you are using the builder API, call the setType(EmbeddedDatabaseType) method withEmbeddedDatabaseType.HSQL.

Using H2

Spring supports the H2 database as well. To enable H2, set the type attribute of the embedded-database tag to H2. If you are using the builder API, call thesetType(EmbeddedDatabaseType) method with EmbeddedDatabaseType.H2.

Using Derby

Spring also supports Apache Derby 10.5 and above. To enable Derby, set the type attribute of the embedded-database tag to DERBY. If you are using the builder API, call the setType(EmbeddedDatabaseType) method with EmbeddedDatabaseType.DERBY.

19.8.5 Testing data access logic with an embedded database

Embedded databases provide a lightweight way to test data access code. The following is a data access integration test template that uses an embedded database. Using a template like this can be useful for one-offs when the embedded database does not need to be reused across test classes. However, if you wish to create an embedded database that is shared within a test suite, consider using the Spring TestContext Framework and configuring the embedded database as a bean in the Spring ApplicationContext as described in Section 19.8.2, “Creating an embedded database using Spring XML” andSection 19.8.3, “Creating an embedded database programmatically”.

public class DataAccessIntegrationTestTemplate {

    private EmbeddedDatabase db;

    @Before
    public void setUp() {
        // creates an HSQL in-memory database populated from default scripts
        // classpath:schema.sql and classpath:data.sql
        db = new EmbeddedDatabaseBuilder()
                .generateUniqueName(true)
                .addDefaultScripts()
                .build();
    }

    @Test
    public void testDataAccess() {
        JdbcTemplate template = new JdbcTemplate(db);
        template.query( /* ... */ );
    }

    @After
    public void tearDown() {
        db.shutdown();
    }

}

19.8.6 Generating unique names for embedded databases

Development teams often encounter errors with embedded databases if their test suite inadvertently attempts to recreate additional instances of the same database. This can happen quite easily if an XML configuration file or @Configuration class is responsible for creating an embedded database and the corresponding configuration is then reused across multiple testing scenarios within the same test suite (i.e., within the same JVM process) –- for example, integration tests against embedded databases whose ApplicationContext configuration only differs with regard to which bean definition profiles are active.

The root cause of such errors is the fact that Spring’s EmbeddedDatabaseFactory (used internally by both the <jdbc:embedded-database> XML namespace element and the EmbeddedDatabaseBuilder for Java Config) will set the name of the embedded database to "testdb" if not otherwise specified. For the case of <jdbc:embedded-database>, the embedded database is typically assigned a name equal to the bean’s id (i.e., often something like "dataSource"). Thus, subsequent attempts to create an embedded database will not result in a new database. Instead, the same JDBC connection URL will be reused, and attempts to create a new embedded database will actually point to an existing embedded database created from the same configuration.

To address this common issue Spring Framework 4.2 provides support for generating unique names for embedded databases. To enable the use of generated names, use one of the following options.

  • EmbeddedDatabaseFactory.setGenerateUniqueDatabaseName()
  • EmbeddedDatabaseBuilder.generateUniqueName()
  • <jdbc:embedded-database generate-name="true" …​ >

19.8.7 Extending the embedded database support

Spring JDBC embedded database support can be extended in two ways:

  • Implement EmbeddedDatabaseConfigurer to support a new embedded database type.
  • Implement DataSourceFactory to support a new DataSource implementation, such as a connection pool to manage embedded database connections.

You are encouraged to contribute back extensions to the Spring community at jira.spring.io.

19.9 Initializing a DataSource

The org.springframework.jdbc.datasource.init package provides support for initializing an existing DataSource. The embedded database support provides one option for creating and initializing a DataSource for an application, but sometimes you need to initialize an instance running on a server somewhere.

19.9.1 Initializing a database using Spring XML

If you want to initialize a database and you can provide a reference to a DataSource bean, use the initialize-database tag in the spring-jdbcnamespace:

<jdbc:initialize-database data-source="dataSource">
    <jdbc:script location="classpath:com/foo/sql/db-schema.sql"/>
    <jdbc:script location="classpath:com/foo/sql/db-test-data.sql"/>
</jdbc:initialize-database>

The example above executes the two scripts specified against the database: the first script creates a schema, and the second populates tables with a test data set. The script locations can also be patterns with wildcards in the usual ant style used for resources in Spring (e.g.classpath*:/com/foo/**/sql/*-data.sql). If a pattern is used, the scripts are executed in lexical order of their URL or filename.

The default behavior of the database initializer is to unconditionally execute the scripts provided. This will not always be what you want, for instance, if you are executing the scripts against a database that already has test data in it. The likelihood of accidentally deleting data is reduced by following the common pattern (as shown above) of creating the tables first and then inserting the data — the first step will fail if the tables already exist.

However, to gain more control over the creation and deletion of existing data, the XML namespace provides a few additional options. The first is a flag to switch the initialization on and off. This can be set according to the environment (e.g. to pull a boolean value from system properties or an environment bean), for example:

<jdbc:initialize-database data-source="dataSource"
    enabled="#{systemProperties.INITIALIZE_DATABASE}">
    <jdbc:script location="..."/>
</jdbc:initialize-database>

The second option to control what happens with existing data is to be more tolerant of failures. To this end you can control the ability of the initializer to ignore certain errors in the SQL it executes from the scripts, for example:

<jdbc:initialize-database data-source="dataSource" ignore-failures="DROPS">
    <jdbc:script location="..."/>
</jdbc:initialize-database>

In this example we are saying we expect that sometimes the scripts will be executed against an empty database, and there are some DROP statements in the scripts which would therefore fail. So failed SQL DROP statements will be ignored, but other failures will cause an exception. This is useful if your SQL dialect doesn’t support DROP …​ IF EXISTS (or similar) but you want to unconditionally remove all test data before re-creating it. In that case the first script is usually a set of DROP statements, followed by a set of CREATE statements.

The ignore-failures option can be set to NONE (the default), DROPS (ignore failed drops), or ALL (ignore all failures).

Each statement should be separated by ; or a new line if the ; character is not present at all in the script. You can control that globally or script by script, for example:

<jdbc:initialize-database data-source="dataSource" separator="@@">
    <jdbc:script location="classpath:com/foo/sql/db-schema.sql" separator=";"/>
    <jdbc:script location="classpath:com/foo/sql/db-test-data-1.sql"/>
    <jdbc:script location="classpath:com/foo/sql/db-test-data-2.sql"/>
</jdbc:initialize-database>

In this example, the two test-data scripts use @@ as statement separator and only the db-schema.sql uses ;. This configuration specifies that the default separator is @@ and override that default for the db-schema script.

If you need more control than you get from the XML namespace, you can simply use the DataSourceInitializer directly and define it as a component in your application.

Initialization of other components that depend on the database

A large class of applications can just use the database initializer with no further complications: those that do not use the database until after the Spring context has started. If your application is not one of those then you might need to read the rest of this section.

The database initializer depends on a DataSource instance and executes the scripts provided in its initialization callback (analogous to an init-method in an XML bean definition, a @PostConstruct method in a component, or the afterPropertiesSet() method in a component that implementsInitializingBean). If other beans depend on the same data source and also use the data source in an initialization callback, then there might be a problem because the data has not yet been initialized. A common example of this is a cache that initializes eagerly and loads data from the database on application startup.

To get around this issue you have two options: change your cache initialization strategy to a later phase, or ensure that the database initializer is initialized first.

The first option might be easy if the application is in your control, and not otherwise. Some suggestions for how to implement this include:

  • Make the cache initialize lazily on first usage, which improves application startup time.
  • Have your cache or a separate component that initializes the cache implement Lifecycle or SmartLifecycle. When the application context starts up aSmartLifecycle can be automatically started if its autoStartup flag is set, and a Lifecycle can be started manually by callingConfigurableApplicationContext.start() on the enclosing context.
  • Use a Spring ApplicationEvent or similar custom observer mechanism to trigger the cache initialization. ContextRefreshedEvent is always published by the context when it is ready for use (after all beans have been initialized), so that is often a useful hook (this is how the SmartLifecycle works by default).

The second option can also be easy. Some suggestions on how to implement this include:

  • Rely on the default behavior of the Spring BeanFactory, which is that beans are initialized in registration order. You can easily arrange that by adopting the common practice of a set of <import/> elements in XML configuration that order your application modules, and ensure that the database and database initialization are listed first.
  • Separate the DataSource and the business components that use it, and control their startup order by putting them in separate ApplicationContextinstances (e.g. the parent context contains the DataSource, and child context contains the business components). This structure is common in Spring web applications but can be more generally applied.
 


Posted by 1010
카테고리 없음2016. 8. 17. 14:18
반응형
# ===================================================================
# COMMON SPRING BOOT PROPERTIES
#
# This sample file is provided as a guideline. Do NOT copy it in its
# entirety to your own application.               ^^^
# ===================================================================


# ----------------------------------------
# CORE PROPERTIES
# ----------------------------------------

# BANNER
banner.charset=UTF-8 # Banner file encoding.
banner.location=classpath:banner.txt # Banner file location.
banner.image.location=classpath:banner.gif # Banner image file location (jpg/png can also be used).
banner.image.width= # Width of the banner image in chars (default 76)
banner.image.height= # Height of the banner image in chars (default based on image height)
banner.image.margin= # Left hand image margin in chars (default 2)
banner.image.invert= # If images should be inverted for dark terminal themes (default false)

# LOGGING
logging.config= # Location of the logging configuration file. For instance `classpath:logback.xml` for Logback
logging.exception-conversion-word=%wEx # Conversion word used when logging exceptions.
logging.file= # Log file name. For instance `myapp.log`
logging.level.*= # Log levels severity mapping. For instance `logging.level.org.springframework=DEBUG`
logging.path= # Location of the log file. For instance `/var/log`
logging.pattern.console= # Appender pattern for output to the console. Only supported with the default logback setup.
logging.pattern.file= # Appender pattern for output to the file. Only supported with the default logback setup.
logging.pattern.level= # Appender pattern for log level (default %5p). Only supported with the default logback setup.
logging.register-shutdown-hook=false # Register a shutdown hook for the logging system when it is initialized.

# AOP
spring.aop.auto=true # Add @EnableAspectJAutoProxy.
spring.aop.proxy-target-class=false # Whether subclass-based (CGLIB) proxies are to be created (true) as opposed to standard Java interface-based proxies (false).

# IDENTITY (ContextIdApplicationContextInitializer)
spring.application.index= # Application index.
spring.application.name= # Application name.

# ADMIN (SpringApplicationAdminJmxAutoConfiguration)
spring.application.admin.enabled=false # Enable admin features for the application.
spring.application.admin.jmx-name=org.springframework.boot:type=Admin,name=SpringApplication # JMX name of the application admin MBean.

# AUTO-CONFIGURATION
spring.autoconfigure.exclude= # Auto-configuration classes to exclude.

# SPRING CORE
spring.beaninfo.ignore=true # Skip search of BeanInfo classes.

# SPRING CACHE (CacheProperties)
spring.cache.cache-names= # Comma-separated list of cache names to create if supported by the underlying cache manager.
spring.cache.caffeine.spec= # The spec to use to create caches. Check CaffeineSpec for more details on the spec format.
spring.cache.couchbase.expiration=0 # Entry expiration in milliseconds. By default the entries never expire.
spring.cache.ehcache.config= # The location of the configuration file to use to initialize EhCache.
spring.cache.guava.spec= # The spec to use to create caches. Check CacheBuilderSpec for more details on the spec format.
spring.cache.hazelcast.config= # The location of the configuration file to use to initialize Hazelcast.
spring.cache.infinispan.config= # The location of the configuration file to use to initialize Infinispan.
spring.cache.jcache.config= # The location of the configuration file to use to initialize the cache manager.
spring.cache.jcache.provider= # Fully qualified name of the CachingProvider implementation to use to retrieve the JSR-107 compliant cache manager. Only needed if more than one JSR-107 implementation is available on the classpath.
spring.cache.type= # Cache type, auto-detected according to the environment by default.

# SPRING CONFIG - using environment property only (ConfigFileApplicationListener)
spring.config.location= # Config file locations.
spring.config.name=application # Config file name.

# HAZELCAST (HazelcastProperties)
spring.hazelcast.config= # The location of the configuration file to use to initialize Hazelcast.

# PROJECT INFORMATION (ProjectInfoProperties)
spring.info.build.location=classpath:META-INF/build-info.properties # Location of the generated build-info.properties file.
spring.info.git.location=classpath:git.properties # Location of the generated git.properties file.

# JMX
spring.jmx.default-domain= # JMX domain name.
spring.jmx.enabled=true # Expose management beans to the JMX domain.
spring.jmx.server=mbeanServer # MBeanServer bean name.

# Email (MailProperties)
spring.mail.default-encoding=UTF-8 # Default MimeMessage encoding.
spring.mail.host= # SMTP server host. For instance `smtp.example.com`
spring.mail.jndi-name= # Session JNDI name. When set, takes precedence to others mail settings.
spring.mail.password= # Login password of the SMTP server.
spring.mail.port= # SMTP server port.
spring.mail.properties.*= # Additional JavaMail session properties.
spring.mail.protocol=smtp # Protocol used by the SMTP server.
spring.mail.test-connection=false # Test that the mail server is available on startup.
spring.mail.username= # Login user of the SMTP server.

# APPLICATION SETTINGS (SpringApplication)
spring.main.banner-mode=console # Mode used to display the banner when the application runs.
spring.main.sources= # Sources (class name, package name or XML resource location) to include in the ApplicationContext.
spring.main.web-environment= # Run the application in a web environment (auto-detected by default).

# FILE ENCODING (FileEncodingApplicationListener)
spring.mandatory-file-encoding= # Expected character encoding the application must use.

# INTERNATIONALIZATION (MessageSourceAutoConfiguration)
spring.messages.always-use-message-format=false # Set whether to always apply the MessageFormat rules, parsing even messages without arguments.
spring.messages.basename=messages # Comma-separated list of basenames, each following the ResourceBundle convention.
spring.messages.cache-seconds=-1 # Loaded resource bundle files cache expiration, in seconds. When set to -1, bundles are cached forever.
spring.messages.encoding=UTF-8 # Message bundles encoding.
spring.messages.fallback-to-system-locale=true # Set whether to fall back to the system Locale if no files for a specific Locale have been found.

# OUTPUT
spring.output.ansi.enabled=detect # Configure the ANSI output (can be "detect", "always", "never").

# PID FILE (ApplicationPidFileWriter)
spring.pid.fail-on-write-error= # Fail if ApplicationPidFileWriter is used but it cannot write the PID file.
spring.pid.file= # Location of the PID file to write (if ApplicationPidFileWriter is used).

# PROFILES
spring.profiles.active= # Comma-separated list of active profiles.
spring.profiles.include= # Unconditionally activate the specified comma separated profiles.

# SENDGRID (SendGridAutoConfiguration)
spring.sendgrid.api-key= # SendGrid api key (alternative to username/password)
spring.sendgrid.username= # SendGrid account username
spring.sendgrid.password= # SendGrid account password
spring.sendgrid.proxy.host= # SendGrid proxy host
spring.sendgrid.proxy.port= # SendGrid proxy port


# ----------------------------------------
# WEB PROPERTIES
# ----------------------------------------

# EMBEDDED SERVER CONFIGURATION (ServerProperties)
server.address= # Network address to which the server should bind to.
server.compression.enabled=false # If response compression is enabled.
server.compression.excluded-user-agents= # List of user-agents to exclude from compression.
server.compression.mime-types= # Comma-separated list of MIME types that should be compressed. For instance `text/html,text/css,application/json`
server.compression.min-response-size= # Minimum response size that is required for compression to be performed. For instance 2048
server.connection-timeout= # Time in milliseconds that connectors will wait for another HTTP request before closing the connection. When not set, the connector's container-specific default will be used. Use a value of -1 to indicate no (i.e. infinite) timeout.
server.context-parameters.*= # Servlet context init parameters. For instance `server.context-parameters.a=alpha`
server.context-path= # Context path of the application.
server.display-name=application # Display name of the application.
server.max-http-header-size=0 # Maximum size in bytes of the HTTP message header.
server.max-http-post-size=0 # Maximum size in bytes of the HTTP post content.
server.error.include-stacktrace=never # When to include a "stacktrace" attribute.
server.error.path=/error # Path of the error controller.
server.error.whitelabel.enabled=true # Enable the default error page displayed in browsers in case of a server error.
server.jetty.acceptors= # Number of acceptor threads to use.
server.jetty.selectors= # Number of selector threads to use.
server.jsp-servlet.class-name=org.apache.jasper.servlet.JspServlet # The class name of the JSP servlet.
server.jsp-servlet.init-parameters.*= # Init parameters used to configure the JSP servlet
server.jsp-servlet.registered=true # Whether or not the JSP servlet is registered
server.port=8080 # Server HTTP port.
server.server-header= # Value to use for the Server response header (no header is sent if empty)
server.servlet-path=/ # Path of the main dispatcher servlet.
server.use-forward-headers= # If X-Forwarded-* headers should be applied to the HttpRequest.
server.session.cookie.comment= # Comment for the session cookie.
server.session.cookie.domain= # Domain for the session cookie.
server.session.cookie.http-only= # "HttpOnly" flag for the session cookie.
server.session.cookie.max-age= # Maximum age of the session cookie in seconds.
server.session.cookie.name= # Session cookie name.
server.session.cookie.path= # Path of the session cookie.
server.session.cookie.secure= # "Secure" flag for the session cookie.
server.session.persistent=false # Persist session data between restarts.
server.session.store-dir= # Directory used to store session data.
server.session.timeout= # Session timeout in seconds.
server.session.tracking-modes= # Session tracking modes (one or more of the following: "cookie", "url", "ssl").
server.ssl.ciphers= # Supported SSL ciphers.
server.ssl.client-auth= # Whether client authentication is wanted ("want") or needed ("need"). Requires a trust store.
server.ssl.enabled= # Enable SSL support.
server.ssl.enabled-protocols= # Enabled SSL protocols.
server.ssl.key-alias= # Alias that identifies the key in the key store.
server.ssl.key-password= # Password used to access the key in the key store.
server.ssl.key-store= # Path to the key store that holds the SSL certificate (typically a jks file).
server.ssl.key-store-password= # Password used to access the key store.
server.ssl.key-store-provider= # Provider for the key store.
server.ssl.key-store-type= # Type of the key store.
server.ssl.protocol=TLS # SSL protocol to use.
server.ssl.trust-store= # Trust store that holds SSL certificates.
server.ssl.trust-store-password= # Password used to access the trust store.
server.ssl.trust-store-provider= # Provider for the trust store.
server.ssl.trust-store-type= # Type of the trust store.
server.tomcat.accesslog.directory=logs # Directory in which log files are created. Can be relative to the tomcat base dir or absolute.
server.tomcat.accesslog.enabled=false # Enable access log.
server.tomcat.accesslog.pattern=common # Format pattern for access logs.
server.tomcat.accesslog.prefix=access_log # Log file name prefix.
server.tomcat.accesslog.rename-on-rotate=false # Defer inclusion of the date stamp in the file name until rotate time.
server.tomcat.accesslog.suffix=.log # Log file name suffix.
server.tomcat.background-processor-delay=30 # Delay in seconds between the invocation of backgroundProcess methods.
server.tomcat.basedir= # Tomcat base directory. If not specified a temporary directory will be used.
server.tomcat.internal-proxies=10\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}|\\
        192\\.168\\.\\d{1,3}\\.\\d{1,3}|\\
        169\\.254\\.\\d{1,3}\\.\\d{1,3}|\\
        127\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}|\\
        172\\.1[6-9]{1}\\.\\d{1,3}\\.\\d{1,3}|\\
        172\\.2[0-9]{1}\\.\\d{1,3}\\.\\d{1,3}|\\
        172\\.3[0-1]{1}\\.\\d{1,3}\\.\\d{1,3} # regular expression matching trusted IP addresses.
server.tomcat.max-threads=0 # Maximum amount of worker threads.
server.tomcat.min-spare-threads=0 # Minimum amount of worker threads.
server.tomcat.port-header=X-Forwarded-Port # Name of the HTTP header used to override the original port value.
server.tomcat.protocol-header= # Header that holds the incoming protocol, usually named "X-Forwarded-Proto".
server.tomcat.protocol-header-https-value=https # Value of the protocol header that indicates that the incoming request uses SSL.
server.tomcat.redirect-context-root= # Whether requests to the context root should be redirected by appending a / to the path.
server.tomcat.remote-ip-header= # Name of the http header from which the remote ip is extracted. For instance `X-FORWARDED-FOR`
server.tomcat.uri-encoding=UTF-8 # Character encoding to use to decode the URI.
server.undertow.accesslog.dir= # Undertow access log directory.
server.undertow.accesslog.enabled=false # Enable access log.
server.undertow.accesslog.pattern=common # Format pattern for access logs.
server.undertow.buffer-size= # Size of each buffer in bytes.
server.undertow.buffers-per-region= # Number of buffer per region.
server.undertow.direct-buffers= # Allocate buffers outside the Java heap.
server.undertow.io-threads= # Number of I/O threads to create for the worker.
server.undertow.worker-threads= # Number of worker threads.

# FREEMARKER (FreeMarkerAutoConfiguration)
spring.freemarker.allow-request-override=false # Set whether HttpServletRequest attributes are allowed to override (hide) controller generated model attributes of the same name.
spring.freemarker.allow-session-override=false # Set whether HttpSession attributes are allowed to override (hide) controller generated model attributes of the same name.
spring.freemarker.cache=false # Enable template caching.
spring.freemarker.charset=UTF-8 # Template encoding.
spring.freemarker.check-template-location=true # Check that the templates location exists.
spring.freemarker.content-type=text/html # Content-Type value.
spring.freemarker.enabled=true # Enable MVC view resolution for this technology.
spring.freemarker.expose-request-attributes=false # Set whether all request attributes should be added to the model prior to merging with the template.
spring.freemarker.expose-session-attributes=false # Set whether all HttpSession attributes should be added to the model prior to merging with the template.
spring.freemarker.expose-spring-macro-helpers=true # Set whether to expose a RequestContext for use by Spring's macro library, under the name "springMacroRequestContext".
spring.freemarker.prefer-file-system-access=true # Prefer file system access for template loading. File system access enables hot detection of template changes.
spring.freemarker.prefix= # Prefix that gets prepended to view names when building a URL.
spring.freemarker.request-context-attribute= # Name of the RequestContext attribute for all views.
spring.freemarker.settings.*= # Well-known FreeMarker keys which will be passed to FreeMarker's Configuration.
spring.freemarker.suffix= # Suffix that gets appended to view names when building a URL.
spring.freemarker.template-loader-path=classpath:/templates/ # Comma-separated list of template paths.
spring.freemarker.view-names= # White list of view names that can be resolved.

# GROOVY TEMPLATES (GroovyTemplateAutoConfiguration)
spring.groovy.template.allow-request-override=false # Set whether HttpServletRequest attributes are allowed to override (hide) controller generated model attributes of the same name.
spring.groovy.template.allow-session-override=false # Set whether HttpSession attributes are allowed to override (hide) controller generated model attributes of the same name.
spring.groovy.template.cache= # Enable template caching.
spring.groovy.template.charset=UTF-8 # Template encoding.
spring.groovy.template.check-template-location=true # Check that the templates location exists.
spring.groovy.template.configuration.*= # See GroovyMarkupConfigurer
spring.groovy.template.content-type=test/html # Content-Type value.
spring.groovy.template.enabled=true # Enable MVC view resolution for this technology.
spring.groovy.template.expose-request-attributes=false # Set whether all request attributes should be added to the model prior to merging with the template.
spring.groovy.template.expose-session-attributes=false # Set whether all HttpSession attributes should be added to the model prior to merging with the template.
spring.groovy.template.expose-spring-macro-helpers=true # Set whether to expose a RequestContext for use by Spring's macro library, under the name "springMacroRequestContext".
spring.groovy.template.prefix= # Prefix that gets prepended to view names when building a URL.
spring.groovy.template.request-context-attribute= # Name of the RequestContext attribute for all views.
spring.groovy.template.resource-loader-path=classpath:/templates/ # Template path.
spring.groovy.template.suffix=.tpl # Suffix that gets appended to view names when building a URL.
spring.groovy.template.view-names= # White list of view names that can be resolved.

# SPRING HATEOAS (HateoasProperties)
spring.hateoas.use-hal-as-default-json-media-type=true # Specify if application/hal+json responses should be sent to requests that accept application/json.

# HTTP message conversion
spring.http.converters.preferred-json-mapper=jackson # Preferred JSON mapper to use for HTTP message conversion. Set to "gson" to force the use of Gson when both it and Jackson are on the classpath.

# HTTP encoding (HttpEncodingProperties)
spring.http.encoding.charset=UTF-8 # Charset of HTTP requests and responses. Added to the "Content-Type" header if not set explicitly.
spring.http.encoding.enabled=true # Enable http encoding support.
spring.http.encoding.force= # Force the encoding to the configured charset on HTTP requests and responses.
spring.http.encoding.force-request= # Force the encoding to the configured charset on HTTP requests. Defaults to true when "force" has not been specified.
spring.http.encoding.force-response= # Force the encoding to the configured charset on HTTP responses.

# MULTIPART (MultipartProperties)
spring.http.multipart.enabled=true # Enable support of multi-part uploads.
spring.http.multipart.file-size-threshold=0 # Threshold after which files will be written to disk. Values can use the suffixed "MB" or "KB" to indicate a Megabyte or Kilobyte size.
spring.http.multipart.location= # Intermediate location of uploaded files.
spring.http.multipart.max-file-size=1Mb # Max file size. Values can use the suffixed "MB" or "KB" to indicate a Megabyte or Kilobyte size.
spring.http.multipart.max-request-size=10Mb # Max request size. Values can use the suffixed "MB" or "KB" to indicate a Megabyte or Kilobyte size.

# JACKSON (JacksonProperties)
spring.jackson.date-format= # Date format string or a fully-qualified date format class name. For instance `yyyy-MM-dd HH:mm:ss`.
spring.jackson.default-property-inclusion= # Controls the inclusion of properties during serialization.
spring.jackson.deserialization.*= # Jackson on/off features that affect the way Java objects are deserialized.
spring.jackson.generator.*= # Jackson on/off features for generators.
spring.jackson.joda-date-time-format= # Joda date time format string. If not configured, "date-format" will be used as a fallback if it is configured with a format string.
spring.jackson.locale= # Locale used for formatting.
spring.jackson.mapper.*= # Jackson general purpose on/off features.
spring.jackson.parser.*= # Jackson on/off features for parsers.
spring.jackson.property-naming-strategy= # One of the constants on Jackson's PropertyNamingStrategy. Can also be a fully-qualified class name of a PropertyNamingStrategy subclass.
spring.jackson.serialization.*= # Jackson on/off features that affect the way Java objects are serialized.
spring.jackson.serialization-inclusion= # Controls the inclusion of properties during serialization. Configured with one of the values in Jackson's JsonInclude.Include enumeration.
spring.jackson.time-zone= # Time zone used when formatting dates. For instance `America/Los_Angeles`

# JERSEY (JerseyProperties)
spring.jersey.application-path= # Path that serves as the base URI for the application. Overrides the value of "@ApplicationPath" if specified.
spring.jersey.filter.order=0 # Jersey filter chain order.
spring.jersey.init.*= # Init parameters to pass to Jersey via the servlet or filter.
spring.jersey.servlet.load-on-startup=-1 # Load on startup priority of the Jersey servlet.
spring.jersey.type=servlet # Jersey integration type. Can be either "servlet" or "filter".

# SPRING MOBILE DEVICE VIEWS (DeviceDelegatingViewResolverAutoConfiguration)
spring.mobile.devicedelegatingviewresolver.enable-fallback=false # Enable support for fallback resolution.
spring.mobile.devicedelegatingviewresolver.enabled=false # Enable device view resolver.
spring.mobile.devicedelegatingviewresolver.mobile-prefix=mobile/ # Prefix that gets prepended to view names for mobile devices.
spring.mobile.devicedelegatingviewresolver.mobile-suffix= # Suffix that gets appended to view names for mobile devices.
spring.mobile.devicedelegatingviewresolver.normal-prefix= # Prefix that gets prepended to view names for normal devices.
spring.mobile.devicedelegatingviewresolver.normal-suffix= # Suffix that gets appended to view names for normal devices.
spring.mobile.devicedelegatingviewresolver.tablet-prefix=tablet/ # Prefix that gets prepended to view names for tablet devices.
spring.mobile.devicedelegatingviewresolver.tablet-suffix= # Suffix that gets appended to view names for tablet devices.

# SPRING MOBILE SITE PREFERENCE (SitePreferenceAutoConfiguration)
spring.mobile.sitepreference.enabled=true # Enable SitePreferenceHandler.

# MUSTACHE TEMPLATES (MustacheAutoConfiguration)
spring.mustache.allow-request-override= # Set whether HttpServletRequest attributes are allowed to override (hide) controller generated model attributes of the same name.
spring.mustache.allow-session-override= # Set whether HttpSession attributes are allowed to override (hide) controller generated model attributes of the same name.
spring.mustache.cache= # Enable template caching.
spring.mustache.charset= # Template encoding.
spring.mustache.check-template-location= # Check that the templates location exists.
spring.mustache.content-type= # Content-Type value.
spring.mustache.enabled= # Enable MVC view resolution for this technology.
spring.mustache.expose-request-attributes= # Set whether all request attributes should be added to the model prior to merging with the template.
spring.mustache.expose-session-attributes= # Set whether all HttpSession attributes should be added to the model prior to merging with the template.
spring.mustache.expose-spring-macro-helpers= # Set whether to expose a RequestContext for use by Spring's macro library, under the name "springMacroRequestContext".
spring.mustache.prefix=classpath:/templates/ # Prefix to apply to template names.
spring.mustache.request-context-attribute= # Name of the RequestContext attribute for all views.
spring.mustache.suffix=.html # Suffix to apply to template names.
spring.mustache.view-names= # White list of view names that can be resolved.

# SPRING MVC (WebMvcProperties)
spring.mvc.async.request-timeout= # Amount of time (in milliseconds) before asynchronous request handling times out.
spring.mvc.date-format= # Date format to use. For instance `dd/MM/yyyy`.
spring.mvc.dispatch-trace-request=false # Dispatch TRACE requests to the FrameworkServlet doService method.
spring.mvc.dispatch-options-request=true # Dispatch OPTIONS requests to the FrameworkServlet doService method.
spring.mvc.favicon.enabled=true # Enable resolution of favicon.ico.
spring.mvc.ignore-default-model-on-redirect=true # If the content of the "default" model should be ignored during redirect scenarios.
spring.mvc.locale= # Locale to use. By default, this locale is overridden by the "Accept-Language" header.
spring.mvc.locale-resolver=accept-header # Define how the locale should be resolved.
spring.mvc.log-resolved-exception=false # Enable warn logging of exceptions resolved by a "HandlerExceptionResolver".
spring.mvc.media-types.*= # Maps file extensions to media types for content negotiation.
spring.mvc.message-codes-resolver-format= # Formatting strategy for message codes. For instance `PREFIX_ERROR_CODE`.
spring.mvc.servlet.load-on-startup=-1 # Load on startup priority of the Spring Web Services servlet.
spring.mvc.static-path-pattern=/** # Path pattern used for static resources.
spring.mvc.throw-exception-if-no-handler-found=false # If a "NoHandlerFoundException" should be thrown if no Handler was found to process a request.
spring.mvc.view.prefix= # Spring MVC view prefix.
spring.mvc.view.suffix= # Spring MVC view suffix.

# SPRING RESOURCES HANDLING (ResourceProperties)
spring.resources.add-mappings=true # Enable default resource handling.
spring.resources.cache-period= # Cache period for the resources served by the resource handler, in seconds.
spring.resources.chain.cache=true # Enable caching in the Resource chain.
spring.resources.chain.enabled= # Enable the Spring Resource Handling chain. Disabled by default unless at least one strategy has been enabled.
spring.resources.chain.gzipped=false # Enable resolution of already gzipped resources.
spring.resources.chain.html-application-cache=false # Enable HTML5 application cache manifest rewriting.
spring.resources.chain.strategy.content.enabled=false # Enable the content Version Strategy.
spring.resources.chain.strategy.content.paths=/** # Comma-separated list of patterns to apply to the Version Strategy.
spring.resources.chain.strategy.fixed.enabled=false # Enable the fixed Version Strategy.
spring.resources.chain.strategy.fixed.paths=/** # Comma-separated list of patterns to apply to the Version Strategy.
spring.resources.chain.strategy.fixed.version= # Version string to use for the Version Strategy.
spring.resources.static-locations=classpath:/META-INF/resources/,classpath:/resources/,classpath:/static/,classpath:/public/ # Locations of static resources.

# SPRING SESSION (SessionProperties)
spring.session.hazelcast.map-name=spring:session:sessions # Name of the map used to store sessions.
spring.session.jdbc.initializer.enabled=true # Create the required session tables on startup if necessary.
spring.session.jdbc.schema=classpath:org/springframework/session/jdbc/schema-@@platform@@.sql # Path to the SQL file to use to initialize the database schema.
spring.session.jdbc.table-name=SPRING_SESSION # Name of database table used to store sessions.
spring.session.mongo.collection-name=sessions # Collection name used to store sessions.
spring.session.redis.flush-mode= # Flush mode for the Redis sessions.
spring.session.redis.namespace= # Namespace for keys used to store sessions.
spring.session.store-type= # Session store type.

# SPRING SOCIAL (SocialWebAutoConfiguration)
spring.social.auto-connection-views=false # Enable the connection status view for supported providers.

# SPRING SOCIAL FACEBOOK (FacebookAutoConfiguration)
spring.social.facebook.app-id= # your application's Facebook App ID
spring.social.facebook.app-secret= # your application's Facebook App Secret

# SPRING SOCIAL LINKEDIN (LinkedInAutoConfiguration)
spring.social.linkedin.app-id= # your application's LinkedIn App ID
spring.social.linkedin.app-secret= # your application's LinkedIn App Secret

# SPRING SOCIAL TWITTER (TwitterAutoConfiguration)
spring.social.twitter.app-id= # your application's Twitter App ID
spring.social.twitter.app-secret= # your application's Twitter App Secret

# THYMELEAF (ThymeleafAutoConfiguration)
spring.thymeleaf.cache=true # Enable template caching.
spring.thymeleaf.check-template-location=true # Check that the templates location exists.
spring.thymeleaf.content-type=text/html # Content-Type value.
spring.thymeleaf.enabled=true # Enable MVC Thymeleaf view resolution.
spring.thymeleaf.encoding=UTF-8 # Template encoding.
spring.thymeleaf.excluded-view-names= # Comma-separated list of view names that should be excluded from resolution.
spring.thymeleaf.mode=HTML5 # Template mode to be applied to templates. See also StandardTemplateModeHandlers.
spring.thymeleaf.prefix=classpath:/templates/ # Prefix that gets prepended to view names when building a URL.
spring.thymeleaf.suffix=.html # Suffix that gets appended to view names when building a URL.
spring.thymeleaf.template-resolver-order= # Order of the template resolver in the chain.
spring.thymeleaf.view-names= # Comma-separated list of view names that can be resolved.

# VELOCITY TEMPLATES (VelocityAutoConfiguration)
spring.velocity.allow-request-override=false # Set whether HttpServletRequest attributes are allowed to override (hide) controller generated model attributes of the same name.
spring.velocity.allow-session-override=false # Set whether HttpSession attributes are allowed to override (hide) controller generated model attributes of the same name.
spring.velocity.cache= # Enable template caching.
spring.velocity.charset=UTF-8 # Template encoding.
spring.velocity.check-template-location=true # Check that the templates location exists.
spring.velocity.content-type=text/html # Content-Type value.
spring.velocity.date-tool-attribute= # Name of the DateTool helper object to expose in the Velocity context of the view.
spring.velocity.enabled=true # Enable MVC view resolution for this technology.
spring.velocity.expose-request-attributes=false # Set whether all request attributes should be added to the model prior to merging with the template.
spring.velocity.expose-session-attributes=false # Set whether all HttpSession attributes should be added to the model prior to merging with the template.
spring.velocity.expose-spring-macro-helpers=true # Set whether to expose a RequestContext for use by Spring's macro library, under the name "springMacroRequestContext".
spring.velocity.number-tool-attribute= # Name of the NumberTool helper object to expose in the Velocity context of the view.
spring.velocity.prefer-file-system-access=true # Prefer file system access for template loading. File system access enables hot detection of template changes.
spring.velocity.prefix= # Prefix that gets prepended to view names when building a URL.
spring.velocity.properties.*= # Additional velocity properties.
spring.velocity.request-context-attribute= # Name of the RequestContext attribute for all views.
spring.velocity.resource-loader-path=classpath:/templates/ # Template path.
spring.velocity.suffix=.vm # Suffix that gets appended to view names when building a URL.
spring.velocity.toolbox-config-location= # Velocity Toolbox config location. For instance `/WEB-INF/toolbox.xml`
spring.velocity.view-names= # White list of view names that can be resolved.

# SPRING WEB SERVICES (WebServicesProperties)
spring.webservices.path=/services # Path that serves as the base URI for the services.
spring.webservices.servlet.init= # Servlet init parameters to pass to Spring Web Services.
spring.webservices.servlet.load-on-startup=-1 # Load on startup priority of the Spring Web Services servlet.



# ----------------------------------------
# SECURITY PROPERTIES
# ----------------------------------------
# SECURITY (SecurityProperties)
security.basic.authorize-mode=role # Security authorize mode to apply.
security.basic.enabled=true # Enable basic authentication.
security.basic.path=/** # Comma-separated list of paths to secure.
security.basic.realm=Spring # HTTP basic realm name.
security.enable-csrf=false # Enable Cross Site Request Forgery support.
security.filter-order=0 # Security filter chain order.
security.filter-dispatcher-types=ASYNC, FORWARD, INCLUDE, REQUEST # Security filter chain dispatcher types.
security.headers.cache=true # Enable cache control HTTP headers.
security.headers.content-type=true # Enable "X-Content-Type-Options" header.
security.headers.frame=true # Enable "X-Frame-Options" header.
security.headers.hsts= # HTTP Strict Transport Security (HSTS) mode (none, domain, all).
security.headers.xss=true # Enable cross site scripting (XSS) protection.
security.ignored= # Comma-separated list of paths to exclude from the default secured paths.
security.require-ssl=false # Enable secure channel for all requests.
security.sessions=stateless # Session creation policy (always, never, if_required, stateless).
security.user.name=user # Default user name.
security.user.password= # Password for the default user name. A random password is logged on startup by default.
security.user.role=USER # Granted roles for the default user name.

# SECURITY OAUTH2 CLIENT (OAuth2ClientProperties
security.oauth2.client.client-id= # OAuth2 client id.
security.oauth2.client.client-secret= # OAuth2 client secret. A random secret is generated by default

# SECURITY OAUTH2 RESOURCES (ResourceServerProperties
security.oauth2.resource.id= # Identifier of the resource.
security.oauth2.resource.jwt.key-uri= # The URI of the JWT token. Can be set if the value is not available and the key is public.
security.oauth2.resource.jwt.key-value= # The verification key of the JWT token. Can either be a symmetric secret or PEM-encoded RSA public key.
security.oauth2.resource.prefer-token-info=true # Use the token info, can be set to false to use the user info.
security.oauth2.resource.service-id=resource #
security.oauth2.resource.token-info-uri= # URI of the token decoding endpoint.
security.oauth2.resource.token-type= # The token type to send when using the userInfoUri.
security.oauth2.resource.user-info-uri= # URI of the user endpoint.

# SECURITY OAUTH2 SSO (OAuth2SsoProperties
security.oauth2.sso.filter-order= # Filter order to apply if not providing an explicit WebSecurityConfigurerAdapter
security.oauth2.sso.login-path=/login # Path to the login page, i.e. the one that triggers the redirect to the OAuth2 Authorization Server


# ----------------------------------------
# DATA PROPERTIES
# ----------------------------------------

# FLYWAY (FlywayProperties)
flyway.baseline-description= #
flyway.baseline-version=1 # version to start migration
flyway.baseline-on-migrate= #
flyway.check-location=false # Check that migration scripts location exists.
flyway.clean-on-validation-error= #
flyway.enabled=true # Enable flyway.
flyway.encoding= #
flyway.ignore-failed-future-migration= #
flyway.init-sqls= # SQL statements to execute to initialize a connection immediately after obtaining it.
flyway.locations=classpath:db/migration # locations of migrations scripts
flyway.out-of-order= #
flyway.password= # JDBC password if you want Flyway to create its own DataSource
flyway.placeholder-prefix= #
flyway.placeholder-replacement= #
flyway.placeholder-suffix= #
flyway.placeholders.*= #
flyway.schemas= # schemas to update
flyway.sql-migration-prefix=V #
flyway.sql-migration-separator= #
flyway.sql-migration-suffix=.sql #
flyway.table= #
flyway.url= # JDBC url of the database to migrate. If not set, the primary configured data source is used.
flyway.user= # Login user of the database to migrate.
flyway.validate-on-migrate= #

# LIQUIBASE (LiquibaseProperties)
liquibase.change-log=classpath:/db/changelog/db.changelog-master.yaml # Change log configuration path.
liquibase.check-change-log-location=true # Check the change log location exists.
liquibase.contexts= # Comma-separated list of runtime contexts to use.
liquibase.default-schema= # Default database schema.
liquibase.drop-first=false # Drop the database schema first.
liquibase.enabled=true # Enable liquibase support.
liquibase.labels= # Comma-separated list of runtime labels to use.
liquibase.parameters.*= # Change log parameters.
liquibase.password= # Login password of the database to migrate.
liquibase.rollback-file= # File to which rollback SQL will be written when an update is performed.
liquibase.url= # JDBC url of the database to migrate. If not set, the primary configured data source is used.
liquibase.user= # Login user of the database to migrate.

# COUCHBASE (CouchbaseProperties)
spring.couchbase.bootstrap-hosts= # Couchbase nodes (host or IP address) to bootstrap from.
spring.couchbase.bucket.name=default # Name of the bucket to connect to.
spring.couchbase.bucket.password=  # Password of the bucket.
spring.couchbase.env.endpoints.key-value=1 # Number of sockets per node against the Key/value service.
spring.couchbase.env.endpoints.query=1 # Number of sockets per node against the Query (N1QL) service.
spring.couchbase.env.endpoints.view=1 # Number of sockets per node against the view service.
spring.couchbase.env.ssl.enabled= # Enable SSL support. Enabled automatically if a "keyStore" is provided unless specified otherwise.
spring.couchbase.env.ssl.key-store= # Path to the JVM key store that holds the certificates.
spring.couchbase.env.ssl.key-store-password= # Password used to access the key store.
spring.couchbase.env.timeouts.connect=5000 # Bucket connections timeout in milliseconds.
spring.couchbase.env.timeouts.key-value=2500 # Blocking operations performed on a specific key timeout in milliseconds.
spring.couchbase.env.timeouts.query=7500 # N1QL query operations timeout in milliseconds.
spring.couchbase.env.timeouts.socket-connect=1000 # Socket connect connections timeout in milliseconds.
spring.couchbase.env.timeouts.view=7500 # Regular and geospatial view operations timeout in milliseconds.

# DAO (PersistenceExceptionTranslationAutoConfiguration)
spring.dao.exceptiontranslation.enabled=true # Enable the PersistenceExceptionTranslationPostProcessor.

# CASSANDRA (CassandraProperties)
spring.data.cassandra.cluster-name= # Name of the Cassandra cluster.
spring.data.cassandra.compression= # Compression supported by the Cassandra binary protocol.
spring.data.cassandra.connect-timeout-millis= # Socket option: connection time out.
spring.data.cassandra.consistency-level= # Queries consistency level.
spring.data.cassandra.contact-points=localhost # Comma-separated list of cluster node addresses.
spring.data.cassandra.fetch-size= # Queries default fetch size.
spring.data.cassandra.keyspace-name= # Keyspace name to use.
spring.data.cassandra.load-balancing-policy= # Class name of the load balancing policy.
spring.data.cassandra.port= # Port of the Cassandra server.
spring.data.cassandra.password= # Login password of the server.
spring.data.cassandra.read-timeout-millis= # Socket option: read time out.
spring.data.cassandra.reconnection-policy= # Reconnection policy class.
spring.data.cassandra.retry-policy= # Class name of the retry policy.
spring.data.cassandra.serial-consistency-level= # Queries serial consistency level.
spring.data.cassandra.schema-action=none # Schema action to take at startup.
spring.data.cassandra.ssl=false # Enable SSL support.
spring.data.cassandra.username= # Login user of the server.

# DATA COUCHBASE (CouchbaseDataProperties)
spring.data.couchbase.auto-index=false # Automatically create views and indexes.
spring.data.couchbase.consistency=read-your-own-writes # Consistency to apply by default on generated queries.
spring.data.couchbase.repositories.enabled=true # Enable Couchbase repositories.

# ELASTICSEARCH (ElasticsearchProperties)
spring.data.elasticsearch.cluster-name=elasticsearch # Elasticsearch cluster name.
spring.data.elasticsearch.cluster-nodes= # Comma-separated list of cluster node addresses. If not specified, starts a client node.
spring.data.elasticsearch.properties.*= # Additional properties used to configure the client.
spring.data.elasticsearch.repositories.enabled=true # Enable Elasticsearch repositories.

# MONGODB (MongoProperties)
spring.data.mongodb.authentication-database= # Authentication database name.
spring.data.mongodb.database=test # Database name.
spring.data.mongodb.field-naming-strategy= # Fully qualified name of the FieldNamingStrategy to use.
spring.data.mongodb.grid-fs-database= # GridFS database name.
spring.data.mongodb.host=localhost # Mongo server host.
spring.data.mongodb.password= # Login password of the mongo server.
spring.data.mongodb.port=27017 # Mongo server port.
spring.data.mongodb.repositories.enabled=true # Enable Mongo repositories.
spring.data.mongodb.uri=mongodb://localhost/test # Mongo database URI. When set, host and port are ignored.
spring.data.mongodb.username= # Login user of the mongo server.

# DATA REDIS
spring.data.redis.repositories.enabled=true # Enable Redis repositories.

# NEO4J (Neo4jProperties)
spring.data.neo4j.compiler= # Compiler to use.
spring.data.neo4j.embedded.enabled=true # Enable embedded mode if the embedded driver is available.
spring.data.neo4j.password= # Login password of the server.
spring.data.neo4j.repositories.enabled=true # Enable Neo4j repositories.
spring.data.neo4j.session.scope=singleton # Scope (lifetime) of the session.
spring.data.neo4j.uri= # URI used by the driver. Auto-detected by default.
spring.data.neo4j.username= # Login user of the server.

# DATA REST (RepositoryRestProperties)
spring.data.rest.base-path= # Base path to be used by Spring Data REST to expose repository resources.
spring.data.rest.default-page-size= # Default size of pages.
spring.data.rest.enable-enum-translation= # Enable enum value translation via the Spring Data REST default resource bundle.
spring.data.rest.limit-param-name= # Name of the URL query string parameter that indicates how many results to return at once.
spring.data.rest.max-page-size= # Maximum size of pages.
spring.data.rest.page-param-name= # Name of the URL query string parameter that indicates what page to return.
spring.data.rest.return-body-on-create= # Return a response body after creating an entity.
spring.data.rest.return-body-on-update= # Return a response body after updating an entity.
spring.data.rest.sort-param-name= # Name of the URL query string parameter that indicates what direction to sort results.

# SOLR (SolrProperties)
spring.data.solr.host=http://127.0.0.1:8983/solr # Solr host. Ignored if "zk-host" is set.
spring.data.solr.repositories.enabled=true # Enable Solr repositories.
spring.data.solr.zk-host= # ZooKeeper host address in the form HOST:PORT.

# DATASOURCE (DataSourceAutoConfiguration & DataSourceProperties)
spring.datasource.continue-on-error=false # Do not stop if an error occurs while initializing the database.
spring.datasource.data= # Data (DML) script resource reference.
spring.datasource.data-username= # User of the database to execute DML scripts (if different).
spring.datasource.data-password= # Password of the database to execute DML scripts (if different).
spring.datasource.dbcp.*= # Commons DBCP specific settings
spring.datasource.dbcp2.*= # Commons DBCP2 specific settings
spring.datasource.driver-class-name= # Fully qualified name of the JDBC driver. Auto-detected based on the URL by default.
spring.datasource.hikari.*= # Hikari specific settings
spring.datasource.initialize=true # Populate the database using 'data.sql'.
spring.datasource.jmx-enabled=false # Enable JMX support (if provided by the underlying pool).
spring.datasource.jndi-name= # JNDI location of the datasource. Class, url, username & password are ignored when set.
spring.datasource.name=testdb # Name of the datasource.
spring.datasource.password= # Login password of the database.
spring.datasource.platform=all # Platform to use in the schema resource (schema-${platform}.sql).
spring.datasource.schema= # Schema (DDL) script resource reference.
spring.datasource.schema-username= # User of the database to execute DDL scripts (if different).
spring.datasource.schema-password= # Password of the database to execute DDL scripts (if different).
spring.datasource.separator=; # Statement separator in SQL initialization scripts.
spring.datasource.sql-script-encoding= # SQL scripts encoding.
spring.datasource.tomcat.*= # Tomcat datasource specific settings
spring.datasource.type= # Fully qualified name of the connection pool implementation to use. By default, it is auto-detected from the classpath.
spring.datasource.url= # JDBC url of the database.
spring.datasource.username=

# JEST (Elasticsearch HTTP client) (JestProperties)
spring.elasticsearch.jest.connection-timeout=3000 # Connection timeout in milliseconds.
spring.elasticsearch.jest.password= # Login password.
spring.elasticsearch.jest.proxy.host= # Proxy host the HTTP client should use.
spring.elasticsearch.jest.proxy.port= # Proxy port the HTTP client should use.
spring.elasticsearch.jest.read-timeout=3000 # Read timeout in milliseconds.
spring.elasticsearch.jest.uris=http://localhost:9200 # Comma-separated list of the Elasticsearch instances to use.
spring.elasticsearch.jest.username= # Login user.

# H2 Web Console (H2ConsoleProperties)
spring.h2.console.enabled=false # Enable the console.
spring.h2.console.path=/h2-console # Path at which the console will be available.
spring.h2.console.settings.trace=false # Enable trace output.
spring.h2.console.settings.web-allow-others=false # Enable remote access.

# JOOQ (JooqAutoConfiguration)
spring.jooq.sql-dialect= # SQLDialect JOOQ used when communicating with the configured datasource. For instance `POSTGRES`

# JPA (JpaBaseConfiguration, HibernateJpaAutoConfiguration)
spring.data.jpa.repositories.enabled=true # Enable JPA repositories.
spring.jpa.database= # Target database to operate on, auto-detected by default. Can be alternatively set using the "databasePlatform" property.
spring.jpa.database-platform= # Name of the target database to operate on, auto-detected by default. Can be alternatively set using the "Database" enum.
spring.jpa.generate-ddl=false # Initialize the schema on startup.
spring.jpa.hibernate.ddl-auto= # DDL mode. This is actually a shortcut for the "hibernate.hbm2ddl.auto" property. Default to "create-drop" when using an embedded database, "none" otherwise.
spring.jpa.hibernate.naming.implicit-strategy= # Hibernate 5 implicit naming strategy fully qualified name.
spring.jpa.hibernate.naming.physical-strategy= # Hibernate 5 physical naming strategy fully qualified name.
spring.jpa.hibernate.naming.strategy= # Hibernate 4 naming strategy fully qualified name. Not supported with Hibernate 5.
spring.jpa.hibernate.use-new-id-generator-mappings= # Use Hibernate's newer IdentifierGenerator for AUTO, TABLE and SEQUENCE.
spring.jpa.open-in-view=true # Register OpenEntityManagerInViewInterceptor. Binds a JPA EntityManager to the thread for the entire processing of the request.
spring.jpa.properties.*= # Additional native properties to set on the JPA provider.
spring.jpa.show-sql=false # Enable logging of SQL statements.

# JTA (JtaAutoConfiguration)
spring.jta.enabled=true # Enable JTA support.
spring.jta.log-dir= # Transaction logs directory.
spring.jta.transaction-manager-id= # Transaction manager unique identifier.

# ATOMIKOS (AtomikosProperties)
spring.jta.atomikos.connectionfactory.borrow-connection-timeout=30 # Timeout, in seconds, for borrowing connections from the pool.
spring.jta.atomikos.connectionfactory.ignore-session-transacted-flag=true # Whether or not to ignore the transacted flag when creating session.
spring.jta.atomikos.connectionfactory.local-transaction-mode=false # Whether or not local transactions are desired.
spring.jta.atomikos.connectionfactory.maintenance-interval=60 # The time, in seconds, between runs of the pool's maintenance thread.
spring.jta.atomikos.connectionfactory.max-idle-time=60 # The time, in seconds, after which connections are cleaned up from the pool.
spring.jta.atomikos.connectionfactory.max-lifetime=0 # The time, in seconds, that a connection can be pooled for before being destroyed. 0 denotes no limit.
spring.jta.atomikos.connectionfactory.max-pool-size=1 # The maximum size of the pool.
spring.jta.atomikos.connectionfactory.min-pool-size=1 # The minimum size of the pool.
spring.jta.atomikos.connectionfactory.reap-timeout=0 # The reap timeout, in seconds, for borrowed connections. 0 denotes no limit.
spring.jta.atomikos.connectionfactory.unique-resource-name=jmsConnectionFactory # The unique name used to identify the resource during recovery.
spring.jta.atomikos.datasource.borrow-connection-timeout=30 # Timeout, in seconds, for borrowing connections from the pool.
spring.jta.atomikos.datasource.default-isolation-level= # Default isolation level of connections provided by the pool.
spring.jta.atomikos.datasource.login-timeout= # Timeout, in seconds, for establishing a database connection.
spring.jta.atomikos.datasource.maintenance-interval=60 # The time, in seconds, between runs of the pool's maintenance thread.
spring.jta.atomikos.datasource.max-idle-time=60 # The time, in seconds, after which connections are cleaned up from the pool.
spring.jta.atomikos.datasource.max-lifetime=0 # The time, in seconds, that a connection can be pooled for before being destroyed. 0 denotes no limit.
spring.jta.atomikos.datasource.max-pool-size=1 # The maximum size of the pool.
spring.jta.atomikos.datasource.min-pool-size=1 # The minimum size of the pool.
spring.jta.atomikos.datasource.reap-timeout=0 # The reap timeout, in seconds, for borrowed connections. 0 denotes no limit.
spring.jta.atomikos.datasource.test-query= # SQL query or statement used to validate a connection before returning it.
spring.jta.atomikos.datasource.unique-resource-name=dataSource # The unique name used to identify the resource during recovery.
spring.jta.atomikos.properties.checkpoint-interval=500 # Interval between checkpoints.
spring.jta.atomikos.properties.console-file-count=1 # Number of debug logs files that can be created.
spring.jta.atomikos.properties.console-file-limit=-1 # How many bytes can be stored at most in debug logs files.
spring.jta.atomikos.properties.console-file-name=tm.out # Debug logs file name.
spring.jta.atomikos.properties.console-log-level= # Console log level.
spring.jta.atomikos.properties.default-jta-timeout=10000 # Default timeout for JTA transactions.
spring.jta.atomikos.properties.enable-logging=true # Enable disk logging.
spring.jta.atomikos.properties.force-shutdown-on-vm-exit=false # Specify if a VM shutdown should trigger forced shutdown of the transaction core.
spring.jta.atomikos.properties.log-base-dir= # Directory in which the log files should be stored.
spring.jta.atomikos.properties.log-base-name=tmlog # Transactions log file base name.
spring.jta.atomikos.properties.max-actives=50 # Maximum number of active transactions.
spring.jta.atomikos.properties.max-timeout=300000 # Maximum timeout (in milliseconds) that can be allowed for transactions.
spring.jta.atomikos.properties.output-dir= # Directory in which to store the debug log files.
spring.jta.atomikos.properties.serial-jta-transactions=true # Specify if sub-transactions should be joined when possible.
spring.jta.atomikos.properties.service= # Transaction manager implementation that should be started.
spring.jta.atomikos.properties.threaded-two-phase-commit=true # Use different (and concurrent) threads for two-phase commit on the participating resources.
spring.jta.atomikos.properties.transaction-manager-unique-name= # Transaction manager's unique name.

# BITRONIX
spring.jta.bitronix.connectionfactory.acquire-increment=1 # Number of connections to create when growing the pool.
spring.jta.bitronix.connectionfactory.acquisition-interval=1 # Time, in seconds, to wait before trying to acquire a connection again after an invalid connection was acquired.
spring.jta.bitronix.connectionfactory.acquisition-timeout=30 # Timeout, in seconds, for acquiring connections from the pool.
spring.jta.bitronix.connectionfactory.allow-local-transactions=true # Whether or not the transaction manager should allow mixing XA and non-XA transactions.
spring.jta.bitronix.connectionfactory.apply-transaction-timeout=false # Whether or not the transaction timeout should be set on the XAResource when it is enlisted.
spring.jta.bitronix.connectionfactory.automatic-enlisting-enabled=true # Whether or not resources should be enlisted and delisted automatically.
spring.jta.bitronix.connectionfactory.cache-producers-consumers=true # Whether or not produces and consumers should be cached.
spring.jta.bitronix.connectionfactory.defer-connection-release=true # Whether or not the provider can run many transactions on the same connection and supports transaction interleaving.
spring.jta.bitronix.connectionfactory.ignore-recovery-failures=false # Whether or not recovery failures should be ignored.
spring.jta.bitronix.connectionfactory.max-idle-time=60 # The time, in seconds, after which connections are cleaned up from the pool.
spring.jta.bitronix.connectionfactory.max-pool-size=10 # The maximum size of the pool. 0 denotes no limit.
spring.jta.bitronix.connectionfactory.min-pool-size=0 # The minimum size of the pool.
spring.jta.bitronix.connectionfactory.password= # The password to use to connect to the JMS provider.
spring.jta.bitronix.connectionfactory.share-transaction-connections=false #  Whether or not connections in the ACCESSIBLE state can be shared within the context of a transaction.
spring.jta.bitronix.connectionfactory.test-connections=true # Whether or not connections should be tested when acquired from the pool.
spring.jta.bitronix.connectionfactory.two-pc-ordering-position=1 # The position that this resource should take during two-phase commit (always first is Integer.MIN_VALUE, always last is Integer.MAX_VALUE).
spring.jta.bitronix.connectionfactory.unique-name=jmsConnectionFactory # The unique name used to identify the resource during recovery.
spring.jta.bitronix.connectionfactory.use-tm-join=true Whether or not TMJOIN should be used when starting XAResources.
spring.jta.bitronix.connectionfactory.user= # The user to use to connect to the JMS provider.
spring.jta.bitronix.datasource.acquire-increment=1 # Number of connections to create when growing the pool.
spring.jta.bitronix.datasource.acquisition-interval=1 # Time, in seconds, to wait before trying to acquire a connection again after an invalid connection was acquired.
spring.jta.bitronix.datasource.acquisition-timeout=30 # Timeout, in seconds, for acquiring connections from the pool.
spring.jta.bitronix.datasource.allow-local-transactions=true # Whether or not the transaction manager should allow mixing XA and non-XA transactions.
spring.jta.bitronix.datasource.apply-transaction-timeout=false # Whether or not the transaction timeout should be set on the XAResource when it is enlisted.
spring.jta.bitronix.datasource.automatic-enlisting-enabled=true # Whether or not resources should be enlisted and delisted automatically.
spring.jta.bitronix.datasource.cursor-holdability= # The default cursor holdability for connections.
spring.jta.bitronix.datasource.defer-connection-release=true # Whether or not the database can run many transactions on the same connection and supports transaction interleaving.
spring.jta.bitronix.datasource.enable-jdbc4-connection-test= # Whether or not Connection.isValid() is called when acquiring a connection from the pool.
spring.jta.bitronix.datasource.ignore-recovery-failures=false # Whether or not recovery failures should be ignored.
spring.jta.bitronix.datasource.isolation-level= # The default isolation level for connections.
spring.jta.bitronix.datasource.local-auto-commit= # The default auto-commit mode for local transactions.
spring.jta.bitronix.datasource.login-timeout= # Timeout, in seconds, for establishing a database connection.
spring.jta.bitronix.datasource.max-idle-time=60 # The time, in seconds, after which connections are cleaned up from the pool.
spring.jta.bitronix.datasource.max-pool-size=10 # The maximum size of the pool. 0 denotes no limit.
spring.jta.bitronix.datasource.min-pool-size=0 # The minimum size of the pool.
spring.jta.bitronix.datasource.prepared-statement-cache-size=0 # The target size of the prepared statement cache. 0 disables the cache.
spring.jta.bitronix.datasource.share-transaction-connections=false #  Whether or not connections in the ACCESSIBLE state can be shared within the context of a transaction.
spring.jta.bitronix.datasource.test-query= # SQL query or statement used to validate a connection before returning it.
spring.jta.bitronix.datasource.two-pc-ordering-position=1 # The position that this resource should take during two-phase commit (always first is Integer.MIN_VALUE, always last is Integer.MAX_VALUE).
spring.jta.bitronix.datasource.unique-name=dataSource # The unique name used to identify the resource during recovery.
spring.jta.bitronix.datasource.use-tm-join=true Whether or not TMJOIN should be used when starting XAResources.
spring.jta.bitronix.properties.allow-multiple-lrc=false # Allow multiple LRC resources to be enlisted into the same transaction.
spring.jta.bitronix.properties.asynchronous2-pc=false # Enable asynchronously execution of two phase commit.
spring.jta.bitronix.properties.background-recovery-interval-seconds=60 # Interval in seconds at which to run the recovery process in the background.
spring.jta.bitronix.properties.current-node-only-recovery=true # Recover only the current node.
spring.jta.bitronix.properties.debug-zero-resource-transaction=false # Log the creation and commit call stacks of transactions executed without a single enlisted resource.
spring.jta.bitronix.properties.default-transaction-timeout=60 # Default transaction timeout in seconds.
spring.jta.bitronix.properties.disable-jmx=false # Enable JMX support.
spring.jta.bitronix.properties.exception-analyzer= # Set the fully qualified name of the exception analyzer implementation to use.
spring.jta.bitronix.properties.filter-log-status=false # Enable filtering of logs so that only mandatory logs are written.
spring.jta.bitronix.properties.force-batching-enabled=true #  Set if disk forces are batched.
spring.jta.bitronix.properties.forced-write-enabled=true # Set if logs are forced to disk.
spring.jta.bitronix.properties.graceful-shutdown-interval=60 # Maximum amount of seconds the TM will wait for transactions to get done before aborting them at shutdown time.
spring.jta.bitronix.properties.jndi-transaction-synchronization-registry-name= # JNDI name of the TransactionSynchronizationRegistry.
spring.jta.bitronix.properties.jndi-user-transaction-name= # JNDI name of the UserTransaction.
spring.jta.bitronix.properties.journal=disk # Name of the journal. Can be 'disk', 'null' or a class name.
spring.jta.bitronix.properties.log-part1-filename=btm1.tlog # Name of the first fragment of the journal.
spring.jta.bitronix.properties.log-part2-filename=btm2.tlog # Name of the second fragment of the journal.
spring.jta.bitronix.properties.max-log-size-in-mb=2 # Maximum size in megabytes of the journal fragments.
spring.jta.bitronix.properties.resource-configuration-filename= # ResourceLoader configuration file name.
spring.jta.bitronix.properties.server-id= # ASCII ID that must uniquely identify this TM instance. Default to the machine's IP address.
spring.jta.bitronix.properties.skip-corrupted-logs=false # Skip corrupted transactions log entries.
spring.jta.bitronix.properties.warn-about-zero-resource-transaction=true # Log a warning for transactions executed without a single enlisted resource.

# NARAYANA (NarayanaProperties)
spring.jta.narayana.default-timeout=60 # Transaction timeout in seconds.
spring.jta.narayana.expiry-scanners=com.arjuna.ats.internal.arjuna.recovery.ExpiredTransactionStatusManagerScanner # Comma-separated list of expiry scanners.
spring.jta.narayana.log-dir= # Transaction object store directory.
spring.jta.narayana.one-phase-commit=true # Enable one phase commit optimisation.
spring.jta.narayana.periodic-recovery-period=120 # Interval in which periodic recovery scans are performed in seconds.
spring.jta.narayana.recovery-backoff-period=10 # Back off period between first and second phases of the recovery scan in seconds.
spring.jta.narayana.recovery-db-pass= # Database password to be used by recovery manager.
spring.jta.narayana.recovery-db-user= # Database username to be used by recovery manager.
spring.jta.narayana.recovery-jms-pass= # JMS password to be used by recovery manager.
spring.jta.narayana.recovery-jms-user= # JMS username to be used by recovery manager.
spring.jta.narayana.recovery-modules= # Comma-separated list of recovery modules.
spring.jta.narayana.transaction-manager-id=1 # Unique transaction manager id.
spring.jta.narayana.xa-resource-orphan-filters= # Comma-separated list of orphan filters.

# EMBEDDED MONGODB (EmbeddedMongoProperties)
spring.mongodb.embedded.features=SYNC_DELAY # Comma-separated list of features to enable.
spring.mongodb.embedded.storage.databaseDir= # Directory used for data storage.
spring.mongodb.embedded.storage.oplogSize= # Maximum size of the oplog in megabytes.
spring.mongodb.embedded.storage.replSetName= # Name of the replica set.
spring.mongodb.embedded.version=2.6.10 # Version of Mongo to use.

# REDIS (RedisProperties)
spring.redis.cluster.max-redirects= # Maximum number of redirects to follow when executing commands across the cluster.
spring.redis.cluster.nodes= # Comma-separated list of "host:port" pairs to bootstrap from.
spring.redis.database=0 # Database index used by the connection factory.
spring.redis.host=localhost # Redis server host.
spring.redis.password= # Login password of the redis server.
spring.redis.pool.max-active=8 # Max number of connections that can be allocated by the pool at a given time. Use a negative value for no limit.
spring.redis.pool.max-idle=8 # Max number of "idle" connections in the pool. Use a negative value to indicate an unlimited number of idle connections.
spring.redis.pool.max-wait=-1 # Maximum amount of time (in milliseconds) a connection allocation should block before throwing an exception when the pool is exhausted. Use a negative value to block indefinitely.
spring.redis.pool.min-idle=0 # Target for the minimum number of idle connections to maintain in the pool. This setting only has an effect if it is positive.
spring.redis.port=6379 # Redis server port.
spring.redis.sentinel.master= # Name of Redis server.
spring.redis.sentinel.nodes= # Comma-separated list of host:port pairs.
spring.redis.timeout=0 # Connection timeout in milliseconds.


# ----------------------------------------
# INTEGRATION PROPERTIES
# ----------------------------------------

# ACTIVEMQ (ActiveMQProperties)
spring.activemq.broker-url= # URL of the ActiveMQ broker. Auto-generated by default. For instance `tcp://localhost:61616`
spring.activemq.in-memory=true # Specify if the default broker URL should be in memory. Ignored if an explicit broker has been specified.
spring.activemq.password= # Login password of the broker.
spring.activemq.user= # Login user of the broker.
spring.activemq.packages.trust-all=false # Trust all packages.
spring.activemq.packages.trusted= # Comma-separated list of specific packages to trust (when not trusting all packages).
spring.activemq.pool.configuration.*= # See PooledConnectionFactory.
spring.activemq.pool.enabled=false # Whether a PooledConnectionFactory should be created instead of a regular ConnectionFactory.
spring.activemq.pool.expiry-timeout=0 # Connection expiration timeout in milliseconds.
spring.activemq.pool.idle-timeout=30000 # Connection idle timeout in milliseconds.
spring.activemq.pool.max-connections=1 # Maximum number of pooled connections.

# ARTEMIS (ArtemisProperties)
spring.artemis.embedded.cluster-password= # Cluster password. Randomly generated on startup by default.
spring.artemis.embedded.data-directory= # Journal file directory. Not necessary if persistence is turned off.
spring.artemis.embedded.enabled=true # Enable embedded mode if the Artemis server APIs are available.
spring.artemis.embedded.persistent=false # Enable persistent store.
spring.artemis.embedded.queues= # Comma-separated list of queues to create on startup.
spring.artemis.embedded.server-id= # Server id. By default, an auto-incremented counter is used.
spring.artemis.embedded.topics= # Comma-separated list of topics to create on startup.
spring.artemis.host=localhost # Artemis broker host.
spring.artemis.mode= # Artemis deployment mode, auto-detected by default. Can be explicitly set to "native" or "embedded".
spring.artemis.password= # Login password of the broker.
spring.artemis.port=61616 # Artemis broker port.
spring.artemis.user= # Login user of the broker.

# SPRING BATCH (BatchProperties)
spring.batch.initializer.enabled=true # Create the required batch tables on startup if necessary.
spring.batch.job.enabled=true # Execute all Spring Batch jobs in the context on startup.
spring.batch.job.names= # Comma-separated list of job names to execute on startup (For instance `job1,job2`). By default, all Jobs found in the context are executed.
spring.batch.schema=classpath:org/springframework/batch/core/schema-@@platform@@.sql # Path to the SQL file to use to initialize the database schema.
spring.batch.table-prefix= # Table prefix for all the batch meta-data tables.

# HORNETQ (HornetQProperties)
spring.hornetq.embedded.cluster-password= # Cluster password. Randomly generated on startup by default.
spring.hornetq.embedded.data-directory= # Journal file directory. Not necessary if persistence is turned off.
spring.hornetq.embedded.enabled=true # Enable embedded mode if the HornetQ server APIs are available.
spring.hornetq.embedded.persistent=false # Enable persistent store.
spring.hornetq.embedded.queues= # Comma-separated list of queues to create on startup.
spring.hornetq.embedded.server-id= # Server id. By default, an auto-incremented counter is used.
spring.hornetq.embedded.topics= # Comma-separated list of topics to create on startup.
spring.hornetq.host=localhost # HornetQ broker host.
spring.hornetq.mode= # HornetQ deployment mode, auto-detected by default. Can be explicitly set to "native" or "embedded".
spring.hornetq.password= # Login password of the broker.
spring.hornetq.port=5445 # HornetQ broker port.
spring.hornetq.user= # Login user of the broker.

# JMS (JmsProperties)
spring.jms.jndi-name= # Connection factory JNDI name. When set, takes precedence to others connection factory auto-configurations.
spring.jms.listener.acknowledge-mode= # Acknowledge mode of the container. By default, the listener is transacted with automatic acknowledgment.
spring.jms.listener.auto-startup=true # Start the container automatically on startup.
spring.jms.listener.concurrency= # Minimum number of concurrent consumers.
spring.jms.listener.max-concurrency= # Maximum number of concurrent consumers.
spring.jms.pub-sub-domain=false # Specify if the default destination type is topic.

# RABBIT (RabbitProperties)
spring.rabbitmq.addresses= # Comma-separated list of addresses to which the client should connect.
spring.rabbitmq.cache.channel.checkout-timeout= # Number of milliseconds to wait to obtain a channel if the cache size has been reached.
spring.rabbitmq.cache.channel.size= # Number of channels to retain in the cache.
spring.rabbitmq.cache.connection.mode=CHANNEL # Connection factory cache mode.
spring.rabbitmq.cache.connection.size= # Number of connections to cache.
spring.rabbitmq.connection-timeout= # Connection timeout, in milliseconds; zero for infinite.
spring.rabbitmq.dynamic=true # Create an AmqpAdmin bean.
spring.rabbitmq.host=localhost # RabbitMQ host.
spring.rabbitmq.listener.acknowledge-mode= # Acknowledge mode of container.
spring.rabbitmq.listener.auto-startup=true # Start the container automatically on startup.
spring.rabbitmq.listener.concurrency= # Minimum number of consumers.
spring.rabbitmq.listener.default-requeue-rejected= # Whether or not to requeue delivery failures; default `true`.
spring.rabbitmq.listener.max-concurrency= # Maximum number of consumers.
spring.rabbitmq.listener.prefetch= # Number of messages to be handled in a single request. It should be greater than or equal to the transaction size (if used).
spring.rabbitmq.listener.retry.enabled=false # Whether or not publishing retries are enabled.
spring.rabbitmq.listener.retry.initial-interval=1000 # Interval between the first and second attempt to deliver a message.
spring.rabbitmq.listener.retry.max-attempts=3 # Maximum number of attempts to deliver a message.
spring.rabbitmq.listener.retry.max-interval=10000 # Maximum number of attempts to deliver a message.
spring.rabbitmq.listener.retry.multiplier=1.0 # A multiplier to apply to the previous delivery retry interval.
spring.rabbitmq.listener.retry.stateless=true # Whether or not retry is stateless or stateful.
spring.rabbitmq.listener.transaction-size= # Number of messages to be processed in a transaction. For best results it should be less than or equal to the prefetch count.
spring.rabbitmq.password= # Login to authenticate against the broker.
spring.rabbitmq.port=5672 # RabbitMQ port.
spring.rabbitmq.publisher-confirms=false # Enable publisher confirms.
spring.rabbitmq.publisher-returns=false # Enable publisher returns.
spring.rabbitmq.requested-heartbeat= # Requested heartbeat timeout, in seconds; zero for none.
spring.rabbitmq.ssl.enabled=false # Enable SSL support.
spring.rabbitmq.ssl.key-store= # Path to the key store that holds the SSL certificate.
spring.rabbitmq.ssl.key-store-password= # Password used to access the key store.
spring.rabbitmq.ssl.trust-store= # Trust store that holds SSL certificates.
spring.rabbitmq.ssl.trust-store-password= # Password used to access the trust store.
spring.rabbitmq.ssl.algorithm= # SSL algorithm to use. By default configure by the rabbit client library.
spring.rabbitmq.template.mandatory=false # Enable mandatory messages.
spring.rabbitmq.template.receive-timeout=0 # Timeout for `receive()` methods.
spring.rabbitmq.template.reply-timeout=5000 # Timeout for `sendAndReceive()` methods.
spring.rabbitmq.template.retry.enabled=false # Set to true to enable retries in the `RabbitTemplate`.
spring.rabbitmq.template.retry.initial-interval=1000 # Interval between the first and second attempt to publish a message.
spring.rabbitmq.template.retry.max-attempts=3 # Maximum number of attempts to publish a message.
spring.rabbitmq.template.retry.max-interval=10000 # Maximum number of attempts to publish a message.
spring.rabbitmq.template.retry.multiplier=1.0 # A multiplier to apply to the previous publishing retry interval.
spring.rabbitmq.username= # Login user to authenticate to the broker.
spring.rabbitmq.virtual-host= # Virtual host to use when connecting to the broker.


# ----------------------------------------
# ACTUATOR PROPERTIES
# ----------------------------------------

# ENDPOINTS (AbstractEndpoint subclasses)
endpoints.enabled=true # Enable endpoints.
endpoints.sensitive= # Default endpoint sensitive setting.
endpoints.actuator.enabled=true # Enable the endpoint.
endpoints.actuator.path= # Endpoint URL path.
endpoints.actuator.sensitive=false # Enable security on the endpoint.
endpoints.autoconfig.enabled= # Enable the endpoint.
endpoints.autoconfig.id= # Endpoint identifier.
endpoints.autoconfig.path= # Endpoint path.
endpoints.autoconfig.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.beans.enabled= # Enable the endpoint.
endpoints.beans.id= # Endpoint identifier.
endpoints.beans.path= # Endpoint path.
endpoints.beans.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.configprops.enabled= # Enable the endpoint.
endpoints.configprops.id= # Endpoint identifier.
endpoints.configprops.keys-to-sanitize=password,secret,key,token,.*credentials.*,vcap_services # Keys that should be sanitized. Keys can be simple strings that the property ends with or regex expressions.
endpoints.configprops.path= # Endpoint path.
endpoints.configprops.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.docs.curies.enabled=false # Enable the curie generation.
endpoints.docs.enabled=true # Enable actuator docs endpoint.
endpoints.docs.path=/docs #
endpoints.docs.sensitive=false #
endpoints.dump.enabled= # Enable the endpoint.
endpoints.dump.id= # Endpoint identifier.
endpoints.dump.path= # Endpoint path.
endpoints.dump.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.env.enabled= # Enable the endpoint.
endpoints.env.id= # Endpoint identifier.
endpoints.env.keys-to-sanitize=password,secret,key,token,.*credentials.*,vcap_services # Keys that should be sanitized. Keys can be simple strings that the property ends with or regex expressions.
endpoints.env.path= # Endpoint path.
endpoints.env.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.flyway.enabled= # Enable the endpoint.
endpoints.flyway.id= # Endpoint identifier.
endpoints.flyway.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.health.enabled= # Enable the endpoint.
endpoints.health.id= # Endpoint identifier.
endpoints.health.mapping.*= # Mapping of health statuses to HttpStatus codes. By default, registered health statuses map to sensible defaults (i.e. UP maps to 200).
endpoints.health.path= # Endpoint path.
endpoints.health.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.health.time-to-live=1000 # Time to live for cached result, in milliseconds.
endpoints.heapdump.enabled= # Enable the endpoint.
endpoints.heapdump.path= # Endpoint path.
endpoints.heapdump.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.info.enabled= # Enable the endpoint.
endpoints.info.id= # Endpoint identifier.
endpoints.info.path= # Endpoint path.
endpoints.info.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.jolokia.enabled=true # Enable Jolokia endpoint.
endpoints.jolokia.path=/jolokia # Endpoint URL path.
endpoints.jolokia.sensitive=true # Enable security on the endpoint.
endpoints.liquibase.enabled= # Enable the endpoint.
endpoints.liquibase.id= # Endpoint identifier.
endpoints.liquibase.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.logfile.enabled=true # Enable the endpoint.
endpoints.logfile.external-file= # External Logfile to be accessed.
endpoints.logfile.path=/logfile # Endpoint URL path.
endpoints.logfile.sensitive=true # Enable security on the endpoint.
endpoints.mappings.enabled= # Enable the endpoint.
endpoints.mappings.id= # Endpoint identifier.
endpoints.mappings.path= # Endpoint path.
endpoints.mappings.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.metrics.enabled= # Enable the endpoint.
endpoints.metrics.filter.enabled=true # Enable the metrics servlet filter.
endpoints.metrics.filter.gauge-submissions=merged # Http filter gauge submissions (merged, per-http-method)
endpoints.metrics.filter.counter-submissions=merged # Http filter counter submissions (merged, per-http-method)
endpoints.metrics.id= # Endpoint identifier.
endpoints.metrics.path= # Endpoint path.
endpoints.metrics.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.shutdown.enabled= # Enable the endpoint.
endpoints.shutdown.id= # Endpoint identifier.
endpoints.shutdown.path= # Endpoint path.
endpoints.shutdown.sensitive= # Mark if the endpoint exposes sensitive information.
endpoints.trace.enabled= # Enable the endpoint.
endpoints.trace.id= # Endpoint identifier.
endpoints.trace.path= # Endpoint path.
endpoints.trace.sensitive= # Mark if the endpoint exposes sensitive information.

# ENDPOINTS CORS CONFIGURATION (EndpointCorsProperties)
endpoints.cors.allow-credentials= # Set whether credentials are supported. When not set, credentials are not supported.
endpoints.cors.allowed-headers= # Comma-separated list of headers to allow in a request. '*' allows all headers.
endpoints.cors.allowed-methods=GET # Comma-separated list of methods to allow. '*' allows all methods.
endpoints.cors.allowed-origins= # Comma-separated list of origins to allow. '*' allows all origins. When not set, CORS support is disabled.
endpoints.cors.exposed-headers= # Comma-separated list of headers to include in a response.
endpoints.cors.max-age=1800 # How long, in seconds, the response from a pre-flight request can be cached by clients.

# JMX ENDPOINT (EndpointMBeanExportProperties)
endpoints.jmx.domain= # JMX domain name. Initialized with the value of 'spring.jmx.default-domain' if set.
endpoints.jmx.enabled=true # Enable JMX export of all endpoints.
endpoints.jmx.static-names= # Additional static properties to append to all ObjectNames of MBeans representing Endpoints.
endpoints.jmx.unique-names=false # Ensure that ObjectNames are modified in case of conflict.

# JOLOKIA (JolokiaProperties)
jolokia.config.*= # See Jolokia manual

# MANAGEMENT HTTP SERVER (ManagementServerProperties)
management.add-application-context-header=true # Add the "X-Application-Context" HTTP header in each response.
management.address= # Network address that the management endpoints should bind to.
management.context-path= # Management endpoint context-path. For instance `/actuator`
management.port= # Management endpoint HTTP port. Uses the same port as the application by default. Configure a different port to use management-specific SSL.
management.security.enabled=true # Enable security.
management.security.roles=ADMIN # Comma-separated list of roles that can access the management endpoint.
management.security.sessions=stateless # Session creating policy to use (always, never, if_required, stateless).
management.ssl.ciphers= # Supported SSL ciphers. Requires a custom management.port.
management.ssl.client-auth= # Whether client authentication is wanted ("want") or needed ("need"). Requires a trust store. Requires a custom management.port.
management.ssl.enabled= # Enable SSL support. Requires a custom management.port.
management.ssl.enabled-protocols= # Enabled SSL protocols. Requires a custom management.port.
management.ssl.key-alias= # Alias that identifies the key in the key store. Requires a custom management.port.
management.ssl.key-password= # Password used to access the key in the key store. Requires a custom management.port.
management.ssl.key-store= # Path to the key store that holds the SSL certificate (typically a jks file). Requires a custom management.port.
management.ssl.key-store-password= # Password used to access the key store. Requires a custom management.port.
management.ssl.key-store-provider= # Provider for the key store. Requires a custom management.port.
management.ssl.key-store-type= # Type of the key store. Requires a custom management.port.
management.ssl.protocol=TLS # SSL protocol to use. Requires a custom management.port.
management.ssl.trust-store= # Trust store that holds SSL certificates. Requires a custom management.port.
management.ssl.trust-store-password= # Password used to access the trust store. Requires a custom management.port.
management.ssl.trust-store-provider= # Provider for the trust store. Requires a custom management.port.
management.ssl.trust-store-type= # Type of the trust store. Requires a custom management.port.

# HEALTH INDICATORS (previously health.*)
management.health.db.enabled=true # Enable database health check.
management.health.defaults.enabled=true # Enable default health indicators.
management.health.diskspace.enabled=true # Enable disk space health check.
management.health.diskspace.path= # Path used to compute the available disk space.
management.health.diskspace.threshold=0 # Minimum disk space that should be available, in bytes.
management.health.elasticsearch.enabled=true # Enable elasticsearch health check.
management.health.elasticsearch.indices= # Comma-separated index names.
management.health.elasticsearch.response-timeout=100 # The time, in milliseconds, to wait for a response from the cluster.
management.health.jms.enabled=true # Enable JMS health check.
management.health.mail.enabled=true # Enable Mail health check.
management.health.mongo.enabled=true # Enable MongoDB health check.
management.health.rabbit.enabled=true # Enable RabbitMQ health check.
management.health.redis.enabled=true # Enable Redis health check.
management.health.solr.enabled=true # Enable Solr health check.
management.health.status.order=DOWN, OUT_OF_SERVICE, UNKNOWN, UP # Comma-separated list of health statuses in order of severity.

# INFO CONTRIBUTORS (InfoContributorProperties)
management.info.build.enabled=true # Enable build info.
management.info.defaults.enabled=true # Enable default info contributors.
management.info.env.enabled=true # Enable environment info.
management.info.git.enabled=true # Enable git info.
management.info.git.mode=simple # Mode to use to expose git information.

# REMOTE SHELL (ShellProperties)
management.shell.auth.type=simple # Authentication type. Auto-detected according to the environment.
management.shell.auth.jaas.domain=my-domain # JAAS domain.
management.shell.auth.key.path= # Path to the authentication key. This should point to a valid ".pem" file.
management.shell.auth.simple.user.name=user # Login user.
management.shell.auth.simple.user.password= # Login password.
management.shell.auth.spring.roles=ADMIN # Comma-separated list of required roles to login to the CRaSH console.
management.shell.command-path-patterns=classpath*:/commands/**,classpath*:/crash/commands/** # Patterns to use to look for commands.
management.shell.command-refresh-interval=-1 # Scan for changes and update the command if necessary (in seconds).
management.shell.config-path-patterns=classpath*:/crash/* # Patterns to use to look for configurations.
management.shell.disabled-commands=jpa*,jdbc*,jndi* # Comma-separated list of commands to disable.
management.shell.disabled-plugins= # Comma-separated list of plugins to disable. Certain plugins are disabled by default based on the environment.
management.shell.ssh.auth-timeout = # Number of milliseconds after user will be prompted to login again.
management.shell.ssh.enabled=true # Enable CRaSH SSH support.
management.shell.ssh.idle-timeout = # Number of milliseconds after which unused connections are closed.
management.shell.ssh.key-path= # Path to the SSH server key.
management.shell.ssh.port=2000 # SSH port.
management.shell.telnet.enabled=false # Enable CRaSH telnet support. Enabled by default if the TelnetPlugin is  available.
management.shell.telnet.port=5000 # Telnet port.

# TRACING (TraceProperties)
management.trace.include=request-headers,response-headers,cookies,errors # Items to be included in the trace.

# METRICS EXPORT (MetricExportProperties)
spring.metrics.export.aggregate.key-pattern= # Pattern that tells the aggregator what to do with the keys from the source repository.
spring.metrics.export.aggregate.prefix= # Prefix for global repository if active.
spring.metrics.export.delay-millis=5000 # Delay in milliseconds between export ticks. Metrics are exported to external sources on a schedule with this delay.
spring.metrics.export.enabled=true # Flag to enable metric export (assuming a MetricWriter is available).
spring.metrics.export.excludes= # List of patterns for metric names to exclude. Applied after the includes.
spring.metrics.export.includes= # List of patterns for metric names to include.
spring.metrics.export.redis.key=keys.spring.metrics # Key for redis repository export (if active).
spring.metrics.export.redis.prefix=spring.metrics # Prefix for redis repository if active.
spring.metrics.export.send-latest= # Flag to switch off any available optimizations based on not exporting unchanged metric values.
spring.metrics.export.statsd.host= # Host of a statsd server to receive exported metrics.
spring.metrics.export.statsd.port=8125 # Port of a statsd server to receive exported metrics.
spring.metrics.export.statsd.prefix= # Prefix for statsd exported metrics.
spring.metrics.export.triggers.*= # Specific trigger properties per MetricWriter bean name.


# ----------------------------------------
# DEVTOOLS PROPERTIES
# ----------------------------------------

# DEVTOOLS (DevToolsProperties)
spring.devtools.livereload.enabled=true # Enable a livereload.com compatible server.
spring.devtools.livereload.port=35729 # Server port.
spring.devtools.restart.additional-exclude= # Additional patterns that should be excluded from triggering a full restart.
spring.devtools.restart.additional-paths= # Additional paths to watch for changes.
spring.devtools.restart.enabled=true # Enable automatic restart.
spring.devtools.restart.exclude=META-INF/maven/**,META-INF/resources/**,resources/**,static/**,public/**,templates/**,**/*Test.class,**/*Tests.class,git.properties # Patterns that should be excluded from triggering a full restart.
spring.devtools.restart.poll-interval=1000 # Amount of time (in milliseconds) to wait between polling for classpath changes.
spring.devtools.restart.quiet-period=400 # Amount of quiet time (in milliseconds) required without any classpath changes before a restart is triggered.
spring.devtools.restart.trigger-file= # Name of a specific file that when changed will trigger the restart check. If not specified any classpath file change will trigger the restart.

# REMOTE DEVTOOLS (RemoteDevToolsProperties)
spring.devtools.remote.context-path=/.~~spring-boot!~ # Context path used to handle the remote connection.
spring.devtools.remote.debug.enabled=true # Enable remote debug support.
spring.devtools.remote.debug.local-port=8000 # Local remote debug server port.
spring.devtools.remote.proxy.host= # The host of the proxy to use to connect to the remote application.
spring.devtools.remote.proxy.port= # The port of the proxy to use to connect to the remote application.
spring.devtools.remote.restart.enabled=true # Enable remote restart.
spring.devtools.remote.secret= # A shared secret required to establish a connection (required to enable remote support).
spring.devtools.remote.secret-header-name=X-AUTH-TOKEN # HTTP header used to transfer the shared secret.


Posted by 1010
카테고리 없음2016. 8. 17. 11:46
반응형

--------------  HEX CONVERSION ----------------


CREATE OR REPLACE FUNCTION SCP.DEC_STR (

I_OWNER    IN   VARCHAR2,

I_TABLE    IN   VARCHAR2,

I_COLUMN   IN   VARCHAR2,

I_DATA     IN   VARCHAR2

)

RETURN VARCHAR2 PARALLEL_ENABLE DETERMINISTIC IS


BEGIN

IF I_DATA IS NULL THEN

RETURN NULL;

END IF;


RETURN UTL_RAW.CAST_TO_VARCHAR2(I_DATA);

EXCEPTION

WHEN OTHERS THEN

RAISE_APPLICATION_ERROR (-20010, TO_CHAR(SQLCODE) || ' ' || SUBSTR(SQLERRM, 11, 1024) );

RETURN NULL;

END;



--------------  HEX CONVERSION ----------------


CREATE OR REPLACE FUNCTION SCP.ENC_STR (

I_OWNER    IN   VARCHAR2,

I_TABLE    IN   VARCHAR2,

I_COLUMN   IN   VARCHAR2,

I_DATA     IN   VARCHAR2

)

RETURN VARCHAR2 PARALLEL_ENABLE DETERMINISTIC IS


BEGIN

IF I_DATA IS NULL THEN

RETURN NULL;

END IF;


RETURN UTL_RAW.CAST_TO_RAW(I_DATA);

EXCEPTION

WHEN OTHERS THEN

RAISE_APPLICATION_ERROR (-20010, TO_CHAR(SQLCODE) || ' ' || SUBSTR(SQLERRM, 11, 1024) );

RETURN NULL;

END;



Posted by 1010
카테고리 없음2016. 8. 16. 11:35
반응형

http://jsfiddle.net/handtrix/yvq5y/

Posted by 1010
카테고리 없음2016. 8. 12. 09:28
반응형


get

$(location).attr('href', 'http://'+window.location.host+'/api/v1/sample/excel');


post 


var jForm = $("#frm");

jForm.attr('action', 'http://localhost:8080/api/v1/sample/excel');

jForm.attr('method', 'post');

jForm.submit();


Posted by 1010
카테고리 없음2016. 8. 11. 17:38
반응형
1
2
3
4
5
6
7
ModelAndView modelAndView = new ModelAndView();
        modelAndView.addObject(ExcelConstant.EXCEL_TITLE, excelTitle);
        modelAndView.addObject(ExcelConstant.EXCEL_SHEET_NAME, excelSheetName);
        modelAndView.addObject(ExcelConstant.EXCEL_FILE_NAME, excelTitle);
        modelAndView.addObject(ExcelConstant.EXCEL_DATA_LIST, dataMapList);
        modelAndView.setViewName("excelDownloadView");
        
cs



1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
package kr.go.safepeople.config.component;
 
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Map;
 
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
 
import kr.go.safepeople.config.exception.SafePeopleException;
import kr.go.safepeople.config.util.DataMap;
 
import org.apache.poi.hssf.usermodel.HSSFFont;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellStyle;
import org.apache.poi.ss.usermodel.IndexedColors;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.util.CellRangeAddress;
import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import org.apache.poi.xssf.usermodel.XSSFCellStyle;
import org.apache.poi.xssf.usermodel.XSSFFont;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.view.AbstractView;
 
@Component("excelDownloadView")
public class ExcelDownloadView extends AbstractView{
    
    public ExcelDownloadView() {
        // TODO Auto-generated constructor stub
        setContentType("application/download; charset=utf-8");
    }
 
    @Override
    protected void renderMergedOutputModel(Map<String, Object> model, HttpServletRequest request, HttpServletResponse response) throws SafePeopleException {
        // TODO Auto-generated method stub
        try{
            String excelFileName = (String)model.get(ExcelConstant.EXCEL_FILE_NAME);
            StringBuffer fileName = new StringBuffer();
            ServletOutputStream out = response.getOutputStream();
            
            fileName.append(excelFileName);
            fileName.append("_");
            fileName.append(currentDate("yyyyMMdd_HHmmss"));
            List<DataMap> excelDataList = (List<DataMap>)model.get(ExcelConstant.EXCEL_DATA_LIST);
            if(excelDataList.size()==0){
                out.print("NODATA");
            }else{
                Workbook workbook = new SXSSFWorkbook(500);
                buildExcelDocument(model, workbook);
                fileName.append(".xlsx");
                setResponse(request,response,fileName.toString());
                workbook.write(out);
            }
            if(out != null){out.close();}
        } catch (Exception e) {
            logger.debug(e.getMessage(), e);
            throw new SafePeopleException(e);
        }
    }
 
    private void setResponse(HttpServletRequest request, HttpServletResponse response, String fileName) throws UnsupportedEncodingException{
        String userAgent = request.getHeader("User-Agent");
        String downFileName = "";
        if(fileName!=null){
            downFileName = fileName;
        }
        if (userAgent.indexOf("MSIE 5.5"> -1) { // MS IE 5.5 이하
            response.setHeader("Content-Disposition""filename=" + URLEncoder.encode(downFileName, "UTF-8").replaceAll("\\+""\\ "+ ";");
        } else if (userAgent.indexOf("MSIE"> -1) { // MS IE (보통은 6.x 이상 가정)
            response.setHeader("Content-Disposition""attachment; filename=" + java.net.URLEncoder.encode(downFileName, "UTF-8").replaceAll("\\+""\\ "+ ";");
        } else if (userAgent.indexOf("Trident"> -1) { // MS IE 11
            response.setHeader("Content-Disposition""attachment; filename=" + java.net.URLEncoder.encode(downFileName, "UTF-8").replaceAll("\\+""\\ "+ ";");
        } else { // 모질라나 오페라
            downFileName = URLEncoder.encode(downFileName,"UTF-8");
            response.setHeader("Content-Disposition""attachment; filename=\""+downFileName.replaceAll("\\+""\\ ")+"\";");
        }
    }
    
    private String currentDate(String format){
        SimpleDateFormat sd = new SimpleDateFormat(format, Locale.KOREA);
        return sd.format(new Date()); 
    }
    
    private void buildExcelDocument(Map<String, Object> model, Workbook workbook) {
        // TODO Auto-generated method stub
        
        // init
        String excelTitle = ExcelConstant.EXCEL_TITLE_EMPTY;
        if(model.get(ExcelConstant.EXCEL_TITLE) != null){
            excelTitle = (String)model.get(ExcelConstant.EXCEL_TITLE);
        }
        String excelSheetName = ExcelConstant.EXCEL_SHEET_NAME_EMPTY;
        if(model.get(ExcelConstant.EXCEL_SHEET_NAME) != null){
            excelSheetName = (String)model.get(ExcelConstant.EXCEL_SHEET_NAME);
        }
        List<DataMap> excelDataList = (List<DataMap>)model.get(ExcelConstant.EXCEL_DATA_LIST);
        DataMap headerDataMap = excelDataList.get(0);
        
        int rowTitleNum = 0;
        int rowTitleSubNum = 2;
        int rowHeaderNum = 3;
        int rowDataNum = 4;
        
        String subTitle = "작성일자 :"+currentDate("YYYY년 MM월 d일 HH:mm:ss");
        
        // create title(style,font)
        XSSFFont titleFont =  (XSSFFont)workbook.createFont();
        titleFont.setFontHeightInPoints((short)16);
        titleFont.setBoldweight(HSSFFont.BOLDWEIGHT_BOLD);
        
        XSSFCellStyle  titleCellStyle = (XSSFCellStyle)workbook.createCellStyle();
        titleCellStyle.setBorderRight(XSSFCellStyle.BORDER_THIN);              
        titleCellStyle.setBorderLeft(XSSFCellStyle.BORDER_THIN);   
        titleCellStyle.setBorderTop(XSSFCellStyle.BORDER_THIN);   
        titleCellStyle.setBorderBottom(XSSFCellStyle.BORDER_THIN); 
        titleCellStyle.setFillForegroundColor(IndexedColors.GREY_25_PERCENT.index);
        titleCellStyle.setFillPattern(CellStyle.SOLID_FOREGROUND);
        titleCellStyle.setAlignment(XSSFCellStyle.ALIGN_CENTER_SELECTION);
        titleCellStyle.setVerticalAlignment(XSSFCellStyle.VERTICAL_CENTER);
        titleCellStyle.setFont(titleFont);
 
        XSSFCellStyle  titleSubCellStyle = (XSSFCellStyle)workbook.createCellStyle();
        titleSubCellStyle.setAlignment(XSSFCellStyle.ALIGN_RIGHT);
        titleSubCellStyle.setVerticalAlignment(XSSFCellStyle.VERTICAL_CENTER);
        
        // create hearder(style,font)
        XSSFFont hearderFont =  (XSSFFont)workbook.createFont();
        hearderFont.setFontHeightInPoints((short)10);
        
        XSSFCellStyle  headerCellStyle = (XSSFCellStyle)workbook.createCellStyle();
        headerCellStyle.setBorderRight(XSSFCellStyle.BORDER_THIN);              
        headerCellStyle.setBorderLeft(XSSFCellStyle.BORDER_THIN);   
        headerCellStyle.setBorderTop(XSSFCellStyle.BORDER_THIN);   
        headerCellStyle.setBorderBottom(XSSFCellStyle.BORDER_THIN);   
        headerCellStyle.setFillForegroundColor(IndexedColors.GREY_25_PERCENT.index);
        headerCellStyle.setFillPattern(XSSFCellStyle.SOLID_FOREGROUND);
        headerCellStyle.setFont(hearderFont);
        
        // create data(style,font)
        XSSFFont dataFont =  (XSSFFont)workbook.createFont();
        dataFont.setFontHeightInPoints((short)10);
        
        XSSFCellStyle  dataCellStyle = (XSSFCellStyle)workbook.createCellStyle();
        dataCellStyle.setBorderRight(XSSFCellStyle.BORDER_THIN);              
        dataCellStyle.setBorderLeft(XSSFCellStyle.BORDER_THIN);   
        dataCellStyle.setBorderTop(XSSFCellStyle.BORDER_THIN);   
        dataCellStyle.setBorderBottom(XSSFCellStyle.BORDER_THIN);   
        dataCellStyle.setFont(dataFont);
        
        // create sheet
        Sheet sheet = workbook.createSheet(excelSheetName);
                
        // create title row
        Row titleRow = sheet.createRow(rowTitleNum);
        titleRow.setHeightInPoints(34);
        Cell titleCell;
        for(int i = 0 ; i < headerDataMap.keyList().size() ; i++){
            titleCell = titleRow.createCell(i);
            if(i == 0){
                titleCell.setCellValue(excelTitle);
            }
            titleCell.setCellStyle(titleCellStyle);
        }
        sheet.addMergedRegion(new CellRangeAddress(rowTitleNum,rowTitleNum,0,(headerDataMap.keyList().size()-1)));
        
        // create titleSub row
        Row titleSubRow = sheet.createRow(rowTitleSubNum);
        Cell titleSubCell;
        for(int i = 0 ; i < headerDataMap.keyList().size() ; i++){
            titleSubCell = titleSubRow.createCell(i);
            if(i == 0){
                titleSubCell.setCellValue(subTitle);
            }
            titleSubCell.setCellStyle(titleSubCellStyle);
        }
        sheet.addMergedRegion(new CellRangeAddress(rowTitleSubNum,rowTitleSubNum,0,(headerDataMap.keyList().size()-1)));
        
        // create header row
        Row headerRow = sheet.createRow(rowHeaderNum);
        Cell hearderCell;
        for(int i = 0 ; i < headerDataMap.keyList().size() ; i++){
            hearderCell = headerRow.createCell(i);
            hearderCell.setCellStyle(headerCellStyle);
            hearderCell.setCellValue((String)headerDataMap.get(i));
        }
        
        // create data row
        Row row = null;
        Cell cell = null;
        for(int rowIdx = 0 ; rowIdx < excelDataList.size() ; rowIdx++){
            row = sheet.createRow(rowDataNum+rowIdx);
            DataMap rowData = excelDataList.get(rowIdx);
            for(int cellIdx = 0 ; cellIdx < headerDataMap.keyList().size() ; cellIdx++){
                cell = row.createCell(cellIdx);
                String cellKey = (String)headerDataMap.keyList().get(cellIdx);
                Object cellData = rowData.get(cellKey);
                if(cellData instanceof String){
                    cell.setCellValue((String)cellData);
                }else if(cellData instanceof Integer){
                    cell.setCellValue((Integer)cellData);
                }else if(cellData instanceof Long){
                    cell.setCellValue((Long)cellData);
                }else if(cellData instanceof Double){
                    cell.setCellValue((Double)cellData);
                }else
                    if(cellData != null){
                        cell.setCellValue(cellData.toString());
                    }
                }
                cell.setCellStyle(dataCellStyle);
            }
        }
        // create cell size 
        for(int i = 0 ; i < headerDataMap.keyList().size() ; i++){
          sheet.autoSizeColumn(i,true);
        }
    }
}
cs


Posted by 1010
카테고리 없음2016. 8. 11. 13:28
반응형

두 랜카드 라우팅 설정하기


route add -net 10.0.0.0 netmask 255.255.255.0 ge 10.0.0.1 eth0

route add -net 192.0.0.0 netmask 255.255.255.0 ge 192.168.0.1 eth1

Posted by 1010
카테고리 없음2016. 8. 8. 16:41
반응형



mvn dependency:tree -Dverbose -Dincludes=org.slf4j


의존성 체크후 버전 예외처리

Posted by 1010
카테고리 없음2016. 8. 3. 15:26
반응형

Detected both log4j-over-slf4j.jar AND slf4j-log4j12.jar



Detected both log4j-over-slf4j.jar AND slf4j-log4j12.jar on the class path, preemptingStackOverflowError.

The purpose of slf4j-log4j12 module is to delegate or redirect calls made to an SLF4J logger to log4j. The purpose of the log4j-over-slf4j module is to redirect calls made to a log4j logger to SLF4J. If SLF4J is bound withslf4j-log4j12.jar and log4j-over-slf4j.jar is also present on the class path, a StackOverflowError will inevitably occur immediately after the first invocation of an SLF4J or a log4j logger.

Here is how the exception might look like:

Exception in thread "main" java.lang.StackOverflowError
  at java
.util.Hashtable.containsKey(Hashtable.java:306)
  at org
.apache.log4j.Log4jLoggerFactory.getLogger(Log4jLoggerFactory.java:36)
  at org
.apache.log4j.LogManager.getLogger(LogManager.java:39)
  at org
.slf4j.impl.Log4jLoggerFactory.getLogger(Log4jLoggerFactory.java:73)
  at org
.slf4j.LoggerFactory.getLogger(LoggerFactory.java:249)
  at org
.apache.log4j.Category.<init>(Category.java:53)
  at org
.apache.log4j.Logger..<init>(Logger.java:35)
  at org
.apache.log4j.Log4jLoggerFactory.getLogger(Log4jLoggerFactory.java:39)
  at org
.apache.log4j.LogManager.getLogger(LogManager.java:39)
  at org
.slf4j.impl.Log4jLoggerFactory.getLogger(Log4jLoggerFactory.java:73)
  at org
.slf4j.LoggerFactory.getLogger(LoggerFactory.java:249)
  at org
.apache.log4j.Category..<init>(Category.java:53)
  at org
.apache.log4j.Logger..<init>(Logger.java:35)
  at org
.apache.log4j.Log4jLoggerFactory.getLogger(Log4jLoggerFactory.java:39)
  at org
.apache.log4j.LogManager.getLogger(LogManager.java:39)
  subsequent lines omitted
...

SINCE 1.5.11 SLF4J software preempts the inevitable stack overflow error by throwing an exception with details about the actual cause of the problem. This is deemed to be better than leaving the user wondering about the reasons of the StackOverflowError.

For more background on this topic see Bridging legacy APIs.



-----------------------------------------------


pom.xml hierarchy 에서 의존성 관계 확인후 예외 처리

Posted by 1010
카테고리 없음2016. 8. 3. 10:57
반응형

Problem
CentOS installation fails with the message:
“Unable to read package metadata. This may be due to missing repodata directory. Please ensure that your install tree has been correctly generated”.

Solution 1
Add the corresponding repository server. In case of CentOS 6.5 64bit: http://mirror.centos.org/centos/6.5/os/x86_64/

Solution 2
See the following stackoverflow thread: http://stackoverflow.com/questions/21637068/unable-to-read-package-metadata-this-may-be-due-to-missing-repodata-directory


확인해 보니 mirror 경로가 변경되었다. 괜히 삽질


http://mirror.centos.org/centos/6/os/x86_64/

Posted by 1010
카테고리 없음2016. 8. 2. 15:45
반응형

http://mirror.oasis.onnetcorp.com/centos/6.5/isos/x86_64/

Posted by 1010
카테고리 없음2016. 8. 1. 13:24
반응형


/standalone/configuration/standalone.xml <interfaces> <interface name="management"> <inet-address value="${jboss.bind.address.management:192.168.33.10}"/> </interface> <interface name="public"> <inet-address value="${jboss.bind.address:192.168.33.10}"/> </interface> </interfaces>


Posted by 1010
카테고리 없음2016. 8. 1. 12:47
반응형

 - 설정 파일 추가.(기존에 해당 파일이 없음)

 - 홈디렉토리, listen port 등을 설정할 수 있음.

 - $ vi /etc/sysconfig/svnserve

OPTIONS="--threads --root /home/svn"


 - /etc/init.d/svnserve 파일을 보면 서비스에 등록하기 위해서 /etc/sysconfig/svnserve 파일이 필요하다는 것을 알 수 있다.

 - 이후 service svnserve start|stop|restart 로 구동 가능.

 

서버 재 부팅시 자동으로 시작하게 만들기.

 - 설정확인.

 - $ chkconfig --list svnserve


 - 설정하기.

 - $ chkconfig svnserve on

Posted by 1010
카테고리 없음2016. 8. 1. 12:46
반응형

1. /etc : postinstall.httpd.bat 실행


2. /bin : httpd -k install 실행 (서비스등록)


3./bin : net start Apache2.2 : 서비스 실행


원도우 서비스 관리자에 등록되어 있는지 확인


4. h

Posted by 1010