Browse Source

Configure local db setup via maven resource filtering.

trautonen 11 years ago
parent
commit
e89d49fa64

+ 7 - 0
spark/pom.xml

@@ -15,6 +15,7 @@
         <gson-version>2.2.4</gson-version>
         <gson-version>2.2.4</gson-version>
         <mysql-connector-version>5.1.28</mysql-connector-version>
         <mysql-connector-version>5.1.28</mysql-connector-version>
         <slf4j-version>1.7.5</slf4j-version>
         <slf4j-version>1.7.5</slf4j-version>
+        <db-host>localhost</db-host>
     </properties>
     </properties>
 
 
     <prerequisites>
     <prerequisites>
@@ -104,6 +105,12 @@
     </profiles>
     </profiles>
     
     
     <build>
     <build>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+                <filtering>true</filtering>
+            </resource>
+        </resources>
         <plugins>
         <plugins>
             <plugin>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <groupId>org.apache.maven.plugins</groupId>

+ 1 - 1
spark/setup.py

@@ -7,7 +7,7 @@ def start(args, logfile, errfile):
   setup_util.replace_text("spark/src/main/webapp/WEB-INF/resin-web.xml", "mysql:\/\/.*:3306", "mysql://" + args.database_host + ":3306")
   setup_util.replace_text("spark/src/main/webapp/WEB-INF/resin-web.xml", "mysql:\/\/.*:3306", "mysql://" + args.database_host + ":3306")
   
   
   try:
   try:
-    subprocess.check_call("mvn clean package", shell=True, cwd="spark", stderr=errfile, stdout=logfile)
+    subprocess.check_call("mvn clean package -Ddb-host=" + args.database_host, shell=True, cwd="spark", stderr=errfile, stdout=logfile)
     subprocess.check_call("rm -rf $RESIN_HOME/webapps/*", shell=True, stderr=errfile, stdout=logfile)
     subprocess.check_call("rm -rf $RESIN_HOME/webapps/*", shell=True, stderr=errfile, stdout=logfile)
     subprocess.check_call("cp spark/target/spark.war $RESIN_HOME/webapps/spark.war", shell=True, stderr=errfile, stdout=logfile)
     subprocess.check_call("cp spark/target/spark.war $RESIN_HOME/webapps/spark.war", shell=True, stderr=errfile, stdout=logfile)
     subprocess.check_call("$RESIN_HOME/bin/resinctl start", shell=True, stderr=errfile, stdout=logfile)
     subprocess.check_call("$RESIN_HOME/bin/resinctl start", shell=True, stderr=errfile, stdout=logfile)

+ 2 - 0
spark/src/main/java/hello/web/HibernateUtil.java

@@ -38,6 +38,8 @@ public class HibernateUtil {
     private static SessionFactory createSessionFactory() {
     private static SessionFactory createSessionFactory() {
         try {
         try {
             Configuration configuration = configuration();
             Configuration configuration = configuration();
+            String url = configuration.getProperty(AvailableSettings.URL);
+            configuration.setProperty(AvailableSettings.URL, url.replace("{db-host}", "localhost"));
             configuration.setProperty(AvailableSettings.DIALECT, MySQLDialect.class.getName());
             configuration.setProperty(AvailableSettings.DIALECT, MySQLDialect.class.getName());
             configuration.setProperty(AvailableSettings.USE_QUERY_CACHE, "false");
             configuration.setProperty(AvailableSettings.USE_QUERY_CACHE, "false");
             configuration.setProperty(AvailableSettings.SHOW_SQL, "false");
             configuration.setProperty(AvailableSettings.SHOW_SQL, "false");

+ 2 - 11
spark/src/main/java/hello/web/JsonTransformer.java

@@ -1,8 +1,5 @@
 package hello.web;
 package hello.web;
 
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import spark.Request;
 import spark.Request;
 import spark.Response;
 import spark.Response;
 import spark.ResponseTransformerRoute;
 import spark.ResponseTransformerRoute;
@@ -11,7 +8,6 @@ import com.google.gson.Gson;
 
 
 public abstract class JsonTransformer extends ResponseTransformerRoute {
 public abstract class JsonTransformer extends ResponseTransformerRoute {
 
 
-    private static final Logger LOGGER            = LoggerFactory.getLogger(JsonTransformer.class);
     private static final Gson   GSON              = new Gson();
     private static final Gson   GSON              = new Gson();
     private static final String CONTENT_TYPE_JSON = "application/json";
     private static final String CONTENT_TYPE_JSON = "application/json";
     
     
@@ -26,13 +22,8 @@ public abstract class JsonTransformer extends ResponseTransformerRoute {
 
 
     @Override
     @Override
     public Object handle(final Request request, final Response response) {
     public Object handle(final Request request, final Response response) {
-        try {
-            response.type(CONTENT_TYPE_JSON);
-            return handleInternal(request, response);
-        } catch (RuntimeException ex) {
-            LOGGER.error("Request handling failed", ex);
-            throw ex;
-        }
+        response.type(CONTENT_TYPE_JSON);
+        return handleInternal(request, response);
     }
     }
     
     
     protected abstract Object handleInternal(Request request, Response response);
     protected abstract Object handleInternal(Request request, Response response);

+ 0 - 29
spark/src/main/java/hello/web/LoggingRoute.java

@@ -1,29 +0,0 @@
-package hello.web;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import spark.Request;
-import spark.Response;
-import spark.Route;
-
-public abstract class LoggingRoute extends Route {
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(LoggingRoute.class);
-    
-    public LoggingRoute(final String path) {
-        super(path);
-    }
-    
-    @Override
-    public Object handle(final Request request, final Response response) {
-        try {
-            return handleInternal(request, response);
-        } catch (RuntimeException ex) {
-            LOGGER.error("Request handling failed", ex);
-            throw ex;
-        }
-    }
-    
-    protected abstract Object handleInternal(Request request, Response response);
-}

+ 3 - 2
spark/src/main/java/hello/web/SparkApplication.java

@@ -14,6 +14,7 @@ import org.hibernate.Session;
 import spark.Filter;
 import spark.Filter;
 import spark.Request;
 import spark.Request;
 import spark.Response;
 import spark.Response;
+import spark.Route;
 
 
 public class SparkApplication implements spark.servlet.SparkApplication {
 public class SparkApplication implements spark.servlet.SparkApplication {
 
 
@@ -50,9 +51,9 @@ public class SparkApplication implements spark.servlet.SparkApplication {
                 return (param == null ? 1 : Integer.parseInt(param));
                 return (param == null ? 1 : Integer.parseInt(param));
             }
             }
         });
         });
-        get(new LoggingRoute("/plaintext") {
+        get(new Route("/plaintext") {
             @Override
             @Override
-            protected Object handleInternal(final Request request, final Response response) {
+            public Object handle(final Request request, final Response response) {
                 response.type(CONTENT_TYPE_TEXT);
                 response.type(CONTENT_TYPE_TEXT);
                 return MESSAGE;
                 return MESSAGE;
             }
             }

+ 1 - 1
spark/src/main/resources/hibernate-local.cfg.xml

@@ -4,7 +4,7 @@
 <hibernate-configuration>
 <hibernate-configuration>
     <session-factory>
     <session-factory>
         <property name="hibernate.connection.driver_class">com.mysql.jdbc.Driver</property>
         <property name="hibernate.connection.driver_class">com.mysql.jdbc.Driver</property>
-        <property name="hibernate.connection.url">jdbc:mysql://localhost:3306/hello_world?jdbcCompliantTruncation=false&amp;elideSetAutoCommits=true&amp;useLocalSessionState=true&amp;cachePrepStmts=true&amp;cacheCallableStmts=true&amp;alwaysSendSetIsolation=false&amp;prepStmtCacheSize=4096&amp;cacheServerConfiguration=true&amp;prepStmtCacheSqlLimit=2048&amp;zeroDateTimeBehavior=convertToNull&amp;traceProtocol=false&amp;useUnbufferedInput=false&amp;useReadAheadInput=false&amp;maintainTimeStats=false&amp;useServerPrepStmts&amp;cacheRSMetadata=true</property>
+        <property name="hibernate.connection.url">jdbc:mysql://${db-host}:3306/hello_world?jdbcCompliantTruncation=false&amp;elideSetAutoCommits=true&amp;useLocalSessionState=true&amp;cachePrepStmts=true&amp;cacheCallableStmts=true&amp;alwaysSendSetIsolation=false&amp;prepStmtCacheSize=4096&amp;cacheServerConfiguration=true&amp;prepStmtCacheSqlLimit=2048&amp;zeroDateTimeBehavior=convertToNull&amp;traceProtocol=false&amp;useUnbufferedInput=false&amp;useReadAheadInput=false&amp;maintainTimeStats=false&amp;useServerPrepStmts&amp;cacheRSMetadata=true</property>
         <property name="hibernate.connection.username">benchmarkdbuser</property>
         <property name="hibernate.connection.username">benchmarkdbuser</property>
         <property name="hibernate.connection.password">benchmarkdbpass</property>
         <property name="hibernate.connection.password">benchmarkdbpass</property>
         <property name="hibernate.c3p0.min_size">32</property>
         <property name="hibernate.c3p0.min_size">32</property>