Browse Source

Merge pull request #767 from trautonen/master

Attemp to fix spark tests
Mike Smith 11 years ago
parent
commit
a7037bf2fc

+ 11 - 4
spark/pom.xml

@@ -10,11 +10,12 @@
 
     <properties>
         <java-version>1.7</java-version>
-        <spark-version>1.1</spark-version>
-        <hibernate-version>4.2.6.Final</hibernate-version>
+        <spark-version>1.1.1</spark-version>
+        <hibernate-version>4.3.0.Final</hibernate-version>
         <gson-version>2.2.4</gson-version>
-        <mysql-connector-version>5.1.26</mysql-connector-version>
+        <mysql-connector-version>5.1.28</mysql-connector-version>
         <slf4j-version>1.7.5</slf4j-version>
+        <db-host>localhost</db-host>
     </properties>
 
     <prerequisites>
@@ -29,7 +30,7 @@
         <dependency>
             <groupId>javax.servlet</groupId>
             <artifactId>javax.servlet-api</artifactId>
-            <version>3.0.1</version>
+            <version>3.1.0</version>
             <scope>provided</scope>
         </dependency>
         <dependency>
@@ -104,6 +105,12 @@
     </profiles>
     
     <build>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+                <filtering>true</filtering>
+            </resource>
+        </resources>
         <plugins>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>

+ 1 - 1
spark/setup.py

@@ -7,7 +7,7 @@ def start(args, logfile, errfile):
   setup_util.replace_text("spark/src/main/webapp/WEB-INF/resin-web.xml", "mysql:\/\/.*:3306", "mysql://" + args.database_host + ":3306")
   
   try:
-    subprocess.check_call("mvn clean package", shell=True, cwd="spark", stderr=errfile, stdout=logfile)
+    subprocess.check_call("mvn clean package -Ddb-host=" + args.database_host, shell=True, cwd="spark", stderr=errfile, stdout=logfile)
     subprocess.check_call("rm -rf $RESIN_HOME/webapps/*", shell=True, stderr=errfile, stdout=logfile)
     subprocess.check_call("cp spark/target/spark.war $RESIN_HOME/webapps/spark.war", shell=True, stderr=errfile, stdout=logfile)
     subprocess.check_call("$RESIN_HOME/bin/resinctl start", shell=True, stderr=errfile, stdout=logfile)

+ 20 - 9
spark/src/main/java/hello/web/HibernateUtil.java

@@ -4,13 +4,17 @@ import hello.domain.World;
 
 import org.hibernate.Session;
 import org.hibernate.SessionFactory;
+import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
 import org.hibernate.cfg.AvailableSettings;
 import org.hibernate.cfg.Configuration;
 import org.hibernate.dialect.MySQLDialect;
-import org.hibernate.service.ServiceRegistryBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class HibernateUtil {
 
+    private static final Logger LOGGER = LoggerFactory.getLogger(HibernateUtil.class);
+    
     private static final SessionFactory SESSION_FACTORY = createSessionFactory();
     private static final ThreadLocal<Session> SESSIONS = new ThreadLocal<>();
     
@@ -32,14 +36,21 @@ public class HibernateUtil {
     }
     
     private static SessionFactory createSessionFactory() {
-        Configuration configuration = configuration();
-        configuration.setProperty(AvailableSettings.DIALECT, MySQLDialect.class.getName());
-        configuration.setProperty(AvailableSettings.USE_QUERY_CACHE, "false");
-        configuration.setProperty(AvailableSettings.SHOW_SQL, "false");
-        configuration.setProperty(AvailableSettings.CURRENT_SESSION_CONTEXT_CLASS, "thread");
-        configuration.addAnnotatedClass(World.class);
-        ServiceRegistryBuilder serviceRegistryBuilder = new ServiceRegistryBuilder().applySettings(configuration.getProperties());
-        return configuration.buildSessionFactory(serviceRegistryBuilder.buildServiceRegistry());
+        try {
+            Configuration configuration = configuration();
+            String url = configuration.getProperty(AvailableSettings.URL);
+            configuration.setProperty(AvailableSettings.URL, url.replace("{db-host}", "localhost"));
+            configuration.setProperty(AvailableSettings.DIALECT, MySQLDialect.class.getName());
+            configuration.setProperty(AvailableSettings.USE_QUERY_CACHE, "false");
+            configuration.setProperty(AvailableSettings.SHOW_SQL, "false");
+            configuration.setProperty(AvailableSettings.CURRENT_SESSION_CONTEXT_CLASS, "thread");
+            configuration.addAnnotatedClass(World.class);
+            StandardServiceRegistryBuilder serviceRegistryBuilder = new StandardServiceRegistryBuilder().applySettings(configuration.getProperties());
+            return configuration.buildSessionFactory(serviceRegistryBuilder.build());
+        } catch (RuntimeException ex) {
+            LOGGER.error("Failed to create session factory");
+            throw ex;
+        }
     }
     
     private static Configuration configuration() {

+ 19 - 4
spark/src/main/java/hello/web/SparkApplication.java

@@ -42,13 +42,28 @@ public class SparkApplication implements spark.servlet.SparkApplication {
                 for (int i = 0; i < queries; i++) {
                     worlds[i] = (World) session.byId(World.class).load(random.nextInt(DB_ROWS) + 1);
                 }
-
-                return worlds;
+                
+                return (request.queryParams("queries") == null ? worlds[0] : worlds);
             }
             
             private int getQueries(final Request request) {
-                String param = request.queryParams("queries");
-                return (param == null ? 1 : Integer.parseInt(param));
+                try {
+                    String param = request.queryParams("queries");
+                    if (param == null) {
+                        return 1;
+                    }
+                    
+                    int queries = Integer.parseInt(param);
+                    if (queries < 1) {
+                        return 1;
+                    }
+                    if (queries > 500) {
+                        return 500;
+                    }
+                    return queries;
+                } catch (NumberFormatException ex) {
+                    return 1;
+                }
             }
         });
         get(new Route("/plaintext") {

+ 3 - 3
spark/src/main/resources/hibernate-local.cfg.xml

@@ -4,11 +4,11 @@
 <hibernate-configuration>
     <session-factory>
         <property name="hibernate.connection.driver_class">com.mysql.jdbc.Driver</property>
-        <property name="hibernate.connection.url">jdbc:mysql://localhost:3306/hello_world?jdbcCompliantTruncation=false&amp;elideSetAutoCommits=true&amp;useLocalSessionState=true&amp;cachePrepStmts=true&amp;cacheCallableStmts=true&amp;alwaysSendSetIsolation=false&amp;prepStmtCacheSize=4096&amp;cacheServerConfiguration=true&amp;prepStmtCacheSqlLimit=2048&amp;zeroDateTimeBehavior=convertToNull&amp;traceProtocol=false&amp;useUnbufferedInput=false&amp;useReadAheadInput=false&amp;maintainTimeStats=false&amp;useServerPrepStmts&amp;cacheRSMetadata=true</property>
+        <property name="hibernate.connection.url">jdbc:mysql://${db-host}:3306/hello_world?jdbcCompliantTruncation=false&amp;elideSetAutoCommits=true&amp;useLocalSessionState=true&amp;cachePrepStmts=true&amp;cacheCallableStmts=true&amp;alwaysSendSetIsolation=false&amp;prepStmtCacheSize=4096&amp;cacheServerConfiguration=true&amp;prepStmtCacheSqlLimit=2048&amp;zeroDateTimeBehavior=convertToNull&amp;traceProtocol=false&amp;useUnbufferedInput=false&amp;useReadAheadInput=false&amp;maintainTimeStats=false&amp;useServerPrepStmts&amp;cacheRSMetadata=true</property>
         <property name="hibernate.connection.username">benchmarkdbuser</property>
         <property name="hibernate.connection.password">benchmarkdbpass</property>
-        <property name="hibernate.c3p0.min_size">20</property>
-        <property name="hibernate.c3p0.max_size">20</property>
+        <property name="hibernate.c3p0.min_size">32</property>
+        <property name="hibernate.c3p0.max_size">256</property>
         <property name="hibernate.c3p0.timeout">1800</property>
         <property name="hibernate.c3p0.max_statements">50</property>
     </session-factory>

+ 1 - 0
spark/src/main/resources/log4j.properties

@@ -3,5 +3,6 @@ log4j.rootLogger=WARN, console
 log4j.appender.console=org.apache.log4j.ConsoleAppender
 log4j.appender.console.layout=org.apache.log4j.PatternLayout
 log4j.appender.console.layout.ConversionPattern=%d %-5p %c %x - %m%n
+log4j.appender.console.Target=System.err
 
 log4j.logger.hello=DEBUG