Răsfoiți Sursa

Added weppy to python frameworks (#2228)

Giovanni Barillari 8 ani în urmă
părinte
comite
c0294a6796

+ 1 - 0
.travis.yml

@@ -167,6 +167,7 @@ env:
     - "TESTDIR=Python/turbogears"
     - "TESTDIR=Python/uwsgi"
     - "TESTDIR=Python/web2py"
+    - "TESTDIR=Python/weppy"
     - "TESTDIR=Python/wheezyweb"
     - "TESTDIR=Python/wsgi"
     - "TESTDIR=Racket/racket-ws"

+ 33 - 0
frameworks/Python/weppy/README.md

@@ -0,0 +1,33 @@
+# weppy Benchmark Test (ported from Flask example)
+
+This is the weppy portion of a [benchmarking tests suite](../../) 
+comparing a variety of web development platforms.
+
+The information below is specific to weppy. For further guidance, 
+review the [documentation](http://frameworkbenchmarks.readthedocs.org/en/latest/). 
+Also note that there is additional information provided in 
+the [Python README](../).
+
+[weppy](http://weppy.org/) is a fullstack Python 2/3 web framework.
+This test uses the included ORM and templating system, and gunicorn+meinheld for the application server on CPtyhon and Tornado on pypy.
+
+## Test Paths & Source
+
+* [JSON Serialization](app.py): "/json"
+* [Single Database Query](app.py): "/db"
+* [Multiple Database Queries](app.py): "queries?queries=#"
+* [Fortunes](app.py): "/fortunes"
+* [Database Updates](app.py): "updates?queries=#"
+* [Plaintext](app.py): "/plaintext"
+
+*Replace # with an actual number.
+
+### Resources
+
+* [weppy Source Code](https://github.com/gi0baro/weppy)
+* [weppy Documentation](http://weppy.org/docs)
+
+### Community
+
+* [weppy (weppy-talk) Google Group](https://groups.google.com/forum/#!forum/weppy-talk)
+

+ 139 - 0
frameworks/Python/weppy/app.py

@@ -0,0 +1,139 @@
+import cgi
+import os
+import sys
+from functools import partial
+from random import randint
+from weppy import App, request, response
+from weppy.dal import Model, Field, DAL, rowmethod
+from weppy.tools import service
+
+_is_pypy = hasattr(sys, 'pypy_version_info')
+if sys.version_info[0] == 3:
+    xrange = range
+
+# NOTE on html escaping: weppy normally escapes every character to its ascii
+# html representation. The 'fortunes' test seems not to like that, so we
+# re-define the escaping only to the next characters
+# _html_escape_table = {
+#     "&": "&",
+#     '"': """,
+#     "'": "'",
+#     ">": ">",
+#     "<": "&lt;",
+# }
+
+
+def light_html_escape(text):
+    # return "".join(_html_escape_table.get(c, c) for c in text)
+    return cgi.escape(text, True).replace("'", "&#x27;")
+
+
+DBHOSTNAME = os.environ.get('DBHOST', 'localhost')
+
+
+app = App(__name__)
+
+
+class World(Model):
+    tablename = "world"
+    randomnumber = Field('int')
+
+    @rowmethod('serialize')
+    def _serialize(self, row):
+        return {'id': row.id, 'randomNumber': row.randomnumber}
+
+
+class Fortune(Model):
+    tablename = "fortune"
+    message = Field()
+
+    @rowmethod('serialize')
+    def _serialize(self, row):
+        return {'id': row.id, 'message': row.message}
+
+
+app.config.db.adapter = 'postgres3:psycopg2' \
+    if not _is_pypy else 'postgres3:pg8000'
+app.config.db.host = DBHOSTNAME
+app.config.db.user = 'benchmarkdbuser'
+app.config.db.password = 'benchmarkdbpass'
+app.config.db.database = 'hello_world'
+app.config.db.pool_size = 100
+
+db = DAL(app, auto_migrate=False)
+db.define_models(World, Fortune)
+
+
[email protected]()
[email protected]
+def json():
+    return {'message': 'Hello, World!'}
+
+
[email protected]("/db", handlers=[db.handler])
[email protected]
+def get_random_world():
+    return World.get(randint(1, 10000)).serialize()
+
+
+def get_qparam():
+    try:
+        rv = int(request.query_params.queries)
+        if rv < 1:
+            rv = 1
+        if rv > 500:
+            rv = 500
+    except:
+        rv = 1
+    return rv
+
+
[email protected]("/queries", handlers=[db.handler])
[email protected]
+def get_random_worlds():
+    num_queries = get_qparam()
+    worlds = [
+        World.get(randint(1, 10000)).serialize() for _ in xrange(num_queries)]
+    return worlds
+
+
[email protected](handlers=[db.handler])
+def fortunes():
+    fortunes = Fortune.all().select()
+    fortunes.append(
+        Fortune.new(id=0, message="Additional fortune added at request time."))
+    fortunes.sort(lambda m: m.message)
+    return dict(fortunes=fortunes, escape=light_html_escape)
+
+
[email protected](handlers=[db.handler])
[email protected]
+def updates():
+    num_queries = get_qparam()
+    worlds = []
+    rp = partial(randint, 1, 10000)
+    ids = [rp() for _ in xrange(num_queries)]
+    ids.sort()  # To avoid deadlock
+    for id in ids:
+        world = World.get(id)
+        world.update_record(randomnumber=rp())
+        worlds.append(world.serialize())
+    return worlds
+
+
[email protected]()
+def plaintext():
+    response.headers["Content-Type"] = "text/plain"
+    return 'Hello, World!'
+
+
+try:
+    import meinheld
+    meinheld.server.set_access_logger(None)
+    meinheld.set_keepalive(120)
+except ImportError:
+    pass
+
+# entry point for debugging
+if __name__ == "__main__":
+    app.run(debug=True)

+ 97 - 0
frameworks/Python/weppy/benchmark_config.json

@@ -0,0 +1,97 @@
+{
+  "framework": "weppy",
+  "tests": [{
+    "default": {
+      "setup_file": "setup_py2",
+      "json_url": "/json",
+      "db_url": "/db",
+      "query_url": "/queries?queries=",
+      "fortune_url": "/fortunes",
+      "update_url": "/updates?queries=",
+      "plaintext_url": "/plaintext",
+      "port": 8080,
+      "approach": "Realistic",
+      "classification": "Fullstack",
+      "database": "Postgres",
+      "framework": "weppy",
+      "language": "Python",
+      "orm": "Full",
+      "platform": "Meinheld",
+      "webserver": "None",
+      "os": "Linux",
+      "database_os": "Linux",
+      "display_name": "weppy-Py2",
+      "notes": "CPython 2.7",
+      "versus": "wsgi"
+    },
+    "py3": {
+      "setup_file": "setup_py3",
+      "json_url": "/json",
+      "db_url": "/db",
+      "query_url": "/queries?queries=",
+      "fortune_url": "/fortunes",
+      "update_url": "/updates?queries=",
+      "plaintext_url": "/plaintext",
+      "port": 8080,
+      "approach": "Realistic",
+      "classification": "Fullstack",
+      "database": "Postgres",
+      "framework": "weppy",
+      "language": "Python",
+      "orm": "Full",
+      "platform": "Meinheld",
+      "webserver": "None",
+      "os": "Linux",
+      "database_os": "Linux",
+      "display_name": "weppy-Py3",
+      "notes": "CPython 3.4",
+      "versus": "wsgi"
+    },
+    "pypy": {
+      "setup_file": "setup_pypy",
+      "json_url": "/json",
+      "db_url": "/db",
+      "query_url": "/queries?queries=",
+      "fortune_url": "/fortunes",
+      "update_url": "/updates?queries=",
+      "plaintext_url": "/plaintext",
+      "port": 8080,
+      "approach": "Realistic",
+      "classification": "Fullstack",
+      "database": "Postgres",
+      "framework": "weppy",
+      "language": "Python",
+      "orm": "Full",
+      "platform": "Tornado",
+      "webserver": "None",
+      "os": "Linux",
+      "database_os": "Linux",
+      "display_name": "weppy-PyPy",
+      "notes": "PyPy 2.5",
+      "versus": "wsgi"
+    },
+    "nginx-uwsgi": {
+      "setup_file": "setup_nginxuwsgi",
+      "json_url": "/json",
+      "db_url": "/db",
+      "query_url": "/queries?queries=",
+      "fortune_url": "/fortunes",
+      "update_url": "/updates?queries=",
+      "plaintext_url": "/plaintext",
+      "port": 8080,
+      "approach": "Realistic",
+      "classification": "Fullstack",
+      "database": "Postgres",
+      "framework": "weppy",
+      "language": "Python",
+      "orm": "Full",
+      "platform": "uWSGI",
+      "webserver": "nginx",
+      "os": "Linux",
+      "database_os": "Linux",
+      "display_name": "weppy-nginx-uWSGI",
+      "notes": "CPython 2.7",
+      "versus": "wsgi"
+    }
+  }]
+}

+ 26 - 0
frameworks/Python/weppy/gunicorn_conf.py

@@ -0,0 +1,26 @@
+import multiprocessing
+import os
+import sys
+
+_is_pypy = hasattr(sys, 'pypy_version_info')
+_is_travis = os.environ.get('TRAVIS') == 'true'
+
+workers = multiprocessing.cpu_count() * 3
+if _is_travis:
+    workers = 2
+
+bind = "0.0.0.0:8080"
+keepalive = 120
+errorlog = '-'
+pidfile = 'gunicorn.pid'
+
+if _is_pypy:
+    worker_class = "tornado"
+else:
+    worker_class = "meinheld.gmeinheld.MeinheldWorker"
+
+    def post_fork(server, worker):
+        # Disable access log.
+        # (Until https://github.com/mopemope/meinheld/pull/42 is released)
+        import meinheld.server
+        meinheld.server.set_access_logger(None)

+ 48 - 0
frameworks/Python/weppy/nginx.conf

@@ -0,0 +1,48 @@
+# This file is based on /usr/local/nginx/conf/nginx.conf.default.
+
+# One worker process per core
+error_log stderr error;
+
+events {
+    # This needed to be increased because the nginx error log said so.
+    # http://nginx.org/en/docs/ngx_core_module.html#worker_connections
+    worker_connections  65535;
+    multi_accept on;
+}
+
+http {
+    default_type  application/octet-stream;
+    client_body_temp_path      /tmp;
+
+    # turn off request logging for performance
+    access_log off;
+
+    # I think these only options affect static file serving
+    sendfile        on;
+    tcp_nopush      on;
+
+    # Allow many HTTP Keep-Alive requests in a single TCP connection before
+    # closing it (the default is 100). This will minimize the total number
+    # of TCP connections opened/closed. The problem is that this may cause
+    # some worker processes to be handling too connections relative to the
+    # other workers based on an initial imbalance, so this is disabled for
+    # now.
+#    keepalive_requests 1000;
+
+    #keepalive_timeout  0;
+    keepalive_timeout  65;
+
+    server {
+        # For information on deferred, see:
+        # http://nginx.org/en/docs/http/ngx_http_core_module.html#listen
+        # http://www.techrepublic.com/article/take-advantage-of-tcp-ip-options-to-optimize-data-transmission/
+        # The backlog argument to listen() is set to match net.ipv4.tcp_max_syn_backlog and net.core.somaxconn
+        listen       8080 default_server deferred backlog=65535;
+        server_name  localhost;
+
+        location / {
+            uwsgi_pass unix:/tmp/uwsgi.sock;
+            include /usr/local/nginx/conf/uwsgi_params;
+        }
+    }
+}

+ 4 - 0
frameworks/Python/weppy/requirements-pypy.txt

@@ -0,0 +1,4 @@
+pg8000==1.10.6
+weppy==0.7.9
+gunicorn==19.4.5
+tornado==4.3

+ 6 - 0
frameworks/Python/weppy/requirements.txt

@@ -0,0 +1,6 @@
+psycopg2==2.6.1
+weppy==0.7.9
+gunicorn==19.4.5
+meinheld==0.5.9
+uwsgi==2.0.12
+greenlet==0.4.9

+ 11 - 0
frameworks/Python/weppy/setup_nginxuwsgi.sh

@@ -0,0 +1,11 @@
+#!/bin/bash
+
+fw_depends python2 nginx
+
+sed -i 's|include .*/conf/uwsgi_params;|include '"${NGINX_HOME}"'/conf/uwsgi_params;|g' nginx.conf
+
+pip install --install-option="--prefix=${PY2_ROOT}" -r $TROOT/requirements.txt
+
+nginx -c $TROOT/nginx.conf
+
+uwsgi --ini $TROOT/uwsgi.ini --processes $MAX_THREADS --wsgi app:app &

+ 7 - 0
frameworks/Python/weppy/setup_py2.sh

@@ -0,0 +1,7 @@
+#!/bin/bash
+
+fw_depends python2
+
+pip install --install-option="--prefix=${PY2_ROOT}" -r $TROOT/requirements.txt
+
+gunicorn app:app -c gunicorn_conf.py &

+ 7 - 0
frameworks/Python/weppy/setup_py3.sh

@@ -0,0 +1,7 @@
+#!/bin/bash
+
+fw_depends python3
+
+pip3 install --install-option="--prefix=${PY3_ROOT}" -r $TROOT/requirements.txt
+
+gunicorn app:app -c gunicorn_conf.py &

+ 7 - 0
frameworks/Python/weppy/setup_pypy.sh

@@ -0,0 +1,7 @@
+#!/bin/bash
+
+fw_depends pypy
+
+pip install --install-option="--prefix=${PYPY_ROOT}" -r $TROOT/requirements-pypy.txt
+
+gunicorn app:app -c gunicorn_conf.py &

+ 20 - 0
frameworks/Python/weppy/templates/fortunes.html

@@ -0,0 +1,20 @@
+<!DOCTYPE html>
+<html>
+    <head>
+        <title>Fortunes</title>
+    </head>
+    <body>
+        <table>
+            <tr>
+                <th>id</th>
+                <th>message</th>
+            </tr>
+            {{for fortune in fortunes:}}
+            <tr>
+                <td>{{=fortune.id}}</td>
+                <td>{{=asis(escape(fortune.message))}}</td>
+            </tr>
+            {{pass}}
+        </table>
+    </body>
+</html>

+ 19 - 0
frameworks/Python/weppy/uwsgi.ini

@@ -0,0 +1,19 @@
+[uwsgi]
+master
+; Increase listen queue used for nginx connecting to uWSGI. This matches
+; net.ipv4.tcp_max_syn_backlog and net.core.somaxconn.
+listen = 128
+; for performance
+disable-logging
+; use UNIX sockets instead of TCP loopback for performance
+socket = /tmp/uwsgi.sock
+; allow nginx to access the UNIX socket
+chmod-socket = 666
+; Avoid thundering herd problem http://uwsgi-docs.readthedocs.org/en/latest/articles/SerializingAccept.html .
+; This is currently disabled because when I tried it with flask, it caused a
+; 20% performance hit. The CPU cores could not be saturated with thunder-lock.
+; I'm not yet sure the full story, so this is presently disabled. Also,
+; disabling this caused bottle to get ~13% faster.
+;thunder-lock
+; used by uwsgi_stop.ini
+pidfile = /tmp/uwsgi.pid