Browse Source

Remove old Weppy framework (#9104)

Weppy was renamed to Emmett which already is tested.
See:
- https://github.com/emmett-framework/emmett/commit/11fe3cac4838c6f1b79339b50a0bcf3557a17069
- https://github.com/gi0baro/weppy (weppy redirects to emmett)
Petrik de Heus 1 year ago
parent
commit
da0744cb25

+ 0 - 33
frameworks/Python/weppy/README.md

@@ -1,33 +0,0 @@
-# weppy Benchmark Test (ported from Flask example)
-
-This is the weppy portion of a [benchmarking tests suite](../../) 
-comparing a variety of web development platforms.
-
-The information below is specific to weppy. For further guidance, 
-review the [documentation](https://github.com/TechEmpower/FrameworkBenchmarks/wiki). 
-Also note that there is additional information provided in 
-the [Python README](../).
-
-[weppy](http://weppy.org/) is a fullstack Python 2/3 web framework.
-This test uses the included ORM and templating system, and gunicorn+meinheld for the application server on CPtyhon and Tornado on pypy.
-
-## Test Paths & Source
-
-* [JSON Serialization](app.py): "/json"
-* [Single Database Query](app.py): "/db"
-* [Multiple Database Queries](app.py): "queries?queries=#"
-* [Fortunes](app.py): "/fortunes"
-* [Database Updates](app.py): "updates?queries=#"
-* [Plaintext](app.py): "/plaintext"
-
-*Replace # with an actual number.
-
-### Resources
-
-* [weppy Source Code](https://github.com/gi0baro/weppy)
-* [weppy Documentation](http://weppy.org/docs)
-
-### Community
-
-* [weppy (weppy-talk) Google Group](https://groups.google.com/forum/#!forum/weppy-talk)
-

+ 0 - 128
frameworks/Python/weppy/app.py

@@ -1,128 +0,0 @@
-import sys
-from functools import partial
-from random import randint
-from weppy import App, Pipe, request, response
-from weppy.orm import Database, Model, Field, rowmethod
-from weppy.tools import service
-from email.utils import formatdate
-
-_is_pypy = hasattr(sys, 'pypy_version_info')
-if sys.version_info[0] == 3:
-    xrange = range
-
-DBHOSTNAME = 'tfb-database'
-
-app = App(__name__)
-
-
-class World(Model):
-    tablename = "world"
-    randomnumber = Field.int()
-
-    @rowmethod('serialize')
-    def _serialize(self, row):
-        return {'id': row.id, 'randomNumber': row.randomnumber}
-
-
-class Fortune(Model):
-    tablename = "fortune"
-    message = Field.string()
-
-    @rowmethod('serialize')
-    def _serialize(self, row):
-        return {'id': row.id, 'message': row.message}
-
-
-class DateHeaderPipe(Pipe):
-    def open(self):
-        response.headers["Date"] = formatdate(timeval=None, localtime=False, usegmt=True)
-
-
-app.config.handle_static = False
-app.config.db.adapter = 'postgres:psycopg2' \
-    if not _is_pypy else 'postgres:pg8000'
-app.config.db.host = DBHOSTNAME
-app.config.db.user = 'benchmarkdbuser'
-app.config.db.password = 'benchmarkdbpass'
-app.config.db.database = 'hello_world'
-app.config.db.pool_size = 100
-
-app.pipeline = [DateHeaderPipe()]
-
-db = Database(app, auto_migrate=False)
-db.define_models(World, Fortune)
-
-
[email protected]()
[email protected]
-def json():
-    return {'message': 'Hello, World!'}
-
-
[email protected]("/db", pipeline=[db.pipe])
[email protected]
-def get_random_world():
-    return World.get(randint(1, 10000)).serialize()
-
-
-def get_qparam():
-    try:
-        rv = int(request.query_params.queries)
-        if rv < 1:
-            rv = 1
-        if rv > 500:
-            rv = 500
-    except:
-        rv = 1
-    return rv
-
-
[email protected]("/queries", pipeline=[db.pipe])
[email protected]
-def get_random_worlds():
-    num_queries = get_qparam()
-    worlds = [
-        World.get(randint(1, 10000)).serialize() for _ in xrange(num_queries)]
-    return worlds
-
-
[email protected](pipeline=[db.pipe])
-def fortunes():
-    fortunes = Fortune.all().select()
-    fortunes.append(
-        Fortune.new(id=0, message="Additional fortune added at request time."))
-    fortunes.sort(lambda m: m.message)
-    return {'fortunes': fortunes}
-
-
[email protected](pipeline=[db.pipe])
[email protected]
-def updates():
-    num_queries = get_qparam()
-    worlds = []
-    rp = partial(randint, 1, 10000)
-    ids = [rp() for _ in xrange(num_queries)]
-    ids.sort()  # To avoid deadlock
-    for id in ids:
-        world = World.get(id)
-        world.update_record(randomnumber=rp())
-        worlds.append(world.serialize())
-    return worlds
-
-
[email protected]()
-def plaintext():
-    response.headers["Content-Type"] = "text/plain"
-    return 'Hello, World!'
-
-
-try:
-    import meinheld
-    meinheld.server.set_access_logger(None)
-    meinheld.set_keepalive(120)
-except ImportError:
-    pass
-
-# entry point for debugging
-if __name__ == "__main__":
-    app.run(debug=True)

+ 0 - 97
frameworks/Python/weppy/benchmark_config.json

@@ -1,97 +0,0 @@
-{
-  "framework": "weppy",
-  "tests": [{
-    "default": {
-      "json_url": "/json",
-      "db_url": "/db",
-      "query_url": "/queries?queries=",
-      "fortune_url": "/fortunes",
-      "update_url": "/updates?queries=",
-      "plaintext_url": "/plaintext",
-      "port": 8080,
-      "approach": "Realistic",
-      "classification": "Fullstack",
-      "database": "Postgres",
-      "framework": "weppy",
-      "language": "Python",
-      "orm": "Full",
-      "platform": "Meinheld",
-      "webserver": "None",
-      "os": "Linux",
-      "database_os": "Linux",
-      "display_name": "weppy-Py2",
-      "notes": "CPython 2.7",
-      "versus": "wsgi",
-      "tags": ["broken"]
-    },
-    "py3": {
-      "json_url": "/json",
-      "db_url": "/db",
-      "query_url": "/queries?queries=",
-      "fortune_url": "/fortunes",
-      "update_url": "/updates?queries=",
-      "plaintext_url": "/plaintext",
-      "port": 8080,
-      "approach": "Realistic",
-      "classification": "Fullstack",
-      "database": "Postgres",
-      "framework": "weppy",
-      "language": "Python",
-      "orm": "Full",
-      "platform": "Meinheld",
-      "webserver": "None",
-      "os": "Linux",
-      "database_os": "Linux",
-      "display_name": "weppy-Py3",
-      "notes": "CPython 3.6",
-      "versus": "wsgi"
-    },
-    "pypy2": {
-      "json_url": "/json",
-      "db_url": "/db",
-      "query_url": "/queries?queries=",
-      "fortune_url": "/fortunes",
-      "update_url": "/updates?queries=",
-      "plaintext_url": "/plaintext",
-      "port": 8080,
-      "approach": "Realistic",
-      "classification": "Fullstack",
-      "database": "Postgres",
-      "framework": "weppy",
-      "language": "Python",
-      "flavor": "PyPy2",
-      "orm": "Full",
-      "platform": "Tornado",
-      "webserver": "None",
-      "os": "Linux",
-      "database_os": "Linux",
-      "display_name": "weppy-PyPy2",
-      "notes": "PyPy2",
-      "versus": "wsgi",
-      "tags": ["broken"]
-    },
-    "nginx-uwsgi": {
-      "json_url": "/json",
-      "db_url": "/db",
-      "query_url": "/queries?queries=",
-      "fortune_url": "/fortunes",
-      "update_url": "/updates?queries=",
-      "plaintext_url": "/plaintext",
-      "port": 8080,
-      "approach": "Realistic",
-      "classification": "Fullstack",
-      "database": "Postgres",
-      "framework": "weppy",
-      "language": "Python",
-      "orm": "Full",
-      "platform": "uWSGI",
-      "webserver": "nginx",
-      "os": "Linux",
-      "database_os": "Linux",
-      "display_name": "weppy-nginx-uWSGI",
-      "notes": "CPython 3.6",
-      "versus": "wsgi",
-      "tags": ["broken"]
-    }
-  }]
-}

+ 0 - 70
frameworks/Python/weppy/config.toml

@@ -1,70 +0,0 @@
-[framework]
-name = "weppy"
-
-[main]
-urls.plaintext = "/plaintext"
-urls.json = "/json"
-urls.db = "/db"
-urls.query = "/queries?queries="
-urls.update = "/updates?queries="
-urls.fortune = "/fortunes"
-approach = "Realistic"
-classification = "Fullstack"
-database = "Postgres"
-database_os = "Linux"
-os = "Linux"
-orm = "Full"
-platform = "Meinheld"
-webserver = "None"
-versus = "wsgi"
-
-[py3]
-urls.plaintext = "/plaintext"
-urls.json = "/json"
-urls.db = "/db"
-urls.query = "/queries?queries="
-urls.update = "/updates?queries="
-urls.fortune = "/fortunes"
-approach = "Realistic"
-classification = "Fullstack"
-database = "Postgres"
-database_os = "Linux"
-os = "Linux"
-orm = "Full"
-platform = "Meinheld"
-webserver = "None"
-versus = "wsgi"
-
-[nginx-uwsgi]
-urls.plaintext = "/plaintext"
-urls.json = "/json"
-urls.db = "/db"
-urls.query = "/queries?queries="
-urls.update = "/updates?queries="
-urls.fortune = "/fortunes"
-approach = "Realistic"
-classification = "Fullstack"
-database = "Postgres"
-database_os = "Linux"
-os = "Linux"
-orm = "Full"
-platform = "uWSGI"
-webserver = "nginx"
-versus = "wsgi"
-
-[pypy2]
-urls.plaintext = "/plaintext"
-urls.json = "/json"
-urls.db = "/db"
-urls.query = "/queries?queries="
-urls.update = "/updates?queries="
-urls.fortune = "/fortunes"
-approach = "Realistic"
-classification = "Fullstack"
-database = "Postgres"
-database_os = "Linux"
-os = "Linux"
-orm = "Full"
-platform = "Tornado"
-webserver = "None"
-versus = "wsgi"

+ 0 - 26
frameworks/Python/weppy/gunicorn_conf.py

@@ -1,26 +0,0 @@
-import multiprocessing
-import os
-import sys
-
-_is_pypy = hasattr(sys, 'pypy_version_info')
-_is_travis = os.environ.get('TRAVIS') == 'true'
-
-workers = multiprocessing.cpu_count() * 3
-if _is_travis:
-    workers = 2
-
-bind = "0.0.0.0:8080"
-keepalive = 120
-errorlog = '-'
-pidfile = 'gunicorn.pid'
-
-if _is_pypy:
-    worker_class = "tornado"
-else:
-    worker_class = "meinheld.gmeinheld.MeinheldWorker"
-
-    def post_fork(server, worker):
-        # Disable access log.
-        # (Until https://github.com/mopemope/meinheld/pull/42 is released)
-        import meinheld.server
-        meinheld.server.set_access_logger(None)

+ 0 - 48
frameworks/Python/weppy/nginx.conf

@@ -1,48 +0,0 @@
-# This file is based on /usr/local/nginx/conf/nginx.conf.default.
-
-# One worker process per core
-error_log stderr error;
-
-events {
-    # This needed to be increased because the nginx error log said so.
-    # http://nginx.org/en/docs/ngx_core_module.html#worker_connections
-    worker_connections  65535;
-    multi_accept on;
-}
-
-http {
-    default_type  application/octet-stream;
-    client_body_temp_path      /tmp;
-
-    # turn off request logging for performance
-    access_log off;
-
-    # I think these only options affect static file serving
-    sendfile        on;
-    tcp_nopush      on;
-
-    # Allow many HTTP Keep-Alive requests in a single TCP connection before
-    # closing it (the default is 100). This will minimize the total number
-    # of TCP connections opened/closed. The problem is that this may cause
-    # some worker processes to be handling too connections relative to the
-    # other workers based on an initial imbalance, so this is disabled for
-    # now.
-#    keepalive_requests 1000;
-
-    #keepalive_timeout  0;
-    keepalive_timeout  65;
-
-    server {
-        # For information on deferred, see:
-        # http://nginx.org/en/docs/http/ngx_http_core_module.html#listen
-        # http://www.techrepublic.com/article/take-advantage-of-tcp-ip-options-to-optimize-data-transmission/
-        # The backlog argument to listen() is set to match net.ipv4.tcp_max_syn_backlog and net.core.somaxconn
-        listen       8080 default_server deferred reuseport backlog=65535;
-        server_name  localhost;
-
-        location / {
-            uwsgi_pass unix:/var/tmp/uwsgi.sock;
-            include /usr/local/nginx/conf/uwsgi_params;
-        }
-    }
-}

+ 0 - 4
frameworks/Python/weppy/requirements-pypy.txt

@@ -1,4 +0,0 @@
-pg8000==1.10.6
-weppy==1.3
-gunicorn==19.9.0
-tornado==6.3.3

+ 0 - 6
frameworks/Python/weppy/requirements.txt

@@ -1,6 +0,0 @@
-psycopg2==2.7.5
-weppy==1.3
-gunicorn==19.9.0
-meinheld==1.0.2
-uwsgi==2.0.22
-greenlet==0.4.14

+ 0 - 20
frameworks/Python/weppy/templates/fortunes.html

@@ -1,20 +0,0 @@
-<!DOCTYPE html>
-<html>
-    <head>
-        <title>Fortunes</title>
-    </head>
-    <body>
-        <table>
-            <tr>
-                <th>id</th>
-                <th>message</th>
-            </tr>
-            {{for fortune in fortunes:}}
-            <tr>
-                <td>{{=fortune.id}}</td>
-                <td>{{=fortune.message}}</td>
-            </tr>
-            {{pass}}
-        </table>
-    </body>
-</html>

+ 0 - 19
frameworks/Python/weppy/uwsgi.ini

@@ -1,19 +0,0 @@
-[uwsgi]
-master
-; Increase listen queue used for nginx connecting to uWSGI. This matches
-; net.ipv4.tcp_max_syn_backlog and net.core.somaxconn.
-listen = 16384
-; for performance
-disable-logging
-; use UNIX sockets instead of TCP loopback for performance
-socket = /var/tmp/uwsgi.sock
-; allow nginx to access the UNIX socket
-chmod-socket = 666
-; Avoid thundering herd problem http://uwsgi-docs.readthedocs.org/en/latest/articles/SerializingAccept.html .
-; This is currently disabled because when I tried it with flask, it caused a
-; 20% performance hit. The CPU cores could not be saturated with thunder-lock.
-; I'm not yet sure the full story, so this is presently disabled. Also,
-; disabling this caused bottle to get ~13% faster.
-;thunder-lock
-; used by uwsgi_stop.ini
-pidfile = /var/tmp/uwsgi.pid

+ 0 - 19
frameworks/Python/weppy/weppy-nginx-uwsgi.dockerfile

@@ -1,19 +0,0 @@
-FROM python:3.6.6-stretch
-
-RUN curl -s http://nginx.org/keys/nginx_signing.key | apt-key add -
-RUN echo "deb http://nginx.org/packages/debian/ stretch nginx" >> /etc/apt/sources.list
-RUN echo "deb-src http://nginx.org/packages/debian/ stretch nginx" >> /etc/apt/sources.list
-
-RUN apt-get update -yqq && apt-get install -yqq nginx
-
-ADD ./ /weppy
-
-WORKDIR /weppy
-
-RUN pip install -r /weppy/requirements.txt
-
-RUN sed -i 's|include .*/conf/uwsgi_params;|include /etc/nginx/uwsgi_params;|g' /weppy/nginx.conf
-
-EXPOSE 8080
-
-CMD nginx -c /weppy/nginx.conf && uwsgi --ini /weppy/uwsgi.ini --processes $(nproc) --wsgi app:app

+ 0 - 11
frameworks/Python/weppy/weppy-py3.dockerfile

@@ -1,11 +0,0 @@
-FROM python:3.6.6-stretch
-
-ADD ./ /weppy
-
-WORKDIR /weppy
-
-RUN pip3 install -r /weppy/requirements.txt
-
-EXPOSE 8080
-
-CMD gunicorn app:app -c gunicorn_conf.py

+ 0 - 11
frameworks/Python/weppy/weppy-pypy2.dockerfile

@@ -1,11 +0,0 @@
-FROM pypy:2-5.10
-
-ADD ./ /weppy
-
-WORKDIR /weppy
-
-RUN pip install -r /weppy/requirements-pypy.txt
-
-EXPOSE 8080
-
-CMD gunicorn app:app -c gunicorn_conf.py

+ 0 - 11
frameworks/Python/weppy/weppy.dockerfile

@@ -1,11 +0,0 @@
-FROM python:2.7.15-stretch
-
-ADD ./ /weppy
-
-WORKDIR /weppy
-
-RUN pip install -r /weppy/requirements.txt
-
-EXPOSE 8080
-
-CMD gunicorn app:app -c gunicorn_conf.py