diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f7c8b403346..bd513625f73 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -154,6 +154,9 @@ jobs: dependabot: needs: verify runs-on: ubuntu-latest + permissions: + pull-requests: write + contents: write if: ${{ github.actor == 'dependabot[bot]' }} steps: - name: Dependabot metadata diff --git a/README.md b/README.md index a630313260a..345cbe50aa1 100644 --- a/README.md +++ b/README.md @@ -89,7 +89,8 @@ Results of continuous benchmarking runs are available in real time [here](https: ### Data Visualization -If you have a `results.json` file that you would like to visualize, you can [do that here](https://tfb-status.techempower.com/share). You can also attach a `runid` parameter to that url where `runid` is a run listed on [tfb-status](https://tfb-status.techempower.com) like so: https://www.techempower.com/benchmarks/#section=test&runid=fd07b64e-47ce-411e-8b9b-b13368e988c6 +If you have a `results.json` file that you would like to visualize, you can [do that here](https://tfb-status.techempower.com/share). You can also attach a `runid` parameter to that url where `runid` is a run listed on [tfb-status](https://tfb-status.techempower.com) like so: https://www.techempower.com/benchmarks/#section=test&runid=fd07b64e-47ce-411e-8b9b-b13368e988c6. +If you want to visualize them or compare different results files on bash, here is an unofficial [plaintext results parser](https://github.com/joeyleeeeeee97/PlainTextResultsParser) ## Contributing diff --git a/frameworks/C++/drogon/drogon-core.dockerfile b/frameworks/C++/drogon/drogon-core.dockerfile index 2d68723d1b6..7d02dc6637f 100644 --- a/frameworks/C++/drogon/drogon-core.dockerfile +++ b/frameworks/C++/drogon/drogon-core.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:18.04 +FROM ubuntu:20.04 COPY ./ ./ @@ -11,7 +11,7 @@ RUN apt-get update -yqq && \ zlib1g-dev && \ add-apt-repository ppa:ubuntu-toolchain-r/test -y && \ apt-get update -yqq && \ - apt-get install -yqq gcc-8 g++-8 + apt-get install -yqq gcc-10 g++-10 RUN locale-gen en_US.UTF-8 @@ -19,10 +19,10 @@ ENV LANG en_US.UTF-8 ENV LANGUAGE en_US:en ENV LC_ALL en_US.UTF-8 -ENV CC=gcc-8 -ENV CXX=g++-8 -ENV AR=gcc-ar-8 -ENV RANLIB=gcc-ranlib-8 +ENV CC=gcc-10 +ENV CXX=g++-10 +ENV AR=gcc-ar-10 +ENV RANLIB=gcc-ranlib-10 ENV IROOT=/install ENV DROGON_ROOT=$IROOT/drogon @@ -32,26 +32,22 @@ ENV TEST_PATH=/drogon_benchmark/build WORKDIR $IROOT -RUN wget https://github.com/an-tao/postgres/archive/batch_mode_ubuntu.tar.gz -RUN tar -xvzf batch_mode_ubuntu.tar.gz -WORKDIR $PG_ROOT - -RUN ./configure --prefix=/usr CFLAGS='-O2 -pipe' -RUN make && make install - -WORKDIR $IROOT +RUN sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' +RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - +RUN sudo apt -y update +RUN sudo apt -y install postgresql-server-dev-all RUN git clone https://github.com/an-tao/drogon WORKDIR $DROGON_ROOT -RUN git checkout fb17efe765d162be01680b05a3a387c7a182a4c5 +RUN git checkout ebf87d69d7bb45dfa478ba364ef9374d9be25092 RUN git submodule update --init RUN mkdir build WORKDIR $DROGON_ROOT/build -RUN cmake -DCMAKE_BUILD_TYPE=release .. +RUN cmake -DCMAKE_BUILD_TYPE=release -DCMAKE_CXX_FLAGS=-flto .. RUN make && make install WORKDIR $IROOT @@ -62,12 +58,12 @@ WORKDIR $MIMALLOC_ROOT RUN git checkout v1.6.7 -b v1.6.7 RUN mkdir -p out/release WORKDIR $MIMALLOC_ROOT/out/release -RUN cmake ../.. +RUN cmake -DCMAKE_BUILD_TYPE=release -DCMAKE_CXX_FLAGS=-flto ../.. RUN make && make install WORKDIR $TEST_PATH -RUN cmake -DCMAKE_BUILD_TYPE=release .. +RUN cmake -DCMAKE_BUILD_TYPE=release -DCMAKE_CXX_FLAGS=-flto .. RUN make EXPOSE 8080 diff --git a/frameworks/C++/drogon/drogon.dockerfile b/frameworks/C++/drogon/drogon.dockerfile index 7dcdf769f41..c94d2f43525 100644 --- a/frameworks/C++/drogon/drogon.dockerfile +++ b/frameworks/C++/drogon/drogon.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:18.04 +FROM ubuntu:20.04 COPY ./ ./ @@ -11,7 +11,7 @@ RUN apt-get update -yqq && \ zlib1g-dev && \ add-apt-repository ppa:ubuntu-toolchain-r/test -y && \ apt-get update -yqq && \ - apt-get install -yqq gcc-8 g++-8 + apt-get install -yqq gcc-10 g++-10 RUN locale-gen en_US.UTF-8 @@ -19,10 +19,10 @@ ENV LANG en_US.UTF-8 ENV LANGUAGE en_US:en ENV LC_ALL en_US.UTF-8 -ENV CC=gcc-8 -ENV CXX=g++-8 -ENV AR=gcc-ar-8 -ENV RANLIB=gcc-ranlib-8 +ENV CC=gcc-10 +ENV CXX=g++-10 +ENV AR=gcc-ar-10 +ENV RANLIB=gcc-ranlib-10 ENV IROOT=/install ENV DROGON_ROOT=$IROOT/drogon @@ -32,26 +32,22 @@ ENV TEST_PATH=/drogon_benchmark/build WORKDIR $IROOT -RUN wget https://github.com/an-tao/postgres/archive/batch_mode_ubuntu.tar.gz -RUN tar -xvzf batch_mode_ubuntu.tar.gz -WORKDIR $PG_ROOT - -RUN ./configure --prefix=/usr CFLAGS='-O2 -pipe' -RUN make && make install - -WORKDIR $IROOT +RUN sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' +RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - +RUN sudo apt -y update +RUN sudo apt -y install postgresql-server-dev-all RUN git clone https://github.com/an-tao/drogon WORKDIR $DROGON_ROOT -RUN git checkout fb17efe765d162be01680b05a3a387c7a182a4c5 +RUN git checkout ebf87d69d7bb45dfa478ba364ef9374d9be25092 RUN git submodule update --init RUN mkdir build WORKDIR $DROGON_ROOT/build -RUN cmake -DCMAKE_BUILD_TYPE=release .. +RUN cmake -DCMAKE_BUILD_TYPE=release -DCMAKE_CXX_FLAGS=-flto .. RUN make && make install WORKDIR $IROOT @@ -62,12 +58,12 @@ WORKDIR $MIMALLOC_ROOT RUN git checkout v1.6.7 -b v1.6.7 RUN mkdir -p out/release WORKDIR $MIMALLOC_ROOT/out/release -RUN cmake ../.. +RUN cmake -DCMAKE_BUILD_TYPE=release -DCMAKE_CXX_FLAGS=-flto ../.. RUN make && make install WORKDIR $TEST_PATH -RUN cmake -DCMAKE_BUILD_TYPE=release .. +RUN cmake -DCMAKE_BUILD_TYPE=release -DCMAKE_CXX_FLAGS=-flto .. RUN make EXPOSE 8080 diff --git a/frameworks/C++/drogon/drogon_benchmark/config-core.json b/frameworks/C++/drogon/drogon_benchmark/config-core.json index b199ea8622e..8dc98ee005f 100644 --- a/frameworks/C++/drogon/drogon_benchmark/config-core.json +++ b/frameworks/C++/drogon/drogon_benchmark/config-core.json @@ -34,7 +34,8 @@ //any synchronous interface of it. "is_fast": true, //connection_number:1 by default - "connection_number": 1 + "connection_number": 1, + "auto_batch": false }], "app": { //threads_num:the number of IO threads,1 by default, if the value is set to 0, the number of threads diff --git a/frameworks/C++/drogon/drogon_benchmark/config.json b/frameworks/C++/drogon/drogon_benchmark/config.json index dfff821be9a..40f7fd0a66b 100644 --- a/frameworks/C++/drogon/drogon_benchmark/config.json +++ b/frameworks/C++/drogon/drogon_benchmark/config.json @@ -34,7 +34,8 @@ //any synchronous interface of it. "is_fast": true, //connection_number:1 by default - "connection_number": 1 + "connection_number": 1, + "auto_batch": false }], "app": { //threads_num:the number of IO threads,1 by default, if the value is set to 0, the number of threads @@ -185,4 +186,4 @@ }], //custom_config: custom configuration for users. This object can be get by the app().getCustomConfig() method. "custom_config": {} -} \ No newline at end of file +} diff --git a/frameworks/C++/drogon/drogon_benchmark/plugins/SyncPlugin.cc b/frameworks/C++/drogon/drogon_benchmark/plugins/SyncPlugin.cc index a774eefffd3..4f00fa7cb88 100644 --- a/frameworks/C++/drogon/drogon_benchmark/plugins/SyncPlugin.cc +++ b/frameworks/C++/drogon/drogon_benchmark/plugins/SyncPlugin.cc @@ -34,8 +34,7 @@ void SyncPlugin::initAndStart(const Json::Value &config) { auto resp = HttpResponse::newHttpResponse(); resp->setBody("Hello, World!"); - resp->setContentTypeCodeAndCustomString( - CT_TEXT_PLAIN, "Content-Type: text/plain\r\n"); + resp->setContentTypeCode(CT_TEXT_PLAIN); return resp; } break; diff --git a/frameworks/C++/ffead-cpp/benchmark_config.json b/frameworks/C++/ffead-cpp/benchmark_config.json index 7e4a96798e8..5a5e577b17c 100644 --- a/frameworks/C++/ffead-cpp/benchmark_config.json +++ b/frameworks/C++/ffead-cpp/benchmark_config.json @@ -95,7 +95,7 @@ "display_name": "ffead-cpp [v-prof-b]", "notes": "", "versus": "", - "tags": [] + "tags": ["broken"] }, "postgresql-raw-profiled": { "db_url": "/t3/d", @@ -137,7 +137,7 @@ "display_name": "ffead-cpp [pg-raw-prof-b]", "notes": "memory libpq batch patch profiled", "versus": "", - "tags": [] + "tags": ["broken"] }, "postgresql-raw-async-profiled": { "db_url": "/t4/d", @@ -219,7 +219,7 @@ "display_name": "ffead-cpp [pg-raw-async-prof-b]", "notes": "async memory libpq batch patch profiled", "versus": "", - "tags": [] + "tags": ["broken"] }, "postgresql-raw-async-clibpqb-pool-profiled": { "db_url": "/t4/d", @@ -240,7 +240,7 @@ "display_name": "ffead-cpp [pg-raw-async-prof-b-pool]", "notes": "async memory libpq batch patch profiled", "versus": "", - "tags": [] + "tags": ["broken"] }, "postgresql-raw-async-qw-profiled": { "db_url": "/t5/d", @@ -282,7 +282,7 @@ "display_name": "ffead-cpp [pg-raw-async-qw-prof-b]", "notes": "async memory libpq batch patch profiled", "versus": "", - "tags": [] + "tags": ["broken"] }, "postgresql-raw-async-qw-pool-profiled-m": { "query_url": "/t5/quem?queries=", @@ -304,4 +304,4 @@ "tags": [] } }] -} \ No newline at end of file +} diff --git a/frameworks/C++/lithium/benchmark_config.json b/frameworks/C++/lithium/benchmark_config.json index 78ccbe25ac2..c1626ff791b 100755 --- a/frameworks/C++/lithium/benchmark_config.json +++ b/frameworks/C++/lithium/benchmark_config.json @@ -73,51 +73,7 @@ "display_name": "Lithium-postgres-beta", "notes": "", "versus": "None" - }, - "postgres-batch": { - "db_url" : "/db", - "query_url" : "/queries?N=", - "fortune_url" : "/fortunes", - "update_url" : "/updates?N=", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "Postgres", - "framework": "Lithium", - "language": "C++", - "flavor": "None", - "orm": "Full", - "platform": "None", - "webserver": "None", - "os": "Linux", - "database_os": "Linux", - "display_name": "Lithium-postgres-batch", - "notes": "", - "versus": "None" - }, - - "postgres-batch-beta": { - "db_url" : "/db", - "query_url" : "/queries?N=", - "fortune_url" : "/fortunes", - "update_url" : "/updates?N=", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "Postgres", - "framework": "Lithium", - "language": "C++", - "flavor": "None", - "orm": "Full", - "platform": "None", - "webserver": "None", - "os": "Linux", - "database_os": "Linux", - "display_name": "Lithium-postgres-batch-beta", - "notes": "", - "versus": "None" } - } ] } diff --git a/frameworks/C++/poco/poco.dockerfile b/frameworks/C++/poco/poco.dockerfile index 99f760cf44f..1f592e48872 100644 --- a/frameworks/C++/poco/poco.dockerfile +++ b/frameworks/C++/poco/poco.dockerfile @@ -9,7 +9,9 @@ ENV POCO_VERSION 1.6.1 ENV POCO_HOME /poco WORKDIR ${POCO_HOME} -RUN curl -sL http://pocoproject.org/releases/poco-${POCO_VERSION}/poco-${POCO_VERSION}-all.tar.gz | tar xz --strip-components=1 +RUN wget https://pocoproject.org/releases/poco-${POCO_VERSION}/poco-${POCO_VERSION}-all.zip +RUN unzip poco-${POCO_VERSION}-all.zip +RUN mv ./poco-${POCO_VERSION}-all/* ./ RUN ./configure --no-tests --no-samples RUN make --quiet PageCompiler-libexec XML-libexec JSON-libexec diff --git a/frameworks/C++/ulib/benchmark_config.json b/frameworks/C++/ulib/benchmark_config.json index 7a45cd66cf5..86e4e513f40 100644 --- a/frameworks/C++/ulib/benchmark_config.json +++ b/frameworks/C++/ulib/benchmark_config.json @@ -16,7 +16,8 @@ "database_os": "Linux", "display_name": "ULib", "notes": "", - "versus": "" + "versus": "", + "tags": ["broken"] }, "plaintext_fit": { "plaintext_url": "/plaintext", @@ -33,7 +34,8 @@ "database_os": "Linux", "display_name": "ULib-fit", "notes": "", - "versus": "" + "versus": "", + "tags": ["broken"] }, "json": { "json_url": "/json", @@ -50,7 +52,8 @@ "database_os": "Linux", "display_name": "ULib", "notes": "", - "versus": "" + "versus": "", + "tags": ["broken"] }, "json_fit": { "json_url": "/json", @@ -67,7 +70,8 @@ "database_os": "Linux", "display_name": "ULib-fit", "notes": "", - "versus": "" + "versus": "", + "tags": ["broken"] }, "mysql": { "db_url": "/db", @@ -87,7 +91,8 @@ "database_os": "Linux", "display_name": "ULib-mysql", "notes": "", - "versus": "" + "versus": "", + "tags": ["broken"] }, "postgres": { "db_url": "/db", @@ -107,7 +112,8 @@ "database_os": "Linux", "display_name": "ULib-postgres", "notes": "", - "versus": "" + "versus": "", + "tags": ["broken"] }, "postgres_fit": { "db_url": "/db", @@ -125,7 +131,8 @@ "database_os": "Linux", "display_name": "ULib-fit", "notes": "", - "versus": "" + "versus": "", + "tags": ["broken"] }, "mongodb": { "setup_file": "setup_mongodb", @@ -146,7 +153,8 @@ "database_os": "Linux", "display_name": "ULib-mongodb", "notes": "", - "versus": "" + "versus": "", + "tags": ["broken"] } }] } diff --git a/frameworks/C++/wt/benchmark.cpp b/frameworks/C++/wt/benchmark.cpp deleted file mode 100644 index 97a8ac8d3d6..00000000000 --- a/frameworks/C++/wt/benchmark.cpp +++ /dev/null @@ -1,376 +0,0 @@ -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include - -#include -#include -#ifndef BENCHMARK_USE_POSTGRES -#include -#else -#include -#endif - -#include - -#ifndef WT_WIN32 -extern char **environ; -#endif // WT_WIN32 - -class MyMessage { -public: - std::string message; - - template - void persist(Action& a) { - Wt::Dbo::field(a, message, "message"); - } -}; - -class World { -public: - int randomNumber; - - template - void persist(Action& a) { - Wt::Dbo::field(a, randomNumber, "randomnumber"); - } -}; - -class Fortune { -public: - std::string message; - - template - void persist(Action& a) { - Wt::Dbo::field(a, message, "message"); - } -}; - -namespace Wt { - namespace Dbo { - template<> - struct dbo_traits : public dbo_default_traits { - static const char *versionField() { - return 0; - } - static IdType invalidId() { - return 0; - } - }; - template<> - struct dbo_traits : public dbo_default_traits { - static const char *versionField() { - return 0; - } - static IdType invalidId() { - return 0; - } - }; - } -} - -class JsonResource : public Wt::WResource { -public: - virtual void handleRequest(const Wt::Http::Request &request, Wt::Http::Response &response) { - response.setMimeType("application/json"); - response.addHeader("Server", "Wt"); - - MyMessage message; - message.message = "Hello, World!"; - - Wt::Dbo::JsonSerializer writer(response.out()); - writer.serialize(message); - } -}; - -class MyConnection : public -#ifdef BENCHMARK_USE_POSTGRES - Wt::Dbo::backend::Postgres -#else - Wt::Dbo::backend::MySQL -#endif -{ -public: -#ifdef BENCHMARK_USE_POSTGRES - MyConnection(const std::string &db) : - Wt::Dbo::backend::Postgres(db) {} -#else - MyConnection(const std::string &db, const std::string &dbuser, const std::string &dbpasswd, const std::string &dbhost, unsigned int dbport) : - Wt::Dbo::backend::MySQL(db, dbuser, dbpasswd, dbhost, dbport) {} -#endif - - virtual void startTransaction() { } - virtual void commitTransaction() { } - virtual void rollbackTransaction() { } -}; - -struct DbStruct { - MyConnection *connection; - Wt::Dbo::Session session; - - std::default_random_engine rng; - std::uniform_int_distribution distribution; - - DbStruct() - : connection(0), - rng(clock()), - distribution(1, 10000) { - std::string dbHostStr = "localhost"; - char *dbHost = std::getenv("DBHOST"); - if (dbHost) - dbHostStr = std::string(dbHost); -#ifndef BENCHMARK_USE_POSTGRES - auto c = Wt::cpp14::make_unique("hello_world", "benchmarkdbuser", "benchmarkdbpass", dbHostStr, 3306); -#else - auto connStr = std::string("host=") + dbHostStr + " port=5432 user=benchmarkdbuser password=benchmarkdbpass dbname=hello_world"; - auto c = Wt::cpp14::make_unique(connStr); -#endif - - connection = c.get(); - session.setConnection(std::move(c)); - session.mapClass("world"); - session.mapClass("fortune"); - } - - int rand() { - return distribution(rng); - } -}; - -namespace { - thread_local DbStruct *dbStruct_; -} - -class DbResource : public Wt::WResource { -public: - virtual void handleRequest(const Wt::Http::Request &request, Wt::Http::Response &response) { - response.setMimeType("application/json"); - response.addHeader("Server", "Wt"); - - if (!dbStruct_) { - dbStruct_ = new DbStruct(); - } - - Wt::Dbo::Transaction transaction(dbStruct_->session); - Wt::Dbo::ptr entry = dbStruct_->session.load(dbStruct_->rand()); - - Wt::Dbo::JsonSerializer writer(response.out()); - writer.serialize(entry); - } -}; - -class QueriesResource : public Wt::WResource { -public: - virtual void handleRequest(const Wt::Http::Request &request, Wt::Http::Response &response) { - int n; - if (const std::string *queries = request.getParameter("queries")) { - n = atoi(queries->c_str()); - if (n < 1) - n = 1; - else if (n > 500) - n = 500; - } else { - n = 1; - } - - response.setMimeType("application/json"); - response.addHeader("Server", "Wt"); - - if (!dbStruct_) { - dbStruct_ = new DbStruct(); - } - - Wt::Dbo::Transaction transaction(dbStruct_->session); - std::vector > results; - results.reserve(n); - for (int i = 0; i < n; ++i) { - results.push_back(dbStruct_->session.load(dbStruct_->rand())); - } - Wt::Dbo::JsonSerializer writer(response.out()); - writer.serialize(results); - } -}; - -typedef Wt::Dbo::collection< Wt::Dbo::ptr > Fortunes; -typedef std::vector > VFortunes; - -bool fortuneCmp(const Wt::Dbo::ptr& f1, const Wt::Dbo::ptr& f2) { - return strcmp(f1->message.c_str(), f2->message.c_str()) < 0; -} - -class FortuneTemplate : public Wt::WTemplate { -private: - const VFortunes *fortunes_; - mutable std::vector >::const_iterator it_; -public: - FortuneTemplate(const std::vector >& fortunes) - : Wt::WTemplate(tr("fortunes")), - fortunes_(&fortunes), - it_(fortunes.end()) - { - addFunction("while", &Wt::WTemplate::Functions::while_f); - } - - virtual bool conditionValue(const std::string& name) const { - if (name == "next-fortune") { - if (it_ == fortunes_->end()) - it_ = fortunes_->begin(); - else - ++it_; - - if (it_ == fortunes_->end()) - return false; - - return true; - } else - return Wt::WTemplate::conditionValue(name); - } - - virtual void resolveString(const std::string& varName, const std::vector& vars, std::ostream& result) { - if (varName == "id") - result << it_->id(); - else if (varName == "message") - format(result, Wt::WString((*it_)->message)); - else - Wt::WTemplate::resolveString(varName, vars, result); - } -}; - -class FortuneResource : public Wt::WResource { -public: - virtual void handleRequest(const Wt::Http::Request &request, Wt::Http::Response &response) { - response.setMimeType("text/html; charset=utf-8"); - response.addHeader("Server", "Wt"); - - if (!dbStruct_) { - dbStruct_ = new DbStruct(); - } - - Wt::Dbo::Transaction transaction(dbStruct_->session); - Fortunes fortunes = dbStruct_->session.find(); - VFortunes vFortunes; - for (Fortunes::const_iterator i = fortunes.begin(); i != fortunes.end(); ++i) - vFortunes.push_back(*i); - auto additionalFortune = Wt::cpp14::make_unique(); - additionalFortune->message = "Additional fortune added at request time."; - vFortunes.push_back(Wt::Dbo::ptr(std::move(additionalFortune))); - - std::sort(vFortunes.begin(), vFortunes.end(), fortuneCmp); - - FortuneTemplate tpl(vFortunes); - - response.out() << ""; - tpl.renderTemplate(response.out()); - } -}; - -class UpdateResource : public Wt::WResource { -public: - virtual void handleRequest(const Wt::Http::Request &request, Wt::Http::Response &response) { - int n; - if (const std::string *queries = request.getParameter("queries")) { - n = atoi(queries->c_str()); - if (n < 1) - n = 1; - else if (n > 500) - n = 500; - } else { - n = 1; - } - - response.setMimeType("application/json"); - response.addHeader("Server", "Wt"); - - if (!dbStruct_) { - dbStruct_ = new DbStruct(); - } - - std::vector > results; - - for (int i = 0; i < n; ++i) { - bool success = false; - while (!success) { - try { - Wt::Dbo::Transaction transaction(dbStruct_->session); - Wt::Dbo::ptr world = dbStruct_->session.load(dbStruct_->rand()); - world.modify()->randomNumber = dbStruct_->rand(); - transaction.commit(); - results.push_back(world); - success = true; - } catch (Wt::Dbo::Exception& e) { - // Retry - } - } - } - - Wt::Dbo::JsonSerializer writer(response.out()); - writer.serialize(results); - } -}; - -class PlaintextResource : public Wt::WResource { - virtual void handleRequest(const Wt::Http::Request &request, Wt::Http::Response &response) { - response.setMimeType("text/plain"); - response.addHeader("Server", "Wt"); - - response.out() << "Hello, World!"; - } -}; - -int main(int argc, char** argv) { - try { - Wt::WServer server(argv[0]); - - server.setServerConfiguration(argc, argv, WTHTTP_CONFIGURATION); - - auto bundle = std::make_shared(); - bundle->use(server.appRoot() + "fortunes"); - server.setLocalizedStrings(bundle); - - JsonResource jsonResource; - server.addResource(&jsonResource, "/json"); - - DbResource dbResource; - server.addResource(&dbResource, "/db"); - - QueriesResource queriesResource; - server.addResource(&queriesResource, "/queries"); - - FortuneResource fortuneResource; - server.addResource(&fortuneResource, "/fortune"); - - UpdateResource updateResource; - server.addResource(&updateResource, "/updates"); - - PlaintextResource plaintextResource; - server.addResource(&plaintextResource, "/plaintext"); - - if (server.start()) { - int sig = Wt::WServer::waitForShutdown(); - - std::cerr << "Shutdown (signal = " << sig << ")" << std::endl; - server.stop(); - -#ifndef WT_WIN32 - if (sig == SIGHUP) - Wt::WServer::restart(argc, argv, environ); -#endif // WT_WIN32 - } - } catch (Wt::WServer::Exception& e) { - std::cerr << e.what() << "\n"; - return 1; - } catch (std::exception& e) { - std::cerr << "exception: " << e.what() << "\n"; - return 1; - } -} diff --git a/frameworks/C++/wt/benchmark_config.json b/frameworks/C++/wt/benchmark_config.json deleted file mode 100644 index 5c6d1be233e..00000000000 --- a/frameworks/C++/wt/benchmark_config.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "framework": "wt", - "tests": [{ - "default": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortune", - "update_url": "/updates?queries=", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "MySQL", - "framework": "wt", - "language": "C++", - "flavor": "None", - "orm": "Full", - "platform": "None", - "webserver": "None", - "os": "Linux", - "database_os": "Linux", - "display_name": "wt", - "notes": "", - "versus": "wt" - }, - "postgres": { - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortune", - "update_url": "/updates?queries=", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "Postgres", - "framework": "wt", - "language": "C++", - "flavor": "None", - "orm": "Full", - "platform": "None", - "webserver": "None", - "os": "Linux", - "database_os": "Linux", - "display_name": "wt-postgres", - "notes": "", - "versus": "wt", - "tags": ["broken"] - } - }] -} diff --git a/frameworks/C++/wt/fortunes.xml b/frameworks/C++/wt/fortunes.xml deleted file mode 100644 index 21e177937e2..00000000000 --- a/frameworks/C++/wt/fortunes.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - Fortunes - - - - - - - - ${while:next-fortune fortune-table-row} -
idmessage
- -
- - - - ${id} - ${message} - - -
diff --git a/frameworks/C++/wt/wt-postgres.dockerfile b/frameworks/C++/wt/wt-postgres.dockerfile deleted file mode 100644 index c78ac52e355..00000000000 --- a/frameworks/C++/wt/wt-postgres.dockerfile +++ /dev/null @@ -1,86 +0,0 @@ -FROM buildpack-deps:xenial - -RUN apt-get update -yqq && apt-get install -yqq software-properties-common unzip cmake - -RUN add-apt-repository ppa:ubuntu-toolchain-r/test -y -RUN apt-get update -yqq -RUN apt-get install -yqq gcc-6 g++-6 - -ENV WT_VERSION 4.0.2 -ENV BOOST_ROOT /boost -ENV BOOST_INC ${BOOST_ROOT}/include -ENV BOOST_LIB ${BOOST_ROOT}/lib -ENV WT_ROOT /wt -ENV WT_LIB ${WT_ROOT}/lib -ENV WT_INC ${WT_ROOT}/include -ENV LD_LIBRARY_PATH ${BOOST_LIB}:${WT_LIB}:${LD_LIBRARY_PATH} -ENV CPLUS_INCLUDE_PATH /usr/include/postgresql:/usr/include/postgresql/9.3/server:${CPLUS_INCLUDE_PATH} - -WORKDIR ${WT_ROOT} -COPY benchmark.cpp benchmark.cpp -COPY fortunes.xml fortunes.xml -COPY wt_config.xml wt_config.xml - -# Build boost_thread, boost_system, boost_filesystem and boost_program_options -RUN wget -q https://dl.bintray.com/boostorg/release/1.65.1/source/boost_1_65_1.tar.gz -RUN tar xf boost_1_65_1.tar.gz -RUN cd boost_1_65_1 && \ - ./bootstrap.sh && \ - ./b2 \ - -d0 \ - toolset=gcc-6 \ - variant=release \ - link=static \ - cxxflags="-std=c++14 -march=native" \ - cflags="-march=native" \ - --prefix=${BOOST_ROOT} \ - --with-system \ - --with-thread \ - --with-program_options \ - --with-filesystem \ - install - -RUN wget -q https://github.com/emweb/wt/archive/${WT_VERSION}.tar.gz -RUN mv ${WT_VERSION}.tar.gz wt-${WT_VERSION}.tar.gz -RUN tar xf wt-${WT_VERSION}.tar.gz - -RUN cd wt-$WT_VERSION && \ - mkdir -p build && \ - cd build && \ - cmake .. -DCMAKE_CXX_STANDARD=14 -DCMAKE_BUILD_TYPE=Release \ - -DBOOST_PREFIX=${BOOST_ROOT} \ - -DCMAKE_INSTALL_PREFIX=${WT_ROOT} -DCONFIGDIR=${WT_ROOT}/etc \ - -DCMAKE_C_COMPILER=$(which gcc-6) \ - -DCMAKE_CXX_COMPILER=$(which g++-6) -DDESTDIR=${WT_ROOT} \ - -DWEBUSER=$(id -u -n) -DWEBGROUP=$(id -g -n) \ - -DENABLE_SSL=OFF -DHTTP_WITH_ZLIB=OFF \ - -DCMAKE_C_FLAGS_RELEASE="-O3 -march=native -DNDEBUG" \ - -DCMAKE_CXX_FLAGS_RELEASE="-O3 -march=native -DNDEBUG" \ - -DBUILD_TESTS=OFF -DENABLE_LIBWTTEST=OFF \ - -DSHARED_LIBS=OFF >/dev/null && \ - make && make install - -RUN g++-6 \ - -std=c++14 \ - -O3 -march=native -DNDEBUG \ - -I${BOOST_INC} \ - -L${BOOST_LIB} \ - -I${WT_INC} \ - -L${WT_LIB} \ - -o te-benchmark-pg.wt \ - -DBENCHMARK_USE_POSTGRES \ - benchmark.cpp \ - -lwthttp -lwt \ - -lwtdbo -lwtdbopostgres \ - -lboost_system \ - -lboost_program_options \ - -lboost_thread \ - -lboost_filesystem \ - -lpthread \ - -lpq - -ENV DBHOST tfb-database - -EXPOSE 8080 - -CMD ./te-benchmark-pg.wt -c wt_config.xml -t $(nproc) --docroot . --approot . --http-listen 0.0.0.0:8080 --accesslog=- --no-compression diff --git a/frameworks/C++/wt/wt.dockerfile b/frameworks/C++/wt/wt.dockerfile deleted file mode 100644 index 2ee34165d5e..00000000000 --- a/frameworks/C++/wt/wt.dockerfile +++ /dev/null @@ -1,85 +0,0 @@ -FROM buildpack-deps:xenial - -RUN apt-get update -yqq && apt-get install -yqq software-properties-common unzip cmake - -RUN add-apt-repository ppa:ubuntu-toolchain-r/test -y -RUN apt-get update -yqq -RUN apt-get install -yqq gcc-6 g++-6 - -ENV WT_VERSION 4.0.2 -ENV BOOST_ROOT /boost -ENV BOOST_INC ${BOOST_ROOT}/include -ENV BOOST_LIB ${BOOST_ROOT}/lib -ENV WT_ROOT /wt -ENV WT_LIB ${WT_ROOT}/lib -ENV WT_INC ${WT_ROOT}/include -ENV LD_LIBRARY_PATH ${BOOST_LIB}:${WT_LIB}:${LD_LIBRARY_PATH} -ENV CPLUS_INCLUDE_PATH /usr/include/postgresql:/usr/include/postgresql/9.3/server:${CPLUS_INCLUDE_PATH} - -WORKDIR ${WT_ROOT} -COPY benchmark.cpp benchmark.cpp -COPY fortunes.xml fortunes.xml -COPY wt_config.xml wt_config.xml - -# Build boost_thread, boost_system, boost_filesystem and boost_program_options -RUN wget -q https://dl.bintray.com/boostorg/release/1.65.1/source/boost_1_65_1.tar.gz -RUN tar xf boost_1_65_1.tar.gz -RUN cd boost_1_65_1 && \ - ./bootstrap.sh && \ - ./b2 \ - -d0 \ - toolset=gcc-6 \ - variant=release \ - link=static \ - cxxflags="-std=c++14 -march=native" \ - cflags="-march=native" \ - --prefix=${BOOST_ROOT} \ - --with-system \ - --with-thread \ - --with-program_options \ - --with-filesystem \ - install - -RUN wget -q https://github.com/emweb/wt/archive/${WT_VERSION}.tar.gz -RUN mv ${WT_VERSION}.tar.gz wt-${WT_VERSION}.tar.gz -RUN tar xf wt-${WT_VERSION}.tar.gz - -RUN cd wt-$WT_VERSION && \ - mkdir -p build && \ - cd build && \ - cmake .. -DCMAKE_CXX_STANDARD=14 -DCMAKE_BUILD_TYPE=Release \ - -DBOOST_PREFIX=${BOOST_ROOT} \ - -DCMAKE_INSTALL_PREFIX=${WT_ROOT} -DCONFIGDIR=${WT_ROOT}/etc \ - -DCMAKE_C_COMPILER=$(which gcc-6) \ - -DCMAKE_CXX_COMPILER=$(which g++-6) -DDESTDIR=${WT_ROOT} \ - -DWEBUSER=$(id -u -n) -DWEBGROUP=$(id -g -n) \ - -DENABLE_SSL=OFF -DHTTP_WITH_ZLIB=OFF \ - -DCMAKE_C_FLAGS_RELEASE="-O3 -march=native -DNDEBUG" \ - -DCMAKE_CXX_FLAGS_RELEASE="-O3 -march=native -DNDEBUG" \ - -DBUILD_TESTS=OFF -DENABLE_LIBWTTEST=OFF \ - -DSHARED_LIBS=OFF >/dev/null && \ - make && make install - -RUN g++-6 \ - -std=c++14 \ - -O3 -march=native -DNDEBUG \ - -I${BOOST_INC} \ - -L${BOOST_LIB} \ - -I${WT_INC} \ - -L${WT_LIB} \ - -o te-benchmark.wt \ - benchmark.cpp \ - -lwthttp -lwt \ - -lwtdbo -lwtdbomysql \ - -lboost_system \ - -lboost_program_options \ - -lboost_thread \ - -lboost_filesystem \ - -lpthread \ - -lmysqlclient - -ENV DBHOST tfb-database - -EXPOSE 8080 - -CMD ./te-benchmark.wt -c wt_config.xml -t $(nproc) --docroot . --approot . --http-listen 0.0.0.0:8080 --accesslog=- --no-compression diff --git a/frameworks/C++/wt/wt_config.xml b/frameworks/C++/wt/wt_config.xml deleted file mode 100644 index cd1338efbdb..00000000000 --- a/frameworks/C++/wt/wt_config.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - -* - - diff --git a/frameworks/C/h2o/CMakeLists.txt b/frameworks/C/h2o/CMakeLists.txt index 6712c50ebdf..2f1fc3ecce7 100644 --- a/frameworks/C/h2o/CMakeLists.txt +++ b/frameworks/C/h2o/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 2.8.0) +cmake_minimum_required(VERSION 3.16.0) project(h2o_app) find_library(H2O_LIB h2o-evloop) find_library(MUSTACHE_C_LIB mustache_c) diff --git a/frameworks/C/h2o/benchmark_config.json b/frameworks/C/h2o/benchmark_config.json index c866f43129d..1e222e90ce8 100644 --- a/frameworks/C/h2o/benchmark_config.json +++ b/frameworks/C/h2o/benchmark_config.json @@ -13,7 +13,7 @@ "approach": "Realistic", "classification": "Platform", "database": "Postgres", - "framework": "None", + "framework": "libh2o", "language": "C", "flavor": "None", "orm": "Raw", diff --git a/frameworks/C/h2o/h2o.dockerfile b/frameworks/C/h2o/h2o.dockerfile index 4a710f82ac8..0c1a53bec8f 100644 --- a/frameworks/C/h2o/h2o.dockerfile +++ b/frameworks/C/h2o/h2o.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:20.04 +FROM ubuntu:22.04 WORKDIR /h2o_app_src COPY ./ ./ @@ -43,6 +43,8 @@ RUN mkdir -p "${H2O_BUILD_DIR}/build" && \ cd ../.. && \ rm -rf "$H2O_BUILD_DIR" +ARG BENCHMARK_ENV +ENV BENCHMARK_ENV=$BENCHMARK_ENV EXPOSE 8080 CMD ["./h2o.sh"] diff --git a/frameworks/C/h2o/h2o.sh b/frameworks/C/h2o/h2o.sh index 3187419cf4e..bdf24a92ebd 100755 --- a/frameworks/C/h2o/h2o.sh +++ b/frameworks/C/h2o/h2o.sh @@ -24,12 +24,9 @@ if [[ -z "$MUSTACHE_C_PREFIX" ]]; then MUSTACHE_C_PREFIX=/opt/mustache-c fi -# A hacky way to detect whether we are running in the physical hardware or the cloud environment. -if [[ $(nproc) -gt 16 ]]; then - echo "Running h2o_app in the physical hardware environment." +if [[ "$BENCHMARK_ENV" = "Azure" ]]; then DB_CONN=5 else - echo "Running h2o_app in the cloud environment." DB_CONN=5 fi @@ -81,6 +78,7 @@ build_h2o_app "-fprofile-use" cmake --install . popd rm -rf "$H2O_APP_BUILD_DIR" +echo "Running h2o_app in the $BENCHMARK_ENV environment." echo "Maximum database connections per thread: $DB_CONN" run_h2o_app 0 "${H2O_APP_PREFIX}/bin" "${H2O_APP_PREFIX}/share/h2o_app" wait diff --git a/frameworks/C/lwan/Makefile b/frameworks/C/lwan/Makefile index f39433a82fc..b4d0fea2bf0 100644 --- a/frameworks/C/lwan/Makefile +++ b/frameworks/C/lwan/Makefile @@ -4,15 +4,15 @@ CFLAGS = -mtune=native -march=native -O3 -fno-plt -flto -ffat-lto-objects -DNDEB -falign-functions=32 -malign-data=abi \ -include /lwan/build/lwan-build-config.h \ -I /lwan/src/lib \ - `pkg-config mariadb --cflags` \ - `pkg-config sqlite3 --cflags` \ - `pkg-config luajit --cflags` + $(shell pkg-config mariadb --cflags) \ + $(shell pkg-config sqlite3 --cflags) \ + $(shell pkg-config luajit --cflags) LDFLAGS = -mtune=native -march=native -O3 -flto -ffat-lto-objects -Wl,-z,now,-z,relro \ -Wl,-whole-archive /lwan/build/src/lib/liblwan.a -Wl,-no-whole-archive \ - `pkg-config mariadb --libs` \ - `pkg-config sqlite3 --libs` \ - `pkg-config luajit --libs` \ + $(shell pkg-config mariadb --libs) \ + $(shell pkg-config sqlite3 --libs) \ + $(shell pkg-config luajit --libs) \ -lpthread \ -lz @@ -26,7 +26,7 @@ all: techempower $(CC) $(CFLAGS) -c $< -o $@ techempower: $(OBJS) - $(CC) $(LDFLAGS) $(OBJS) -o techempower + $(CC) -o techempower $(OBJS) $(LDFLAGS) clean: rm -f techempower $(OBJS) diff --git a/frameworks/C/lwan/lwan-lua.dockerfile b/frameworks/C/lwan/lwan-lua.dockerfile index 4a9a5a4cded..f98738eb889 100644 --- a/frameworks/C/lwan/lwan-lua.dockerfile +++ b/frameworks/C/lwan/lwan-lua.dockerfile @@ -1,24 +1,19 @@ -FROM ubuntu:21.10 +FROM ubuntu:22.04 RUN apt-get update -yqq && \ apt-get install -yqq \ git pkg-config build-essential cmake zlib1g-dev \ - libsqlite3-dev libmariadbclient-dev wget + libsqlite3-dev libmariadb-dev wget libluajit-5.1-dev ADD ./ /lwan WORKDIR /lwan RUN mkdir mimalloc && \ - wget https://github.com/microsoft/mimalloc/archive/6e1ca96a4965c776c10698c24dae576523178ef5.tar.gz -O - | tar xz --strip-components=1 -C mimalloc && \ + wget https://github.com/microsoft/mimalloc/archive/817569dfad79732233fb86649c89e04387ce02e9.tar.gz -O - | tar xz --strip-components=1 -C mimalloc && \ cd mimalloc && mkdir build && cd build && \ CFLAGS="-flto -ffat-lto-objects" cmake .. -DCMAKE_BUILD_TYPE=Release -DMI_SECURE=OFF && make -j install -RUN mkdir luajit && \ - wget http://luajit.org/download/LuaJIT-2.0.5.tar.gz -O - | tar xz --strip-components=1 -C luajit && \ - cd luajit && \ - PREFIX=/usr CFLAGS="-O3 -mtune=native -march=native -flto -ffat-lto-objects" make -j install - -RUN wget https://github.com/lpereira/lwan/archive/3bb25e235e76028d7687ae4636a059474f42b19c.tar.gz -O - | tar xz --strip-components=1 && \ +RUN wget https://github.com/lpereira/lwan/archive/e637f1ea724389a36dcab02affb6ec3fe5ecb0b6.tar.gz -O - | tar xz --strip-components=1 && \ mkdir build && cd build && \ cmake /lwan -DCMAKE_BUILD_TYPE=Release -DUSE_ALTERNATIVE_MALLOC=mimalloc && \ make lwan-static @@ -26,7 +21,7 @@ RUN wget https://github.com/lpereira/lwan/archive/3bb25e235e76028d7687ae4636a059 RUN make clean && make ENV LD_LIBRARY_PATH=/usr/local/lib:/usr/lib -ENV LD_PRELOAD=/usr/local/lib/mimalloc-1.6/libmimalloc.so +ENV LD_PRELOAD=/usr/local/lib/libmimalloc.so EXPOSE 8080 diff --git a/frameworks/C/lwan/lwan.dockerfile b/frameworks/C/lwan/lwan.dockerfile index ff7f8e740f8..262ed935394 100644 --- a/frameworks/C/lwan/lwan.dockerfile +++ b/frameworks/C/lwan/lwan.dockerfile @@ -1,24 +1,19 @@ -FROM ubuntu:21.10 +FROM ubuntu:22.04 RUN apt-get update -yqq && \ apt-get install -yqq \ git pkg-config build-essential cmake zlib1g-dev \ - libsqlite3-dev libmariadbclient-dev wget + libsqlite3-dev libmariadb-dev wget libluajit-5.1-dev ADD ./ /lwan WORKDIR /lwan RUN mkdir mimalloc && \ - wget https://github.com/microsoft/mimalloc/archive/6e1ca96a4965c776c10698c24dae576523178ef5.tar.gz -O - | tar xz --strip-components=1 -C mimalloc && \ + wget https://github.com/microsoft/mimalloc/archive/817569dfad79732233fb86649c89e04387ce02e9.tar.gz -O - | tar xz --strip-components=1 -C mimalloc && \ cd mimalloc && mkdir build && cd build && \ CFLAGS="-flto -ffat-lto-objects" cmake .. -DCMAKE_BUILD_TYPE=Release -DMI_SECURE=OFF && make -j install -RUN mkdir luajit && \ - wget http://luajit.org/download/LuaJIT-2.0.5.tar.gz -O - | tar xz --strip-components=1 -C luajit && \ - cd luajit && \ - PREFIX=/usr CFLAGS="-O3 -mtune=native -march=native -flto -ffat-lto-objects" make -j install - -RUN wget https://github.com/lpereira/lwan/archive/3bb25e235e76028d7687ae4636a059474f42b19c.tar.gz -O - | tar xz --strip-components=1 && \ +RUN wget https://github.com/lpereira/lwan/archive/e637f1ea724389a36dcab02affb6ec3fe5ecb0b6.tar.gz -O - | tar xz --strip-components=1 && \ mkdir build && cd build && \ cmake /lwan -DCMAKE_BUILD_TYPE=Release -DUSE_ALTERNATIVE_MALLOC=mimalloc && \ make lwan-static @@ -31,7 +26,7 @@ ENV MYSQL_USER=benchmarkdbuser ENV MYSQL_PASS=benchmarkdbpass ENV MYSQL_DB=hello_world ENV MYSQL_HOST=tfb-database -ENV LD_PRELOAD=/usr/local/lib/mimalloc-1.6/libmimalloc.so +ENV LD_PRELOAD=/usr/local/lib/libmimalloc.so EXPOSE 8080 diff --git a/frameworks/C/lwan/src/database.c b/frameworks/C/lwan/src/database.c index e1ff94042bb..5f88236600d 100644 --- a/frameworks/C/lwan/src/database.c +++ b/frameworks/C/lwan/src/database.c @@ -1,6 +1,6 @@ /* * lwan - simple web server - * Copyright (c) 2014 Leandro A. F. Pereira + * Copyright (c) 2022 L. A. F. Pereira * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License diff --git a/frameworks/C/lwan/src/database.h b/frameworks/C/lwan/src/database.h index 0af4f79a94d..987d0ec9b96 100644 --- a/frameworks/C/lwan/src/database.h +++ b/frameworks/C/lwan/src/database.h @@ -1,6 +1,6 @@ /* * lwan - simple web server - * Copyright (c) 2014 Leandro A. F. Pereira + * Copyright (c) 2022 L. A. F. Pereira * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License diff --git a/frameworks/C/lwan/src/json.c b/frameworks/C/lwan/src/json.c index 125f1281d17..98daaab4f40 100644 --- a/frameworks/C/lwan/src/json.c +++ b/frameworks/C/lwan/src/json.c @@ -1,6 +1,6 @@ /* * Copyright (c) 2017 Intel Corporation - * Copyright (c) 2020 Leandro A. F. Pereira + * Copyright (c) 2022 L. A. F. Pereira * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/frameworks/C/lwan/src/techempower.c b/frameworks/C/lwan/src/techempower.c index a8ef463592e..decad0dcbe0 100644 --- a/frameworks/C/lwan/src/techempower.c +++ b/frameworks/C/lwan/src/techempower.c @@ -1,6 +1,6 @@ /* * lwan - simple web server - * Copyright (c) 2014 Leandro A. F. Pereira + * Copyright (c) 2022 L. A. F. Pereira * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License diff --git a/frameworks/C/lwan/techempower.conf b/frameworks/C/lwan/techempower.conf index 274e83e8d47..45b09c08e8b 100644 --- a/frameworks/C/lwan/techempower.conf +++ b/frameworks/C/lwan/techempower.conf @@ -1,4 +1,6 @@ -listener *:8080 { +listener *:8080 + +site { # For main TWFB benchmarks &plaintext /plaintext &json /json @@ -15,11 +17,13 @@ listener *:8080 { function handle_get_plaintext(req) req:set_response("Hello, World!") + return nil end function handle_get_json(req) req:set_headers({['Content-Type']='application/json'}) req:set_response(json.encode({message="Hello, World!"})) + return nil end''' } diff --git a/frameworks/C/nginx/nginx.dockerfile b/frameworks/C/nginx/nginx.dockerfile index 3ae9f5bf769..91678bbaee6 100644 --- a/frameworks/C/nginx/nginx.dockerfile +++ b/frameworks/C/nginx/nginx.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:21.10 +FROM ubuntu:22.04 ARG DEBIAN_FRONTEND=noninteractive diff --git a/frameworks/CSharp/appmpower/appmpower-odbc-pg.dockerfile b/frameworks/CSharp/appmpower/appmpower-odbc-pg.dockerfile index 84f0e035a6b..aab3957a316 100644 --- a/frameworks/CSharp/appmpower/appmpower-odbc-pg.dockerfile +++ b/frameworks/CSharp/appmpower/appmpower-odbc-pg.dockerfile @@ -8,20 +8,20 @@ RUN apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ WORKDIR /odbc # To compile the latest postgresql odbc driver, postgresql itself needs to be installed -RUN curl -L -o postgresql-14.1.tar.gz https://ftp.postgresql.org/pub/source/v14.1/postgresql-14.1.tar.gz +#RUN curl -L -o postgresql-14.1.tar.gz https://ftp.postgresql.org/pub/source/v14.1/postgresql-14.1.tar.gz RUN curl -L -o unixODBC-2.3.9.tar.gz ftp://ftp.unixodbc.org/pub/unixODBC/unixODBC-2.3.9.tar.gz -RUN curl -L -o psqlodbc-13.02.0000.tar.gz https://ftp.postgresql.org/pub/odbc/versions/src/psqlodbc-13.02.0000.tar.gz +#RUN curl -L -o psqlodbc-13.02.0000.tar.gz https://ftp.postgresql.org/pub/odbc/versions/src/psqlodbc-13.02.0000.tar.gz -RUN tar -xvf postgresql-14.1.tar.gz +#RUN tar -xvf postgresql-14.1.tar.gz RUN tar -xvf unixODBC-2.3.9.tar.gz -RUN tar -xvf psqlodbc-13.02.0000.tar.gz +#RUN tar -xvf psqlodbc-13.02.0000.tar.gz -WORKDIR /odbc/postgresql-14.1 -RUN ./configure -RUN make -RUN make install +#WORKDIR /odbc/postgresql-14.1 +#RUN ./configure +#RUN make +#RUN make install -ENV PATH=/usr/local/pgsql/bin:$PATH +#ENV PATH=/usr/local/pgsql/bin:$PATH WORKDIR /odbc/unixODBC-2.3.9 RUN ./configure --prefix=/usr/local/unixODBC @@ -30,10 +30,10 @@ RUN make install ENV PATH=/usr/local/unixODBC/lib:$PATH -WORKDIR /odbc/psqlodbc-13.02.0000 -RUN ./configure --with-unixodbc=/usr/local/unixODBC --with-libpq=/usr/local/pgsql --prefix=/usr/local/pgsqlodbc -RUN make -RUN make install +#WORKDIR /odbc/psqlodbc-13.02.0000 +#RUN ./configure --with-unixodbc=/usr/local/unixODBC --with-libpq=/usr/local/pgsql --prefix=/usr/local/pgsqlodbc +#RUN make +#RUN make install WORKDIR /app COPY src . @@ -44,19 +44,19 @@ FROM mcr.microsoft.com/dotnet/aspnet:6.0.0 AS runtime RUN apt-get update # The following installs standard versions unixodbc 2.3.6 and pgsqlodbc 11 -#RUN apt-get install -y unixodbc odbc-postgresql +RUN apt-get install -y unixodbc odbc-postgresql # unixodbc still needs to be installed even if compiled locally -RUN apt-get install -y unixodbc wget curl libpq-dev build-essential +#RUN apt-get install -y unixodbc wget curl libpq-dev build-essential WORKDIR /odbc -RUN curl -L -o pgpool-II-4.2.3.tar.gz https://www.pgpool.net/mediawiki/download.php?f=pgpool-II-4.2.3.tar.gz -RUN tar -xvf pgpool-II-4.2.3.tar.gz +#RUN curl -L -o pgpool-II-4.2.3.tar.gz https://www.pgpool.net/mediawiki/download.php?f=pgpool-II-4.2.3.tar.gz +#RUN tar -xvf pgpool-II-4.2.3.tar.gz -WORKDIR /odbc/pgpool-II-4.2.3 -RUN ./configure -RUN make -RUN make install +#WORKDIR /odbc/pgpool-II-4.2.3 +#RUN ./configure +#RUN make +#RUN make install COPY --from=build /usr/local/unixODBC /usr/local/unixODBC @@ -66,7 +66,7 @@ COPY --from=build /usr/local/unixODBC /usr/local/unixODBC ENV PATH=/usr/local/unixODBC/bin:$PATH -COPY --from=build /usr/local/pgsqlodbc /usr/local/pgsqlodbc +#COPY --from=build /usr/local/pgsqlodbc /usr/local/pgsqlodbc WORKDIR /etc/ COPY odbcinst.ini . diff --git a/frameworks/CSharp/appmpower/odbcinst.ini b/frameworks/CSharp/appmpower/odbcinst.ini index 510be155b9d..ff3db34179a 100644 --- a/frameworks/CSharp/appmpower/odbcinst.ini +++ b/frameworks/CSharp/appmpower/odbcinst.ini @@ -16,8 +16,8 @@ Description=ODBC for PostgreSQL ; in version 08.x. Note that the library can also be installed under an other ; path than /usr/local/lib/ following your installation. ; This is the standard location used by apt-get install -y unixodbc -;Driver = /usr/lib/x86_64-linux-gnu/odbc/psqlodbcw.so -Driver =/usr/local/pgsqlodbc/lib/psqlodbcw.so +Driver = /usr/lib/x86_64-linux-gnu/odbc/psqlodbcw.so +;Driver =/usr/local/pgsqlodbc/lib/psqlodbcw.so Threading = 0 CPTimeout = 0 diff --git a/frameworks/CSharp/appmpower/src/Db/PooledCommand.cs b/frameworks/CSharp/appmpower/src/Data/DbCommand.cs similarity index 70% rename from frameworks/CSharp/appmpower/src/Db/PooledCommand.cs rename to frameworks/CSharp/appmpower/src/Data/DbCommand.cs index 46255e86bad..2086068c802 100644 --- a/frameworks/CSharp/appmpower/src/Db/PooledCommand.cs +++ b/frameworks/CSharp/appmpower/src/Data/DbCommand.cs @@ -1,32 +1,36 @@ using System.Data; -using System.Data.Common; using System.Threading.Tasks; -namespace appMpower.Db +namespace appMpower.Data { - public class PooledCommand : IDbCommand + public class DbCommand : IDbCommand { private IDbCommand _dbCommand; - private PooledConnection _pooledConnection; + private DbConnection _dbConnection; - public PooledCommand(PooledConnection pooledConnection) + public DbCommand(DbConnection dbConnection) { - _dbCommand = pooledConnection.CreateCommand(); - _pooledConnection = pooledConnection; + _dbCommand = dbConnection.CreateCommand(); + _dbConnection = dbConnection; } - public PooledCommand(string commandText, PooledConnection pooledConnection) + public DbCommand(string commandText, DbConnection dbConnection) { - pooledConnection.GetCommand(commandText, this); + dbConnection.GetCommand(commandText, CommandType.Text, this); } - internal PooledCommand(IDbCommand dbCommand, PooledConnection pooledConnection) + public DbCommand(string commandText, CommandType commandType, DbConnection dbConnection) + { + dbConnection.GetCommand(commandText, commandType, this); + } + + internal DbCommand(IDbCommand dbCommand, DbConnection dbConnection) { _dbCommand = dbCommand; - _pooledConnection = pooledConnection; + _dbConnection = dbConnection; } - internal IDbCommand DbCommand + internal IDbCommand Command { get { @@ -38,15 +42,15 @@ internal IDbCommand DbCommand } } - internal PooledConnection PooledConnection + internal DbConnection DbConnection { get { - return _pooledConnection; + return _dbConnection; } set { - _pooledConnection = value; + _dbConnection = value; } } @@ -99,7 +103,6 @@ public IDbConnection? Connection } #nullable disable - public IDataParameterCollection Parameters { get @@ -143,6 +146,11 @@ public IDbDataParameter CreateParameter() return _dbCommand.CreateParameter(); } + public IDbDataParameter CreateParameter(string name, object value) + { + return CreateParameter(name, DbType.String, value); + } + public IDbDataParameter CreateParameter(string name, DbType dbType, object value) { IDbDataParameter dbDataParameter = null; @@ -177,12 +185,12 @@ public IDataReader ExecuteReader() public async Task ExecuteNonQueryAsync() { - return await (_dbCommand as DbCommand).ExecuteNonQueryAsync(); + return await (_dbCommand as System.Data.Common.DbCommand).ExecuteNonQueryAsync(); } - public async Task ExecuteReaderAsync(CommandBehavior behavior) + public async Task ExecuteReaderAsync(CommandBehavior behavior) { - return await (_dbCommand as DbCommand).ExecuteReaderAsync(behavior); + return await (_dbCommand as System.Data.Common.DbCommand).ExecuteReaderAsync(behavior); } public IDataReader ExecuteReader(CommandBehavior behavior) @@ -197,19 +205,21 @@ public IDataReader ExecuteReader(CommandBehavior behavior) } #nullable disable - public void Prepare() +#nullable enable + public async Task ExecuteScalarAsync() { - _dbCommand.Prepare(); + return await ((System.Data.Common.DbCommand)_dbCommand).ExecuteScalarAsync(); } +#nullable disable - public void Release() + public void Prepare() { - _pooledConnection.ReleaseCommand(this); + _dbCommand.Prepare(); } public void Dispose() { - _pooledConnection.ReleaseCommand(this); + _dbConnection.ReleaseCommand(this); } } } \ No newline at end of file diff --git a/frameworks/CSharp/appmpower/src/Data/DbConnection.cs b/frameworks/CSharp/appmpower/src/Data/DbConnection.cs new file mode 100644 index 00000000000..31dc5a783dc --- /dev/null +++ b/frameworks/CSharp/appmpower/src/Data/DbConnection.cs @@ -0,0 +1,203 @@ +using System.Collections.Concurrent; +using System.Data; +using System.Threading.Tasks; + +namespace appMpower.Data +{ + public class DbConnection : IDbConnection + { + private string _connectionString; + internal InternalConnection _internalConnection; + + public DbConnection() + { + _connectionString = DbProviderFactory.ConnectionString; + } + + public DbConnection(string connectionString) + { + _connectionString = connectionString; + } + + internal ConcurrentDictionary DbCommands + { + get + { + return _internalConnection.DbCommands; + } + set + { + _internalConnection.DbCommands = value; + } + } + + public short Number + { + get + { + return _internalConnection.Number; + } + set + { + _internalConnection.Number = value; + } + } + + public IDbConnection Connection + { + get + { + return _internalConnection.DbConnection; + } + set + { + _internalConnection.DbConnection = value; + } + } + + public string ConnectionString + { + get + { + return _internalConnection.DbConnection.ConnectionString; + } + set + { + _internalConnection.DbConnection.ConnectionString = value; + } + } + + public int ConnectionTimeout + { + get + { + return _internalConnection.DbConnection.ConnectionTimeout; + } + } + + public string Database + { + get + { + return _internalConnection.DbConnection.Database; + } + } + + public ConnectionState State + { + get + { + if (_internalConnection is null) return ConnectionState.Closed; + return _internalConnection.DbConnection.State; + } + } + + public IDbTransaction BeginTransaction() + { + return _internalConnection.DbConnection.BeginTransaction(); + } + + public IDbTransaction BeginTransaction(IsolationLevel il) + { + return _internalConnection.DbConnection.BeginTransaction(il); + } + + public void ChangeDatabase(string databaseName) + { + _internalConnection.DbConnection.ChangeDatabase(databaseName); + } + + public void Close() + { + _internalConnection.DbConnection.Close(); + } + + public async Task CloseAsync() + { + await (_internalConnection.DbConnection as System.Data.Common.DbConnection).CloseAsync(); + } + + public IDbCommand CreateCommand() + { + return _internalConnection.DbConnection.CreateCommand(); + } + + public void Open() + { + if (_internalConnection.DbConnection.State == ConnectionState.Closed) + { + _internalConnection.DbConnection.Open(); + } + } + + public void Dispose() + { +#if ADO + _internalConnection.DbConnection.Dispose(); + _internalConnection.Dispose(); +#else + DbConnections.Release(_internalConnection); +#endif + } + + public async Task OpenAsync() + { +#if ADO && SQLSERVER + _internalConnection = new(); + _internalConnection.DbConnection = new System.Data.SqlClient.SqlConnection(_connectionString); +#elif ADO && POSTGRESQL + _internalConnection = new(); + _internalConnection.DbConnection = new Npgsql.NpgsqlConnection(_connectionString); +#else + if (_internalConnection is null) + { + _internalConnection = await DbConnections.GetConnection(_connectionString); + } +#endif + + if (_internalConnection.DbConnection.State == ConnectionState.Closed) + { + await (_internalConnection.DbConnection as System.Data.Common.DbConnection).OpenAsync(); + } + } + + internal DbCommand GetCommand(string commandText, CommandType commandType, DbCommand dbCommand) + { +#if ADO + dbCommand.Command = _internalConnection.DbConnection.CreateCommand(); + dbCommand.Command.CommandText = commandText; + dbCommand.Command.CommandType = commandType; + dbCommand.DbConnection = this; +#else + DbCommand internalCommand; + + if (_internalConnection.DbCommands.TryRemove(commandText, out internalCommand)) + { + dbCommand.Command = internalCommand.Command; + dbCommand.DbConnection = internalCommand.DbConnection; + } + else + { + dbCommand.Command = _internalConnection.DbConnection.CreateCommand(); + dbCommand.Command.CommandText = commandText; + dbCommand.Command.CommandType = commandType; + dbCommand.DbConnection = this; + + //For non odbc drivers like Npgsql which do not support Prepare + dbCommand.Command.Prepare(); + + //Console.WriteLine("prepare pool connection: " + this._internalConnection.Number + " for command " + _internalConnection.DbCommands.Count); + } +#endif + + return dbCommand; + } + + public void ReleaseCommand(DbCommand dbCommand) + { +#if !ADO + _internalConnection.DbCommands.TryAdd(dbCommand.CommandText, dbCommand); +#endif + } + } +} \ No newline at end of file diff --git a/frameworks/CSharp/appmpower/src/Data/DbConnections.cs b/frameworks/CSharp/appmpower/src/Data/DbConnections.cs new file mode 100644 index 00000000000..d7658b41ebe --- /dev/null +++ b/frameworks/CSharp/appmpower/src/Data/DbConnections.cs @@ -0,0 +1,67 @@ +using System.Collections.Concurrent; +using System.Threading.Tasks; + +namespace appMpower.Data +{ + public static class DbConnections + { + private static bool _connectionsCreated = false; + private static short _createdConnections = 0; + private static short _maxConnections = 250; + + private static ConcurrentStack _stack = new(); + private static ConcurrentQueue> _waitingQueue = new(); + + public static async Task GetConnection(string connectionString) + { + InternalConnection internalConnection = null; + + if (_connectionsCreated) + { + if (!_stack.TryPop(out internalConnection)) + { + internalConnection = await GetDbConnectionAsync(); + } + + return internalConnection; + } + else + { + internalConnection = new InternalConnection(); + internalConnection.DbConnection = new System.Data.Odbc.OdbcConnection(connectionString); + + _createdConnections++; + + if (_createdConnections == _maxConnections) _connectionsCreated = true; + + internalConnection.Number = _createdConnections; + internalConnection.DbCommands = new ConcurrentDictionary(); + //Console.WriteLine("opened connection number: " + dbConnection.Number); + + return internalConnection; + } + } + + public static Task GetDbConnectionAsync() + { + var taskCompletionSource = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + _waitingQueue.Enqueue(taskCompletionSource); + return taskCompletionSource.Task; + } + + public static void Release(InternalConnection internalConnection) + { + TaskCompletionSource taskCompletionSource; + + if (_waitingQueue.TryDequeue(out taskCompletionSource)) + { + taskCompletionSource.SetResult(internalConnection); + } + else + { + _stack.Push(internalConnection); + } + } + } +} \ No newline at end of file diff --git a/frameworks/CSharp/appmpower/src/Db/DataProvider.cs b/frameworks/CSharp/appmpower/src/Data/DbProviderFactory.cs similarity index 72% rename from frameworks/CSharp/appmpower/src/Db/DataProvider.cs rename to frameworks/CSharp/appmpower/src/Data/DbProviderFactory.cs index f466a503585..5f19a054204 100644 --- a/frameworks/CSharp/appmpower/src/Db/DataProvider.cs +++ b/frameworks/CSharp/appmpower/src/Data/DbProviderFactory.cs @@ -1,12 +1,14 @@ -namespace appMpower.Db +using System.Data; + +namespace appMpower.Data { - public static class DataProvider + public static class DbProviderFactory { #if MYSQL public const string ConnectionString = "Driver={MariaDB};Server=tfb-database;Database=hello_world;Uid=benchmarkdbuser;Pwd=benchmarkdbpass;Pooling=false;OPTIONS=67108864;FLAG_FORWARD_CURSOR=1"; #elif ADO - public const string ConnectionString = "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;SSL Mode=Disable;Maximum Pool Size=8;NoResetOnClose=true;Enlist=false;Max Auto Prepare=4;Multiplexing=true;Write Coalescing Buffer Threshold Bytes=1000"; - //public const string ConnectionString = "Server=localhost;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;SSL Mode=Disable;Maximum Pool Size=9;NoResetOnClose=true;Enlist=false;Max Auto Prepare=4;Multiplexing=true;Write Coalescing Buffer Threshold Bytes=1000"; + public const string ConnectionString = "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;SSL Mode=Disable;Maximum Pool Size=18;NoResetOnClose=true;Enlist=false;Max Auto Prepare=4;Multiplexing=true;Write Coalescing Buffer Threshold Bytes=1000"; + //public const string ConnectionString = "Server=localhost;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;SSL Mode=Disable;Maximum Pool Size=18;NoResetOnClose=true;Enlist=false;Max Auto Prepare=4;Multiplexing=true;Write Coalescing Buffer Threshold Bytes=1000"; #else public const string ConnectionString = "Driver={PostgreSQL};Server=tfb-database;Database=hello_world;Uid=benchmarkdbuser;Pwd=benchmarkdbpass;UseServerSidePrepare=1;Pooling=false"; //public const string ConnectionString = "Driver={PostgreSQL};Server=localhost;Database=hello_world;Uid=benchmarkdbuser;Pwd=benchmarkdbpass;UseServerSidePrepare=1;Pooling=false"; diff --git a/frameworks/CSharp/appmpower/src/Data/InternalConnection.cs b/frameworks/CSharp/appmpower/src/Data/InternalConnection.cs new file mode 100644 index 00000000000..25cecfeb4f8 --- /dev/null +++ b/frameworks/CSharp/appmpower/src/Data/InternalConnection.cs @@ -0,0 +1,16 @@ +using System.Collections.Concurrent; +using System.Data; + +namespace appMpower.Data +{ + public class InternalConnection : System.IDisposable + { + public short Number { get; set; } + public IDbConnection DbConnection { get; set; } + public ConcurrentDictionary DbCommands { get; set; } + + public void Dispose() + { + } + } +} \ No newline at end of file diff --git a/frameworks/CSharp/appmpower/src/Db/PooledConnection.cs b/frameworks/CSharp/appmpower/src/Db/PooledConnection.cs deleted file mode 100644 index 67e9cb9b068..00000000000 --- a/frameworks/CSharp/appmpower/src/Db/PooledConnection.cs +++ /dev/null @@ -1,198 +0,0 @@ -using System.Collections.Concurrent; -using System.Data; -using System.Data.Common; -using System.Threading.Tasks; - -namespace appMpower.Db -{ - public class PooledConnection : IDbConnection - { - private bool _released = false; - private short _number = 0; - private IDbConnection _dbConnection; - private ConcurrentDictionary _pooledCommands; - - internal PooledConnection() - { - } - - internal PooledConnection(IDbConnection dbConnection) - { - _dbConnection = dbConnection; - _pooledCommands = new ConcurrentDictionary(); - } - - internal ConcurrentDictionary PooledCommands - { - get - { - return _pooledCommands; - } - set - { - _pooledCommands = value; - } - } - - public short Number - { - get - { - return _number; - } - set - { - _number = value; - } - } - - public IDbConnection DbConnection - { - get - { - return _dbConnection; - } - set - { - _dbConnection = value; - } - } - - public string ConnectionString - { - get - { - return _dbConnection.ConnectionString; - } - set - { - _dbConnection.ConnectionString = value; - } - } - - public int ConnectionTimeout - { - get - { - return _dbConnection.ConnectionTimeout; - } - } - - public string Database - { - get - { - return _dbConnection.Database; - } - } - - public ConnectionState State - { - get - { - return _dbConnection.State; - } - } - - public bool Released - { - get - { - return _released; - } - internal set - { - _released = value; - } - } - - public IDbTransaction BeginTransaction() - { - return _dbConnection.BeginTransaction(); - } - - public IDbTransaction BeginTransaction(IsolationLevel il) - { - return _dbConnection.BeginTransaction(il); - } - - public void ChangeDatabase(string databaseName) - { - _dbConnection.ChangeDatabase(databaseName); - } - - public void Close() - { - _dbConnection.Close(); - _released = true; - } - - public IDbCommand CreateCommand() - { - return _dbConnection.CreateCommand(); - } - - public void Open() - { - if (_dbConnection.State == ConnectionState.Closed) - { - _dbConnection.Open(); - } - } - - public void Release() - { - if (!_released && _dbConnection.State == ConnectionState.Open) - { - PooledConnections.Release(this); - } - } - - public void Dispose() - { - if (!_released && _dbConnection.State == ConnectionState.Open) - { - PooledConnections.Dispose(this); - } - } - - public async Task OpenAsync() - { - if (_dbConnection.State == ConnectionState.Closed) - { - await (_dbConnection as DbConnection).OpenAsync(); - } - } - - internal PooledCommand GetCommand(string commandText, PooledCommand pooledCommand) - { - PooledCommand internalCommand; - - if (_pooledCommands.TryRemove(commandText, out internalCommand)) - { - pooledCommand.DbCommand = internalCommand.DbCommand; - pooledCommand.PooledConnection = internalCommand.PooledConnection; - } - else - { - pooledCommand.DbCommand = this.DbConnection.CreateCommand(); - pooledCommand.DbCommand.CommandText = commandText; - pooledCommand.PooledConnection = this; - - //For non odbc drivers like Npgsql which do not support Prepare -#if !ADO - pooledCommand.DbCommand.Prepare(); -#endif - - //Console.WriteLine("prepare pool connection: " + this._number + " for command " + _pooledCommands.Count); - } - - return pooledCommand; - } - - public void ReleaseCommand(PooledCommand pooledCommand) - { - _pooledCommands.TryAdd(pooledCommand.CommandText, pooledCommand); - } - } -} \ No newline at end of file diff --git a/frameworks/CSharp/appmpower/src/Db/PooledConnections.cs b/frameworks/CSharp/appmpower/src/Db/PooledConnections.cs deleted file mode 100644 index 718974379e8..00000000000 --- a/frameworks/CSharp/appmpower/src/Db/PooledConnections.cs +++ /dev/null @@ -1,88 +0,0 @@ -using System.Collections.Concurrent; -using System.Threading.Tasks; - -namespace appMpower.Db -{ - public static class PooledConnections - { - private static bool _connectionsCreated = false; - private static short _createdConnections = 0; - private static short _maxConnections = 240; - - private static ConcurrentStack _stack = new ConcurrentStack(); - private static ConcurrentQueue> _waitingQueue = new ConcurrentQueue>(); - - public static async Task GetConnection(string connectionString) - { - PooledConnection pooledConnection = null; - - if (_connectionsCreated) - { - if (_stack.TryPop(out pooledConnection)) - { - pooledConnection.Released = false; - } - else - { - pooledConnection = await GetPooledConnectionAsync(); - } - - return pooledConnection; - } - else - { - pooledConnection = new PooledConnection(); - -#if ADO - pooledConnection.DbConnection = new Npgsql.NpgsqlConnection(connectionString); -#else - pooledConnection.DbConnection = new System.Data.Odbc.OdbcConnection(connectionString); -#endif - - _createdConnections++; - - if (_createdConnections == _maxConnections) _connectionsCreated = true; - - pooledConnection.Number = _createdConnections; - pooledConnection.PooledCommands = new ConcurrentDictionary(); - //Console.WriteLine("opened connection number: " + pooledConnection.Number); - - return pooledConnection; - } - } - - public static Task GetPooledConnectionAsync() - { - var taskCompletionSource = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); - - _waitingQueue.Enqueue(taskCompletionSource); - return taskCompletionSource.Task; - } - - public static void Dispose(PooledConnection pooledConnection) - { - PooledConnection newPooledConnection = new PooledConnection(); - - newPooledConnection.DbConnection = pooledConnection.DbConnection; - newPooledConnection.Number = pooledConnection.Number; - newPooledConnection.PooledCommands = pooledConnection.PooledCommands; - - Release(newPooledConnection); - } - - public static void Release(PooledConnection pooledConnection) - { - TaskCompletionSource taskCompletionSource; - - if (_waitingQueue.TryDequeue(out taskCompletionSource)) - { - taskCompletionSource.SetResult(pooledConnection); - } - else - { - pooledConnection.Released = true; - _stack.Push(pooledConnection); - } - } - } -} diff --git a/frameworks/CSharp/appmpower/src/RawDb.cs b/frameworks/CSharp/appmpower/src/RawDb.cs index 4b65bc15b83..8813ce6dfcc 100644 --- a/frameworks/CSharp/appmpower/src/RawDb.cs +++ b/frameworks/CSharp/appmpower/src/RawDb.cs @@ -1,11 +1,10 @@ using System; using System.Collections.Generic; using System.Data; -using System.Data.Common; using System.Linq; using System.Threading.Tasks; using Microsoft.Extensions.Caching.Memory; -using appMpower.Db; +using appMpower.Data; using PlatformBenchmarks; namespace appMpower @@ -32,36 +31,37 @@ public static class RawDb public static async Task LoadSingleQueryRow() { - var pooledConnection = await PooledConnections.GetConnection(DataProvider.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); - var (pooledCommand, _) = CreateReadCommand(pooledConnection); - var world = await ReadSingleRow(pooledCommand); + var (dbCommand, _) = CreateReadCommand(pooledConnection); - pooledCommand.Release(); - pooledConnection.Release(); + using (dbCommand) + { + var world = await ReadSingleRow(dbCommand); - return world; + return world; + } } public static async Task LoadMultipleQueriesRows(int count) { var worlds = new World[count]; - var pooledConnection = await PooledConnections.GetConnection(DataProvider.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); - var (pooledCommand, dbDataParameter) = CreateReadCommand(pooledConnection); + var (dbCommand, dbDataParameter) = CreateReadCommand(pooledConnection); - for (int i = 0; i < count; i++) + using (dbCommand) { - worlds[i] = await ReadSingleRow(pooledCommand); - dbDataParameter.Value = _random.Next(1, 10001); + for (int i = 0; i < count; i++) + { + worlds[i] = await ReadSingleRow(dbCommand); + dbDataParameter.Value = _random.Next(1, 10001); + } } - pooledCommand.Release(); - pooledConnection.Release(); - return worlds; } @@ -69,17 +69,20 @@ public static async Task> LoadFortunesRows() { var fortunes = new List(); - var pooledConnection = await PooledConnections.GetConnection(DataProvider.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); - var pooledCommand = new PooledCommand("SELECT * FROM fortune", pooledConnection); - var dataReader = await pooledCommand.ExecuteReaderAsync(CommandBehavior.SingleResult & CommandBehavior.SequentialAccess); + var dbCommand = new DbCommand("SELECT * FROM fortune", pooledConnection); - while (dataReader.Read()) + using (dbCommand) { - fortunes.Add(new Fortune - ( - id: dataReader.GetInt32(0), + var dataReader = await dbCommand.ExecuteReaderAsync(CommandBehavior.SingleResult & CommandBehavior.SequentialAccess); + + while (dataReader.Read()) + { + fortunes.Add(new Fortune + ( + id: dataReader.GetInt32(0), #if MYSQL //MariaDB ODBC connector does not correctly support Japanese characters in combination with default ADO.NET; //as a solution we custom read this string @@ -88,11 +91,10 @@ public static async Task> LoadFortunesRows() message: dataReader.GetString(1) #endif )); - } + } - dataReader.Close(); - pooledCommand.Release(); - pooledConnection.Release(); + dataReader.Close(); + } fortunes.Add(new Fortune(id: 0, message: "Additional fortune added at request time.")); fortunes.Sort(); @@ -104,20 +106,21 @@ public static async Task LoadMultipleUpdatesRows(int count) { var worlds = new World[count]; - var pooledConnection = await PooledConnections.GetConnection(DataProvider.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); var (queryCommand, dbDataParameter) = CreateReadCommand(pooledConnection); - for (int i = 0; i < count; i++) + using (queryCommand) { - worlds[i] = await ReadSingleRow(queryCommand); - dbDataParameter.Value = _random.Next(1, 10001); + for (int i = 0; i < count; i++) + { + worlds[i] = await ReadSingleRow(queryCommand); + dbDataParameter.Value = _random.Next(1, 10001); + } } - queryCommand.Release(); - - var updateCommand = new PooledCommand(PlatformBenchmarks.BatchUpdateString.Query(count), pooledConnection); + using var updateCommand = new DbCommand(PlatformBenchmarks.BatchUpdateString.Query(count), pooledConnection); var ids = PlatformBenchmarks.BatchUpdateString.Ids; var randoms = PlatformBenchmarks.BatchUpdateString.Randoms; @@ -145,26 +148,23 @@ public static async Task LoadMultipleUpdatesRows(int count) await updateCommand.ExecuteNonQueryAsync(); - updateCommand.Release(); - pooledConnection.Release(); - return worlds; } - private static (PooledCommand pooledCommand, IDbDataParameter dbDataParameter) CreateReadCommand(PooledConnection pooledConnection) + private static (DbCommand dbCommand, IDbDataParameter dbDataParameter) CreateReadCommand(DbConnection pooledConnection) { #if ADO - var pooledCommand = new PooledCommand("SELECT * FROM world WHERE id=@Id", pooledConnection); + var dbCommand = new DbCommand("SELECT * FROM world WHERE id=@Id", pooledConnection); #else - var pooledCommand = new PooledCommand("SELECT * FROM world WHERE id=?", pooledConnection); + var dbCommand = new DbCommand("SELECT * FROM world WHERE id=?", pooledConnection); #endif - return (pooledCommand, pooledCommand.CreateParameter("Id", DbType.Int32, _random.Next(1, 10001))); + return (dbCommand, dbCommand.CreateParameter("Id", DbType.Int32, _random.Next(1, 10001))); } - private static async Task ReadSingleRow(PooledCommand pooledCommand) + private static async Task ReadSingleRow(DbCommand dbCommand) { - var dataReader = await pooledCommand.ExecuteReaderAsync(CommandBehavior.SingleRow & CommandBehavior.SequentialAccess); + var dataReader = await dbCommand.ExecuteReaderAsync(CommandBehavior.SingleRow & CommandBehavior.SequentialAccess); dataReader.Read(); @@ -202,17 +202,17 @@ public static async Task ReadMultipleRows(int count) queryString = _queriesMultipleRows[count] = PlatformBenchmarks.StringBuilderCache.GetStringAndRelease(stringBuilder); } - var pooledConnection = await PooledConnections.GetConnection(DataProvider.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); - var pooledCommand = new PooledCommand(queryString, pooledConnection); + using var dbCommand = new DbCommand(queryString, pooledConnection); for (int i = 0; i < count; i++) { - pooledCommand.CreateParameter(ids[i], DbType.Int32, _random.Next(1, 10001)); + dbCommand.CreateParameter(ids[i], DbType.Int32, _random.Next(1, 10001)); } - var dataReader = await pooledCommand.ExecuteReaderAsync(CommandBehavior.Default & CommandBehavior.SequentialAccess); + var dataReader = await dbCommand.ExecuteReaderAsync(CommandBehavior.Default & CommandBehavior.SequentialAccess); do { @@ -228,15 +228,13 @@ public static async Task ReadMultipleRows(int count) } while (await dataReader.NextResultAsync()); dataReader.Close(); - pooledCommand.Release(); - pooledConnection.Release(); return worlds; } - public static string ReadColumn(DbDataReader dbDataReader, int column) + public static string ReadColumn(IDataReader dataReader, int column) { - long size = dbDataReader.GetBytes(column, 0, null, 0, 0); //get the length of data + long size = dataReader.GetBytes(column, 0, null, 0, 0); //get the length of data byte[] values = new byte[size]; int bufferSize = 64; @@ -245,7 +243,7 @@ public static string ReadColumn(DbDataReader dbDataReader, int column) while (bytesRead < size) { - bytesRead += dbDataReader.GetBytes(column, currentPosition, values, currentPosition, bufferSize); + bytesRead += dataReader.GetBytes(column, currentPosition, values, currentPosition, bufferSize); currentPosition += bufferSize; } @@ -254,12 +252,12 @@ public static string ReadColumn(DbDataReader dbDataReader, int column) public static async Task PopulateCache() { - var pooledConnection = await PooledConnections.GetConnection(DataProvider.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); - var (pooledCommand, dbDataParameter) = CreateReadCommand(pooledConnection); + var (dbCommand, dbDataParameter) = CreateReadCommand(pooledConnection); - using (pooledCommand) + using (dbCommand) { var cacheKeys = _cacheKeys; var cache = _cache; @@ -267,12 +265,9 @@ public static async Task PopulateCache() for (var i = 1; i < 10001; i++) { dbDataParameter.Value = i; - cache.Set(cacheKeys[i], await ReadSingleRow(pooledCommand)); + cache.Set(cacheKeys[i], await ReadSingleRow(dbCommand)); } } - - pooledCommand.Release(); - pooledConnection.Release(); } public static Task LoadCachedQueries(int count) @@ -301,19 +296,18 @@ public static Task LoadCachedQueries(int count) return Task.FromResult(result); } - //static async Task LoadUncachedQueries(int id, int i, int count, RawDb rawdb, CachedWorld[] result) static async Task LoadUncachedQueries(int id, int i, int count, CachedWorld[] result) { - var pooledConnection = await PooledConnections.GetConnection(DataProvider.ConnectionString); - pooledConnection.Open(); + using var pooledConnection = new DbConnection(DbProviderFactory.ConnectionString); + await pooledConnection.OpenAsync(); - var (pooledCommand, dbDataParameter) = CreateReadCommand(pooledConnection); + var (dbCommand, dbDataParameter) = CreateReadCommand(pooledConnection); - using (pooledCommand) + using (dbCommand) { Func> create = async (entry) => { - return await ReadSingleRow(pooledCommand); + return await ReadSingleRow(dbCommand); }; var cacheKeys = _cacheKeys; @@ -329,9 +323,6 @@ static async Task LoadUncachedQueries(int id, int i, int count, C dbDataParameter.Value = id; key = cacheKeys[id]; } - - pooledCommand.Release(); - pooledConnection.Release(); } return result; diff --git a/frameworks/CSharp/appmpower/src/appMpower.ado b/frameworks/CSharp/appmpower/src/appMpower.ado index c15e464b05f..a438de011dc 100644 --- a/frameworks/CSharp/appmpower/src/appMpower.ado +++ b/frameworks/CSharp/appmpower/src/appMpower.ado @@ -17,12 +17,15 @@ - + + + $(DefineConstants);ADO + $(DefineConstants);POSTGRESQL \ No newline at end of file diff --git a/frameworks/CSharp/appmpower/src/appMpower.csproj b/frameworks/CSharp/appmpower/src/appMpower.csproj index f38c1a11e0e..961fe29434c 100644 --- a/frameworks/CSharp/appmpower/src/appMpower.csproj +++ b/frameworks/CSharp/appmpower/src/appMpower.csproj @@ -33,7 +33,8 @@ - $(DefineConstants);MYSQL + $(DefineConstants);ODBC + $(DefineConstants);MYSQL \ No newline at end of file diff --git a/frameworks/CSharp/aspnetcore/benchmark_config.json b/frameworks/CSharp/aspnetcore/benchmark_config.json index 770d463dfe7..b742c109df2 100644 --- a/frameworks/CSharp/aspnetcore/benchmark_config.json +++ b/frameworks/CSharp/aspnetcore/benchmark_config.json @@ -65,7 +65,7 @@ "approach": "Realistic", "classification": "Micro", "database": "None", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [Middleware]", "language": "C#", "orm": "Raw", "platform": ".NET", @@ -83,7 +83,7 @@ "approach": "Realistic", "classification": "Micro", "database": "None", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [Middleware]", "language": "C#", "orm": "Raw", "platform": ".NET", @@ -103,7 +103,7 @@ "approach": "Realistic", "classification": "Micro", "database": "Postgres", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [Middleware]", "language": "C#", "orm": "Raw", "platform": ".NET", @@ -121,7 +121,7 @@ "approach": "Realistic", "classification": "Micro", "database": "Postgres", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [Middleware]", "language": "C#", "orm": "Raw", "platform": ".NET", @@ -141,7 +141,7 @@ "approach": "Realistic", "classification": "Micro", "database": "Postgres", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [Middleware]", "language": "C#", "orm": "Full", "platform": ".NET", @@ -161,7 +161,7 @@ "approach": "Realistic", "classification": "Micro", "database": "Postgres", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [Middleware]", "language": "C#", "orm": "Micro", "platform": ".NET", @@ -179,7 +179,7 @@ "approach": "Realistic", "classification": "Micro", "database": "Postgres", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [Middleware]", "language": "C#", "orm": "Micro", "platform": ".NET", @@ -200,7 +200,7 @@ "approach": "Realistic", "classification": "Micro", "database": "MySQL", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [Middleware]", "language": "C#", "orm": "Raw", "platform": ".NET", @@ -221,7 +221,7 @@ "approach": "Realistic", "classification": "Micro", "database": "MySQL", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [Middleware]", "language": "C#", "orm": "Micro", "platform": ".NET", @@ -240,7 +240,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "None", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [MVC]", "language": "C#", "orm": "Raw", "platform": ".NET", @@ -260,7 +260,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "Postgres", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [MVC]", "language": "C#", "orm": "Raw", "platform": ".NET", @@ -278,7 +278,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "Postgres", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [MVC]", "language": "C#", "orm": "Raw", "platform": ".NET", @@ -298,7 +298,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "Postgres", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [MVC]", "language": "C#", "orm": "Full", "platform": ".NET", @@ -318,7 +318,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "Postgres", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [MVC]", "language": "C#", "orm": "Micro", "platform": ".NET", @@ -336,7 +336,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "Postgres", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [MVC]", "language": "C#", "orm": "Micro", "platform": ".NET", @@ -357,7 +357,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "MySQL", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [MVC]", "language": "C#", "orm": "Raw", "platform": ".NET", @@ -378,7 +378,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "MySQL", - "framework": "ASP.NET Core", + "framework": "ASP.NET Core [MVC]", "language": "C#", "orm": "Micro", "platform": ".NET", diff --git a/frameworks/CSharp/fastendpoints/Benchmarks/Benchmarks.csproj b/frameworks/CSharp/fastendpoints/Benchmarks/Benchmarks.csproj index f04cb044d3d..edfa5d791f8 100644 --- a/frameworks/CSharp/fastendpoints/Benchmarks/Benchmarks.csproj +++ b/frameworks/CSharp/fastendpoints/Benchmarks/Benchmarks.csproj @@ -8,7 +8,7 @@ - + diff --git a/frameworks/CSharp/fastendpoints/Benchmarks/Benchmarks.sln b/frameworks/CSharp/fastendpoints/Benchmarks/Benchmarks.sln deleted file mode 100644 index a8ad56f39a3..00000000000 --- a/frameworks/CSharp/fastendpoints/Benchmarks/Benchmarks.sln +++ /dev/null @@ -1,25 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.32002.185 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Benchmarks", "Benchmarks.csproj", "{95F15ACC-FFB8-4C45-BF4E-6E2B602C1EBA}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Release|Any CPU = Release|Any CPU - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {95F15ACC-FFB8-4C45-BF4E-6E2B602C1EBA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {95F15ACC-FFB8-4C45-BF4E-6E2B602C1EBA}.Debug|Any CPU.Build.0 = Debug|Any CPU - {95F15ACC-FFB8-4C45-BF4E-6E2B602C1EBA}.Release|Any CPU.ActiveCfg = Release|Any CPU - {95F15ACC-FFB8-4C45-BF4E-6E2B602C1EBA}.Release|Any CPU.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - SolutionGuid = {975848F3-00CE-49FC-A82F-86DDC0A3CC6F} - EndGlobalSection -EndGlobal diff --git a/frameworks/CSharp/fastendpoints/Benchmarks/Endpoints/JsonEndpoint.cs b/frameworks/CSharp/fastendpoints/Benchmarks/Endpoints/JsonEndpoint.cs index 20cd12068dc..ac904b48071 100644 --- a/frameworks/CSharp/fastendpoints/Benchmarks/Endpoints/JsonEndpoint.cs +++ b/frameworks/CSharp/fastendpoints/Benchmarks/Endpoints/JsonEndpoint.cs @@ -1,6 +1,6 @@ namespace Benchmarks.Endpoints; -public class JsonEndpoint : Endpoint +public class JsonEndpoint : Endpoint { public override void Configure() { @@ -8,7 +8,7 @@ public override void Configure() AllowAnonymous(); } - public override Task HandleAsync(object _, CancellationToken __) + public override Task HandleAsync(EmptyRequest _, CancellationToken __) { HttpContext.Response.ContentLength = 27; return SendAsync(new { message = "Hello, World!" }); diff --git a/frameworks/CSharp/fastendpoints/Benchmarks/Endpoints/PlainTextEndpoint.cs b/frameworks/CSharp/fastendpoints/Benchmarks/Endpoints/PlainTextEndpoint.cs index 5468ab35ae4..6899e55c2e3 100644 --- a/frameworks/CSharp/fastendpoints/Benchmarks/Endpoints/PlainTextEndpoint.cs +++ b/frameworks/CSharp/fastendpoints/Benchmarks/Endpoints/PlainTextEndpoint.cs @@ -1,6 +1,6 @@ namespace Benchmarks.Endpoints; -public class PlainTextEndpoint : Endpoint +public class PlainTextEndpoint : Endpoint { private static readonly byte[] payload = System.Text.Encoding.UTF8.GetBytes("Hello, World!"); @@ -10,7 +10,7 @@ public override void Configure() AllowAnonymous(); } - public override Task HandleAsync(object _, CancellationToken __) + public override Task HandleAsync(EmptyRequest _, CancellationToken __) { HttpContext.Response.StatusCode = StatusCodes.Status200OK; HttpContext.Response.ContentType = "text/plain"; diff --git a/frameworks/CSharp/fastendpoints/Benchmarks/Program.cs b/frameworks/CSharp/fastendpoints/Benchmarks/Program.cs index 1b36635cb43..0cafb905e95 100644 --- a/frameworks/CSharp/fastendpoints/Benchmarks/Program.cs +++ b/frameworks/CSharp/fastendpoints/Benchmarks/Program.cs @@ -6,4 +6,4 @@ var app = builder.Build(); app.UseFastEndpoints(); -app.Run(); +app.Run("http://0.0.0.0:8080"); diff --git a/frameworks/CSharp/fastendpoints/fastendpoints.dockerfile b/frameworks/CSharp/fastendpoints/fastendpoints.dockerfile index 19bf5d3ef3f..207c43c5ee9 100644 --- a/frameworks/CSharp/fastendpoints/fastendpoints.dockerfile +++ b/frameworks/CSharp/fastendpoints/fastendpoints.dockerfile @@ -1,12 +1,16 @@ -FROM mcr.microsoft.com/dotnet/sdk:6.0 AS build +FROM mcr.microsoft.com/dotnet/sdk:6.0.100 AS build WORKDIR /app COPY Benchmarks . RUN dotnet publish -c Release -o out -FROM mcr.microsoft.com/dotnet/aspnet:6.0 AS runtime +FROM mcr.microsoft.com/dotnet/aspnet:6.0.0 AS runtime WORKDIR /app COPY --from=build /app/out ./ +ENV DOTNET_TieredPGO 1 +ENV DOTNET_TC_QuickJitForLoops 1 +ENV DOTNET_ReadyToRun 0 + EXPOSE 8080 ENTRYPOINT ["dotnet", "Benchmarks.dll"] \ No newline at end of file diff --git a/frameworks/CSharp/genhttp/Benchmarks/Benchmarks.csproj b/frameworks/CSharp/genhttp/Benchmarks/Benchmarks.csproj index 9eec55e889b..d5ffe0609bc 100644 --- a/frameworks/CSharp/genhttp/Benchmarks/Benchmarks.csproj +++ b/frameworks/CSharp/genhttp/Benchmarks/Benchmarks.csproj @@ -26,11 +26,11 @@ - - - - - + + + + + \ No newline at end of file diff --git a/frameworks/CSharp/revenj/revenj.dockerfile b/frameworks/CSharp/revenj/revenj.dockerfile index 1d901389b38..5ba717ac7fd 100644 --- a/frameworks/CSharp/revenj/revenj.dockerfile +++ b/frameworks/CSharp/revenj/revenj.dockerfile @@ -1,4 +1,4 @@ -FROM mono:5.12.0.226 +FROM mono:6.12.0.122 RUN apt-get update -yqq && apt-get install -yqq unzip RUN mkdir /java diff --git a/frameworks/CSharp/servicestack-v6/.gitignore b/frameworks/CSharp/servicestack-v6/.gitignore new file mode 100644 index 00000000000..708c4155fa7 --- /dev/null +++ b/frameworks/CSharp/servicestack-v6/.gitignore @@ -0,0 +1,37 @@ +[Oo]bj/ +[Bb]in/ +TestResults/ +.nuget/ +*.sln.ide/ +_ReSharper.*/ +.idea/ +packages/ +artifacts/ +PublishProfiles/ +.vs/ +*.user +*.suo +*.cache +*.docstates +_ReSharper.* +nuget.exe +*net45.csproj +*net451.csproj +*k10.csproj +*.psess +*.vsp +*.pidb +*.userprefs +*DS_Store +*.ncrunchsolution +*.*sdf +*.ipch +*.swp +*~ +.build/ +.testPublish/ +launchSettings.json +BenchmarkDotNet.Artifacts/ +BDN.Generated/ +binaries/ +global.json diff --git a/frameworks/CSharp/servicestack-v6/Benchmarks/Configure.AppHost.cs b/frameworks/CSharp/servicestack-v6/Benchmarks/Configure.AppHost.cs new file mode 100644 index 00000000000..b5e9ee53496 --- /dev/null +++ b/frameworks/CSharp/servicestack-v6/Benchmarks/Configure.AppHost.cs @@ -0,0 +1,26 @@ +using Funq; +using ServiceStack; +using ServicestackV6.ServiceInterface; + +[assembly: HostingStartup(typeof(ServicestackV6.AppHost))] + +namespace ServicestackV6; + +public class AppHost : AppHostBase, IHostingStartup +{ + public void Configure(IWebHostBuilder builder) => builder.Configure(app => + { + if (!HasInit) + app.UseServiceStack(new AppHost()); + }); + + public AppHost() : base("ServicestackV6", typeof(MyServices).Assembly) { } + + public override void Configure(Container container) + { + SetConfig(new HostConfig + { + UseSameSiteCookies = true, + }); + } +} diff --git a/frameworks/CSharp/servicestack-v6/Benchmarks/Models/Json.cs b/frameworks/CSharp/servicestack-v6/Benchmarks/Models/Json.cs new file mode 100644 index 00000000000..8b41fa8b8f8 --- /dev/null +++ b/frameworks/CSharp/servicestack-v6/Benchmarks/Models/Json.cs @@ -0,0 +1,11 @@ +using ServiceStack; + +namespace ServicestackV6.ServiceModel; + +[Route("/json")] +public class JsonRequest : IReturn { } + +public class JsonResponse +{ + public string message => "Hello, World!"; +} \ No newline at end of file diff --git a/frameworks/CSharp/servicestack-v6/Benchmarks/Models/PlainText.cs b/frameworks/CSharp/servicestack-v6/Benchmarks/Models/PlainText.cs new file mode 100644 index 00000000000..2ca5572fb93 --- /dev/null +++ b/frameworks/CSharp/servicestack-v6/Benchmarks/Models/PlainText.cs @@ -0,0 +1,6 @@ +using ServiceStack; + +namespace ServicestackV6.ServiceModel; + +[Route("/plaintext")] +public class PlainTextRequest : IReturnVoid { } \ No newline at end of file diff --git a/frameworks/CSharp/servicestack-v6/Benchmarks/Program.cs b/frameworks/CSharp/servicestack-v6/Benchmarks/Program.cs new file mode 100644 index 00000000000..ebe45483b50 --- /dev/null +++ b/frameworks/CSharp/servicestack-v6/Benchmarks/Program.cs @@ -0,0 +1,4 @@ +WebApplication + .CreateBuilder(args) + .Build() + .Run(); \ No newline at end of file diff --git a/frameworks/CSharp/servicestack-v6/Benchmarks/Services/MyServices.cs b/frameworks/CSharp/servicestack-v6/Benchmarks/Services/MyServices.cs new file mode 100644 index 00000000000..cca44f3e624 --- /dev/null +++ b/frameworks/CSharp/servicestack-v6/Benchmarks/Services/MyServices.cs @@ -0,0 +1,22 @@ +using ServiceStack; +using ServicestackV6.ServiceModel; + +namespace ServicestackV6.ServiceInterface; + +public class MyServices : Service +{ + private static readonly byte[] payload = System.Text.Encoding.UTF8.GetBytes("Hello, World!"); + + public object Get(JsonRequest _) + { + Response.SetContentLength(27); + return new JsonResponse(); + } + + public byte[] Get(PlainTextRequest _) + { + Response.SetContentLength(payload.Length); + Response.ContentType = "text/plain"; + return payload; + } +} \ No newline at end of file diff --git a/frameworks/CSharp/servicestack-v6/Benchmarks/ServicestackV6.csproj b/frameworks/CSharp/servicestack-v6/Benchmarks/ServicestackV6.csproj new file mode 100644 index 00000000000..c853d5cb021 --- /dev/null +++ b/frameworks/CSharp/servicestack-v6/Benchmarks/ServicestackV6.csproj @@ -0,0 +1,13 @@ + + + + net6.0 + enable + enable + + + + + + + diff --git a/frameworks/CSharp/fastendpoints/Benchmarks/appsettings.Development.json b/frameworks/CSharp/servicestack-v6/Benchmarks/appsettings.Development.json similarity index 100% rename from frameworks/CSharp/fastendpoints/Benchmarks/appsettings.Development.json rename to frameworks/CSharp/servicestack-v6/Benchmarks/appsettings.Development.json diff --git a/frameworks/CSharp/fastendpoints/Benchmarks/appsettings.json b/frameworks/CSharp/servicestack-v6/Benchmarks/appsettings.json similarity index 100% rename from frameworks/CSharp/fastendpoints/Benchmarks/appsettings.json rename to frameworks/CSharp/servicestack-v6/Benchmarks/appsettings.json diff --git a/frameworks/CSharp/servicestack-v6/README.md b/frameworks/CSharp/servicestack-v6/README.md new file mode 100644 index 00000000000..1cdf7d086d2 --- /dev/null +++ b/frameworks/CSharp/servicestack-v6/README.md @@ -0,0 +1,26 @@ +# Servicestack-V6 Tests on Windows and Linux +This includes tests for plaintext and json serialization. + +## Infrastructure Software Versions + +**Language** + +* C# 10.0 + +**Platforms** + +* .NET 6.0 (Windows and Linux) + +**Web Servers** + +* [Kestrel](https://github.com/dotnet/aspnetcore/tree/main/src/Servers/Kestrel) + +**Web Stack** + +* [ServiceStack](https://servicestack.net/) +* ASP.Net 6 + +## Paths & Source for Tests + +* [Plaintext](Benchmarks/Services/MyServices.cs#L10): "http://localhost:8080/plaintext" +* [JSON Serialization](Benchmarks/Services/MyServices.cs#16): "http://localhost:8080/json" \ No newline at end of file diff --git a/frameworks/CSharp/servicestack-v6/benchmark_config.json b/frameworks/CSharp/servicestack-v6/benchmark_config.json new file mode 100644 index 00000000000..b481ae213b5 --- /dev/null +++ b/frameworks/CSharp/servicestack-v6/benchmark_config.json @@ -0,0 +1,26 @@ +{ + "framework": "servicestack-v6", + "tests": [ + { + "default": { + "plaintext_url": "/plaintext", + "json_url": "/json", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "None", + "framework": "ServiceStack-V6", + "language": "C#", + "orm": "Raw", + "platform": ".NET", + "flavor": "CoreCLR", + "webserver": "Kestrel", + "os": "Linux", + "database_os": "Linux", + "display_name": "ServiceStack-V6", + "notes": "", + "versus": "aspcore-mvc" + } + } + ] +} \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/config.toml b/frameworks/CSharp/servicestack-v6/config.toml similarity index 58% rename from frameworks/CSharp/servicestack/config.toml rename to frameworks/CSharp/servicestack-v6/config.toml index a602ae621c5..2d443087327 100644 --- a/frameworks/CSharp/servicestack/config.toml +++ b/frameworks/CSharp/servicestack-v6/config.toml @@ -1,15 +1,15 @@ [framework] -name = "servicestack" +name = "servicestack-v6" [main] urls.plaintext = "/plaintext" urls.json = "/json" approach = "Realistic" -classification = "Fullstack" +classification = "Micro" database = "None" database_os = "Linux" os = "Linux" orm = "Raw" -platform = "None" -webserver = "nginx" -versus = "servicestack" +platform = ".NET" +webserver = "Kestrel" +versus = "aspcore-mvc" \ No newline at end of file diff --git a/frameworks/CSharp/servicestack-v6/servicestack-v6.dockerfile b/frameworks/CSharp/servicestack-v6/servicestack-v6.dockerfile new file mode 100644 index 00000000000..d61fa34d913 --- /dev/null +++ b/frameworks/CSharp/servicestack-v6/servicestack-v6.dockerfile @@ -0,0 +1,12 @@ +FROM mcr.microsoft.com/dotnet/sdk:6.0 AS build +WORKDIR /app +COPY Benchmarks . +RUN dotnet publish -c Release -o out + +FROM mcr.microsoft.com/dotnet/aspnet:6.0 AS runtime +WORKDIR /app +COPY --from=build /app/out ./ + +EXPOSE 8080 + +ENTRYPOINT ["dotnet", "ServicestackV6.dll"] \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/README.md b/frameworks/CSharp/servicestack/README.md deleted file mode 100644 index c05a1ad1500..00000000000 --- a/frameworks/CSharp/servicestack/README.md +++ /dev/null @@ -1,105 +0,0 @@ -# ServiceStack on Mono and Windows - -## Tests - -### JSON Response - -* `http://localhost:8080/json` - -### Plain Text Response - -* `http://localhost:8080/plaintext` - -### Database Responses - -**Microsoft SQL Server** using ORMLite - -* `http://localhost:8080/sqlserver/db` -* `http://localhost:8080/sqlserver/queries/10` -* `http://localhost:8080/sqlserver/fortunes` -* `http://localhost:8080/sqlserver/updates/25` - -**MySQL** using ORMLite - -* `http://localhost:8080/mysql/db` -* `http://localhost:8080/mysql/queries/10` -* `http://localhost:8080/mysql/fortunes` -* `http://localhost:8080/mysql/updates/25` - -**PostgreSQL** using ORMLite - -* `http://localhost:8080/postgresql/db` -* `http://localhost:8080/postgresql/queries/10` -* `http://localhost:8080/postgresql/fortunes` -* `http://localhost:8080/postgresql/updates/25` - -**MongoDB** - -* `http://localhost:8080/mongodb/db` -* `http://localhost:8080/mongodb/queries/10` -* `http://localhost:8080/mongodb/fortunes` -* `http://localhost:8080/mongodb/updates/25` - ---- - -## Mono Installation - - sudo apt-get install build-essential autoconf automake libtool zlib1g-dev git - - git clone git://github.com/mono/mono - cd mono - git checkout mono-3.0.10 - ./autogen.sh --prefix=/usr/local - make get-monolite-latest - make EXTERNAL_MCS=${PWD}/mcs/class/lib/monolite/basic.exe - sudo make install - - cd .. - - git clone git://github.com/mono/xsp - cd xsp - git checkout 3.0 - ./autogen.sh --prefix=/usr/local - make - sudo make install - -## Versions - -**Language** - -* C# 5.0 - -**Platforms** - -* .NET Framework 4.5 (Windows) -* Mono 3.0.X (Linux) - -**Web Servers** - -* Self Hosting using HTTPListener (Windows/Linux) -* IIS 8 (Windows) - includes [Swagger](http://www.nuget.org/packages/ServiceStack.Api.Swagger/) -* nginx 1.4.0 & XSP FastCGI (Linux) - -**Web Stack** - -* ASP.NET 4.5 -* [ServiceStack](https://github.com/servicestack/servicestack/wiki) - -**Database Providers** - -* Microsoft SQL Server - [interface](http://www.nuget.org/packages/ServiceStack.OrmLite.SqlServer/) -* MySQL - [driver](http://www.nuget.org/packages/MySql.Data/) + [interface](http://www.nuget.org/packages/ServiceStack.OrmLite.MySql/) -* PostgreSQL - [driver](http://www.nuget.org/packages/Npgsql/) + [interface](http://www.nuget.org/packages/ServiceStack.OrmLite.PostgreSQL/) -* MongoDB - [driver](http://www.nuget.org/packages/mongocsharpdriver/) - -**Caching Providers** - -* In-Memory -* Redis NoSQL Db - [client w/interface](http://www.nuget.org/packages/ServiceStack.Redis) -* MemCache - [client](http://www.nuget.org/packages/EnyimMemcached) + [interface](http://www.nuget.org/packages/ServiceStack.Caching.Memcached) -* Amazon Web Services In-Memory DynamoDb DataCache - [client](http://www.nuget.org/packages/AWSSDK) + [interface](http://www.nuget.org/packages/ServiceStack.Caching.AwsDynamoDb) -* Microsoft Azure In-Memory DataCache - [client](http://www.nuget.org/packages/WindowsAzure.Caching) + [interface](http://www.nuget.org/packages/ServiceStack.Caching.Azure) - -**Developer Tools** - -* Visual Studio 2012 diff --git a/frameworks/CSharp/servicestack/benchmark_config.json b/frameworks/CSharp/servicestack/benchmark_config.json deleted file mode 100644 index 355dda825ae..00000000000 --- a/frameworks/CSharp/servicestack/benchmark_config.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "framework": "servicestack", - "tests": [{ - "default": { - "json_url": "/json", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "None", - "framework": "servicestack", - "language": "C#", - "flavor": "Mono", - "orm": "Raw", - "platform": "None", - "webserver": "nginx", - "os": "Linux", - "database_os": "Linux", - "display_name": "servicestack", - "notes": "", - "versus": "servicestack" - } - }] -} diff --git a/frameworks/CSharp/servicestack/nginx.conf b/frameworks/CSharp/servicestack/nginx.conf deleted file mode 100644 index 70fa2d0fee0..00000000000 --- a/frameworks/CSharp/servicestack/nginx.conf +++ /dev/null @@ -1,21 +0,0 @@ -pid /tmp/nginx.pid; -error_log stderr error; - -events { - worker_connections 8192; -} - -http { - access_log off; - include nginx.upstream.conf; - - server { - listen 8080; - - location / { - fastcgi_pass mono; - include /etc/nginx/fastcgi_params; - fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; - } - } -} diff --git a/frameworks/CSharp/servicestack/run.sh b/frameworks/CSharp/servicestack/run.sh deleted file mode 100644 index 23b215f604e..00000000000 --- a/frameworks/CSharp/servicestack/run.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash - -CPU_COUNT=$(nproc) - -# one fastcgi instance for each thread -# load balanced by nginx -port_start=9001 -port_end=$(($port_start+$CPU_COUNT)) - -# To debug, use --printlog --verbose --loglevels=All -for port in $(seq $port_start $port_end); do - MONO_OPTIONS=--gc=sgen fastcgi-mono-server4 --applications=/:/servicestack/src --socket=tcp:127.0.0.1:$port & -done - -sleep 5s - -# nginx -conf="upstream mono {\n" -for port in $(seq $port_start $port_end); do - conf+="\tserver 127.0.0.1:${port};\n" -done -conf+="}" - -echo -e $conf > nginx.upstream.conf -nginx -c /servicestack/nginx.conf -g "worker_processes ${CPU_COUNT};" - -wait diff --git a/frameworks/CSharp/servicestack/servicestack.dockerfile b/frameworks/CSharp/servicestack/servicestack.dockerfile deleted file mode 100644 index f6cc4a2ca80..00000000000 --- a/frameworks/CSharp/servicestack/servicestack.dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -FROM mono:5.8.0.127 -RUN apt-get update -yqq && apt-get install -yqq nginx wget mono-fastcgi-server - -WORKDIR /servicestack -COPY src src -COPY nginx.conf nginx.conf -COPY run.sh run.sh - -RUN mkdir lib -RUN curl -sL -O https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -RUN mono nuget.exe install src/packages.config -OutputDirectory lib/ -RUN xbuild src/ServiceStackBenchmark.csproj /t:Clean -RUN xbuild src/ServiceStackBenchmark.csproj /t:Build - -EXPOSE 8080 - -CMD bash run.sh diff --git a/frameworks/CSharp/servicestack/src/AppHost.cs b/frameworks/CSharp/servicestack/src/AppHost.cs deleted file mode 100644 index 26c83a4076e..00000000000 --- a/frameworks/CSharp/servicestack/src/AppHost.cs +++ /dev/null @@ -1,64 +0,0 @@ -using System; -using System.Linq; -using System.Collections.Generic; - -using ServiceStack; -using ServiceStack.Api.Swagger; -using ServiceStack.CacheAccess; -using ServiceStack.CacheAccess.Providers; -using ServiceStack.Common; -using ServiceStack.Common.Web; -using ServiceStack.Redis; -using ServiceStack.ServiceHost; -using ServiceStack.WebHost.Endpoints; -using ServiceStack.WebHost.Endpoints.Formats; - - -namespace ServiceStackBenchmark -{ - - public class AppHost : AppHostBase - { - - public AppHost() : base("ServiceStackBenchmark", typeof(AppHost).Assembly) { } - - public override void Configure(Funq.Container container) - { - ServiceStack.Text.JsConfig.EmitCamelCaseNames = true; - - // Remove some unused features that by default are included - Plugins.RemoveAll(p => p is CsvFormat); - Plugins.RemoveAll(p => p is MetadataFeature); - - // Add plugins - Plugins.Add(new SwaggerFeature()); - - // Get disable features specified in Config file (i.e. Soap, Metadata, etc.) - var disabled = AppHostConfigHelper.GetDisabledFeatures(); - - // Construct Service Endpoint Host Configuration store - var config = new EndpointHostConfig - { - DefaultRedirectPath = "/swagger-ui/index.html", // default to the Swagger page - DefaultContentType = ContentType.Json, - WriteErrorsToResponse = false, - EnableFeatures = Feature.All.Remove(disabled), - AppendUtf8CharsetOnContentTypes = new HashSet { ContentType.Html }, - }; - - // Apply configuration - SetConfig(config); - - // Initialize Databases & associated Routes - container.InitDatabaseRoutes(Routes); - - // Register Cache Clients - container.Register(new MemoryCacheClient()); - - // Register Redis Client Manager - container.Register(c => - new PooledRedisClientManager("localhost:6379")); - } - - } -} \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/src/AppHostConfigHelper.cs b/frameworks/CSharp/servicestack/src/AppHostConfigHelper.cs deleted file mode 100644 index 95653577495..00000000000 --- a/frameworks/CSharp/servicestack/src/AppHostConfigHelper.cs +++ /dev/null @@ -1,208 +0,0 @@ -using System; -using System.Linq; -using System.Configuration; -using System.Collections.Generic; -using System.Threading; - -using MongoDB.Bson; -using MongoDB.Bson.Serialization; -using MongoDB.Driver; - -using ServiceStack.ServiceHost; -using ServiceStackBenchmark.Model; - -namespace ServiceStackBenchmark -{ - public static class AppHostConfigHelper - { - public static bool InitMongoDB(this Funq.Container container) - { - try - { - // Register the MySql Database Connection Factory - var mongoDbConnectionString = ConfigurationManager.ConnectionStrings["MongoDB"].ConnectionString; - var client = new MongoClient(mongoDbConnectionString); - var server = client.GetServer(); - var database = server.GetDatabase("hello_world"); - container.Register(c => database); - - BsonClassMap.RegisterClassMap(cm => { - cm.MapProperty(c => c.id); - cm.MapProperty(c => c.randomNumber); - }); - - BsonClassMap.RegisterClassMap(cm => { - cm.MapProperty(c => c.id); - cm.MapProperty(c => c.message); - }); - - // Create needed tables in MySql Server if they do not exist - return database.CreateWorldTable() && database.CreateFortuneTable(); - } - catch - { - // Unregister failed database connection factory - container.Register(c => null); - - return false; - } - - } - - public static bool InitMySQL(this Funq.Container container) - { - try - { - // Register the MySql Database Connection Factory - var mySqlConnectionString = ConfigurationManager.ConnectionStrings["MySQL"]; - var mySqlFactory = new MySqlOrmLiteConnectionFactory(mySqlConnectionString.ConnectionString); - mySqlFactory.DialectProvider.UseUnicode = true; - container.Register(c => mySqlFactory); - - // Create needed tables in MySql Server if they do not exist - using (var conn = mySqlFactory.OpenDbConnection()) - { - return conn.CreateWorldTable() && conn.CreateFortuneTable(); - } - } - catch (Exception ex) - { - // Unregister failed database connection factory - container.Register(c => null); - - return false; - } - - } - - public static bool InitPostgreSQL(this Funq.Container container) - { - try - { - // Register the PostgreSQL Database Connection Factory - var postgreSqlConnectionString = ConfigurationManager.ConnectionStrings["PostgreSQL"]; - var postgreSqlFactory = new PostgreSqlOrmLiteConnectionFactory(postgreSqlConnectionString.ConnectionString); - postgreSqlFactory.DialectProvider.UseUnicode = true; - container.Register(c => postgreSqlFactory); - - // Create needed tables in PostgreSql Server if they do not exist - using (var conn = postgreSqlFactory.OpenDbConnection()) - { - return conn.CreateWorldTable() && conn.CreateFortuneTable(); - } - } - catch (Exception ex) - { - // Unregister failed database connection factory - container.Register(c => null); - - return false; - } - - } - - public static bool InitSQLServer(this Funq.Container container) - { - try - { - // Register the Microsoft Sql Server Database Connection Factory - var sqlServerConnectionString = ConfigurationManager.ConnectionStrings["SQLServer"]; - var sqlServerFactory = new SqlServerOrmLiteConnectionFactory(sqlServerConnectionString.ConnectionString); - sqlServerFactory.DialectProvider.UseUnicode = true; - container.Register(c => sqlServerFactory); - - // Create needed tables in Microsoft Sql Server if they do not exist - using (var conn = sqlServerFactory.OpenDbConnection()) - { - return conn.CreateWorldTable() && conn.CreateFortuneTable(); - } - } - catch (Exception ex) - { - // Unregister failed database connection factory - container.Register(c => null); - - return false; - } - - } - - public static void InitDatabaseRoutes(this Funq.Container container, IServiceRoutes routes) - { - if (container.InitMongoDB()) - { - routes.Add("/mongodb/db", "GET"); - routes.Add("/mongodb/queries/{queries}", "GET"); - routes.Add("/mongodb/fortunes", "GET"); - routes.Add("/mongodb/updates/{queries}", "GET"); - routes.Add("/mongodb/cached/db", "GET"); - } - - if (container.InitMySQL()) - { - routes.Add("/mysql/db", "GET"); - routes.Add("/mysql/queries/{queries}", "GET"); - routes.Add("/mysql/fortunes", "GET"); - routes.Add("/mysql/updates/{queries}", "GET"); - routes.Add("/mysql/cached/db", "GET"); - } - - if (container.InitPostgreSQL()) - { - routes.Add("/postgresql/db", "GET"); - routes.Add("/postgresql/queries/{queries}", "GET"); - routes.Add("/postgresql/fortunes", "GET"); - routes.Add("/postgresql/updates/{queries}", "GET"); - routes.Add("/postgresql/cached/db", "GET"); - } - - if (container.InitSQLServer()) - { - routes.Add("/sqlserver/db", "GET"); - routes.Add("/sqlserver/queries/{queries}", "GET"); - routes.Add("/sqlserver/fortunes", "GET"); - routes.Add("/sqlserver/updates/{queries}", "GET"); - routes.Add("/sqlserver/cached/db", "GET"); - } - } - - public static Feature GetDisabledFeatures() - { - try - { - var disabled = ConfigurationManager.AppSettings.Get("DisabledFeatures"); - - Feature d; - if (Enum.TryParse(disabled, true, out d)) - return d; - - return Feature.None; - } - catch - { - return Feature.None; - } - - } - - /// - /// Method to config the Minimum number of Worker Threads per Logical Processor Count. - /// - /// the Completion Port Threads are set to their defaults as there is no IO concerrency in our app - public static void ConfigThreadPool() - { - string minTPLPSetting = ConfigurationManager.AppSettings["minWorkerThreadsPerLogicalProcessor"]; - - if (minTPLPSetting == null) - return; - - int sysMinWorkerThreads, sysMinCompletionPortThreads; - ThreadPool.GetMinThreads(out sysMinWorkerThreads, out sysMinCompletionPortThreads); - - int newMinWorkerThreadsPerCPU = Math.Max(1, Convert.ToInt32(minTPLPSetting)); - - var minWorkerThreads = Environment.ProcessorCount * newMinWorkerThreadsPerCPU; - ThreadPool.SetMinThreads(minWorkerThreads, sysMinCompletionPortThreads); - } - } -} diff --git a/frameworks/CSharp/servicestack/src/DbFactories/IMySqlOrmLiteConnectionFactory.cs b/frameworks/CSharp/servicestack/src/DbFactories/IMySqlOrmLiteConnectionFactory.cs deleted file mode 100644 index 4f093869924..00000000000 --- a/frameworks/CSharp/servicestack/src/DbFactories/IMySqlOrmLiteConnectionFactory.cs +++ /dev/null @@ -1,8 +0,0 @@ -using System; - -using ServiceStack.OrmLite; - -namespace ServiceStackBenchmark -{ - public interface IMySqlOrmLiteConnectionFactory : IDbConnectionFactory { } -} diff --git a/frameworks/CSharp/servicestack/src/DbFactories/IPostgreSqlOrmLiteConnectionFactory.cs b/frameworks/CSharp/servicestack/src/DbFactories/IPostgreSqlOrmLiteConnectionFactory.cs deleted file mode 100644 index 2d95beff24b..00000000000 --- a/frameworks/CSharp/servicestack/src/DbFactories/IPostgreSqlOrmLiteConnectionFactory.cs +++ /dev/null @@ -1,8 +0,0 @@ -using System; - -using ServiceStack.OrmLite; - -namespace ServiceStackBenchmark -{ - public interface IPostgreSqlOrmLiteConnectionFactory : IDbConnectionFactory { } -} diff --git a/frameworks/CSharp/servicestack/src/DbFactories/ISqlServerOrmLiteConnectionFactory.cs b/frameworks/CSharp/servicestack/src/DbFactories/ISqlServerOrmLiteConnectionFactory.cs deleted file mode 100644 index c423a27a952..00000000000 --- a/frameworks/CSharp/servicestack/src/DbFactories/ISqlServerOrmLiteConnectionFactory.cs +++ /dev/null @@ -1,8 +0,0 @@ -using System; - -using ServiceStack.OrmLite; - -namespace ServiceStackBenchmark -{ - public interface ISqlServerOrmLiteConnectionFactory : IDbConnectionFactory { } -} diff --git a/frameworks/CSharp/servicestack/src/DbFactories/MySqlOrmLiteConnectionFactory.cs b/frameworks/CSharp/servicestack/src/DbFactories/MySqlOrmLiteConnectionFactory.cs deleted file mode 100644 index a78d6755704..00000000000 --- a/frameworks/CSharp/servicestack/src/DbFactories/MySqlOrmLiteConnectionFactory.cs +++ /dev/null @@ -1,12 +0,0 @@ -using System; - -using ServiceStack.OrmLite; -using ServiceStack.OrmLite.MySql; - -namespace ServiceStackBenchmark -{ - public class MySqlOrmLiteConnectionFactory : OrmLiteConnectionFactory, IMySqlOrmLiteConnectionFactory - { - public MySqlOrmLiteConnectionFactory(string s) : base(s, MySqlDialectProvider.Instance) { } - } -} diff --git a/frameworks/CSharp/servicestack/src/DbFactories/PostgreSqlOrmLiteConnectionFactory.cs b/frameworks/CSharp/servicestack/src/DbFactories/PostgreSqlOrmLiteConnectionFactory.cs deleted file mode 100644 index ce29b4aa9cd..00000000000 --- a/frameworks/CSharp/servicestack/src/DbFactories/PostgreSqlOrmLiteConnectionFactory.cs +++ /dev/null @@ -1,29 +0,0 @@ -using System; - -using ServiceStack.OrmLite; -using ServiceStack.OrmLite.PostgreSQL; - -namespace ServiceStackBenchmark -{ - public class PostgreSqlOrmLiteConnectionFactory : OrmLiteConnectionFactory, IPostgreSqlOrmLiteConnectionFactory - { - public PostgreSqlOrmLiteConnectionFactory(string s) : base(s, PostgreSQLDialectProvider.Instance) { - this.DialectProvider.NamingStrategy = new LowercaseNamingStrategy(); - } - } - - public class LowercaseNamingStrategy : OrmLiteNamingStrategyBase - { - public override string GetTableName(string name) - { - return name.ToLower(); - } - - public override string GetColumnName(string name) - { - return name.ToLower(); - } - - } - -} diff --git a/frameworks/CSharp/servicestack/src/DbFactories/SqlServerOrmLiteConnectionFactory.cs b/frameworks/CSharp/servicestack/src/DbFactories/SqlServerOrmLiteConnectionFactory.cs deleted file mode 100644 index cf60c6a7ffc..00000000000 --- a/frameworks/CSharp/servicestack/src/DbFactories/SqlServerOrmLiteConnectionFactory.cs +++ /dev/null @@ -1,12 +0,0 @@ -using System; - -using ServiceStack.OrmLite; -using ServiceStack.OrmLite.SqlServer; - -namespace ServiceStackBenchmark -{ - public class SqlServerOrmLiteConnectionFactory : OrmLiteConnectionFactory, ISqlServerOrmLiteConnectionFactory - { - public SqlServerOrmLiteConnectionFactory(string s) : base(s, SqlServerOrmLiteDialectProvider.Instance) { } - } -} diff --git a/frameworks/CSharp/servicestack/src/Global.asax b/frameworks/CSharp/servicestack/src/Global.asax deleted file mode 100644 index 192bec597be..00000000000 --- a/frameworks/CSharp/servicestack/src/Global.asax +++ /dev/null @@ -1 +0,0 @@ -<%@ Application Codebehind="Global.asax.cs" Inherits="ServiceStackBenchmark.Global" Language="C#" %> diff --git a/frameworks/CSharp/servicestack/src/Global.asax.cs b/frameworks/CSharp/servicestack/src/Global.asax.cs deleted file mode 100644 index 3830d776c9e..00000000000 --- a/frameworks/CSharp/servicestack/src/Global.asax.cs +++ /dev/null @@ -1,22 +0,0 @@ -using System; -using System.Threading; -using System.Web; - -namespace ServiceStackBenchmark -{ - public class Global : HttpApplication - { - protected void Application_Start(object sender, EventArgs e) - { - AppHostConfigHelper.ConfigThreadPool(); - - new AppHost().Init(); - } - - protected void Application_BeginRequest(object src, EventArgs e) - { } - - protected void Application_EndRequest(object src, EventArgs e) - { } - } -} diff --git a/frameworks/CSharp/servicestack/src/Model/Fortune.cs b/frameworks/CSharp/servicestack/src/Model/Fortune.cs deleted file mode 100644 index 8def2df2a06..00000000000 --- a/frameworks/CSharp/servicestack/src/Model/Fortune.cs +++ /dev/null @@ -1,114 +0,0 @@ -using System; -using System.Collections.Generic; -using System.ComponentModel.DataAnnotations; -using System.Data; -using System.Linq; - -using MongoDB.Driver; -using MongoDB.Driver.Builders; - -using ServiceStack.DataAnnotations; -using ServiceStack.Html; -using ServiceStack.OrmLite; -using ServiceStack.Text; - -namespace ServiceStackBenchmark.Model -{ - [Alias("Fortune")] - public class Fortune : IComparable - { - [PrimaryKey()] - public int id { get; set; } - - [StringLength(100)] - public string message { get; set; } - - public int CompareTo(Fortune fortune) - { - return message.CompareTo(fortune.message); - } - - } - - public static class FortuneMethods - { - - public static List GetFortunes(this IDbConnection db) - { - return db.Select(); - } - - public static List GetFortunes(this MongoDatabase db) - { - var collection = db.GetCollection("Fortune"); - return collection.FindAll().ToList(); - } - - public static bool CreateFortuneTable(this IDbConnection db) - { - if (db.TableExists("Fortune")) - return true; - - try - { - db.CreateTable(); - - // Populate the collection - db.Insert(GetFortunes().ToArray()); - - return true; - } - catch - { - return false; - } - } - - public static bool CreateFortuneTable(this MongoDatabase db) - { - if (db.CollectionExists("Fortune")) - return true; - - try - { - // Populate the collection - var collection = db.GetCollection("Fortune"); - collection.InsertBatch(GetFortunes()); - - return true; - } - catch - { - return false; - } - } - - private static IEnumerable GetFortunes() - { - var fortunes = new List(); - fortunes.Add(new Fortune() { id = 1, message = "fortune: No such file or directory" }); - fortunes.Add(new Fortune() { id = 2, message = "A computer scientist is someone who fixes things that aren't broken." }); - fortunes.Add(new Fortune() { id = 3, message = "After enough decimal places, nobody gives a damn." }); - fortunes.Add(new Fortune() { id = 4, message = "A bad random number generator: 1, 1, 1, 1, 1, 4.33e+67, 1, 1, 1" }); - fortunes.Add(new Fortune() { id = 5, message = "A computer program does what you tell it to do, not what you want it to do." }); - fortunes.Add(new Fortune() { id = 6, message = "Emacs is a nice operating system, but I prefer UNIX. — Tom Christaensen" }); - fortunes.Add(new Fortune() { id = 7, message = "Any program that runs right is obsolete." }); - fortunes.Add(new Fortune() { id = 8, message = "A list is only as strong as its weakest link. — Donald Knuth" }); - fortunes.Add(new Fortune() { id = 9, message = "Feature: A bug with seniority." }); - fortunes.Add(new Fortune() { id = 10, message = "Computers make very fast, very accurate mistakes." }); - fortunes.Add(new Fortune() { id = 11, message = "" }); - fortunes.Add(new Fortune() { id = 12, message = "フレームワークのベンチマーク" }); - return fortunes; - } - - public static string ToHtml(List fortunes) - { - string page = @"Fortunes"; - HtmlHelper htmlHelper = new HtmlHelper(); - fortunes.ForEach(f => page += @"".Fmt(f.id, htmlHelper.Encode(f.message))); - page += @"
idmessage
{0}{1}
"; - return page; - } - - } -} diff --git a/frameworks/CSharp/servicestack/src/Model/World.cs b/frameworks/CSharp/servicestack/src/Model/World.cs deleted file mode 100644 index d7d0fd18939..00000000000 --- a/frameworks/CSharp/servicestack/src/Model/World.cs +++ /dev/null @@ -1,211 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Data; -using System.Linq; -using System.Threading.Tasks; - -using ServiceStack.CacheAccess; -using ServiceStack.Common; -using ServiceStack.DataAnnotations; -using ServiceStack.OrmLite; - -using MongoDB.Driver; -using MongoDB.Driver.Builders; - -namespace ServiceStackBenchmark.Model -{ - [Alias("World")] - public class World - { - [PrimaryKey()] - public int id { get; set; } - public int randomNumber { get; set; } - } - - public static class WorldMethods - { - - public static World GetWorld(this IDbConnection db, int id) - { - // retrieve the World with passed id - return db.GetById(id); - } - - public static World GetWorld(this MongoDatabase db, int id) - { - // retrieve the World with passed id - var collection = db.GetCollection("World"); - return collection.FindOne(Query.EQ(w => w.id, id)); - } - - public static List GetWorlds(this IDbConnection db) - { - // retrieve all Worlds - return db.Select(); - } - - public static List GetWorlds(this MongoDatabase db) - { - // retrieve all Worlds - var collection = db.GetCollection("World"); - return collection.FindAll().ToList(); - } - - public static List GetWorlds(this IDbConnection db, IEnumerable ids) - { - // retrieve the Worlds included passed ids - return db.GetByIds(ids); - } - - public static List GetWorlds(this MongoDatabase db, IEnumerable ids) - { - // retrieve the Worlds included passed ids - var collection = db.GetCollection("World"); - return collection.Find(Query.In(w => w.id, ids)).ToList(); - } - - public static List UpdateWorlds(this IDbConnection db, IEnumerable ids) - { - // get the worlds for the passed ids - var worlds = db.GetByIds(ids); - - // concurrently update each world with a new random number - Parallel.ForEach(worlds, w => - { - lock (worlds) - { - w.randomNumber = SafeRandom.Instance.Next(0, 10000) + 1; - } - }); - - // update the dataase with the above changes - db.UpdateAll(worlds); - - // return updated collection - return worlds; - } - - public static List UpdateWorlds(this MongoDatabase db, IEnumerable ids, int recordCount) - { - var collection = db.GetCollection("World"); - - // get the worlds for the passed ids - var worlds = collection.Find(Query.In(w => w.id, ids)).ToList(); - - // concurrently update each world with a new random number - Parallel.ForEach(worlds, w => - { - lock (worlds) - { - w.randomNumber = SafeRandom.Instance.Next(0, recordCount) + 1; - } - }); - - // TODO: look into how to make this a single statement - foreach (var w in worlds) - { - // update the database with the above changes - collection.Update( - Query.EQ(t => t.id, w.id), - Update.Set(t => t.randomNumber, w.randomNumber)); - } - - // return updated collection - return worlds; - } - - public static void CacheAllWorlds(this IDbConnection db, ICacheClient cache) - { - cache.FlushAll(); - - // concurrently create a list of world ids - var worlds = db.GetWorlds(); - - Parallel.ForEach(worlds, w => - { - var cacheKey = UrnId.Create("Id", w.id.ToString()); - - cache.Set(cacheKey, w); - }); - } - - public static void CacheAllWorlds(this MongoDatabase db, ICacheClient cache, string dbType) - { - cache.FlushAll(); - - // concurrently create a list of world ids - var worlds = db.GetWorlds(); - - Parallel.ForEach(worlds, w => - { - var cacheKey = UrnId.CreateWithParts(new string[] { dbType, w.id.ToString() }); - - cache.Set(cacheKey, w); - }); - } - - public static bool CreateWorldTable(this IDbConnection db) - { - // only create table if it does not already exist - if (db.TableExists("World")) - return true; - - try - { - // create the database table based on model - db.CreateTable(); - - // populate the table - var worlds = new List(10000); - Parallel.For(0, 10000, i => - { - lock (worlds) - { - worlds.Add(new World() { id = i, randomNumber = SafeRandom.Instance.Next(0, 10000) + 1 }); - } - - }); - - // insert new records into database - db.Insert(worlds.ToArray()); - - return true; - } - catch - { - return false; - } - } - - public static bool CreateWorldTable(this MongoDatabase db) - { - // only create table if it does not already exist - if (db.CollectionExists("World")) - return true; - - try - { - // populate the table - var worlds = new List(10000); - Parallel.For(0, 10000, i => - { - lock (worlds) - { - worlds.Add(new World() { id = i, randomNumber = SafeRandom.Instance.Next(0, 10000) + 1 }); - } - - }); - - // insert new records into database - var collection = db.GetCollection("World"); - collection.InsertBatch(worlds); - - return true; - } - catch - { - return false; - } - } - } -} diff --git a/frameworks/CSharp/servicestack/src/NuGet.config b/frameworks/CSharp/servicestack/src/NuGet.config deleted file mode 100644 index d8ce1e8ef17..00000000000 --- a/frameworks/CSharp/servicestack/src/NuGet.config +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/src/Properties/AssemblyInfo.cs b/frameworks/CSharp/servicestack/src/Properties/AssemblyInfo.cs deleted file mode 100644 index 97790ecd6ad..00000000000 --- a/frameworks/CSharp/servicestack/src/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,33 +0,0 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("ServiceStackBenchmark.Properties")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("ServiceStackBenchmark.Properties")] -[assembly: AssemblyCopyright("Copyright © 2013")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("b93603f5-a9d3-483e-b26b-1e6fc272c078")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/frameworks/CSharp/servicestack/src/Properties/PublishProfiles/IIS.pubxml b/frameworks/CSharp/servicestack/src/Properties/PublishProfiles/IIS.pubxml deleted file mode 100644 index a8c1a0a4641..00000000000 --- a/frameworks/CSharp/servicestack/src/Properties/PublishProfiles/IIS.pubxml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - MSDeploy - localhost - Benchmarks - - False - InProc - - <_SavePWD>False - - - Debug - x64 - True - False - / - True - False - False - DonotMerge - - - - \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/src/SafeRandom.cs b/frameworks/CSharp/servicestack/src/SafeRandom.cs deleted file mode 100644 index 94533f325e5..00000000000 --- a/frameworks/CSharp/servicestack/src/SafeRandom.cs +++ /dev/null @@ -1,72 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; - -namespace ServiceStackBenchmark -{ - internal sealed class SafeRandom - { - private static volatile SafeRandom instance; - - private static object syncRoot = new object(); - - private static Random random; - - private SafeRandom() - { - random = new Random(); - } - - public int Next() - { - int result; - - lock (random) - { - result = random.Next(); - } - return result; - } - - public int Next(int maxValue) - { - int result; - - lock (random) - { - result = random.Next(maxValue); - } - return result; - } - - public int Next(int minValue, int maxValue) - { - int result; - - lock (random) - { - result = random.Next(minValue, maxValue); - } - return result; - } - - public static SafeRandom Instance - { - get - { - if (instance == null) - { - lock (syncRoot) - { - if (instance == null) - { - instance = new SafeRandom(); - } - } - } - - return instance; - } - } - } -} \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/src/SelfHost/App.config b/frameworks/CSharp/servicestack/src/SelfHost/App.config deleted file mode 100644 index b7c0ddb8dbf..00000000000 --- a/frameworks/CSharp/servicestack/src/SelfHost/App.config +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/frameworks/CSharp/servicestack/src/SelfHost/AppHostSelfHelper.cs b/frameworks/CSharp/servicestack/src/SelfHost/AppHostSelfHelper.cs deleted file mode 100644 index bed2637cc2a..00000000000 --- a/frameworks/CSharp/servicestack/src/SelfHost/AppHostSelfHelper.cs +++ /dev/null @@ -1,85 +0,0 @@ -using System; -using System.Linq; -using System.Net; -using System.Collections.Generic; -using System.Diagnostics; - -namespace ServiceStackBenchmark -{ - public static class AppHostSelfHelper - { - public static bool IsMono() - { - return Type.GetType("Mono.Runtime") != null; - } - - public static void StartListening(this AppSelfHost appHost, string urlBase) - { - var addedURLToACL = false; - - try - { - appHost.Start(urlBase); - } - catch (HttpListenerException ex) - { - if (IsMono()) - throw ex; - - if (ex.ErrorCode == 5) - { - AppHostSelfHelper.AddAddress(urlBase); - addedURLToACL = true; - appHost.Start(urlBase); - } - } - - Console.WriteLine("AppHost Created at {0}, listening on {1}", DateTime.Now, urlBase); - Console.WriteLine("Press to stop."); - do { } while (Console.ReadKey(true).Key != ConsoleKey.Escape); - - if (addedURLToACL) - AppHostSelfHelper.DeleteAddress(urlBase); - } - - - #region Methods to Handle URL Listening with ACL Permissions - - public static void AddAddress(string address) - { - AddAddress(address, Environment.UserDomainName, Environment.UserName); - } - - public static void AddAddress(string address, string domain, string user) - { - string args = string.Format(@"http add urlacl url={0} user={1}\{2} listen=yes", address, domain, user); - - var psi = new ProcessStartInfo("netsh", args) - { - Verb = "runas", - CreateNoWindow = true, - WindowStyle = ProcessWindowStyle.Hidden, - UseShellExecute = true - }; - - Process.Start(psi).WaitForExit(); - } - - public static void DeleteAddress(string address) - { - string args = string.Format(@"http delete urlacl url={0}", address); - - var psi = new ProcessStartInfo("netsh", args) - { - CreateNoWindow = true, - WindowStyle = ProcessWindowStyle.Hidden, - UseShellExecute = true - }; - - Process.Start(psi).WaitForExit(); - } - - - #endregion - } -} diff --git a/frameworks/CSharp/servicestack/src/SelfHost/AppSelfHost.cs b/frameworks/CSharp/servicestack/src/SelfHost/AppSelfHost.cs deleted file mode 100644 index 62c2bb18b4e..00000000000 --- a/frameworks/CSharp/servicestack/src/SelfHost/AppSelfHost.cs +++ /dev/null @@ -1,55 +0,0 @@ -using System; -using System.Linq; -using System.Collections.Generic; -using ServiceStack; -using ServiceStack.CacheAccess; -using ServiceStack.CacheAccess.Providers; -using ServiceStack.Common; -using ServiceStack.Common.Web; -using ServiceStack.Redis; -using ServiceStack.ServiceHost; -using ServiceStack.WebHost.Endpoints; -using ServiceStack.WebHost.Endpoints.Formats; - -namespace ServiceStackBenchmark -{ - public class AppSelfHost : AppHostHttpListenerBase - { - - public AppSelfHost() : base("ServiceStackBenchmark", typeof(AppHost).Assembly) { } - - public override void Configure(Funq.Container container) - { - ServiceStack.Text.JsConfig.EmitCamelCaseNames = true; - - // Remove some unused features that by default are included - Plugins.RemoveAll(p => p is CsvFormat); - Plugins.RemoveAll(p => p is MetadataFeature); - - // Get disable features specified in Config file (i.e. Soap, Metadata, etc.) - var disabled = AppHostConfigHelper.GetDisabledFeatures(); - - // Construct Service Endpoint Host Configuration store - var config = new EndpointHostConfig - { - DefaultContentType = ContentType.Json, - WriteErrorsToResponse = false, - EnableFeatures = Feature.All.Remove(disabled), - AppendUtf8CharsetOnContentTypes = new HashSet { ContentType.Html }, - }; - - // Apply configuration - SetConfig(config); - - // Initialize Databases & associated Routes - container.InitDatabaseRoutes(Routes); - - // Register Cache Clients - container.Register(new MemoryCacheClient()); - - // Register Redis Client Manager - container.Register(c => - new PooledRedisClientManager("localhost:6379")); - } - } -} diff --git a/frameworks/CSharp/servicestack/src/SelfHost/Program.cs b/frameworks/CSharp/servicestack/src/SelfHost/Program.cs deleted file mode 100644 index f6c645e2bf7..00000000000 --- a/frameworks/CSharp/servicestack/src/SelfHost/Program.cs +++ /dev/null @@ -1,37 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; - -using ServiceStackBenchmark; - -namespace ServiceStackBenchmark.SelfHost -{ - class Program - { - static void Main(string[] args) - { - var listeningOn = args.Length == 0 ? "http://*:1337/" : args[0]; - - using (var appHost = new AppSelfHost()) - { - try - { - appHost.Init(); - - // TODO: switch to Start after the next ServiceStack deployment (added to framework on commit #806) - appHost.StartListening(listeningOn); - } - catch (Exception ex) - { - Console.WriteLine("ERROR: {0}: {1}", ex.GetType().Name, ex.Message); - } - finally - { - appHost.Stop(); - } - } - - Console.WriteLine("AppHost has finished"); - } - } -} diff --git a/frameworks/CSharp/servicestack/src/SelfHost/Properties/AssemblyInfo.cs b/frameworks/CSharp/servicestack/src/SelfHost/Properties/AssemblyInfo.cs deleted file mode 100644 index 2eb024f2548..00000000000 --- a/frameworks/CSharp/servicestack/src/SelfHost/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,36 +0,0 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("ServiceStackBenchmark.SelfHost")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("ServiceStackBenchmark.SelfHost")] -[assembly: AssemblyCopyright("Copyright © 2013")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("0c254029-d754-40d1-a2a6-4c0ef648417f")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/frameworks/CSharp/servicestack/src/SelfHost/ServiceStackBenchmark.SelfHost.csproj b/frameworks/CSharp/servicestack/src/SelfHost/ServiceStackBenchmark.SelfHost.csproj deleted file mode 100644 index 58c9a730277..00000000000 --- a/frameworks/CSharp/servicestack/src/SelfHost/ServiceStackBenchmark.SelfHost.csproj +++ /dev/null @@ -1,154 +0,0 @@ - - - - - Debug - AnyCPU - {64B2E432-0D03-40CE-9086-81FC7190D9A3} - Exe - Properties - ServiceStackBenchmark.SelfHost - ServiceStackBenchmark.SelfHost - v4.5 - 512 - ..\src\ - true - - - AnyCPU - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - - - AnyCPU - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - - - app.manifest - - - true - bin\x86\Debug\ - DEBUG;TRACE - full - x86 - prompt - MinimumRecommendedRules.ruleset - true - - - bin\x86\Release\ - TRACE - true - pdbonly - x86 - prompt - MinimumRecommendedRules.ruleset - true - - - true - bin\x64\Debug\ - DEBUG;TRACE - full - x64 - prompt - MinimumRecommendedRules.ruleset - true - - - bin\x64\Release\ - TRACE - true - pdbonly - x64 - prompt - MinimumRecommendedRules.ruleset - true - - - - False - ..\..\lib\ServiceStack.3.9.59\lib\net35\ServiceStack.dll - - - False - ..\..\lib\ServiceStack.Common.3.9.59\lib\net35\ServiceStack.Common.dll - - - False - ..\..\lib\ServiceStack.Common.3.9.59\lib\net35\ServiceStack.Interfaces.dll - - - False - ..\..\lib\ServiceStack.OrmLite.SqlServer.3.9.59\lib\ServiceStack.OrmLite.dll - - - False - ..\..\lib\ServiceStack.OrmLite.SqlServer.3.9.59\lib\ServiceStack.OrmLite.SqlServer.dll - - - False - ..\..\lib\ServiceStack.Redis.3.9.59\lib\net35\ServiceStack.Redis.dll - - - False - ..\..\lib\ServiceStack.3.9.59\lib\net35\ServiceStack.ServiceInterface.dll - - - False - ..\..\lib\ServiceStack.Text.3.9.59\lib\net35\ServiceStack.Text.dll - - - - - - - - - - - - - - - - - - - - - - - {80cf41ab-455b-4eb9-bfcc-3f8c4e1d8354} - ServiceStackBenchmark - - - - - false - - - - - - - - - diff --git a/frameworks/CSharp/servicestack/src/SelfHost/app.manifest b/frameworks/CSharp/servicestack/src/SelfHost/app.manifest deleted file mode 100644 index 03041dc36b5..00000000000 --- a/frameworks/CSharp/servicestack/src/SelfHost/app.manifest +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/frameworks/CSharp/servicestack/src/SelfHost/packages.config b/frameworks/CSharp/servicestack/src/SelfHost/packages.config deleted file mode 100644 index 064e4480bfb..00000000000 --- a/frameworks/CSharp/servicestack/src/SelfHost/packages.config +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/frameworks/CSharp/servicestack/src/Service/MongoDBService.cs b/frameworks/CSharp/servicestack/src/Service/MongoDBService.cs deleted file mode 100644 index b7aea1e9c65..00000000000 --- a/frameworks/CSharp/servicestack/src/Service/MongoDBService.cs +++ /dev/null @@ -1,172 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; - -using ServiceStack.Common; -using ServiceStack.ServiceHost; -using ServiceStack.ServiceInterface; - -using ServiceStackBenchmark.Model; - -using MongoDB.Bson; -using MongoDB.Driver.Builders; -using MongoDB.Driver; - -namespace ServiceStackBenchmark -{ - - #region MongoDB Service Requests - - [Api("Test #2 using Service Stack and MongoDB")] - public class MongoDBDbRequest : IReturn - { } - - [Api("Test #3 using Service Stack and MongoDB")] - public class MongoDBQueriesRequest : IReturn> - { - [ApiMember(Name = "queries", Description = "Number of Queries to Execute", DataType = "int", IsRequired = true)] - [ApiAllowableValues("queries", 1, 500)] - public int queries { get; set; } - } - - [Api("Test #4 using Service Stack, and MongoDB")] - public class MongoDBFortunesRequest : IReturn> - { } - - [Api("Test #5 using Service Stack, and MongoDB")] - public class MongoDBUpdatesRequest : IReturn> - { - [ApiMember(Name = "queries", Description = "Number of Queries to Execute", DataType = "int", IsRequired = true)] - [ApiAllowableValues("queries", 1, 500)] - public int queries { get; set; } - } - - [Api("Test #7 using Service Stack, and MongoDB with Caching")] - public class MongoDBCachedDbRequest : IReturn - { } - - #endregion - - /// Service Stack tests using MongoDB provider - public class MongoDBService : Service - { - private const string dbType = "MongoDB"; - - #region Public Properties - - public MongoDatabase db { get; set; } - - #endregion - - #region Public Service Methods - - public object Get(MongoDBDbRequest request) - { - // get a random world id - var id = SafeRandom.Instance.Next(0, 10000) + 1; - - // retrieve world from database - return db.GetWorld(id); - } - - public object Get(MongoDBQueriesRequest request) - { - // limit queries to be between 1 and 500 iterations - var worldCount = Math.Max(1, Math.Min(500, (int)request.queries)); - - // concurrently create a list of random world ids to retrieve - var ids = new List(); - Parallel.For(0, worldCount, i => - { - lock (ids) - { - ids.Add(SafeRandom.Instance.Next(0, 10000) + 1); - } - }); - - // retrieve worlds associated with ids - return db.GetWorlds(ids); - } - - [AddHeader(ContentType = ServiceStack.Common.Web.ContentType.Html)] - public object Get(MongoDBFortunesRequest request) - { - // retrieve fortunes from database - var fortunes = db.GetFortunes(); - - // add additional fortune record - fortunes.Add(new Fortune { id = 0, message = "Additional fortune added at request time." }); - - // sort fortunes - fortunes.Sort(); - - // construct HTML page using template and return - return FortuneMethods.ToHtml(fortunes); - } - - public object Get(MongoDBUpdatesRequest request) - { - // limit queries to be between 1 and 500 iterations - var worldCount = Math.Max(1, Math.Min(500, (int)request.queries)); - - // concurrently create a list of random world ids to update - var ids = new List(worldCount); - Parallel.For(0, worldCount, i => - { - lock (ids) - { - ids.Add(SafeRandom.Instance.Next(0, 10000) + 1); - } - }); - - // purge cache client - Cache.FlushAll(); - - // update the worlds - return db.UpdateWorlds(ids, 10000); - } - - public object Get(MongoDBCachedDbRequest request) - { - // get a random world id - var id = SafeRandom.Instance.Next(0, 10000) + 1; - - // create the cache key for the random world id - var cacheKey = UrnId.CreateWithParts(new string[] { dbType, id.ToString() }); - - // if world is cached, return it - var world = Cache.Get(cacheKey); - if (world != null) - return world; - - // get all of the worlds form the database - var worlds = db.GetWorlds(); - - // construct a cache dictionary - var cacheDict = new Dictionary(); - Parallel.ForEach(worlds, w => - { - // collect the current result - if (w.id == id) - world = w; - - // add world to cache dictionary - var key = UrnId.CreateWithParts(new string[] { dbType, w.id.ToString() }); - lock (cacheDict) - { - cacheDict.Add(key, w); - } - }); - - // populate cache - Cache.SetAll(cacheDict); - - // return current request - return world; - } - - #endregion - } - -} \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/src/Service/MySqlService.cs b/frameworks/CSharp/servicestack/src/Service/MySqlService.cs deleted file mode 100644 index 0d569ded7ef..00000000000 --- a/frameworks/CSharp/servicestack/src/Service/MySqlService.cs +++ /dev/null @@ -1,186 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; - -using ServiceStack.Common; -using ServiceStack.ServiceHost; -using ServiceStack.ServiceInterface; - -using ServiceStackBenchmark.Model; - -namespace ServiceStackBenchmark -{ - - #region MySQL Service Requests - - [Api("Test #2 using Service Stack, ORMLite, and MySQL")] - public class MySqlDbRequest : IReturn - { } - - [Api("Test #3 using Service Stack, ORMLite, and MySQL")] - public class MySqlQueriesRequest : IReturn> - { - [ApiMember(Name = "queries", Description = "Number of Queries to Execute", DataType = "int", IsRequired = true)] - [ApiAllowableValues("queries", 1, 500)] - public int queries { get; set; } - } - - [Api("Test #4 using Service Stack, ORMLite, and MySQL")] - public class MySqlFortunesRequest : IReturn> - { } - - [Api("Test #5 using Service Stack, ORMLite, and MySQL")] - public class MySqlUpdatesRequest : IReturn> - { - [ApiMember(Name = "queries", Description = "Number of Queries to Execute", DataType = "int", IsRequired = true)] - [ApiAllowableValues("queries", 1, 500)] - public int queries { get; set; } - } - - [Api("Test #7 using Service Stack, ORMLite, and MySQL with Caching")] - public class MySqlCachedDbRequest : IReturn - { } - - #endregion - - /// Service Stack tests using MySQL provider and ORMLite - public class MySqlService : Service - { - private const string dbType = "MySql"; - - #region Public Properties - - public IMySqlOrmLiteConnectionFactory dbFactory { get; set; } - - #endregion - - #region Public Service Methods - - public object Get(MySqlDbRequest request) - { - // get a random world id - var id = SafeRandom.Instance.Next(0, 10000) + 1; - - // retrieve world from database - using (var db = dbFactory.OpenDbConnection()) - { - return db.GetWorld(id); - } - } - - public object Get(MySqlQueriesRequest request) - { - // limit queries to be between 1 and 500 iterations - var worldCount = Math.Max(1, Math.Min(500, (int)request.queries)); - - // concurrently create a list of random world ids to retrieve - var ids = new List(); - Parallel.For(0, worldCount, i => - { - lock (ids) - { - ids.Add(SafeRandom.Instance.Next(0, 10000) + 1); - } - }); - - // retrieve worlds associated with ids - using (var db = dbFactory.OpenDbConnection()) - { - return db.GetWorlds(ids); - } - } - - [AddHeader(ContentType = ServiceStack.Common.Web.ContentType.Html)] - public object Get(MySqlFortunesRequest request) - { - var fortunes = new List(); - - // retrieve fortunes from database - using (var db = dbFactory.OpenDbConnection()) - { - fortunes = db.GetFortunes(); - } - - // add additional fortune record - fortunes.Add(new Fortune { id = 0, message = "Additional fortune added at request time." }); - - // sort fortunes - fortunes.Sort(); - - // construct HTML page using template and return - return FortuneMethods.ToHtml(fortunes); - } - - public object Get(MySqlUpdatesRequest request) - { - // limit queries to be between 1 and 500 iterations - var worldCount = Math.Max(1, Math.Min(500, (int)request.queries)); - - // concurrently create a list of random world ids to update - var ids = new List(worldCount); - Parallel.For(0, worldCount, i => - { - lock (ids) - { - ids.Add(SafeRandom.Instance.Next(0, 10000) + 1); - } - }); - - // purge cache client - Cache.FlushAll(); - - // update the worlds - using (var db = dbFactory.OpenDbConnection()) - { - return db.UpdateWorlds(ids); - } - } - - public object Get(MySqlCachedDbRequest request) - { - // get a random world id - var id = SafeRandom.Instance.Next(0, 10000) + 1; - - // create the cache key for the random world id - var cacheKey = UrnId.CreateWithParts(new string[] { dbType, id.ToString() }); - - // if world is cached, return it - var world = Cache.Get(cacheKey); - if (world != null) - return world; - - // get all of the worlds form the database - List worlds; - using (var db = dbFactory.OpenDbConnection()) - { - worlds = db.GetWorlds(); - } - - // construct a cache dictionary - var cacheDict = new Dictionary(); - Parallel.ForEach(worlds, w => - { - // collect the current result - if (w.id == id) - world = w; - - // add world to cache dictionary - var key = UrnId.CreateWithParts(new string[] { dbType, w.id.ToString() }); - lock (cacheDict) - { - cacheDict.Add(key, w); - } - }); - - // populate cache - Cache.SetAll(cacheDict); - - // return current request - return world; - } - - #endregion - } - -} diff --git a/frameworks/CSharp/servicestack/src/Service/PostgreSqlService.cs b/frameworks/CSharp/servicestack/src/Service/PostgreSqlService.cs deleted file mode 100644 index 2e74d5c5d1d..00000000000 --- a/frameworks/CSharp/servicestack/src/Service/PostgreSqlService.cs +++ /dev/null @@ -1,187 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; - -using ServiceStack.CacheAccess; -using ServiceStack.Common; -using ServiceStack.ServiceHost; -using ServiceStack.ServiceInterface; - -using ServiceStackBenchmark.Model; - -namespace ServiceStackBenchmark -{ - - #region PostgreSQL Service Requests - - [Api("Test #2 using Service Stack, ORMLite, and PostgreSQL")] - public class PostgreSqlDbRequest : IReturn - { } - - [Api("Test #3 using Service Stack, ORMLite, and PostgreSQL")] - public class PostgreSqlQueriesRequest : IReturn> - { - [ApiMember(Name = "queries", Description = "Number of Queries to Execute", DataType = "int", IsRequired = true)] - [ApiAllowableValues("queries", 1, 500)] - public int queries { get; set; } - } - - [Api("Test #4 using Service Stack, ORMLite, and PostgreSQL")] - public class PostgreSqlFortunesRequest : IReturn> - { } - - [Api("Test #5 using Service Stack, ORMLite, and PostgreSQL")] - public class PostgreSqlUpdatesRequest : IReturn> - { - [ApiMember(Name = "queries", Description = "Number of Queries to Execute", DataType = "int", IsRequired = true)] - [ApiAllowableValues("queries", 1, 500)] - public int queries { get; set; } - } - - [Api("Test #7 using Service Stack, ORMLite, and PostgreSQL with Caching")] - public class PostgreSqlCachedDbRequest : IReturn - { } - - #endregion - - /// Service Stack tests using PostgreSQL provider and ORMLite - public class PostgreSqlService : Service - { - private const string dbType = "PgSql"; - - #region Public Properties - - public IPostgreSqlOrmLiteConnectionFactory dbFactory { get; set; } - - #endregion - - #region Public Service Methods - - public object Get(PostgreSqlDbRequest request) - { - // get a random world id - var id = SafeRandom.Instance.Next(0, 10000) + 1; - - // retrieve world from database - using (var db = dbFactory.OpenDbConnection()) - { - return db.GetWorld(id); - } - } - - public object Get(PostgreSqlQueriesRequest request) - { - // limit queries to be between 1 and 500 iterations - var worldCount = Math.Max(1, Math.Min(500, (int)request.queries)); - - // concurrently create a list of random world ids to retrieve - var ids = new List(); - Parallel.For(0, worldCount, i => - { - lock (ids) - { - ids.Add(SafeRandom.Instance.Next(0, 10000) + 1); - } - }); - - // retrieve worlds associated with ids - using (var db = dbFactory.OpenDbConnection()) - { - return db.GetWorlds(ids); - } - } - - [AddHeader(ContentType = ServiceStack.Common.Web.ContentType.Html)] - public object Get(PostgreSqlFortunesRequest request) - { - var fortunes = new List(); - - // retrieve fortunes from database - using (var db = dbFactory.OpenDbConnection()) - { - fortunes = db.GetFortunes(); - } - - // add additional fortune record - fortunes.Add(new Fortune { id = 0, message = "Additional fortune added at request time." }); - - // sort fortunes - fortunes.Sort(); - - // construct HTML page using template and return - return FortuneMethods.ToHtml(fortunes); - } - - public object Get(PostgreSqlUpdatesRequest request) - { - // limit queries to be between 1 and 500 iterations - var worldCount = Math.Max(1, Math.Min(500, (int)request.queries)); - - // concurrently create a list of random world ids to update - var ids = new List(worldCount); - Parallel.For(0, worldCount, i => - { - lock (ids) - { - ids.Add(SafeRandom.Instance.Next(0, 10000) + 1); - } - }); - - // purge cache client - Cache.FlushAll(); - - // update the worlds - using (var db = dbFactory.OpenDbConnection()) - { - return db.UpdateWorlds(ids); - } - } - - public object Get(PostgreSqlCachedDbRequest request) - { - // get a random world id - var id = SafeRandom.Instance.Next(0, 10000) + 1; - - // create the cache key for the random world id - var cacheKey = UrnId.CreateWithParts(new string[] { dbType, id.ToString() }); - - // if world is cached, return it - var world = Cache.Get(cacheKey); - if (world != null) - return world; - - // get all of the worlds form the database - List worlds; - using (var db = dbFactory.OpenDbConnection()) - { - worlds = db.GetWorlds(); - } - - // construct a cache dictionary - var cacheDict = new Dictionary(); - Parallel.ForEach(worlds, w => - { - // collect the current result - if (w.id == id) - world = w; - - // add world to cache dictionary - var key = UrnId.CreateWithParts(new string[] { dbType, w.id.ToString() }); - lock (cacheDict) - { - cacheDict.Add(key, w); - } - }); - - // populate cache - Cache.SetAll(cacheDict); - - // return current request - return world; - } - - #endregion - } - -} diff --git a/frameworks/CSharp/servicestack/src/Service/Services.cs b/frameworks/CSharp/servicestack/src/Service/Services.cs deleted file mode 100644 index ee909dd0836..00000000000 --- a/frameworks/CSharp/servicestack/src/Service/Services.cs +++ /dev/null @@ -1,99 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; - -using ServiceStack.CacheAccess; -using ServiceStack.CacheAccess.Providers; -using ServiceStack.CacheAccess.Memcached; -using ServiceStack.CacheAccess.AwsDynamoDb; -using ServiceStack.CacheAccess.Azure; -using ServiceStack.Common.Web; -using ServiceStack.Redis; -using ServiceStack.ServiceHost; -using ServiceStack.ServiceInterface; - -namespace ServiceStackBenchmark -{ - - #region Hello World Services - - [Api("Test #1 (JSON serialization) using Service Stack")] - [Route("/json", "GET")] - public class JsonRequest { } - - public class JsonService : Service - { - public object Get(JsonRequest request) - { - var response = new { message = "Hello, World!" }; - return response; - } - } - - [Api("Test #6 (Plaintext) using Service Stack")] - [Route("/plaintext", "GET")] - public class PlainTextRequest { } - - public class PlainTextService : Service - { - public object Get(PlainTextRequest request) - { - var response = new HttpResult("Hello, World!", "text/plain"); - return response; - } - } - - - [Api("Set Cache Provider")] - [Route("/cacheprovider/{provider}", "GET")] - public class SetCacheProviderRequest - { - [ApiMember(Name ="provider", Description = "Cache Provider", DataType = "string", IsRequired = true)] - [ApiAllowableValues("provider", new string[] { "inmem", "memcache", "redis", "aws", "azure" })] - public string provider { get; set; } - } - - public class CacheProviderService : Service - { - public object Any(SetCacheProviderRequest request) - { - try - { - switch (request.provider) - { - case "memcache": - var memcache = new MemcachedClientCache(); - AppHost.Instance.Container.Register(memcache); - return new HttpResult("Cache Provider switched to MemCache."); - - case "redis": - AppHost.Instance.Container.Register(c => c.Resolve().GetCacheClient()); - return new HttpResult("Cache Provider switched to Redis."); - - case "aws": - var aws = new DynamoDbCacheClient("", "", Amazon.RegionEndpoint.APSoutheast1); - AppHost.Instance.Container.Register(aws); - return new HttpResult("Cache Provider switched to Amazon Web Service DynamoDb Cache Client."); - - case "azure": - AppHost.Instance.Container.Register(new AzureCacheClient("default")); - return new HttpResult("Cache Provider switched to Microsoft Azure Cache Client."); - - default: - AppHost.Instance.Container.Register(new MemoryCacheClient()); - return new HttpResult("Cache Provider switched to In-Memory Cache Client."); - } - } - catch - { - throw; - } - } - } - - - #endregion - - - -} \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/src/Service/SqlServerService.cs b/frameworks/CSharp/servicestack/src/Service/SqlServerService.cs deleted file mode 100644 index eecff2f0e88..00000000000 --- a/frameworks/CSharp/servicestack/src/Service/SqlServerService.cs +++ /dev/null @@ -1,187 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; - -using ServiceStack.Common; -using ServiceStack.ServiceHost; -using ServiceStack.ServiceInterface; - -using ServiceStackBenchmark.Model; - -namespace ServiceStackBenchmark -{ - - #region Microsoft SQL Server Service Requests - - [Api("Test #2 using Service Stack, ORMLite, and Microsoft SQL Server")] - public class SqlServerDbRequest : IReturn - { } - - [Api("Test #3 using Service Stack, ORMLite, and Microsoft SQL Server")] - public class SqlServerQueriesRequest : IReturn> - { - [ApiMember(Name = "queries", Description = "Number of Queries to Execute", DataType = "int", IsRequired = true)] - [ApiAllowableValues("queries", 1, 500)] - public int queries { get; set; } - } - - [Api("Test #4 using Service Stack, ORMLite, and Microsoft SQL Server")] - public class SqlServerFortunesRequest : IReturn> - { } - - [Api("Test #5 using Service Stack, ORMLite, and Microsoft SQL Server")] - public class SqlServerUpdatesRequest : IReturn> - { - [ApiMember(Name = "queries", Description = "Number of Queries to Execute", DataType = "int", IsRequired = true)] - [ApiAllowableValues("queries", 1, 500)] - public int queries { get; set; } - } - - [Api("Test #7 using Service Stack, ORMLite, and Microsoft SQL Server with Caching")] - public class SqlServerCachedDbRequest : IReturn - { } - - #endregion - - /// Service Stack tests using Microsoft SQL Server provider and ORMLite - public class SqlServerService : Service - { - private const string dbType = "SqlSrvr"; - - #region Public Properties - - public ISqlServerOrmLiteConnectionFactory dbFactory { get; set; } - - #endregion - - #region Public Service Methods - - public object Get(SqlServerDbRequest request) - { - // get a random world id - var id = SafeRandom.Instance.Next(0, 10000) + 1; - - // retrieve world from database - using (var db = dbFactory.OpenDbConnection()) - { - return db.GetWorld(id); - } - } - - public object Get(SqlServerQueriesRequest request) - { - // limit queries to be between 1 and 500 iterations - var worldCount = Math.Max(1, Math.Min(500, (int)request.queries)); - - // concurrently create a list of random world ids to retrieve - var ids = new List(); - Parallel.For(0, worldCount, i => - { - lock (ids) - { - ids.Add(SafeRandom.Instance.Next(0, 10000) + 1); - } - }); - - // retrieve worlds associated with ids - using (var db = dbFactory.OpenDbConnection()) - { - return db.GetWorlds(ids); - } - } - - [AddHeader(ContentType = ServiceStack.Common.Web.ContentType.Html)] - public object Get(SqlServerFortunesRequest request) - { - var fortunes = new List(); - - // retrieve fortunes from database - using (var db = dbFactory.OpenDbConnection()) - { - fortunes = db.GetFortunes(); - } - - // add additional fortune record - fortunes.Add(new Fortune { id = 0, message = "Additional fortune added at request time." }); - - // sort fortunes - fortunes.Sort(); - - // construct HTML page using template and return - return FortuneMethods.ToHtml(fortunes); - } - - public object Get(SqlServerUpdatesRequest request) - { - // limit queries to be between 1 and 500 iterations - var worldCount = Math.Max(1, Math.Min(500, (int)request.queries)); - - // concurrently create a list of random world ids to update - var ids = new List(worldCount); - Parallel.For(0, worldCount, i => - { - lock (ids) - { - ids.Add(SafeRandom.Instance.Next(0, 10000) + 1); - } - }); - - // purge cache client - Cache.FlushAll(); - - // update the worlds - using (var db = dbFactory.OpenDbConnection()) - { - return db.UpdateWorlds(ids); - } - } - - public object Get(SqlServerCachedDbRequest request) - { - // get a random world id - var id = SafeRandom.Instance.Next(0, 10000) + 1; - - // create the cache key for the random world id - var cacheKey = UrnId.CreateWithParts(new string[] { dbType, id.ToString() }); - - // if world is cached, return it - var world = Cache.Get(cacheKey); - if (world != null) - return world; - - // get all of the worlds form the database - List worlds; - using (var db = dbFactory.OpenDbConnection()) - { - worlds = db.GetWorlds(); - } - - // construct a cache dictionary - var cacheDict = new Dictionary(); - Parallel.ForEach(worlds, w => - { - // collect the current result - if (w.id == id) - world = w; - - // add world to cache dictionary - var key = UrnId.CreateWithParts(new string[] { dbType, w.id.ToString() }); - lock (cacheDict) - { - cacheDict.Add(key, w); - } - }); - - // populate cache - Cache.SetAll(cacheDict); - - // return current request - return world; - } - - #endregion - - } - -} diff --git a/frameworks/CSharp/servicestack/src/ServiceStackBenchmark.csproj b/frameworks/CSharp/servicestack/src/ServiceStackBenchmark.csproj deleted file mode 100644 index 62151012f94..00000000000 --- a/frameworks/CSharp/servicestack/src/ServiceStackBenchmark.csproj +++ /dev/null @@ -1,282 +0,0 @@ - - - - - Debug - AnyCPU - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354} - {349c5851-65df-11da-9384-00065b846f21};{fae04ec0-301f-11d3-bf4b-00c04f79efbc} - Library - Properties - ServiceStackBenchmark - ServiceStackBenchmark - v4.5 - 512 - - .\ - false - - - AnyCPU - true - full - false - bin\ - DEBUG;TRACE - prompt - 4 - false - - - AnyCPU - pdbonly - true - bin\ - TRACE - prompt - 4 - false - - - - - - true - bin\ - DEBUG;TRACE - full - x86 - prompt - MinimumRecommendedRules.ruleset - - - bin\ - TRACE - true - pdbonly - x86 - prompt - MinimumRecommendedRules.ruleset - - - true - bin\ - DEBUG;TRACE - full - x64 - prompt - MinimumRecommendedRules.ruleset - - - bin\ - TRACE - true - pdbonly - x64 - prompt - MinimumRecommendedRules.ruleset - - - - False - ..\lib\AWSSDK.2.0.0.4-beta\lib\net45\AWSSDK.dll - - - ..\lib\EnyimMemcached.2.12\lib\net35\Enyim.Caching.dll - - - ..\lib\WindowsAzure.Caching.1.7.0.0\lib\net35-full\Microsoft.ApplicationServer.Caching.Client.dll - - - ..\lib\WindowsAzure.Caching.1.7.0.0\lib\net35-full\Microsoft.ApplicationServer.Caching.Core.dll - - - ..\lib\WindowsAzure.Caching.1.7.0.0\lib\net35-full\Microsoft.Web.DistributedCache.dll - - - ..\lib\WindowsAzure.Caching.1.7.0.0\lib\net35-full\Microsoft.WindowsFabric.Common.dll - - - ..\lib\WindowsAzure.Caching.1.7.0.0\lib\net35-full\Microsoft.WindowsFabric.Data.Common.dll - - - ..\lib\mongocsharpdriver.1.8.2\lib\net35\MongoDB.Bson.dll - - - ..\lib\mongocsharpdriver.1.8.2\lib\net35\MongoDB.Driver.dll - - - False - ..\lib\Npgsql.2.0.13-beta1\lib\net45\Mono.Security.dll - - - False - ..\lib\MySql.Data.6.7.4\lib\net40\MySql.Data.dll - - - False - ..\lib\Npgsql.2.0.13-beta1\lib\net45\Npgsql.dll - - - False - ..\lib\Npgsql.2.0.13-beta1\lib\net45\policy.2.0.Npgsql.dll - - - False - ..\lib\ServiceStack.3.9.59\lib\net35\ServiceStack.dll - - - False - ..\lib\ServiceStack.Api.Swagger.3.9.59\lib\net35\ServiceStack.Api.Swagger.dll - - - ..\lib\ServiceStack.Caching.AwsDynamoDb.3.9.55\lib\ServiceStack.CacheAccess.AwsDynamoDb.dll - - - ..\lib\ServiceStack.Caching.Azure.3.9.55\lib\ServiceStack.CacheAccess.Azure.dll - - - ..\lib\ServiceStack.Caching.Memcached.3.9.55\lib\ServiceStack.CacheAccess.Memcached.dll - - - False - ..\lib\ServiceStack.Common.3.9.59\lib\net35\ServiceStack.Common.dll - - - False - ..\lib\ServiceStack.Common.3.9.59\lib\net35\ServiceStack.Interfaces.dll - - - False - ..\lib\ServiceStack.OrmLite.SqlServer.3.9.59\lib\ServiceStack.OrmLite.dll - - - False - ..\lib\ServiceStack.OrmLite.MySql.3.9.59\lib\ServiceStack.OrmLite.MySql.dll - - - False - ..\lib\ServiceStack.OrmLite.PostgreSQL.3.9.59\lib\ServiceStack.OrmLite.PostgreSQL.dll - - - False - ..\lib\ServiceStack.OrmLite.SqlServer.3.9.59\lib\ServiceStack.OrmLite.SqlServer.dll - - - False - ..\lib\ServiceStack.Redis.3.9.59\lib\net35\ServiceStack.Redis.dll - - - False - ..\lib\ServiceStack.3.9.59\lib\net35\ServiceStack.ServiceInterface.dll - - - False - ..\lib\ServiceStack.Text.3.9.59\lib\net35\ServiceStack.Text.dll - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Designer - - - - - - - - - - - - - - - - - - - - - - - Global.asax - - - - - - - - - Web.config - - - Web.config - - - - - false - - - - - - 11.0 - $(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion) - - - - - - - - - - - - - - True - - - - - - - - - - diff --git a/frameworks/CSharp/servicestack/src/ServiceStackBenchmark.sln b/frameworks/CSharp/servicestack/src/ServiceStackBenchmark.sln deleted file mode 100644 index e41be833101..00000000000 --- a/frameworks/CSharp/servicestack/src/ServiceStackBenchmark.sln +++ /dev/null @@ -1,58 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 2012 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ServiceStackBenchmark", "ServiceStackBenchmark.csproj", "{80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{0C24E4BD-94BD-4AFD-B912-00A5FF825E6B}" - ProjectSection(SolutionItems) = preProject - NuGet.config = NuGet.config - EndProjectSection -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = ".nuget", ".nuget", "{E408A80A-0E70-4FD8-9441-76C90F37D955}" - ProjectSection(SolutionItems) = preProject - .nuget\NuGet.Config = .nuget\NuGet.Config - .nuget\NuGet.exe = .nuget\NuGet.exe - .nuget\NuGet.targets = .nuget\NuGet.targets - EndProjectSection -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ServiceStackBenchmark.SelfHost", "SelfHost\ServiceStackBenchmark.SelfHost.csproj", "{64B2E432-0D03-40CE-9086-81FC7190D9A3}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Debug|Any CPU.Build.0 = Debug|Any CPU - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Debug|x64.ActiveCfg = Debug|Any CPU - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Debug|x64.Build.0 = Debug|Any CPU - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Debug|x86.ActiveCfg = Debug|x86 - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Debug|x86.Build.0 = Debug|x86 - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Release|Any CPU.ActiveCfg = Release|Any CPU - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Release|Any CPU.Build.0 = Release|Any CPU - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Release|x64.ActiveCfg = Release|x64 - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Release|x64.Build.0 = Release|x64 - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Release|x86.ActiveCfg = Release|Any CPU - {80CF41AB-455B-4EB9-BFCC-3F8C4E1D8354}.Release|x86.Build.0 = Release|Any CPU - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Debug|x64.ActiveCfg = Debug|Any CPU - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Debug|x64.Build.0 = Debug|Any CPU - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Debug|x86.ActiveCfg = Debug|x86 - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Debug|x86.Build.0 = Debug|x86 - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Release|Any CPU.Build.0 = Release|Any CPU - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Release|x64.ActiveCfg = Release|x64 - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Release|x64.Build.0 = Release|x64 - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Release|x86.ActiveCfg = Release|Any CPU - {64B2E432-0D03-40CE-9086-81FC7190D9A3}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/frameworks/CSharp/servicestack/src/Web.Debug.config b/frameworks/CSharp/servicestack/src/Web.Debug.config deleted file mode 100644 index 2e302f9f954..00000000000 --- a/frameworks/CSharp/servicestack/src/Web.Debug.config +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/src/Web.Release.config b/frameworks/CSharp/servicestack/src/Web.Release.config deleted file mode 100644 index c35844462ba..00000000000 --- a/frameworks/CSharp/servicestack/src/Web.Release.config +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/src/Web.config b/frameworks/CSharp/servicestack/src/Web.config deleted file mode 100644 index 89b68c087fd..00000000000 --- a/frameworks/CSharp/servicestack/src/Web.config +++ /dev/null @@ -1,74 +0,0 @@ - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/frameworks/CSharp/servicestack/src/packages.config b/frameworks/CSharp/servicestack/src/packages.config deleted file mode 100644 index 466abbf0a98..00000000000 --- a/frameworks/CSharp/servicestack/src/packages.config +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/frameworks/Clojure/aleph/README.md b/frameworks/Clojure/aleph/README.md index 08f8046b4aa..56d56191516 100644 --- a/frameworks/Clojure/aleph/README.md +++ b/frameworks/Clojure/aleph/README.md @@ -1,24 +1,32 @@ -# Compojure Benchmarking Test +# Aleph Benchmarking Test -This is the [Aleph](https://github.com/ztellman/aleph) portion of a [benchmarking test suite](../) comparing a variety of web development platforms. - -### JSON Encoding Test - -* [JSON test source](hello/src/hello/handler.clj) +This is the [Aleph](https://github.com/clj-commons/aleph) portion of a [benchmarking test suite](../) comparing a variety of web development platforms. ## Infrastructure Software Versions -The dependencies are documented in [project.clj](hello/project.clj), +The dependencies are documented in [project.clj](project.clj), but the main ones are: -* [Aleph 0.4.5-alpha6](https://github.com/ztellman/aleph) -* [Clojure 1.9.0](http://clojure.org/) -* [metosin/jsonista 0.2.0](https://github.com/metosin/jsonista), which in turn uses [Jackson](http://jackson.codehaus.org/) +* [Aleph 0.4.7](https://github.com/clj-commons/aleph) +* [Clojure 1.11.0](http://clojure.org/) +* [metosin/jsonista 0.3.5](https://github.com/metosin/jsonista), which in turn uses [Jackson](http://jackson.codehaus.org/) +* [hiccup 1.0.5](https://github.com/weavejester/hiccup) +* [porsas 0.0.1-alpha14](https://github.com/arnaudgeiser/porsas) ## Test URLs ### JSON Encoding Test +`http://localhost:8080/json` + +### Single Query Test +`http://localhost:8080/db` + +### Multiple Query Test +`http://localhost:8080/queries?queries=number` -http://localhost/json +### Fortune Test +`http://localhost:8080/fortunes` -### Plaintext Test +### Database Updates +`http://localhost:8080/updates?queries=number` -http://localhost/plaintext +### Plaintext +`http://localhost:8080/plaintext` diff --git a/frameworks/Clojure/aleph/aleph.dockerfile b/frameworks/Clojure/aleph/aleph.dockerfile index ae4d41a2bcc..8714c56a871 100644 --- a/frameworks/Clojure/aleph/aleph.dockerfile +++ b/frameworks/Clojure/aleph/aleph.dockerfile @@ -1,9 +1,20 @@ -FROM clojure:lein-2.8.1 +FROM clojure:openjdk-17-lein-2.9.8 WORKDIR /aleph COPY src src COPY project.clj project.clj RUN lein uberjar +# HTTP server EXPOSE 8080 +# async-profiler HTTP-server +EXPOSE 8081 +# JMX port +EXPOSE 9999 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:+AggressiveOpts", "-jar", "target/hello-aleph-standalone.jar"] +RUN apt update -y +RUN apt install perl -y + +CMD ["java", "-server", "-Xms2G", "-Xmx2G", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dvertx.disableMetrics=true", "-Dvertx.threadChecks=false", "-Dvertx.disableContextTimings=true", "-Dvertx.disableTCCL=true", "-Djava.net.preferIPv4Stack=true", "-jar", "target/hello-aleph-standalone.jar"] + +# To enable JMX and async-profiler +#CMD ["java", "-XX:+UnlockDiagnosticVMOptions", "-XX:+DebugNonSafepoints", "-Djdk.attach.allowAttachSelf", "-Dcom.sun.management.jmxremote=true", "-Djava.rmi.server.hostname=0.0.0.0","-Dcom.sun.management.jmxremote.rmi.port=9999" ,"-Dcom.sun.management.jmxremote.port=9999", "-Dcom.sun.management.jmxremote.ssl=false", "-Dcom.sun.management.jmxremote.authenticate=false", "-server", "-Xms2G", "-Xmx2G", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dvertx.disableMetrics=true", "-Dvertx.threadChecks=false", "-Dvertx.disableContextTimings=true", "-Dvertx.disableTCCL=true", "-Djava.net.preferIPv4Stack=true", "-jar", "target/hello-aleph-standalone.jar"] diff --git a/frameworks/Clojure/aleph/benchmark_config.json b/frameworks/Clojure/aleph/benchmark_config.json index 39f9a3597bb..91dbcef2649 100755 --- a/frameworks/Clojure/aleph/benchmark_config.json +++ b/frameworks/Clojure/aleph/benchmark_config.json @@ -4,10 +4,14 @@ "default": { "json_url": "/json", "plaintext_url": "/plaintext", + "db_url": "/db", + "query_url": "/queries?queries=", + "update_url": "/updates?queries=", + "fortune_url": "/fortunes", "port": 8080, "approach": "Realistic", "classification": "Micro", - "database": "None", + "database": "Postgres", "framework": "aleph", "language": "Clojure", "flavor": "None", diff --git a/frameworks/Clojure/aleph/config.toml b/frameworks/Clojure/aleph/config.toml index 06d8b24f010..7314043c7c8 100644 --- a/frameworks/Clojure/aleph/config.toml +++ b/frameworks/Clojure/aleph/config.toml @@ -4,9 +4,13 @@ name = "aleph" [main] urls.plaintext = "/plaintext" urls.json = "/json" +urls.db = "/db" +urls.query = "/queries?queries=" +urls.update = "/updates?queries=" +urls.fortune = "/fortunes" approach = "Realistic" classification = "Micro" -database = "None" +database = "Postgres" database_os = "Linux" os = "Linux" orm = "Raw" diff --git a/frameworks/Clojure/aleph/project.clj b/frameworks/Clojure/aleph/project.clj index 815fdefec26..7243aaa6c95 100644 --- a/frameworks/Clojure/aleph/project.clj +++ b/frameworks/Clojure/aleph/project.clj @@ -1,10 +1,24 @@ (defproject hello "aleph" :description "JSON/plaintext tests" - :dependencies [[org.clojure/clojure "1.9.0"] - [clj-tuple "0.2.2"] - [org.clojure/tools.cli "0.3.7"] - [aleph "0.4.5-alpha6"] - [javax.xml.bind/jaxb-api "2.3.0"] - [metosin/jsonista "0.2.0"]] + :dependencies [[org.clojure/clojure "1.11.0"] + [aleph "0.4.7"] + [metosin/jsonista "0.3.5"] + [hiccup "1.0.5"] + [io.netty/netty-transport-native-epoll "4.1.65.Final" :classifier "linux-x86_64"] + [com.github.arnaudgeiser/porsas "0.0.1-alpha14" + :exclusions [io.netty/netty-codec-dns + io.netty/netty-codec + io.netty/netty-buffer + io.netty/netty-common + io.netty/netty-codec-http + io.netty/netty-codec-http2 + io.netty/netty-codec-socks + io.netty/netty-handler + io.netty/netty-handler-proxy + io.netty/netty-transport + io.netty/netty-resolver-dns + io.netty/netty-resolver]] + [com.clojure-goes-fast/clj-async-profiler "0.5.1"]] :main hello.handler + :jvm-opts ^:replace ["-Dclojure.compiler.direct-linking=true"] :aot :all) diff --git a/frameworks/Clojure/aleph/src/hello/handler.clj b/frameworks/Clojure/aleph/src/hello/handler.clj index fc42d53a1a7..2130bae1136 100644 --- a/frameworks/Clojure/aleph/src/hello/handler.clj +++ b/frameworks/Clojure/aleph/src/hello/handler.clj @@ -1,45 +1,188 @@ (ns hello.handler (:require - [byte-streams :as bs] - [clojure.tools.cli :as cli] - [aleph.http :as http] - [jsonista.core :as json] - [clj-tuple :as t]) + [aleph.http :as http] + [aleph.netty :as netty] + [byte-streams :as bs] + [clj-async-profiler.core :as prof] + [hiccup.page :as hp] + [hiccup.util :as hu] + [jsonista.core :as json] + [manifold.deferred :as d] + [porsas.async :as async]) + (:import (clojure.lang IDeref) + (io.netty.channel ChannelOption) + (io.netty.buffer PooledByteBufAllocator) + (java.util.function Supplier) + (java.util.concurrent ThreadLocalRandom) + (porsas.async Context)) (:gen-class)) (def plaintext-response - (t/hash-map - :status 200 - :headers (t/hash-map "content-type" "text/plain; charset=utf-8") - :body (bs/to-byte-array "Hello, World!"))) + {:status 200 + :headers {"Content-Type" "text/plain"} + :body (bs/to-byte-array "Hello, World!")}) (def json-response - (t/hash-map - :status 200 - :headers (t/hash-map "content-type" "application/json"))) + {:status 200 + :headers {"Content-Type" "application/json"}}) -(defn handler [req] +(def html-response + {:status 200 + :headers {"Content-Type" "text/html; charset=utf-8"}}) + +(def db-spec + {:uri "postgresql://tfb-database:5432/hello_world" + :user "benchmarkdbuser" + :password "benchmarkdbpass" + :size 1}) + +(defmacro thread-local [& body] + `(let [tl# (ThreadLocal/withInitial (reify Supplier (get [_] ~@body)))] + (reify IDeref (deref [_] (.get tl#))))) + +(def pool + "PostgreSQL pool of connections (`PgPool`)." + (thread-local (async/pool db-spec))) + +(defn random + "Generate a random number between 1 and 10'000." + [] + (unchecked-inc (.nextInt (ThreadLocalRandom/current) 10000))) + +(defn sanitize-queries-param + "Sanitizes the `queries` parameter. Clamps the value between 1 and 500. + Invalid (string) values become 1." + [request] + (let [queries (-> request + :query-string + (subs 8)) + n (try (Integer/parseInt queries) + (catch Exception _ 1))] ; default to 1 on parse failure + (cond + (< n 1) 1 + (> n 500) 500 + :else n))) + +(def ^Context + query-mapper + "Map each row into a record." + (async/context {:row (async/rs->compiled-record)})) + +(defn query-one-random-world + "Query a random world on the database. + Return a `CompletableFuture`." + [] + (async/query-one query-mapper + @pool + ["SELECT id, randomnumber FROM world WHERE id=$1" (random)])) + +(defn update-world + "Update a world on the database. + Return a `CompletableFuture`." + [{:keys [randomNumber id]}] + (async/query @pool + ["UPDATE world SET randomnumber=$1 WHERE id=$2" randomNumber id])) + +(defn run-queries + "Run a number of `queries` on the database to fetch a random world. + Return a `manifold.deferred`." + [queries] + (apply d/zip + (take queries + (repeatedly query-one-random-world)))) + +(defn query-fortunes + "Query the fortunes on database. + Return a `CompletableFuture`." + [] + (async/query query-mapper @pool ["SELECT id, message from FORTUNE"])) + +(defn get-fortunes + "Fetch the full list of Fortunes from the database, sort them by the fortune + message text. + Return a `CompletableFuture` with the results." + [] + (d/chain (query-fortunes) + (fn [fortunes] + (sort-by :message + (conj fortunes + {:id 0 + :message "Additional fortune added at request time."}))))) + +(defn update-and-persist + "Fetch a number of `queries` random world from the database. + Compute a new `randomNumber` for each of them a return a `CompletableFuture` + with the updated worlds." + [queries] + (d/chain' (run-queries queries) + (fn [worlds] + (let [worlds' (mapv #(assoc % :randomNumber (random)) worlds)] + (d/chain' (apply d/zip (mapv update-world worlds')) + (fn [_] worlds')))))) + +(defn fortunes-hiccup + "Render the given fortunes to simple HTML using Hiccup." + [fortunes] + (hp/html5 + [:head + [:title "Fortunes"]] + [:body + [:table + [:tr + [:th "id"] + [:th "message"]] + (for [x fortunes] + [:tr + [:td (:id x)] + [:td (hu/escape-html (:message x))]])]])) + +(defn handler + "Ring handler representing the different tests." + [req] (let [uri (:uri req)] (cond (.equals "/plaintext" uri) plaintext-response - (.equals "/json" uri) (assoc json-response - :body (json/write-value-as-bytes (t/hash-map :message "Hello, World!"))) + (.equals "/json" uri) (assoc json-response + :body (json/write-value-as-bytes {:message "Hello, World!"})) + (.equals "/db" uri) (-> (query-one-random-world) + (d/chain (fn [world] + (assoc json-response + :body (json/write-value-as-bytes world))))) + (.equals "/queries" uri) (-> (sanitize-queries-param req) + (run-queries) + (d/chain (fn [worlds] + (assoc json-response + :body (json/write-value-as-bytes worlds))))) + (.equals "/fortunes" uri) (d/chain' (get-fortunes) + fortunes-hiccup + (fn [body] + (assoc html-response :body body))) + (.equals "/updates" uri) (-> (sanitize-queries-param req) + (update-and-persist) + (d/chain (fn [worlds] + (assoc json-response + :body (json/write-value-as-bytes worlds))))) :else {:status 404}))) ;;; -(defn -main [& args] - - (let [[{:keys [help port]} _ banner] - (cli/cli args - ["-p" "--port" "Server port" - :default 8080 - :parse-fn #(Integer/parseInt %)] - ["-h" "--[no-]help"])] - - (when help - (println banner) - (System/exit 0)) - - (aleph.netty/leak-detector-level! :disabled) - (http/start-server handler {:port port, :executor :none}))) +(defn -main [& _] + (netty/leak-detector-level! :disabled) + (http/start-server handler {:port 8080 + :raw-stream? true + :epoll? true + :executor :none + :bootstrap-transform (fn [bootstrap] + (.option bootstrap ChannelOption/ALLOCATOR PooledByteBufAllocator/DEFAULT) + (.childOption bootstrap ChannelOption/ALLOCATOR PooledByteBufAllocator/DEFAULT)) + :pipeline-transform (fn [pipeline] + (.remove pipeline "continue-handler"))}) + ;; Uncomment to enable async-profiler + #_ + (do + (prof/profile-for 60 + #_ + {:transform (fn [s] + (when-not (re-find #"(writev|__libc|epoll_wait|write|__pthread)" s) + s))}) + (prof/serve-files 8081))) diff --git a/frameworks/Crystal/orion/README.md b/frameworks/Crystal/orion/README.md deleted file mode 100755 index d9f2e30f42d..00000000000 --- a/frameworks/Crystal/orion/README.md +++ /dev/null @@ -1,46 +0,0 @@ -This is the [Orion](https://github.com/obsidian/orion) test of the Framework Benchmarks. -Crystal is a new language that closely resembles Ruby with a goal of removing typed variables and parameters (instead inferencing), whilst maintaining top speed through bindings into C. - -Orion is a powerful, simple, rails-esque routing library for HTTP::Server - - -# orion Benchmarking Test - -### Test Type Implementation Source Code - -* [JSON](orion.cr) -* [PLAINTEXT](orion.cr) -* [DB](orion.cr) -* [QUERY](orion.cr) -* [CACHED QUERY](orion.cr) -* [UPDATE](orion.cr) -* [FORTUNES](orion.cr) - -## Test URLs -### JSON - -http://localhost:8080/json - -### PLAINTEXT - -http://localhost:8080/plaintext - -### DB - -http://localhost:8080/db - -### QUERY - -http://localhost:8080/query?queries= - -### CACHED QUERY - -http://localhost:8080/cached_query?queries= - -### UPDATE - -http://localhost:8080/update?queries= - -### FORTUNES - -http://localhost:8080/fortunes diff --git a/frameworks/Crystal/orion/orion.cr b/frameworks/Crystal/orion/orion.cr deleted file mode 100644 index 6ed2b5a9557..00000000000 --- a/frameworks/Crystal/orion/orion.cr +++ /dev/null @@ -1,116 +0,0 @@ -require "orion" -require "json" -require "ecr/macros" -require "pg" - -APPDB = DB.open(ENV["DATABASE_URL"]) -ID_MAXIMUM = 10_000 - -private def random_world - id = rand(1..ID_MAXIMUM) - id, random_number = APPDB.query_one("SELECT id, randomNumber FROM world WHERE id = $1", id, as: {Int32, Int32}) - {id: id, randomNumber: random_number} -end - -private def set_world(world) - APPDB.exec("UPDATE world SET randomNumber = $1 WHERE id = $2", world[:randomNumber], world[:id]) - world -end - -private def fortunes - data = Array(NamedTuple(id: Int32, message: String)).new - - APPDB.query_each("SELECT id, message FROM Fortune") do |rs| - data.push({id: rs.read(Int32), message: rs.read(String)}) - end - - data -end - -private def sanitized_query_count(request) - queries = request.query_params["queries"]? || "1" - queries = queries.to_i? || 1 - queries.clamp(1..500) -end - -router Bench do - # - # Basic Tests - # - - # Test 1: JSON Serialization - get "/json" do |context| - context.response.headers["Server"] = "Orion" - context.response.headers["Date"] = HTTP.format_time(Time.utc) - context.response.headers["content-type"] = "application/json" - - context.response.puts({message: "Hello, World!"}.to_json) - end - - # Test 2: Plaintext - get "/plaintext" do |context| - context.response.headers["Server"] = "Orion" - context.response.headers["Date"] = HTTP.format_time(Time.utc) - context.response.headers["content-type"] = "text/plain" - - context.response.print "Hello, World!" - end - - # - # Postgres DatabaseTests - # - - # Postgres Test 3: Single database query - get "/db" do |context| - context.response.headers["Server"] = "Orion" - context.response.headers["Date"] = HTTP.format_time(Time.utc) - context.response.headers["content-type"] = "application/json" - - context.response.puts random_world.to_json - end - - # Postgres Test 4: Multiple database query - get "/queries" do |context| - results = (1..sanitized_query_count(context.request)).map do - random_world - end - context.response.headers["Server"] = "Orion" - context.response.headers["Date"] = HTTP.format_time(Time.utc) - context.response.headers["content-type"] = "application/json" - - context.response.puts results.to_json - end - - # Postgres Test 5: HMTL template render - get "/fortunes" do |context| - data = fortunes - additional_fortune = { - id: 0, - message: "Additional fortune added at request time.", - } - data.push(additional_fortune) - data.sort_by! { |fortune| fortune[:message] } - io = IO::Memory.new - ECR.embed "views/fortunes.ecr", io - - context.response.headers["Server"] = "Orion" - context.response.headers["Date"] = HTTP.format_time(Time.utc) - context.response.headers["content-type"] = "text/html; charset=UTF-8" - - context.response.puts io.to_s - end - - # Postgres Test 6: Data updates - get "/updates" do |context| - updated = (1..sanitized_query_count(context.request)).map do - set_world({id: random_world[:id], randomNumber: rand(1..ID_MAXIMUM)}) - end - context.response.headers["Server"] = "Orion" - context.response.headers["Date"] = HTTP.format_time(Time.utc) - context.response.headers["content-type"] = "application/json" - - context.response.puts updated.to_json - end -end - -Bench.listen(host: "0.0.0.0", port: 8080, reuse_port: true) \ No newline at end of file diff --git a/frameworks/Crystal/orion/orion.dockerfile b/frameworks/Crystal/orion/orion.dockerfile deleted file mode 100644 index 74cc4f248b0..00000000000 --- a/frameworks/Crystal/orion/orion.dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM crystallang/crystal:0.34.0 - -WORKDIR /orion -COPY views views -COPY run.sh run.sh -COPY orion.cr orion.cr -COPY shard.yml shard.yml - -ENV DATABASE_URL postgres://benchmarkdbuser:benchmarkdbpass@tfb-database:5432/hello_world?initial_pool_size=56&max_idle_pool_size=56 - -RUN shards install -RUN crystal build --release --no-debug orion.cr - -EXPOSE 8080 - -CMD bash run.sh diff --git a/frameworks/Crystal/orion/run.sh b/frameworks/Crystal/orion/run.sh deleted file mode 100644 index 35ae5ca3d2f..00000000000 --- a/frameworks/Crystal/orion/run.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -for i in $(seq 1 $(nproc --all)); do - ./orion & -done - -wait diff --git a/frameworks/Crystal/orion/shard.yml b/frameworks/Crystal/orion/shard.yml deleted file mode 100644 index a451ef90aea..00000000000 --- a/frameworks/Crystal/orion/shard.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: orion -version: 0.1.0 - -authors: - - Carlos Donderis - -dependencies: - orion: - github: obsidian/orion - pg: - github: will/crystal-pg - -targets: - orion: - main: orion.cr - -crystal: 0.34.0 \ No newline at end of file diff --git a/frameworks/Crystal/orion/views/fortunes.ecr b/frameworks/Crystal/orion/views/fortunes.ecr deleted file mode 100644 index d2842b8bf4b..00000000000 --- a/frameworks/Crystal/orion/views/fortunes.ecr +++ /dev/null @@ -1,20 +0,0 @@ - - - - Fortunes - - - - - - - - <% data.each do |fortune| %> - - - - - <% end %> -
idmessage
<%= fortune[:id] %><%= HTML.escape(fortune[:message]) %>
- - diff --git a/frameworks/D/hunt/README.md b/frameworks/D/hunt/README.md deleted file mode 100644 index 5c226e123b0..00000000000 --- a/frameworks/D/hunt/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# Hunt Benchmarking Test - -This is the Hunt portion of a [benchmarking test suite](../) comparing a variety of web development platforms. - - -## Requirements -* Dlang > 2.077 - -## Test URLs - -### PlanText Test - - http://localhost:8080/plaintext - -### JSON Encoding Test - - http://localhost:8080/json - -### Single database query - - http://localhost:8080/db - -### Multiple database queries - - http://localhost:8080//queries?queries=10 - -### Database updates - - http://localhost:8080/updates?queries=10 diff --git a/frameworks/D/hunt/build.sh b/frameworks/D/hunt/build.sh deleted file mode 100755 index 74161415831..00000000000 --- a/frameworks/D/hunt/build.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -rm -rf picohttpparser -git clone https://github.com/h2o/picohttpparser.git -cp patches/Makefile picohttpparser -cd picohttpparser -make package - - -rm -rf http-parser -git clone https://github.com/nodejs/http-parser.git -cd http-parser -make package \ No newline at end of file diff --git a/frameworks/D/hunt/dub.json b/frameworks/D/hunt/dub.json deleted file mode 100644 index f8b061449f5..00000000000 --- a/frameworks/D/hunt/dub.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "name": "hunt-minihttp", - "targetType": "executable", - "description": "A mini http server powered by Hunt.", - "copyright": "Copyright (C) 2017-2020, HuntLabs", - "homepage": "https://www.huntlabs.net", - "license": "Apache-2.0", - "dependencies": { - "hunt": "~>1.5.0-beta.3", - "std_data_json": "~>0.18.2" - }, - "versions": [ - "POSTGRESQL1" - ], - "configurations": [ - { - "name": "default", - "sourcePaths": ["http"], - "libs-posix": [ - "http_parser" - ], - "lflags-posix": [ - "-Lhttp-parser/" - ], - "versions": [ - "HTTP" - ] - }, - { - "name": "minihttp", - "sourcePaths": ["pico"], - "libs-posix": [ - "picohttpparser" - ], - "lflags-posix": [ - "-Lpicohttpparser/" - ], - "versions": [ - "MINIHTTP" - ] - }, - { - "name": "mmap", - "sourcePaths": ["mmap"], - "libs-posix": [ - "picohttpparser" - ], - "lflags-posix": [ - "-Lpicohttpparser/" - ], - "versions": [ - "MMAP" - ] - } - ], - "subConfigurations": { - "hunt-database": "postgresql" - } - -} diff --git a/frameworks/D/hunt/http/app.d b/frameworks/D/hunt/http/app.d deleted file mode 100644 index fc9dd2c4706..00000000000 --- a/frameworks/D/hunt/http/app.d +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Collie - An asynchronous event-driven network framework using Dlang development - * - * Copyright (C) 2015-2018 Shanghai Putao Technology Co., Ltd - * - * Developer: Putao's Dlang team - * - * Licensed under the Apache-2.0 License. - * - */ -import std.getopt; -import std.stdio; - -//import hunt.database; -import hunt.io; -import hunt.system.Memory : totalCPUs; -import http.Processor; -import http.Server; -import http.DemoProcessor; - -void main(string[] args) { - ushort port = 8080; - GetoptResult o = getopt(args, "port|p", "Port (default 8080)", &port); - if (o.helpWanted) { - defaultGetoptPrinter("A simple http server powered by Hunt!", o.options); - return; - } - - //version (POSTGRESQL) { - // DatabaseOption options; - // debug { - // options = new DatabaseOption( - // "postgresql://benchmarkdbuser:benchmarkdbpass@10.1.11.44:5432/hello_world?charset=utf-8"); - // } else { - // options = new DatabaseOption( - // "postgresql://benchmarkdbuser:benchmarkdbpass@tfb-database:5432/hello_world?charset=utf-8"); - // } - // - // options.setMinimumConnection(totalCPUs*3); - // options.setMaximumConnection(totalCPUs*3); - // dbConnection = new Database(options); - //} - - AbstractTcpServer httpServer = new HttpServer!(DemoProcessor)("0.0.0.0", port, totalCPUs); - writefln("listening on http://%s", httpServer.bindingAddress.toString()); - httpServer.start(); -} diff --git a/frameworks/D/hunt/http/http/DemoProcessor.d b/frameworks/D/hunt/http/http/DemoProcessor.d deleted file mode 100644 index 81c51c494e4..00000000000 --- a/frameworks/D/hunt/http/http/DemoProcessor.d +++ /dev/null @@ -1,252 +0,0 @@ -module http.DemoProcessor; - - - -// import stdx.data.json; -import std.json; - -//import hunt.database; -import hunt.io; -import http.Processor; -import http.HttpURI; -import http.UrlEncoded; -import hunt.logging.ConsoleLogger : trace, warning, tracef; - -import std.algorithm; -import std.array; -import std.exception; -import std.random; -import std.string; - -version (POSTGRESQL) { - __gshared Database dbConnection; -} - -enum HttpHeader textHeader = HttpHeader("Content-Type", "text/plain; charset=UTF-8"); -enum HttpHeader htmlHeader = HttpHeader("Content-Type", "text/html; charset=UTF-8"); -enum HttpHeader jsonHeader = HttpHeader("Content-Type", "application/json; charset=UTF-8"); - - -enum plaintextLength = "/plaintext".length; -enum jsonLength = "/json".length; -enum dbLength = "/db".length; -enum fortunesLength = "/fortunes".length; - -class DemoProcessor : HttpProcessor { - version (POSTGRESQL) HttpURI uri; - - this(TcpStream client) { - version (POSTGRESQL) uri = new HttpURI(); - super(client); - } - - override void onComplete(HttpRequest req) { - string path = req.uri; - if(path.length == plaintextLength) { // plaintext - respondWith("Hello, World!", 200, textHeader); - } else if(path.length == jsonLength) { // json - JSONValue js = JSONValue(["message" : JSONValue("Hello, World!")]); - respondWith(js.toJSON(), 200, jsonHeader); - } else { - - version (POSTGRESQL) { - if(path.length == dbLength) { - respondSingleQuery(); - } else if(path.length == fortunesLength) { - respondFortunes(); - } else { - handleDbUpdate(path); - } - - } else { - respondWith404(); - } - } - } - - - private void respondWith404() { - version (POSTGRESQL) { - respondWith("The available paths are: /plaintext, /json, /db, /fortunes," ~ - " /queries?queries=number, /updates?queries=number", 404); - } else { - respondWith("The available paths are: /plaintext, /json", 404); - } - } - - version (POSTGRESQL) { - private void handleDbUpdate(string url) { - uri.parse(url); - - switch(uri.getPath()) { - case "/queries": - UrlEncoded queriesMap = new UrlEncoded(); - uri.decodeQueryTo(queriesMap); - int number = 1; - debug { - trace(queriesMap.toString()); - if (!queriesMap.containsKey("queries")) { - respondWith404(); - return; - } - - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - warning(ex.msg); - } - } - } else { - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - } - } - } - - respondMultipleQuery(number); - break; - - - case "/updates": - UrlEncoded queriesMap = new UrlEncoded(); - uri.decodeQueryTo(queriesMap); - int number = 1; - debug { - if (!queriesMap.containsKey("queries")) { - respondWith404(); - return; - } - - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - warning(ex.msg); - } - } - } else { - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - } - } - } - respondUpdates(number); - break; - - default: - respondWith404(); - break; - } - } - - - private void respondSingleQuery() { - int id = uniform(1, 10001); - string query = "SELECT id, randomNumber FROM world WHERE id = " ~ id.to!string; - ResultSet rs = dbConnection.query(query); - - JSONValue js = JSONValue(["id" : JSONValue(id), "randomNumber" - : JSONValue(to!int(rs.front()[0]))]); - - respondWith(js.toJSON(), 200, jsonHeader); - } - - private void respondMultipleQuery(int queries) { - if (queries < 1) - queries = 1; - else if (queries > 500) - queries = 500; - - JSONValue[] arr = new JSONValue[queries]; - for (int i = 0; i < queries; i++) { - immutable id = uniform(1, 10001); - immutable query = "SELECT id, randomNumber FROM world WHERE id = " ~ id.to!string; - ResultSet rs = dbConnection.query(query); - - arr[i] = JSONValue(["id" : JSONValue(id), "randomNumber" - : JSONValue(to!int(rs.front()[0]))]); - } - JSONValue js = JSONValue(arr); - respondWith(js.toJSON(), 200, jsonHeader); - } - - private void respondFortunes() { - immutable query = "SELECT id, message::text FROM Fortune"; - ResultSet rs = dbConnection.query(query); - FortuneModel[] data = rs.map!(f => FortuneModel(f["id"].to!int, f["message"])).array; - data ~= FortuneModel(0, "Additional fortune added at request time."); - data.sort!((a, b) => a.message < b.message); - // trace(data); - - respondWith(randerFortunes(data), 200, htmlHeader); - } - - static string randerFortunes(FortuneModel[] data) { - Appender!string sb; - sb.put(` - - - Fortunes - - - - - - -`); - - foreach (FortuneModel f; data) { - string message = replace(f.message, ">", ">"); - message = replace(message, "<", "<"); - message = replace(message, "\"", """); - sb.put(format(" \n \n \n", f.id, message)); - } - - sb.put("
idmessage
%d%s
\n \n"); - - return sb.data; - } - - private void respondUpdates(int queries) { - if (queries < 1) - queries = 1; - else if (queries > 500) - queries = 500; - - JSONValue[] arr = new JSONValue[queries]; - for (int i = 0; i < queries; i++) { - immutable id = uniform(1, 10001); - immutable idString = id.to!string; - immutable query = "SELECT id, randomNumber FROM world WHERE id = " ~ idString; - ResultSet rs = dbConnection.query(query); - int randomNumber = to!int(rs.front()[0]); - debug tracef("id=%d, randomNumber=%d", id, randomNumber); - - randomNumber = uniform(1, 10001); - string updateSql = "UPDATE world SET randomNumber = " - ~ randomNumber.to!string ~ " WHERE id = " ~ idString; - int r = dbConnection.execute(updateSql); - // debug tracef("r=%d", r); - - arr[i] = JSONValue(["id" : JSONValue(id), "randomNumber" : JSONValue(randomNumber)]); - } - - JSONValue js = JSONValue(arr); - respondWith(js.toJSON(), 200, jsonHeader); - } - } -} - -struct FortuneModel { - int id; - string message; -} diff --git a/frameworks/D/hunt/http/http/HttpURI.d b/frameworks/D/hunt/http/http/HttpURI.d deleted file mode 100644 index 64c7b695d50..00000000000 --- a/frameworks/D/hunt/http/http/HttpURI.d +++ /dev/null @@ -1,1168 +0,0 @@ -module http.HttpURI; - - - -import hunt.collection.MultiMap; - -import hunt.Exceptions; -import hunt.text.Charset; -import hunt.text.Common; -import hunt.text.StringBuilder; -import hunt.util.TypeUtils; -import http.UrlEncoded; - -import std.array; -import std.conv; -import std.string; - -import hunt.logging; - - -/** - * Http URI. Parse a HTTP URI from a string or byte array. Given a URI - * http://user@host:port/path/info;param?query#fragment this class - * will split it into the following undecoded optional elements: - *
    - *
  • {@link #getScheme()} - http:
  • - *
  • {@link #getAuthority()} - //name@host:port
  • - *
  • {@link #getHost()} - host
  • - *
  • {@link #getPort()} - port
  • - *
  • {@link #getPath()} - /path/info
  • - *
  • {@link #getParam()} - param
  • - *
  • {@link #getQuery()} - query
  • - *
  • {@link #getFragment()} - fragment
  • - *
- * - https://bob:bobby@www.lunatech.com:8080/file;p=1?q=2#third - \___/ \_/ \___/ \______________/ \__/\_______/ \_/ \___/ - | | | | | | \_/ | | - Scheme User Password Host Port Path | | Fragment - \_____________________________/ | Query - | Path parameter - Authority - *

- * Any parameters will be returned from {@link #getPath()}, but are excluded - * from the return value of {@link #getDecodedPath()}. If there are multiple - * parameters, the {@link #getParam()} method returns only the last one. - * - * See_Also: - * https://stackoverflow.com/questions/1634271/url-encoding-the-space-character-or-20 - * https://web.archive.org/web/20151218094722/http://blog.lunatech.com/2009/02/03/what-every-web-developer-must-know-about-url-encoding - */ -class HttpURI { - private enum State { - START, HOST_OR_PATH, SCHEME_OR_PATH, HOST, IPV6, PORT, PATH, PARAM, QUERY, FRAGMENT, ASTERISK - } - - private string _scheme; - private string _user; - private string _host; - private int _port; - private string _path; - private string _param; - private string _query; - private string _fragment; - - string _uri; - string _decodedPath; - - /** - * Construct a normalized URI. Port is not set if it is the default port. - * - * @param scheme - * the URI scheme - * @param host - * the URI hose - * @param port - * the URI port - * @param path - * the URI path - * @param param - * the URI param - * @param query - * the URI query - * @param fragment - * the URI fragment - * @return the normalized URI - */ - static HttpURI createHttpURI(string scheme, string host, int port, string path, string param, string query, - string fragment) { - if (port == 80 && (scheme == "http")) - port = 0; - if (port == 443 && (scheme == "https")) - port = 0; - return new HttpURI(scheme, host, port, path, param, query, fragment); - } - - this() { - } - - this(string scheme, string host, int port, string path, string param, string query, string fragment) { - _scheme = scheme; - _host = host; - _port = port; - _path = path; - _param = param; - _query = query; - _fragment = fragment; - } - - this(HttpURI uri) { - this(uri._scheme, uri._host, uri._port, uri._path, uri._param, uri._query, uri._fragment); - _uri = uri._uri; - } - - this(string uri) { - _port = -1; - parse(State.START, uri); - } - - // this(URI uri) { - // _uri = null; - - // _scheme = uri.getScheme(); - // _host = uri.getHost(); - // if (_host == null && uri.getRawSchemeSpecificPart().startsWith("//")) - // _host = ""; - // _port = uri.getPort(); - // _user = uri.getUserInfo(); - // _path = uri.getRawPath(); - - // _decodedPath = uri.getPath(); - // if (_decodedPath != null) { - // int p = _decodedPath.lastIndexOf(';'); - // if (p >= 0) - // _param = _decodedPath.substring(p + 1); - // } - // _query = uri.getRawQuery(); - // _fragment = uri.getFragment(); - - // _decodedPath = null; - // } - - this(string scheme, string host, int port, string pathQuery) { - _uri = null; - - _scheme = scheme; - _host = host; - _port = port; - - parse(State.PATH, pathQuery); - - } - - void parse(string uri) { - clear(); - _uri = uri; - parse(State.START, uri); - } - - /** - * Parse according to https://tools.ietf.org/html/rfc7230#section-5.3 - * - * @param method - * the request method - * @param uri - * the request uri - */ - void parseRequestTarget(string method, string uri) { - clear(); - _uri = uri; - - if (method == "CONNECT") - _path = uri; - else - parse(uri.startsWith("/") ? State.PATH : State.START, uri); - } - - // deprecated("") - // void parseConnect(string uri) { - // clear(); - // _uri = uri; - // _path = uri; - // } - - void parse(string uri, int offset, int length) { - clear(); - int end = offset + length; - _uri = uri.substring(offset, end); - parse(State.START, uri); - } - - private void parse(State state, string uri) { - bool encoded = false; - int end = cast(int)uri.length; - int mark = 0; - int path_mark = 0; - char last = '/'; - for (int i = 0; i < end; i++) { - char c = uri[i]; - - final switch (state) { - case State.START: { - switch (c) { - case '/': - mark = i; - state = State.HOST_OR_PATH; - break; - case ';': - mark = i + 1; - state = State.PARAM; - break; - case '?': - // assume empty path (if seen at start) - _path = ""; - mark = i + 1; - state = State.QUERY; - break; - case '#': - mark = i + 1; - state = State.FRAGMENT; - break; - case '*': - _path = "*"; - state = State.ASTERISK; - break; - - case '.': - path_mark = i; - state = State.PATH; - encoded = true; - break; - - default: - mark = i; - if (_scheme == null) - state = State.SCHEME_OR_PATH; - else { - path_mark = i; - state = State.PATH; - } - break; - } - - continue; - } - - case State.SCHEME_OR_PATH: { - switch (c) { - case ':': - // must have been a scheme - _scheme = uri.substring(mark, i); - // Start again with scheme set - state = State.START; - break; - - case '/': - // must have been in a path and still are - state = State.PATH; - break; - - case ';': - // must have been in a path - mark = i + 1; - state = State.PARAM; - break; - - case '?': - // must have been in a path - _path = uri.substring(mark, i); - mark = i + 1; - state = State.QUERY; - break; - - case '%': - // must have be in an encoded path - encoded = true; - state = State.PATH; - break; - - case '#': - // must have been in a path - _path = uri.substring(mark, i); - state = State.FRAGMENT; - break; - - default: - break; - } - continue; - } - - case State.HOST_OR_PATH: { - switch (c) { - case '/': - _host = ""; - mark = i + 1; - state = State.HOST; - break; - - case '@': - case ';': - case '?': - case '#': - // was a path, look again - i--; - path_mark = mark; - state = State.PATH; - break; - - case '.': - // it is a path - encoded = true; - path_mark = mark; - state = State.PATH; - break; - - default: - // it is a path - path_mark = mark; - state = State.PATH; - } - continue; - } - - case State.HOST: { - switch (c) { - case '/': - _host = uri.substring(mark, i); - path_mark = mark = i; - state = State.PATH; - break; - case ':': - if (i > mark) - _host = uri.substring(mark, i); - mark = i + 1; - state = State.PORT; - break; - case '@': - if (_user != null) - throw new IllegalArgumentException("Bad authority"); - _user = uri.substring(mark, i); - mark = i + 1; - break; - - case '[': - state = State.IPV6; - break; - - default: - break; - } - break; - } - - case State.IPV6: { - switch (c) { - case '/': - throw new IllegalArgumentException("No closing ']' for ipv6 in " ~ uri); - case ']': - c = uri.charAt(++i); - _host = uri.substring(mark, i); - if (c == ':') { - mark = i + 1; - state = State.PORT; - } else { - path_mark = mark = i; - state = State.PATH; - } - break; - - default: - break; - } - - break; - } - - case State.PORT: { - if (c == '@') { - if (_user != null) - throw new IllegalArgumentException("Bad authority"); - // It wasn't a port, but a password! - _user = _host ~ ":" ~ uri.substring(mark, i); - mark = i + 1; - state = State.HOST; - } else if (c == '/') { - // _port = TypeUtils.parseInt(uri, mark, i - mark, 10); - _port = to!int(uri[mark .. i], 10); - path_mark = mark = i; - state = State.PATH; - } - break; - } - - case State.PATH: { - switch (c) { - case ';': - mark = i + 1; - state = State.PARAM; - break; - case '?': - _path = uri.substring(path_mark, i); - mark = i + 1; - state = State.QUERY; - break; - case '#': - _path = uri.substring(path_mark, i); - mark = i + 1; - state = State.FRAGMENT; - break; - case '%': - encoded = true; - break; - case '.': - if ('/' == last) - encoded = true; - break; - - default: - break; - } - break; - } - - case State.PARAM: { - switch (c) { - case '?': - _path = uri.substring(path_mark, i); - _param = uri.substring(mark, i); - mark = i + 1; - state = State.QUERY; - break; - case '#': - _path = uri.substring(path_mark, i); - _param = uri.substring(mark, i); - mark = i + 1; - state = State.FRAGMENT; - break; - case '/': - encoded = true; - // ignore internal params - state = State.PATH; - break; - case ';': - // multiple parameters - mark = i + 1; - break; - - default: - break; - } - break; - } - - case State.QUERY: { - if (c == '#') { - _query = uri.substring(mark, i); - mark = i + 1; - state = State.FRAGMENT; - } - break; - } - - case State.ASTERISK: { - throw new IllegalArgumentException("Bad character '*'"); - } - - case State.FRAGMENT: { - _fragment = uri.substring(mark, end); - i = end; - break; - } - } - last = c; - } - - final switch (state) { - case State.START: - break; - case State.SCHEME_OR_PATH: - _path = uri.substring(mark, end); - break; - - case State.HOST_OR_PATH: - _path = uri.substring(mark, end); - break; - - case State.HOST: - if (end > mark) - _host = uri.substring(mark, end); - break; - - case State.IPV6: - throw new IllegalArgumentException("No closing ']' for ipv6 in " ~ uri); - - case State.PORT: - // _port = TypeUtils.parseInt(uri, mark, end - mark, 10); - _port = to!int(uri[mark .. end], 10); - break; - - case State.ASTERISK: - break; - - case State.FRAGMENT: - _fragment = uri.substring(mark, end); - break; - - case State.PARAM: - _path = uri.substring(path_mark, end); - _param = uri.substring(mark, end); - break; - - case State.PATH: - _path = uri.substring(path_mark, end); - break; - - case State.QUERY: - _query = uri.substring(mark, end); - break; - } - - if (!encoded) { - if (_param == null) - _decodedPath = _path; - else - _decodedPath = _path[0 .. _path.length - _param.length - 1]; - } - } - - string getScheme() { - return _scheme; - } - - string getHost() { - // Return null for empty host to retain compatibility with java.net.URI - if (_host != null && _host.length == 0) - return null; - return _host; - } - - int getPort() { - return _port; - } - - /** - * The parsed Path. - * - * @return the path as parsed on valid URI. null for invalid URI. - */ - string getPath() { - return _path; - } - - string getDecodedPath() { - if (_decodedPath.empty && !_path.empty) - _decodedPath = URIUtils.canonicalPath(URIUtils.decodePath(_path)); - return _decodedPath; - } - - string getParam() { - return _param; - } - - string getQuery() { - return _query; - } - - bool hasQuery() { - return _query != null && _query.length > 0; - } - - string getFragment() { - return _fragment; - } - - void decodeQueryTo(MultiMap!string parameters, string encoding = StandardCharsets.UTF_8) { - if (_query == _fragment) - return; - - UrlEncoded.decodeTo(_query, parameters, encoding); - } - - void clear() { - _uri = null; - - _scheme = null; - _host = null; - _port = -1; - _path = null; - _param = null; - _query = null; - _fragment = null; - - _decodedPath = null; - } - - bool isAbsolute() { - return _scheme != null && _scheme.length > 0; - } - - override - string toString() { - if (_uri is null) { - StringBuilder ot = new StringBuilder(); - - if (_scheme != null) - ot.append(_scheme).append(':'); - - if (_host != null) { - ot.append("//"); - if (_user != null) - ot.append(_user).append('@'); - ot.append(_host); - } - - if (_port > 0) - ot.append(':').append(_port); - - if (_path != null) - ot.append(_path); - - if (_query != null) - ot.append('?').append(_query); - - if (_fragment != null) - ot.append('#').append(_fragment); - - if (ot.length > 0) - _uri = ot.toString(); - else - _uri = ""; - } - return _uri; - } - - bool equals(Object o) { - if (o is this) - return true; - if (!(typeid(o) == typeid(HttpURI))) - return false; - return toString().equals(o.toString()); - } - - void setScheme(string scheme) { - _scheme = scheme; - _uri = null; - } - - /** - * @param host - * the host - * @param port - * the port - */ - void setAuthority(string host, int port) { - _host = host; - _port = port; - _uri = null; - } - - /** - * @param path - * the path - */ - void setPath(string path) { - _uri = null; - _path = path; - _decodedPath = null; - } - - /** - * @param path - * the decoded path - */ - // void setDecodedPath(string path) { - // _uri = null; - // _path = URIUtils.encodePath(path); - // _decodedPath = path; - // } - - void setPathQuery(string path) { - _uri = null; - _path = null; - _decodedPath = null; - _param = null; - _fragment = null; - if (path != null) - parse(State.PATH, path); - } - - void setQuery(string query) { - _query = query; - _uri = null; - } - - // URI toURI() { - // return new URI(_scheme, null, _host, _port, _path, _query == null ? null : UrlEncoded.decodestring(_query), - // _fragment); - // } - - string getPathQuery() { - if (_query == null) - return _path; - return _path ~ "?" ~ _query; - } - - bool hasAuthority() { - return _host != null; - } - - string getAuthority() { - if (_port > 0) - return _host ~ ":" ~ to!string(_port); - return _host; - } - - string getUser() { - return _user; - } - -} - - -/** - * Parse an authority string into Host and Port - *

Parse a string in the form "host:port", handling IPv4 an IPv6 hosts

- * - */ -class URIUtils -{ - /* ------------------------------------------------------------ */ - /* Decode a URI path and strip parameters - */ - static string decodePath(string path) { - return decodePath(path, 0, cast(int)path.length); - } - - /* ------------------------------------------------------------ */ - /* Decode a URI path and strip parameters of UTF-8 path - */ - static string decodePath(string path, int offset, int length) { - try { - StringBuilder builder = null; - - int end = offset + length; - for (int i = offset; i < end; i++) { - char c = path[i]; - switch (c) { - case '%': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - if ((i + 2) < end) { - char u = path.charAt(i + 1); - if (u == 'u') { - // TODO this is wrong. This is a codepoint not a char - //builder.append(cast(char) (0xffff & TypeUtils.parseInt(path, i + 2, 4, 16))); - i += 5; - } else { - //builder.append(cast(byte) (0xff & (TypeUtils.convertHexDigit(u) * 16 + TypeUtils.convertHexDigit(path.charAt(i + 2))))); - i += 2; - } - } else { - throw new IllegalArgumentException("Bad URI % encoding"); - } - - break; - - case ';': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - - while (++i < end) { - if (path[i] == '/') { - builder.append('/'); - break; - } - } - - break; - - default: - if (builder !is null) - builder.append(c); - break; - } - } - - if (builder !is null) - return builder.toString(); - if (offset == 0 && length == path.length) - return path; - return path.substring(offset, end); - } catch (Exception e) { - // System.err.println(path.substring(offset, offset + length) + " " + e); - error(e.toString); - return decodeISO88591Path(path, offset, length); - } - } - - - /* ------------------------------------------------------------ */ - /* Decode a URI path and strip parameters of ISO-8859-1 path - */ - private static string decodeISO88591Path(string path, int offset, int length) { - StringBuilder builder = null; - int end = offset + length; - for (int i = offset; i < end; i++) { - char c = path[i]; - switch (c) { - case '%': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - if ((i + 2) < end) { - char u = path.charAt(i + 1); - if (u == 'u') { - // TODO this is wrong. This is a codepoint not a char - // builder.append(cast(char) (0xffff & TypeUtils.parseInt(path, i + 2, 4, 16))); - i += 5; - } else { - //builder.append(cast(byte) (0xff & (TypeUtils.convertHexDigit(u) * 16 + TypeUtils.convertHexDigit(path.charAt(i + 2))))); - i += 2; - } - } else { - throw new IllegalArgumentException(""); - } - - break; - - case ';': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - while (++i < end) { - if (path[i] == '/') { - builder.append('/'); - break; - } - } - break; - - - default: - if (builder !is null) - builder.append(c); - break; - } - } - - if (builder !is null) - return builder.toString(); - if (offset == 0 && length == path.length) - return path; - return path.substring(offset, end); - } - - /* ------------------------------------------------------------ */ - - /** - * Convert a decoded path to a canonical form. - *

- * All instances of "." and ".." are factored out. - *

- *

- * Null is returned if the path tries to .. above its root. - *

- * - * @param path the path to convert, decoded, with path separators '/' and no queries. - * @return the canonical path, or null if path traversal above root. - */ - static string canonicalPath(string path) { - if (path.empty) - return path; - - bool slash = true; - int end = cast(int)path.length; - int i = 0; - - loop: - while (i < end) { - char c = path[i]; - switch (c) { - case '/': - slash = true; - break; - - case '.': - if (slash) - break loop; - slash = false; - break; - - default: - slash = false; - } - - i++; - } - - if (i == end) - return path; - - StringBuilder canonical = new StringBuilder(path.length); - canonical.append(path, 0, i); - - int dots = 1; - i++; - while (i <= end) { - char c = i < end ? path[i] : '\0'; - switch (c) { - case '\0': - case '/': - switch (dots) { - case 0: - if (c != '\0') - canonical.append(c); - break; - - case 1: - break; - - case 2: - if (canonical.length < 2) - return null; - canonical.setLength(canonical.length - 1); - canonical.setLength(canonical.lastIndexOf("/") + 1); - break; - - default: - while (dots-- > 0) - canonical.append('.'); - if (c != '\0') - canonical.append(c); - } - - slash = true; - dots = 0; - break; - - case '.': - if (dots > 0) - dots++; - else if (slash) - dots = 1; - else - canonical.append('.'); - slash = false; - break; - - default: - while (dots-- > 0) - canonical.append('.'); - canonical.append(c); - dots = 0; - slash = false; - } - - i++; - } - return canonical.toString(); - } - - - /* ------------------------------------------------------------ */ - - /** - * Convert a path to a cananonical form. - *

- * All instances of "." and ".." are factored out. - *

- *

- * Null is returned if the path tries to .. above its root. - *

- * - * @param path the path to convert (expects URI/URL form, encoded, and with path separators '/') - * @return the canonical path, or null if path traversal above root. - */ - static string canonicalEncodedPath(string path) { - if (path.empty) - return path; - - bool slash = true; - int end = cast(int)path.length; - int i = 0; - - loop: - while (i < end) { - char c = path[i]; - switch (c) { - case '/': - slash = true; - break; - - case '.': - if (slash) - break loop; - slash = false; - break; - - case '?': - return path; - - default: - slash = false; - } - - i++; - } - - if (i == end) - return path; - - StringBuilder canonical = new StringBuilder(path.length); - canonical.append(path, 0, i); - - int dots = 1; - i++; - while (i <= end) { - char c = i < end ? path[i] : '\0'; - switch (c) { - case '\0': - case '/': - case '?': - switch (dots) { - case 0: - if (c != '\0') - canonical.append(c); - break; - - case 1: - if (c == '?') - canonical.append(c); - break; - - case 2: - if (canonical.length < 2) - return null; - canonical.setLength(canonical.length - 1); - canonical.setLength(canonical.lastIndexOf("/") + 1); - if (c == '?') - canonical.append(c); - break; - default: - while (dots-- > 0) - canonical.append('.'); - if (c != '\0') - canonical.append(c); - } - - slash = true; - dots = 0; - break; - - case '.': - if (dots > 0) - dots++; - else if (slash) - dots = 1; - else - canonical.append('.'); - slash = false; - break; - - default: - while (dots-- > 0) - canonical.append('.'); - canonical.append(c); - dots = 0; - slash = false; - } - - i++; - } - return canonical.toString(); - } - - - - /* ------------------------------------------------------------ */ - - /** - * Convert a path to a compact form. - * All instances of "//" and "///" etc. are factored out to single "/" - * - * @param path the path to compact - * @return the compacted path - */ - static string compactPath(string path) { - if (path == null || path.length == 0) - return path; - - int state = 0; - int end = cast(int)path.length; - int i = 0; - - loop: - while (i < end) { - char c = path[i]; - switch (c) { - case '?': - return path; - case '/': - state++; - if (state == 2) - break loop; - break; - default: - state = 0; - } - i++; - } - - if (state < 2) - return path; - - StringBuilder buf = new StringBuilder(path.length); - buf.append(path, 0, i); - - loop2: - while (i < end) { - char c = path[i]; - switch (c) { - case '?': - buf.append(path, i, end); - break loop2; - case '/': - if (state++ == 0) - buf.append(c); - break; - default: - state = 0; - buf.append(c); - } - i++; - } - - return buf.toString(); - } - - /* ------------------------------------------------------------ */ - - /** - * @param uri URI - * @return True if the uri has a scheme - */ - static bool hasScheme(string uri) { - for (int i = 0; i < uri.length; i++) { - char c = uri[i]; - if (c == ':') - return true; - if (!(c >= 'a' && c <= 'z' || - c >= 'A' && c <= 'Z' || - (i > 0 && (c >= '0' && c <= '9' || - c == '.' || - c == '+' || - c == '-')) - )) - break; - } - return false; - } -} diff --git a/frameworks/D/hunt/http/http/Parser.d b/frameworks/D/hunt/http/http/Parser.d deleted file mode 100644 index a68ee4f8a60..00000000000 --- a/frameworks/D/hunt/http/http/Parser.d +++ /dev/null @@ -1,283 +0,0 @@ -/// Minimalistic low-overhead wrapper for nodejs/http-parser -/// Used for benchmarks with simple server -module http.Parser; - - - -private: - -import std.range.primitives; -import core.stdc.string; - -alias http_data_cb = extern (C) int function(http_parser*, const ubyte* at, size_t length); -alias http_cb = extern (C) int function(http_parser*); - -public enum HttpParserType : uint { - request = 0, - response = 1, - both = 2 -} - -public enum HttpMethod : uint { - DELETE = 0, - GET = 1, - HEAD = 2, - POST = 3, - PUT = 4, - /* pathological */ - CONNECT = 5, - OPTIONS = 6, - TRACE = 7, - /* WebDAV */ - COPY = 8, - LOCK = 9, - MKCOL = 10, - MOVE = 11, - PROPFIND = 12, - PROPPATCH = 13, - SEARCH = 14, - UNLOCK = 15, - BIND = 16, - REBIND = 17, - UNBIND = 18, - ACL = 19, - /* subversion */ - REPORT = 20, - MKACTIVITY = 21, - CHECKOUT = 22, - MERGE = 23, - /* upnp */ - MSEARCH = 24, - NOTIFY = 25, - SUBSCRIBE = 26, - UNSUBSCRIBE = 27, - /* RFC-5789 */ - PATCH = 28, - PURGE = 29, - /* CalDAV */ - MKCALENDAR = 30, - /* RFC-2068, section 19.6.1.2 */ - LINK = 31, - UNLINK = 32, - /* icecast */ - SOURCE = 33, -} - -enum HttpError : uint { - OK, - /* Callback-related errors */ - CB_message_begin, - CB_url, - CB_header_field, - CB_header_value, - CB_headers_complete, - CB_body, - CB_message_complete, - CB_status, - CB_chunk_header, - CB_chunk_complete, - /* Parsing-related errors */ - INVALID_EOF_STATE, - HEADER_OVERFLOW, - CLOSED_CONNECTION, - INVALID_VERSION, - INVALID_STATUS, - INVALID_METHOD, - INVALID_URL, - INVALID_HOST, - INVALID_PORT, - INVALID_PATH, - INVALID_QUERY_STRING, - INVALID_FRAGMENT, - LF_EXPECTED, - INVALID_HEADER_TOKEN, - INVALID_CONTENT_LENGTH, - UNEXPECTED_CONTENT_LENGTH, - INVALID_CHUNK_SIZE, - INVALID_CONSTANT, - INVALID_INTERNAL_STATE, - STRICT, - PAUSED, - UNKNOWN, -} - -struct http_parser { - /** PRIVATE **/ - uint state; // bitfield - uint nread; /* # bytes read in various scenarios */ - ulong content_length; /* # bytes in body (0 if no Content-Length header) */ - - /** READ-ONLY **/ - ushort http_major; - ushort http_minor; - // bitfield - uint status_code_method_http_errono_upgrade; - /** PUBLIC **/ - void* data; /* A pointer to get hook to the "connection" or "socket" object */ -} - -struct http_parser_settings { - http_cb on_message_begin; - http_data_cb on_url; - http_data_cb on_status; - http_data_cb on_header_field; - http_data_cb on_header_value; - http_cb on_headers_complete; - http_data_cb on_body; - http_cb on_message_complete; - /* When on_chunk_header is called, the current chunk length is stored - * in parser->content_length. - */ - http_cb on_chunk_header; - http_cb on_chunk_complete; -} - -extern (C) pure @nogc nothrow void http_parser_init(http_parser* parser, HttpParserType type); - -extern (C) pure @nogc nothrow int http_should_keep_alive(const http_parser* parser); - -/* Return a string description of the given error */ -extern (C) pure @nogc nothrow immutable(char)* http_errno_description(HttpError err); - -/* Checks if this is the final chunk of the body. */ -extern (C) pure @nogc nothrow int http_body_is_final(const http_parser* parser); - -/* Executes the parser. Returns number of parsed bytes. Sets -* `parser->http_errno` on error. */ -extern (C) pure @nogc nothrow size_t http_parser_execute(http_parser* parser, - const http_parser_settings* settings, const ubyte* data, size_t len); - -// extern (C) uint http_parser_flags(const http_parser* parser); - -uint http_parser_flags(const http_parser* parser) { - // return parser.status_code | (parser.method<<16) | (parser.http_errno << 24) | (parser.upgrade << 31); - return parser.status_code_method_http_errono_upgrade; -} - -// =========== Public interface starts here ============= - -public: - -class HttpException : Exception { - HttpError error; - - pure @nogc nothrow this(HttpError error, string file = __FILE__, - size_t line = __LINE__, Throwable nextInChain = null) { - this.error = error; - immutable char* str = http_errno_description(error); - super(str[0 .. strlen(str)], file, line, nextInChain); - } -} - -struct HttpParser(Interceptor) { - http_parser parser; - http_parser_settings settings; - Interceptor interceptor; - Throwable failure; - uint flags; - - static generateCallback(string cName, string dName) { - import std.format; - - return format(` - static if(__traits(hasMember, interceptor, "%2$s")) - { - extern(C) static int %1$s(http_parser* p) { - auto parser = cast(HttpParser*)p; - try { - parser.flags = http_parser_flags(p); - return parser.interceptor.%2$s(parser); - } - catch (Throwable t) { - parser.failure = t; - return 1; - } - } - settings.%1$s = &%1$s; - } - `, cName, dName); - } - - static generateCallbackWithData(string cName, string dName) { - import std.format; - - return format(` - static if(__traits(hasMember, interceptor, "%2$s")) - { - extern(C) static int %1$s(http_parser* p, const ubyte* at, size_t size) { - auto parser = cast(HttpParser*)p; - try { - parser.flags = http_parser_flags(p); - return parser.interceptor.%2$s(parser, at[0..size]); - } - catch (Throwable t) { - parser.failure = t; - return 1; - } - } - settings.%1$s = &%1$s; - } - `, cName, dName); - } - - @property HttpError errorCode() pure @safe nothrow { - return cast(HttpError)((flags >> 24) & 0x7f); - } - -public: - alias interceptor this; - - @property uint status() pure @safe nothrow { - return flags & 0xffff; - } - - @property HttpMethod method() pure @safe nothrow { - return cast(HttpMethod)((flags >> 16) & 0xFF); - } - - this(Interceptor interceptor, HttpParserType type) { - this.interceptor = interceptor; - http_parser_init(&parser, type); - mixin(generateCallback("on_message_begin", "onMessageBegin")); - mixin(generateCallbackWithData("on_url", "onUrl")); - mixin(generateCallbackWithData("on_status", "onStatus")); - mixin(generateCallbackWithData("on_body", "onBody")); - mixin(generateCallbackWithData("on_header_field", "onHeaderField")); - mixin(generateCallbackWithData("on_header_value", "onHeaderValue")); - mixin(generateCallback("on_headers_complete", "onHeadersComplete")); - mixin(generateCallback("on_message_complete", "onMessageComplete")); - } - - @property bool shouldKeepAlive() pure nothrow { - return http_should_keep_alive(&parser) == 1; - } - - @property ushort httpMajor() @safe pure nothrow { - return parser.http_major; - } - - @property ushort httpMinor() @safe pure nothrow { - return parser.http_minor; - } - - size_t execute(const(ubyte)[] chunk) { - size_t size = http_parser_execute(&parser, &settings, chunk.ptr, chunk.length); - flags = http_parser_flags(&parser); - if (errorCode) { - auto f = failure; - failure = null; - if (f is null) - f = new HttpException(errorCode); - throw f; - } - return size; - } - - size_t execute(const(char)[] str) { - return execute(cast(const(ubyte)[]) str); - } -} - -auto httpParser(Interceptor)(Interceptor interceptor, HttpParserType type) { - return HttpParser!Interceptor(interceptor, type); -} diff --git a/frameworks/D/hunt/http/http/Processor.d b/frameworks/D/hunt/http/http/Processor.d deleted file mode 100644 index c920c738593..00000000000 --- a/frameworks/D/hunt/http/http/Processor.d +++ /dev/null @@ -1,273 +0,0 @@ -/// An example "HTTP server" with poor usability but sensible performance -/// -module http.Processor; - - - -import std.array, std.exception, std.format, std.algorithm.mutation, std.socket; -import core.stdc.stdlib; -import core.thread, core.atomic; -import http.Parser; - -import hunt.collection.ByteBuffer; -import hunt.logging; -import hunt.io; -import hunt.util.DateTime; -import std.array; -import std.string; -import core.stdc.string; -import core.stdc.stdlib; -import std.stdio; - -struct HttpHeader { - string name, value; -} - -struct HttpRequest { - HttpHeader[] headers; - HttpMethod method; - string uri; -} - -version(NO_HTTPPARSER) { -enum string ResponseData = "HTTP/1.1 200 OK\r\nContent-Length: 13\r\nConnection: Keep-Alive\r\nContent-Type: text/plain\r\nServer: Hunt/1.0\r\nDate: Wed, 17 Apr 2013 12:00:00 GMT\r\n\r\nHello, World!"; -} - - -enum string keepAliveResponseData = "HTTP/1.1 200 OK\r\nContent-Length: 13\r\nConnection: Keep-Alive\r\nContent-Type: text/plain\r\nServer: Hunt/1.0\r\nDate: Wed, 17 Apr 2013 12:00:00 GMT\r\n\r\nHello, World!"; -enum string nokeepAliveResponseData = "HTTP/1.1 200 OK\r\nContent-Length: 13\r\nConnection: close\r\nContent-Type: text/plain\r\nServer: Hunt/1.0\r\nDate: Wed, 17 Apr 2013 12:00:00 GMT\r\n\r\nHello, World!"; - -abstract class HttpProcessor { -private: - enum State { - url, - field, - value, - done - } - - Appender!(char[]) outBuf; - HttpHeader[] headers; // buffer for headers - size_t header; // current header - string url; // url - alias Parser = HttpParser!HttpProcessor; - Parser parser; - ScratchPad pad; - HttpRequest request; - State state; - bool serving; - string resData; - long index1; - long index2; - long length1; - long length2; - void * keepAliveValue; - void * nokeepAliveValue; - -public: - TcpStream client; - - this(TcpStream sock) { - serving = true; - client = sock; - headers = new HttpHeader[1]; - pad = ScratchPad(16 * 1024); - parser = httpParser(this, HttpParserType.request); - index1 = keepAliveResponseData.indexOf("Date:") + 6; - length1 = keepAliveResponseData.length; - length2 = nokeepAliveResponseData.length; - keepAliveValue = malloc(length1); - nokeepAliveValue = malloc(length2); - memcpy(keepAliveValue , (cast(ubyte[])keepAliveResponseData).ptr, length1); - memcpy(nokeepAliveValue , (cast(ubyte[])nokeepAliveResponseData).ptr, length2); - } - - void run() { - client.received((ByteBuffer buffer) { - version(NO_HTTPPARSER) { - client.write(cast(ubyte[])ResponseData); - } else { - parser.execute(cast(ubyte[]) buffer.getRemaining()); - } - - }) - .onClosed(() { - //notifyClientClosed(); - }) - .onError((string msg) { - debug warning("Error: ", msg); - }) - .start(); - } - - protected void notifyClientClosed() { - debug tracef("The connection[%s] is closed", client.remoteAddress()); - } - - void respondWith(string _body, uint status, HttpHeader[] headers...) { - //outBuf.clear(); - - if (parser.shouldKeepAlive) - { - memcpy(keepAliveValue + index1 , (cast(ubyte[])(DateTimeHelper.getDateAsGMT())).ptr, 29); - client.write(cast(ubyte[]) keepAliveValue[0 .. length1]); - }else - { - memcpy(nokeepAliveValue + index2 , (cast(ubyte[])(DateTimeHelper.getDateAsGMT())).ptr, 29); - client.write(cast(ubyte[]) nokeepAliveValue[0 .. length2]); - } - - //return respondWith( _body, status, headers); - } - - //void respondWith(string _body, uint status, HttpHeader[] headers...) { - // outBuf.clear(); - // //if (parser.shouldKeepAlive) - // //{ - // // formattedWrite(outBuf, "HTTP/1.1 %s OK\r\n Server: Hunt/1.0\r\n Date: %s\r\n Content-Type: text/plain; charset=UTF-8\r\n Content-Length: %d\r\n\r\n%s", status, DateTimeHelper.getDateAsGMT(), _body.length, _body); - // //}else - // //{ - // // formattedWrite(outBuf, "HTTP/1.1 %s OK\r\n Server: Hunt/1.0\r\n Date: %s\r\n Connection: close\r\n Content-Type: text/plain; charset=UTF-8\r\n Content-Length: %d\r\n\r\n%s", status, DateTimeHelper.getDateAsGMT(), _body.length, _body); - // //} - // //warning("%s", outBuf.data); - // //client.write(cast(ubyte[]) outBuf.data); - // formattedWrite(outBuf, "HTTP/1.1 %s OK\r\n", status); - // outBuf.put("Server: Hunt/1.0\r\n"); - // - // formattedWrite(outBuf, "Date: %s\r\n", DateTimeHelper.getDateAsGMT()); - // if (!parser.shouldKeepAlive) - // outBuf.put("Connection: close\r\n"); - // foreach (ref hdr; headers) { - // outBuf.put(hdr.name); - // outBuf.put(": "); - // outBuf.put(hdr.value); - // outBuf.put("\r\n"); - // } - // formattedWrite(outBuf, "Content-Length: %d\r\n\r\n", _body.length); - // outBuf.put( _body); - // warning("%s", outBuf.data); - // client.write(cast(ubyte[]) outBuf.data); // TODO: short-writes are quite possible - //} - - void onStart(HttpRequest req) { - } - - void onChunk(HttpRequest req, const(ubyte)[] chunk) { - } - - void onComplete(HttpRequest req); - - final int onMessageBegin(Parser* parser) { - outBuf.clear(); - header = 0; - pad.reset(); - state = State.url; - return 0; - } - - final int onUrl(Parser* parser, const(ubyte)[] chunk) { - pad.put(chunk); - return 0; - } - - final int onBody(Parser* parser, const(ubyte)[] chunk) { - onChunk(request, chunk); - return 0; - } - - final int onHeaderField(Parser* parser, const(ubyte)[] chunk) { - final switch (state) { - case State.url: - url = pad.sliceStr; - break; - case State.value: - headers[header].value = pad.sliceStr; - header += 1; - if (headers.length <= header) - headers.length += 1; - break; - case State.field: - case State.done: - break; - } - state = State.field; - pad.put(chunk); - return 0; - } - - final int onHeaderValue(Parser* parser, const(ubyte)[] chunk) { - if (state == State.field) { - headers[header].name = pad.sliceStr; - } - pad.put(chunk); - state = State.value; - return 0; - } - - final int onHeadersComplete(Parser* parser) { - headers[header].value = pad.sliceStr; - header += 1; - request = HttpRequest(headers[0 .. header], parser.method, url); - onStart(request); - state = State.done; - return 0; - } - - final int onMessageComplete(Parser* parser) { - import std.stdio; - - if (state == State.done) { - try { - onComplete(request); - } catch(Exception ex) { - respondWith(ex.msg, 500); - } - } - if (!parser.shouldKeepAlive) - serving = false; - return 0; - } - -} - -// ==================================== IMPLEMENTATION DETAILS ============================================== -private: - -struct ScratchPad { - ubyte* ptr; - size_t capacity; - size_t last, current; - - this(size_t size) { - ptr = cast(ubyte*) malloc(size); - capacity = size; - } - - void put(const(ubyte)[] slice) { - enforce(current + slice.length <= capacity, "HTTP headers too long"); - ptr[current .. current + slice.length] = slice[]; - current += slice.length; - } - - const(ubyte)[] slice() { - auto data = ptr[last .. current]; - last = current; - return data; - } - - string sliceStr() { - return cast(string) slice; - } - - void reset() { - current = 0; - last = 0; - } - - @disable this(this); - - ~this() { - free(ptr); - ptr = null; - } -} diff --git a/frameworks/D/hunt/http/http/Server.d b/frameworks/D/hunt/http/http/Server.d deleted file mode 100644 index 5b58255842a..00000000000 --- a/frameworks/D/hunt/http/http/Server.d +++ /dev/null @@ -1,127 +0,0 @@ -module http.Server; - - - -import hunt.event; -import hunt.io; -import hunt.logging.ConsoleLogger; -import hunt.system.Memory : totalCPUs; -import hunt.util.DateTime; - -import std.array; -import std.conv; -import std.json; -import std.socket; -import std.string; -import std.stdio; - -import http.Parser; -import http.Processor; -import std.experimental.allocator; - -shared static this() { - //DateTimeHelper.startClock(); -} - -import hunt.io.channel; - -/** -*/ -abstract class AbstractTcpServer { - protected EventLoopGroup _group = null; - protected bool _isStarted = false; - protected Address _address; - protected int _workersCount; - TcpStreamOptions _tcpStreamoption; - - - this(Address address, int thread = (totalCPUs - 1), int workersCount = 0) { - this._address = address; - _tcpStreamoption = TcpStreamOptions.create(); - _tcpStreamoption.bufferSize = 1024 * 2; - _tcpStreamoption.isKeepalive = false; - _group = new EventLoopGroup(cast(uint) thread); - this._workersCount = workersCount; - } - - @property Address bindingAddress() { - return _address; - } - - void start() { - if (_isStarted) - return; - _isStarted = true; - - Socket server = new TcpSocket(); - server.setOption(SocketOptionLevel.SOCKET, SocketOption.REUSEADDR, true); - server.bind(new InternetAddress("0.0.0.0", 8080)); - server.listen(8192); - - trace("Launching http server"); - debug { - _group.start(_tcpStreamoption.bufferSize); - } else { - _group.start(100, _tcpStreamoption.bufferSize); - } - - if (_workersCount) { - defaultPoolThreads = _workersCount; - workerPool(); // Initilize worker poll - } - writefln("worker count: %d", _workersCount); - writefln("IO thread: %d", _group.size); - - while (true) { - try { - version (HUNT_DEBUG) - trace("Waiting for server.accept()"); - - Socket socket = server.accept(); - version (HUNT_DEBUG) { - infof("new connection from %s, fd=%d", - socket.remoteAddress.toString(), socket.handle()); - } - // EventLoop loop = _group.nextLoop(); - EventLoop loop = _group.nextLoop(socket.handle); - //TcpStream stream = new TcpStream(loop, socket, _tcpStreamoption); - TcpStream stream = theAllocator.make!TcpStream(loop, socket, _tcpStreamoption); - onConnectionAccepted(stream); - } catch (Exception e) { - warningf("Failure on accepting %s", e); - break; - } - } - _isStarted = false; - } - - protected void onConnectionAccepted(TcpStream client); - - void stop() { - if (!_isStarted) - return; - _isStarted = false; - _group.stop(); - } -} - -alias ProcessorCreater = HttpProcessor delegate(TcpStream client); - -/** -*/ -class HttpServer(T) : AbstractTcpServer if (is(T : HttpProcessor)) { - - this(string ip, ushort port, int thread = (totalCPUs - 1)) { - super(new InternetAddress(ip, port), thread); - } - - this(Address address, int thread = (totalCPUs - 1)) { - super(address, thread); - } - - override protected void onConnectionAccepted(TcpStream client) { - HttpProcessor httpProcessor = new T(client); - httpProcessor.run(); - } - -} diff --git a/frameworks/D/hunt/http/http/UrlEncoded.d b/frameworks/D/hunt/http/http/UrlEncoded.d deleted file mode 100644 index 6e111f456aa..00000000000 --- a/frameworks/D/hunt/http/http/UrlEncoded.d +++ /dev/null @@ -1,362 +0,0 @@ -module http.UrlEncoded; - - - -import hunt.collection.List; -import hunt.collection.MultiMap; -import hunt.collection.StringBuffer; -import hunt.Exceptions; -import hunt.logging; -import hunt.text.Charset; -import hunt.text.Common; -import hunt.text.StringBuilder; -import hunt.util.TypeUtils; - -import std.conv; -import std.array; - - -/** - * Handles coding of MIME "x-www-form-urlencoded". - *

- * This class handles the encoding and decoding for either the query string of a - * URL or the _content of a POST HTTP request. - *

- * Notes - *

- * The UTF-8 charset is assumed, unless otherwise defined by either passing a - * parameter or setting the "org.hunt.utils.UrlEncoding.charset" System - * property. - *

- *

- * The hashtable either contains string single values, vectors of string or - * arrays of Strings. - *

- *

- * This class is only partially synchronised. In particular, simple get - * operations are not protected from concurrent updates. - *

- * - * @see java.net.URLEncoder - */ -class UrlEncoded : MultiMap!string { - - enum string ENCODING = StandardCharsets.UTF_8; - - - this() { - } - - this(string query) { - decodeTo(query, this, ENCODING); - } - - void decode(string query) { - decodeTo(query, this, ENCODING); - } - - void decode(string query, string charset) { - decodeTo(query, this, charset); - } - - /** - * Encode MultiMap with % encoding for UTF8 sequences. - * - * @return the MultiMap as a string with % encoding - */ - string encode() { - return encode(ENCODING, false); - } - - /** - * Encode MultiMap with % encoding for arbitrary string sequences. - * - * @param charset the charset to use for encoding - * @return the MultiMap as a string encoded with % encodings - */ - string encode(string charset) { - return encode(charset, false); - } - - /** - * Encode MultiMap with % encoding. - * - * @param charset the charset to encode with - * @param equalsForNullValue if True, then an '=' is always used, even - * for parameters without a value. e.g. "blah?a=&b=&c=". - * @return the MultiMap as a string encoded with % encodings - */ - string encode(string charset, bool equalsForNullValue) { - return encode(this, charset, equalsForNullValue); - } - - /** - * Encode MultiMap with % encoding. - * - * @param map the map to encode - * @param charset the charset to use for encoding (uses default encoding if null) - * @param equalsForNullValue if True, then an '=' is always used, even - * for parameters without a value. e.g. "blah?a=&b=&c=". - * @return the MultiMap as a string encoded with % encodings. - */ - static string encode(MultiMap!string map, string charset, bool equalsForNullValue) { - if (charset is null) - charset = ENCODING; - - StringBuilder result = new StringBuilder(128); - bool delim = false; - foreach(string key, List!string list; map) - { - int s = list.size(); - - if (delim) { - result.append('&'); - } - - if (s == 0) { - result.append(encodeString(key, charset)); - if (equalsForNullValue) - result.append('='); - } else { - for (int i = 0; i < s; i++) { - if (i > 0) - result.append('&'); - string val = list.get(i); - result.append(encodeString(key, charset)); - - if (val != null) { - if (val.length > 0) { - result.append('='); - result.append(encodeString(val, charset)); - } else if (equalsForNullValue) - result.append('='); - } else if (equalsForNullValue) - result.append('='); - } - } - delim = true; - } - return result.toString(); - } - - /** - * Decoded parameters to Map. - * - * @param content the string containing the encoded parameters - * @param map the MultiMap to put parsed query parameters into - * @param charset the charset to use for decoding - */ - static void decodeTo(string content, MultiMap!string map, string charset = ENCODING) { - if (charset.empty) - charset = ENCODING; - - synchronized (map) { - string key = null; - string value = null; - int mark = -1; - bool encoded = false; - for (int i = 0; i < content.length; i++) { - char c = content[i]; - switch (c) { - case '&': - int l = i - mark - 1; - value = l == 0 ? "" : - (encoded ? decodeString(content, mark + 1, l) : content.substring(mark + 1, i)); - mark = i; - encoded = false; - if (key != null) { - map.add(key, value); - } else if (value != null && value.length > 0) { - map.add(value, ""); - } - key = null; - value = null; - break; - case '=': - if (key != null) - break; - key = encoded ? decodeString(content, mark + 1, i - mark - 1) : content.substring(mark + 1, i); - mark = i; - encoded = false; - break; - case '+': - encoded = true; - break; - case '%': - encoded = true; - break; - default: break; - } - } - - int contentLen = cast(int)content.length; - - if (key != null) { - int l = contentLen - mark - 1; - value = l == 0 ? "" : (encoded ? decodeString(content, mark + 1, l) : content.substring(mark + 1)); - version(HUNT_DEBUG) tracef("key=%s, value=%s", key, value); - map.add(key, value); - } else if (mark < contentLen) { - version(HUNT_DEBUG) tracef("empty value: content=%s, key=%s", content, key); - key = encoded - ? decodeString(content, mark + 1, contentLen - mark - 1, charset) - : content.substring(mark + 1); - if (!key.empty) { - map.add(key, ""); - } - } else { - warningf("No key found."); - } - } - } - - /** - * Decode string with % encoding. - * This method makes the assumption that the majority of calls - * will need no decoding. - * - * @param encoded the encoded string to decode - * @return the decoded string - */ - static string decodeString(string encoded) { - return decodeString(encoded, 0, cast(int)encoded.length); - } - - /** - * Decode string with % encoding. - * This method makes the assumption that the majority of calls - * will need no decoding. - * - * @param encoded the encoded string to decode - * @param offset the offset in the encoded string to decode from - * @param length the length of characters in the encoded string to decode - * @param charset the charset to use for decoding - * @return the decoded string - */ - static string decodeString(string encoded, int offset, int length, string charset = ENCODING) { - StringBuffer buffer = null; - for (int i = 0; i < length; i++) { - char c = encoded.charAt(offset + i); - if (c < 0 || c > 0xff) { - if (buffer is null) { - buffer = new StringBuffer(length); - buffer.append(encoded, offset, offset + i + 1); - } else - buffer.append(c); - } else if (c == '+') { - if (buffer is null) { - buffer = new StringBuffer(length); - buffer.append(encoded, offset, offset + i); - } - - buffer.append(' '); - } else if (c == '%') { - if (buffer is null) { - buffer = new StringBuffer(length); - buffer.append(encoded, offset, offset + i); - } - - byte[] ba = new byte[length]; - int n = 0; - while (c >= 0 && c <= 0xff) { - if (c == '%') { - if (i + 2 < length) { - int o = offset + i + 1; - i += 3; - //ba[n] = cast(byte) TypeUtils.parseInt(encoded, o, 2, 16); - n++; - } else { - ba[n++] = cast(byte) '?'; - i = length; - } - } else if (c == '+') { - ba[n++] = cast(byte) ' '; - i++; - } else { - ba[n++] = cast(byte) c; - i++; - } - - if (i >= length) - break; - c = encoded.charAt(offset + i); - } - - i--; - buffer.append(cast(string)(ba[0 .. n])); - - } else if (buffer !is null) - buffer.append(c); - } - - if (buffer is null) { - if (offset == 0 && encoded.length == length) - return encoded; - return encoded.substring(offset, offset + length); - } - - return buffer.toString(); - } - - - /** - * Perform URL encoding. - * - * @param string the string to encode - * @return encoded string. - */ - static string encodeString(string string) { - return encodeString(string, ENCODING); - } - - /** - * Perform URL encoding. - * - * @param string the string to encode - * @param charset the charset to use for encoding - * @return encoded string. - */ - static string encodeString(string str, string charset) { - if (charset is null) - charset = ENCODING; - byte[] bytes = cast(byte[])str; - // bytes = string.getBytes(charset); - - int len = cast(int)bytes.length; - byte[] encoded = new byte[bytes.length * 3]; - warningf("encoded"); - int n = 0; - bool noEncode = true; - - for (int i = 0; i < len; i++) { - byte b = bytes[i]; - - if (b == ' ') { - noEncode = false; - encoded[n++] = cast(byte) '+'; - } else if (b >= 'a' && b <= 'z' || - b >= 'A' && b <= 'Z' || - b >= '0' && b <= '9') { - encoded[n++] = b; - } else { - noEncode = false; - encoded[n++] = cast(byte) '%'; - byte nibble = cast(byte) ((b & 0xf0) >> 4); - if (nibble >= 10) - encoded[n++] = cast(byte) ('A' + nibble - 10); - else - encoded[n++] = cast(byte) ('0' + nibble); - nibble = cast(byte) (b & 0xf); - if (nibble >= 10) - encoded[n++] = cast(byte) ('A' + nibble - 10); - else - encoded[n++] = cast(byte) ('0' + nibble); - } - } - - if (noEncode) - return str; - - return cast(string)(encoded[0 .. n]); - } -} diff --git a/frameworks/D/hunt/hunt-dmd.dockerfile b/frameworks/D/hunt/hunt-dmd.dockerfile deleted file mode 100644 index 72ca05eeca7..00000000000 --- a/frameworks/D/hunt/hunt-dmd.dockerfile +++ /dev/null @@ -1,19 +0,0 @@ -FROM dlangchina/dlang-dmd:latest - -ADD ./ /hunt -WORKDIR /hunt - -RUN apt-get update -y && apt-get install -y --no-install-recommends git && apt-get install -yqq libpq-dev libsqlite3-dev default-libmysqlclient-dev zlib1g-dev && rm -rf /var/lib/apt/lists/* && rm -rf /var/cache/apt/* - -RUN git clone https://github.com/h2o/picohttpparser.git && \ - cp -rf patches/Makefile picohttpparser && \ - cd picohttpparser && \ - make package && \ - cd .. - -RUN dub upgrade --verbose -RUN dub build --build=release --arch=x86_64 --compiler=dmd -c=mmap -f - -EXPOSE 8080 - -CMD ["./hunt-minihttp"] diff --git a/frameworks/D/hunt/hunt-http.dockerfile b/frameworks/D/hunt/hunt-http.dockerfile deleted file mode 100644 index e9fecc50b00..00000000000 --- a/frameworks/D/hunt/hunt-http.dockerfile +++ /dev/null @@ -1,18 +0,0 @@ -FROM dlangchina/dlang-ldc:latest - -ADD ./ /hunt -WORKDIR /hunt - -RUN apt-get update -y && apt-get install -y --no-install-recommends git && rm -rf /var/lib/apt/lists/* && rm -rf /var/cache/apt/* - -RUN git clone https://github.com/nodejs/http-parser.git && \ - cd http-parser && \ - make package && \ - cd .. - -RUN dub upgrade --verbose -RUN dub build --build=release --arch=x86_64 --compiler=ldc2 -f - -EXPOSE 8080 - -CMD ["./hunt-minihttp"] diff --git a/frameworks/D/hunt/hunt.dockerfile b/frameworks/D/hunt/hunt.dockerfile deleted file mode 100644 index e688f119d8f..00000000000 --- a/frameworks/D/hunt/hunt.dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -FROM dlangchina/dlang-ldc:latest - -ADD ./ /hunt -WORKDIR /hunt - -RUN apt-get update -y -RUN apt-get install -y --no-install-recommends make -RUN apt-get install -y --no-install-recommends git -RUN apt-get install -yqq libpq-dev libsqlite3-dev default-libmysqlclient-dev zlib1g-dev -RUN rm -rf /var/lib/apt/lists/* && rm -rf /var/cache/apt/* - -RUN git clone https://github.com/h2o/picohttpparser.git && \ - cp -rf patches/Makefile picohttpparser && \ - cd picohttpparser && \ - make package && \ - cd .. - -RUN dub upgrade --verbose -RUN dub build --build=release --arch=x86_64 --compiler=ldc2 -c=minihttp -f - -EXPOSE 8080 - -CMD ["./hunt-minihttp"] diff --git a/frameworks/D/hunt/mmap/DemoProcessor.d b/frameworks/D/hunt/mmap/DemoProcessor.d deleted file mode 100644 index de85df3d235..00000000000 --- a/frameworks/D/hunt/mmap/DemoProcessor.d +++ /dev/null @@ -1,267 +0,0 @@ -module DemoProcessor; - -// import stdx.data.json; -import std.json; - -import hunt.database; -import hunt.io; -import http.Common; -import http.Processor; -import http.HttpURI; -import http.UrlEncoded; -import hunt.logging.ConsoleLogger : trace, warning, tracef; - -import std.algorithm; -import std.array; -import std.exception; -import std.random; -import std.string; - -version (POSTGRESQL) { - __gshared Database dbConnection; -} - -enum HttpHeader textHeader = HttpHeader("Content-Type", "text/plain; charset=UTF-8"); -enum HttpHeader htmlHeader = HttpHeader("Content-Type", "text/html; charset=UTF-8"); -enum HttpHeader jsonHeader = HttpHeader("Content-Type", "application/json; charset=UTF-8"); - - -enum plaintextLength = "/plaintext".length; -enum jsonLength = "/json".length; -enum dbLength = "/db".length; -enum fortunesLength = "/fortunes".length; - -class DemoProcessor : HttpProcessor { - version (POSTGRESQL) HttpURI uri; - - this(TcpStream client) { - version (POSTGRESQL) uri = new HttpURI(); - super(client); - } - - override void onComplete(ref HttpRequest req) { - debug { - trace(req.uri()); - trace(req.method()); - trace(req.headers()); - } - - string path = req.uri; - // auto uri = new HttpURI(req.uri); - // uri.parse(req.uri); - // if(cmp(path, "/plaintext") == 0) { - // respondWith("Hello, World!", 200, textHeader); - // } else if(cmp(path, "/json") == 0) { - // JSONValue js = JSONValue(["message" : JSONValue("Hello, World!")]); - // respondWith(js.toJSON(), 200, jsonHeader); - // } else { - // respondWith404(); - // } - if(path.length == plaintextLength) { // plaintext - respondWith("Hello, World!", 200, textHeader); - } else if(path.length == jsonLength) { // json - JSONValue js = JSONValue(["message" : JSONValue("Hello, World!")]); - respondWith(js.toJSON(), 200, jsonHeader); - } else { - - version (POSTGRESQL) { - if(path.length == dbLength) { - respondSingleQuery(); - } else if(path.length == fortunesLength) { - respondFortunes(); - } else { - handleDbUpdate(path); - } - - } else { - respondWith404(); - } - } - } - - - private void respondWith404() { - version (POSTGRESQL) { - respondWith("The available paths are: /plaintext, /json, /db, /fortunes," ~ - " /queries?queries=number, /updates?queries=number", 404); - } else { - respondWith("The available paths are: /plaintext, /json", 404); - } - } - - version (POSTGRESQL) { - private void handleDbUpdate(string url) { - uri.parse(url); - - switch(uri.getPath()) { - case "/queries": - UrlEncoded queriesMap = new UrlEncoded(); - uri.decodeQueryTo(queriesMap); - int number = 1; - debug { - trace(queriesMap.toString()); - if (!queriesMap.containsKey("queries")) { - respondWith404(); - return; - } - - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - warning(ex.msg); - } - } - } else { - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - } - } - } - - respondMultipleQuery(number); - break; - - - case "/updates": - UrlEncoded queriesMap = new UrlEncoded(); - uri.decodeQueryTo(queriesMap); - int number = 1; - debug { - if (!queriesMap.containsKey("queries")) { - respondWith404(); - return; - } - - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - warning(ex.msg); - } - } - } else { - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - } - } - } - respondUpdates(number); - break; - - default: - respondWith404(); - break; - } - } - - - private void respondSingleQuery() { - int id = uniform(1, 10001); - string query = "SELECT id, randomNumber FROM world WHERE id = " ~ id.to!string; - ResultSet rs = dbConnection.query(query); - - JSONValue js = JSONValue(["id" : JSONValue(id), "randomNumber" - : JSONValue(to!int(rs.front()[0]))]); - - respondWith(js.toJSON(), 200, jsonHeader); - } - - private void respondMultipleQuery(int queries) { - if (queries < 1) - queries = 1; - else if (queries > 500) - queries = 500; - - JSONValue[] arr = new JSONValue[queries]; - for (int i = 0; i < queries; i++) { - immutable id = uniform(1, 10001); - immutable query = "SELECT id, randomNumber FROM world WHERE id = " ~ id.to!string; - ResultSet rs = dbConnection.query(query); - - arr[i] = JSONValue(["id" : JSONValue(id), "randomNumber" - : JSONValue(to!int(rs.front()[0]))]); - } - JSONValue js = JSONValue(arr); - respondWith(js.toJSON(), 200, jsonHeader); - } - - private void respondFortunes() { - immutable query = "SELECT id, message::text FROM Fortune"; - ResultSet rs = dbConnection.query(query); - FortuneModel[] data = rs.map!(f => FortuneModel(f["id"].to!int, f["message"])).array; - data ~= FortuneModel(0, "Additional fortune added at request time."); - data.sort!((a, b) => a.message < b.message); - // trace(data); - - respondWith(randerFortunes(data), 200, htmlHeader); - } - - static string randerFortunes(FortuneModel[] data) { - Appender!string sb; - sb.put(` - - - Fortunes - - - - - - -`); - - foreach (FortuneModel f; data) { - string message = replace(f.message, ">", ">"); - message = replace(message, "<", "<"); - message = replace(message, "\"", """); - sb.put(format(" \n \n \n", f.id, message)); - } - - sb.put("
idmessage
%d%s
\n \n"); - - return sb.data; - } - - private void respondUpdates(int queries) { - if (queries < 1) - queries = 1; - else if (queries > 500) - queries = 500; - - JSONValue[] arr = new JSONValue[queries]; - for (int i = 0; i < queries; i++) { - immutable id = uniform(1, 10001); - immutable idString = id.to!string; - immutable query = "SELECT id, randomNumber FROM world WHERE id = " ~ idString; - ResultSet rs = dbConnection.query(query); - int randomNumber = to!int(rs.front()[0]); - debug tracef("id=%d, randomNumber=%d", id, randomNumber); - - randomNumber = uniform(1, 10001); - string updateSql = "UPDATE world SET randomNumber = " - ~ randomNumber.to!string ~ " WHERE id = " ~ idString; - int r = dbConnection.execute(updateSql); - // debug tracef("r=%d", r); - - arr[i] = JSONValue(["id" : JSONValue(id), "randomNumber" : JSONValue(randomNumber)]); - } - - JSONValue js = JSONValue(arr); - respondWith(js.toJSON(), 200, jsonHeader); - } - } -} - -struct FortuneModel { - int id; - string message; -} diff --git a/frameworks/D/hunt/mmap/app.d b/frameworks/D/hunt/mmap/app.d deleted file mode 100644 index a074a5ea30d..00000000000 --- a/frameworks/D/hunt/mmap/app.d +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Collie - An asynchronous event-driven network framework using Dlang development - * - * Copyright (C) 2015-2018 Shanghai Putao Technology Co., Ltd - * - * Developer: Putao's Dlang team - * - * Licensed under the Apache-2.0 License. - * - */ -import std.getopt; -import std.stdio; - -import hunt.database; -import hunt.io; -import hunt.system.Memory : totalCPUs; -import http.Processor; -import http.Server; -import DemoProcessor; - -void main(string[] args) { - ushort port = 8080; - GetoptResult o = getopt(args, "port|p", "Port (default 8080)", &port); - if (o.helpWanted) { - defaultGetoptPrinter("A simple http server powered by Hunt!", o.options); - return; - } - - version (POSTGRESQL) { - DatabaseOption options; - debug { - options = new DatabaseOption( - "postgresql://benchmarkdbuser:benchmarkdbpass@10.1.11.44:5432/hello_world?charset=utf-8"); - } else { - options = new DatabaseOption( - "postgresql://benchmarkdbuser:benchmarkdbpass@tfb-database:5432/hello_world?charset=utf-8"); - } - - options.setMinimumConnection(totalCPUs*3); - options.setMaximumConnection(totalCPUs*3); - dbConnection = new Database(options); - } - - AbstractTcpServer httpServer = new HttpServer!(DemoProcessor)("0.0.0.0", port, totalCPUs); - writefln("listening on http://%s", httpServer.bindingAddress.toString()); - httpServer.start(); -} diff --git a/frameworks/D/hunt/mmap/http/Common.d b/frameworks/D/hunt/mmap/http/Common.d deleted file mode 100644 index a41f5afb4ec..00000000000 --- a/frameworks/D/hunt/mmap/http/Common.d +++ /dev/null @@ -1,84 +0,0 @@ -module http.Common; - - -public enum HttpParserType : uint { - request = 0, - response = 1, - both = 2 -} - -struct HttpHeader { - string name, value; -} - -public enum HttpMethod : uint { - DELETE = 0, - GET = 1, - HEAD = 2, - POST = 3, - PUT = 4, - /* pathological */ - CONNECT = 5, - OPTIONS = 6, - TRACE = 7, - /* WebDAV */ - COPY = 8, - LOCK = 9, - MKCOL = 10, - MOVE = 11, - PROPFIND = 12, - PROPPATCH = 13, - SEARCH = 14, - UNLOCK = 15, - BIND = 16, - REBIND = 17, - UNBIND = 18, - ACL = 19, - /* subversion */ - REPORT = 20, - MKACTIVITY = 21, - CHECKOUT = 22, - MERGE = 23, - /* upnp */ - MSEARCH = 24, - NOTIFY = 25, - SUBSCRIBE = 26, - UNSUBSCRIBE = 27, - /* RFC-5789 */ - PATCH = 28, - PURGE = 29, - /* CalDAV */ - MKCALENDAR = 30, - /* RFC-2068, section 19.6.1.2 */ - LINK = 31, - UNLINK = 32, - /* icecast */ - SOURCE = 33, -} - -enum HttpError : uint { - OK, - /* Parsing-related errors */ - INVALID_EOF_STATE, - HEADER_OVERFLOW, - CLOSED_CONNECTION, - INVALID_VERSION, - INVALID_STATUS, - INVALID_METHOD, - INVALID_URL, - INVALID_HOST, - INVALID_PORT, - INVALID_PATH, - INVALID_QUERY_STRING, - INVALID_FRAGMENT, - LF_EXPECTED, - INVALID_HEADER_TOKEN, - INVALID_CONTENT_LENGTH, - UNEXPECTED_CONTENT_LENGTH, - INVALID_CHUNK_SIZE, - INVALID_CONSTANT, - INVALID_INTERNAL_STATE, - STRICT, - PAUSED, - UNKNOWN -} diff --git a/frameworks/D/hunt/mmap/http/HttpURI.d b/frameworks/D/hunt/mmap/http/HttpURI.d deleted file mode 100644 index ed2a646ec51..00000000000 --- a/frameworks/D/hunt/mmap/http/HttpURI.d +++ /dev/null @@ -1,1166 +0,0 @@ -module http.HttpURI; - -import hunt.collection.MultiMap; - -import hunt.Exceptions; -import hunt.text.Charset; -import hunt.text.Common; -import hunt.text.StringBuilder; -import hunt.util.TypeUtils; -import http.UrlEncoded; - -import std.array; -import std.conv; -import std.string; - -import hunt.logging; - - -/** - * Http URI. Parse a HTTP URI from a string or byte array. Given a URI - * http://user@host:port/path/info;param?query#fragment this class - * will split it into the following undecoded optional elements: - *
    - *
  • {@link #getScheme()} - http:
  • - *
  • {@link #getAuthority()} - //name@host:port
  • - *
  • {@link #getHost()} - host
  • - *
  • {@link #getPort()} - port
  • - *
  • {@link #getPath()} - /path/info
  • - *
  • {@link #getParam()} - param
  • - *
  • {@link #getQuery()} - query
  • - *
  • {@link #getFragment()} - fragment
  • - *
- * - https://bob:bobby@www.lunatech.com:8080/file;p=1?q=2#third - \___/ \_/ \___/ \______________/ \__/\_______/ \_/ \___/ - | | | | | | \_/ | | - Scheme User Password Host Port Path | | Fragment - \_____________________________/ | Query - | Path parameter - Authority - *

- * Any parameters will be returned from {@link #getPath()}, but are excluded - * from the return value of {@link #getDecodedPath()}. If there are multiple - * parameters, the {@link #getParam()} method returns only the last one. - * - * See_Also: - * https://stackoverflow.com/questions/1634271/url-encoding-the-space-character-or-20 - * https://web.archive.org/web/20151218094722/http://blog.lunatech.com/2009/02/03/what-every-web-developer-must-know-about-url-encoding - */ -class HttpURI { - private enum State { - START, HOST_OR_PATH, SCHEME_OR_PATH, HOST, IPV6, PORT, PATH, PARAM, QUERY, FRAGMENT, ASTERISK - } - - private string _scheme; - private string _user; - private string _host; - private int _port; - private string _path; - private string _param; - private string _query; - private string _fragment; - - string _uri; - string _decodedPath; - - /** - * Construct a normalized URI. Port is not set if it is the default port. - * - * @param scheme - * the URI scheme - * @param host - * the URI hose - * @param port - * the URI port - * @param path - * the URI path - * @param param - * the URI param - * @param query - * the URI query - * @param fragment - * the URI fragment - * @return the normalized URI - */ - static HttpURI createHttpURI(string scheme, string host, int port, string path, string param, string query, - string fragment) { - if (port == 80 && (scheme == "http")) - port = 0; - if (port == 443 && (scheme == "https")) - port = 0; - return new HttpURI(scheme, host, port, path, param, query, fragment); - } - - this() { - } - - this(string scheme, string host, int port, string path, string param, string query, string fragment) { - _scheme = scheme; - _host = host; - _port = port; - _path = path; - _param = param; - _query = query; - _fragment = fragment; - } - - this(HttpURI uri) { - this(uri._scheme, uri._host, uri._port, uri._path, uri._param, uri._query, uri._fragment); - _uri = uri._uri; - } - - this(string uri) { - _port = -1; - parse(State.START, uri); - } - - // this(URI uri) { - // _uri = null; - - // _scheme = uri.getScheme(); - // _host = uri.getHost(); - // if (_host == null && uri.getRawSchemeSpecificPart().startsWith("//")) - // _host = ""; - // _port = uri.getPort(); - // _user = uri.getUserInfo(); - // _path = uri.getRawPath(); - - // _decodedPath = uri.getPath(); - // if (_decodedPath != null) { - // int p = _decodedPath.lastIndexOf(';'); - // if (p >= 0) - // _param = _decodedPath.substring(p + 1); - // } - // _query = uri.getRawQuery(); - // _fragment = uri.getFragment(); - - // _decodedPath = null; - // } - - this(string scheme, string host, int port, string pathQuery) { - _uri = null; - - _scheme = scheme; - _host = host; - _port = port; - - parse(State.PATH, pathQuery); - - } - - void parse(string uri) { - clear(); - _uri = uri; - parse(State.START, uri); - } - - /** - * Parse according to https://tools.ietf.org/html/rfc7230#section-5.3 - * - * @param method - * the request method - * @param uri - * the request uri - */ - void parseRequestTarget(string method, string uri) { - clear(); - _uri = uri; - - if (method == "CONNECT") - _path = uri; - else - parse(uri.startsWith("/") ? State.PATH : State.START, uri); - } - - // deprecated("") - // void parseConnect(string uri) { - // clear(); - // _uri = uri; - // _path = uri; - // } - - void parse(string uri, int offset, int length) { - clear(); - int end = offset + length; - _uri = uri.substring(offset, end); - parse(State.START, uri); - } - - private void parse(State state, string uri) { - bool encoded = false; - int end = cast(int)uri.length; - int mark = 0; - int path_mark = 0; - char last = '/'; - for (int i = 0; i < end; i++) { - char c = uri[i]; - - final switch (state) { - case State.START: { - switch (c) { - case '/': - mark = i; - state = State.HOST_OR_PATH; - break; - case ';': - mark = i + 1; - state = State.PARAM; - break; - case '?': - // assume empty path (if seen at start) - _path = ""; - mark = i + 1; - state = State.QUERY; - break; - case '#': - mark = i + 1; - state = State.FRAGMENT; - break; - case '*': - _path = "*"; - state = State.ASTERISK; - break; - - case '.': - path_mark = i; - state = State.PATH; - encoded = true; - break; - - default: - mark = i; - if (_scheme == null) - state = State.SCHEME_OR_PATH; - else { - path_mark = i; - state = State.PATH; - } - break; - } - - continue; - } - - case State.SCHEME_OR_PATH: { - switch (c) { - case ':': - // must have been a scheme - _scheme = uri.substring(mark, i); - // Start again with scheme set - state = State.START; - break; - - case '/': - // must have been in a path and still are - state = State.PATH; - break; - - case ';': - // must have been in a path - mark = i + 1; - state = State.PARAM; - break; - - case '?': - // must have been in a path - _path = uri.substring(mark, i); - mark = i + 1; - state = State.QUERY; - break; - - case '%': - // must have be in an encoded path - encoded = true; - state = State.PATH; - break; - - case '#': - // must have been in a path - _path = uri.substring(mark, i); - state = State.FRAGMENT; - break; - - default: - break; - } - continue; - } - - case State.HOST_OR_PATH: { - switch (c) { - case '/': - _host = ""; - mark = i + 1; - state = State.HOST; - break; - - case '@': - case ';': - case '?': - case '#': - // was a path, look again - i--; - path_mark = mark; - state = State.PATH; - break; - - case '.': - // it is a path - encoded = true; - path_mark = mark; - state = State.PATH; - break; - - default: - // it is a path - path_mark = mark; - state = State.PATH; - } - continue; - } - - case State.HOST: { - switch (c) { - case '/': - _host = uri.substring(mark, i); - path_mark = mark = i; - state = State.PATH; - break; - case ':': - if (i > mark) - _host = uri.substring(mark, i); - mark = i + 1; - state = State.PORT; - break; - case '@': - if (_user != null) - throw new IllegalArgumentException("Bad authority"); - _user = uri.substring(mark, i); - mark = i + 1; - break; - - case '[': - state = State.IPV6; - break; - - default: - break; - } - break; - } - - case State.IPV6: { - switch (c) { - case '/': - throw new IllegalArgumentException("No closing ']' for ipv6 in " ~ uri); - case ']': - c = uri.charAt(++i); - _host = uri.substring(mark, i); - if (c == ':') { - mark = i + 1; - state = State.PORT; - } else { - path_mark = mark = i; - state = State.PATH; - } - break; - - default: - break; - } - - break; - } - - case State.PORT: { - if (c == '@') { - if (_user != null) - throw new IllegalArgumentException("Bad authority"); - // It wasn't a port, but a password! - _user = _host ~ ":" ~ uri.substring(mark, i); - mark = i + 1; - state = State.HOST; - } else if (c == '/') { - // _port = TypeUtils.parseInt(uri, mark, i - mark, 10); - _port = to!int(uri[mark .. i], 10); - path_mark = mark = i; - state = State.PATH; - } - break; - } - - case State.PATH: { - switch (c) { - case ';': - mark = i + 1; - state = State.PARAM; - break; - case '?': - _path = uri.substring(path_mark, i); - mark = i + 1; - state = State.QUERY; - break; - case '#': - _path = uri.substring(path_mark, i); - mark = i + 1; - state = State.FRAGMENT; - break; - case '%': - encoded = true; - break; - case '.': - if ('/' == last) - encoded = true; - break; - - default: - break; - } - break; - } - - case State.PARAM: { - switch (c) { - case '?': - _path = uri.substring(path_mark, i); - _param = uri.substring(mark, i); - mark = i + 1; - state = State.QUERY; - break; - case '#': - _path = uri.substring(path_mark, i); - _param = uri.substring(mark, i); - mark = i + 1; - state = State.FRAGMENT; - break; - case '/': - encoded = true; - // ignore internal params - state = State.PATH; - break; - case ';': - // multiple parameters - mark = i + 1; - break; - - default: - break; - } - break; - } - - case State.QUERY: { - if (c == '#') { - _query = uri.substring(mark, i); - mark = i + 1; - state = State.FRAGMENT; - } - break; - } - - case State.ASTERISK: { - throw new IllegalArgumentException("Bad character '*'"); - } - - case State.FRAGMENT: { - _fragment = uri.substring(mark, end); - i = end; - break; - } - } - last = c; - } - - final switch (state) { - case State.START: - break; - case State.SCHEME_OR_PATH: - _path = uri.substring(mark, end); - break; - - case State.HOST_OR_PATH: - _path = uri.substring(mark, end); - break; - - case State.HOST: - if (end > mark) - _host = uri.substring(mark, end); - break; - - case State.IPV6: - throw new IllegalArgumentException("No closing ']' for ipv6 in " ~ uri); - - case State.PORT: - // _port = TypeUtils.parseInt(uri, mark, end - mark, 10); - _port = to!int(uri[mark .. end], 10); - break; - - case State.ASTERISK: - break; - - case State.FRAGMENT: - _fragment = uri.substring(mark, end); - break; - - case State.PARAM: - _path = uri.substring(path_mark, end); - _param = uri.substring(mark, end); - break; - - case State.PATH: - _path = uri.substring(path_mark, end); - break; - - case State.QUERY: - _query = uri.substring(mark, end); - break; - } - - if (!encoded) { - if (_param == null) - _decodedPath = _path; - else - _decodedPath = _path[0 .. _path.length - _param.length - 1]; - } - } - - string getScheme() { - return _scheme; - } - - string getHost() { - // Return null for empty host to retain compatibility with java.net.URI - if (_host != null && _host.length == 0) - return null; - return _host; - } - - int getPort() { - return _port; - } - - /** - * The parsed Path. - * - * @return the path as parsed on valid URI. null for invalid URI. - */ - string getPath() { - return _path; - } - - string getDecodedPath() { - if (_decodedPath.empty && !_path.empty) - _decodedPath = URIUtils.canonicalPath(URIUtils.decodePath(_path)); - return _decodedPath; - } - - string getParam() { - return _param; - } - - string getQuery() { - return _query; - } - - bool hasQuery() { - return _query != null && _query.length > 0; - } - - string getFragment() { - return _fragment; - } - - void decodeQueryTo(MultiMap!string parameters, string encoding = StandardCharsets.UTF_8) { - if (_query == _fragment) - return; - - UrlEncoded.decodeTo(_query, parameters, encoding); - } - - void clear() { - _uri = null; - - _scheme = null; - _host = null; - _port = -1; - _path = null; - _param = null; - _query = null; - _fragment = null; - - _decodedPath = null; - } - - bool isAbsolute() { - return _scheme != null && _scheme.length > 0; - } - - override - string toString() { - if (_uri is null) { - StringBuilder ot = new StringBuilder(); - - if (_scheme != null) - ot.append(_scheme).append(':'); - - if (_host != null) { - ot.append("//"); - if (_user != null) - ot.append(_user).append('@'); - ot.append(_host); - } - - if (_port > 0) - ot.append(':').append(_port); - - if (_path != null) - ot.append(_path); - - if (_query != null) - ot.append('?').append(_query); - - if (_fragment != null) - ot.append('#').append(_fragment); - - if (ot.length > 0) - _uri = ot.toString(); - else - _uri = ""; - } - return _uri; - } - - bool equals(Object o) { - if (o is this) - return true; - if (!(typeid(o) == typeid(HttpURI))) - return false; - return toString().equals(o.toString()); - } - - void setScheme(string scheme) { - _scheme = scheme; - _uri = null; - } - - /** - * @param host - * the host - * @param port - * the port - */ - void setAuthority(string host, int port) { - _host = host; - _port = port; - _uri = null; - } - - /** - * @param path - * the path - */ - void setPath(string path) { - _uri = null; - _path = path; - _decodedPath = null; - } - - /** - * @param path - * the decoded path - */ - // void setDecodedPath(string path) { - // _uri = null; - // _path = URIUtils.encodePath(path); - // _decodedPath = path; - // } - - void setPathQuery(string path) { - _uri = null; - _path = null; - _decodedPath = null; - _param = null; - _fragment = null; - if (path != null) - parse(State.PATH, path); - } - - void setQuery(string query) { - _query = query; - _uri = null; - } - - // URI toURI() { - // return new URI(_scheme, null, _host, _port, _path, _query == null ? null : UrlEncoded.decodestring(_query), - // _fragment); - // } - - string getPathQuery() { - if (_query == null) - return _path; - return _path ~ "?" ~ _query; - } - - bool hasAuthority() { - return _host != null; - } - - string getAuthority() { - if (_port > 0) - return _host ~ ":" ~ to!string(_port); - return _host; - } - - string getUser() { - return _user; - } - -} - - -/** - * Parse an authority string into Host and Port - *

Parse a string in the form "host:port", handling IPv4 an IPv6 hosts

- * - */ -class URIUtils -{ - /* ------------------------------------------------------------ */ - /* Decode a URI path and strip parameters - */ - static string decodePath(string path) { - return decodePath(path, 0, cast(int)path.length); - } - - /* ------------------------------------------------------------ */ - /* Decode a URI path and strip parameters of UTF-8 path - */ - static string decodePath(string path, int offset, int length) { - try { - StringBuilder builder = null; - - int end = offset + length; - for (int i = offset; i < end; i++) { - char c = path[i]; - switch (c) { - case '%': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - if ((i + 2) < end) { - char u = path.charAt(i + 1); - if (u == 'u') { - // TODO this is wrong. This is a codepoint not a char - builder.append(cast(char) (0xffff & TypeUtils.parseInt(path, i + 2, 4, 16))); - i += 5; - } else { - builder.append(cast(byte) (0xff & (TypeUtils.convertHexDigit(u) * 16 + TypeUtils.convertHexDigit(path.charAt(i + 2))))); - i += 2; - } - } else { - throw new IllegalArgumentException("Bad URI % encoding"); - } - - break; - - case ';': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - - while (++i < end) { - if (path[i] == '/') { - builder.append('/'); - break; - } - } - - break; - - default: - if (builder !is null) - builder.append(c); - break; - } - } - - if (builder !is null) - return builder.toString(); - if (offset == 0 && length == path.length) - return path; - return path.substring(offset, end); - } catch (Exception e) { - // System.err.println(path.substring(offset, offset + length) + " " + e); - error(e.toString); - return decodeISO88591Path(path, offset, length); - } - } - - - /* ------------------------------------------------------------ */ - /* Decode a URI path and strip parameters of ISO-8859-1 path - */ - private static string decodeISO88591Path(string path, int offset, int length) { - StringBuilder builder = null; - int end = offset + length; - for (int i = offset; i < end; i++) { - char c = path[i]; - switch (c) { - case '%': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - if ((i + 2) < end) { - char u = path.charAt(i + 1); - if (u == 'u') { - // TODO this is wrong. This is a codepoint not a char - builder.append(cast(char) (0xffff & TypeUtils.parseInt(path, i + 2, 4, 16))); - i += 5; - } else { - builder.append(cast(byte) (0xff & (TypeUtils.convertHexDigit(u) * 16 + TypeUtils.convertHexDigit(path.charAt(i + 2))))); - i += 2; - } - } else { - throw new IllegalArgumentException(""); - } - - break; - - case ';': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - while (++i < end) { - if (path[i] == '/') { - builder.append('/'); - break; - } - } - break; - - - default: - if (builder !is null) - builder.append(c); - break; - } - } - - if (builder !is null) - return builder.toString(); - if (offset == 0 && length == path.length) - return path; - return path.substring(offset, end); - } - - /* ------------------------------------------------------------ */ - - /** - * Convert a decoded path to a canonical form. - *

- * All instances of "." and ".." are factored out. - *

- *

- * Null is returned if the path tries to .. above its root. - *

- * - * @param path the path to convert, decoded, with path separators '/' and no queries. - * @return the canonical path, or null if path traversal above root. - */ - static string canonicalPath(string path) { - if (path.empty) - return path; - - bool slash = true; - int end = cast(int)path.length; - int i = 0; - - loop: - while (i < end) { - char c = path[i]; - switch (c) { - case '/': - slash = true; - break; - - case '.': - if (slash) - break loop; - slash = false; - break; - - default: - slash = false; - } - - i++; - } - - if (i == end) - return path; - - StringBuilder canonical = new StringBuilder(path.length); - canonical.append(path, 0, i); - - int dots = 1; - i++; - while (i <= end) { - char c = i < end ? path[i] : '\0'; - switch (c) { - case '\0': - case '/': - switch (dots) { - case 0: - if (c != '\0') - canonical.append(c); - break; - - case 1: - break; - - case 2: - if (canonical.length < 2) - return null; - canonical.setLength(canonical.length - 1); - canonical.setLength(canonical.lastIndexOf("/") + 1); - break; - - default: - while (dots-- > 0) - canonical.append('.'); - if (c != '\0') - canonical.append(c); - } - - slash = true; - dots = 0; - break; - - case '.': - if (dots > 0) - dots++; - else if (slash) - dots = 1; - else - canonical.append('.'); - slash = false; - break; - - default: - while (dots-- > 0) - canonical.append('.'); - canonical.append(c); - dots = 0; - slash = false; - } - - i++; - } - return canonical.toString(); - } - - - /* ------------------------------------------------------------ */ - - /** - * Convert a path to a cananonical form. - *

- * All instances of "." and ".." are factored out. - *

- *

- * Null is returned if the path tries to .. above its root. - *

- * - * @param path the path to convert (expects URI/URL form, encoded, and with path separators '/') - * @return the canonical path, or null if path traversal above root. - */ - static string canonicalEncodedPath(string path) { - if (path.empty) - return path; - - bool slash = true; - int end = cast(int)path.length; - int i = 0; - - loop: - while (i < end) { - char c = path[i]; - switch (c) { - case '/': - slash = true; - break; - - case '.': - if (slash) - break loop; - slash = false; - break; - - case '?': - return path; - - default: - slash = false; - } - - i++; - } - - if (i == end) - return path; - - StringBuilder canonical = new StringBuilder(path.length); - canonical.append(path, 0, i); - - int dots = 1; - i++; - while (i <= end) { - char c = i < end ? path[i] : '\0'; - switch (c) { - case '\0': - case '/': - case '?': - switch (dots) { - case 0: - if (c != '\0') - canonical.append(c); - break; - - case 1: - if (c == '?') - canonical.append(c); - break; - - case 2: - if (canonical.length < 2) - return null; - canonical.setLength(canonical.length - 1); - canonical.setLength(canonical.lastIndexOf("/") + 1); - if (c == '?') - canonical.append(c); - break; - default: - while (dots-- > 0) - canonical.append('.'); - if (c != '\0') - canonical.append(c); - } - - slash = true; - dots = 0; - break; - - case '.': - if (dots > 0) - dots++; - else if (slash) - dots = 1; - else - canonical.append('.'); - slash = false; - break; - - default: - while (dots-- > 0) - canonical.append('.'); - canonical.append(c); - dots = 0; - slash = false; - } - - i++; - } - return canonical.toString(); - } - - - - /* ------------------------------------------------------------ */ - - /** - * Convert a path to a compact form. - * All instances of "//" and "///" etc. are factored out to single "/" - * - * @param path the path to compact - * @return the compacted path - */ - static string compactPath(string path) { - if (path == null || path.length == 0) - return path; - - int state = 0; - int end = cast(int)path.length; - int i = 0; - - loop: - while (i < end) { - char c = path[i]; - switch (c) { - case '?': - return path; - case '/': - state++; - if (state == 2) - break loop; - break; - default: - state = 0; - } - i++; - } - - if (state < 2) - return path; - - StringBuilder buf = new StringBuilder(path.length); - buf.append(path, 0, i); - - loop2: - while (i < end) { - char c = path[i]; - switch (c) { - case '?': - buf.append(path, i, end); - break loop2; - case '/': - if (state++ == 0) - buf.append(c); - break; - default: - state = 0; - buf.append(c); - } - i++; - } - - return buf.toString(); - } - - /* ------------------------------------------------------------ */ - - /** - * @param uri URI - * @return True if the uri has a scheme - */ - static bool hasScheme(string uri) { - for (int i = 0; i < uri.length; i++) { - char c = uri[i]; - if (c == ':') - return true; - if (!(c >= 'a' && c <= 'z' || - c >= 'A' && c <= 'Z' || - (i > 0 && (c >= '0' && c <= '9' || - c == '.' || - c == '+' || - c == '-')) - )) - break; - } - return false; - } -} \ No newline at end of file diff --git a/frameworks/D/hunt/mmap/http/Parser.d b/frameworks/D/hunt/mmap/http/Parser.d deleted file mode 100644 index fcb805123b3..00000000000 --- a/frameworks/D/hunt/mmap/http/Parser.d +++ /dev/null @@ -1,202 +0,0 @@ -/// Minimalistic low-overhead wrapper for nodejs/http-parser -/// Used for benchmarks with simple server -module http.Parser; - -import http.Common; - -import hunt.logging.ConsoleLogger; -import std.conv; -import std.range.primitives; -import core.stdc.string; - - - -/* contains name and value of a header (name == NULL if is a continuing line - * of a multiline header */ -struct phr_header { - const char *name; - size_t name_len; - const char *value; - size_t value_len; -} - -/* returns number of bytes consumed if successful, -2 if request is partial, - * -1 if failed */ -extern (C) pure @nogc nothrow int phr_parse_request(const char *buf, size_t len, const char **method, - size_t *method_len, const char **path, size_t *path_len, - int *minor_version, phr_header *headers, size_t *num_headers, size_t last_len); - -/* ditto */ -extern (C) pure @nogc nothrow int phr_parse_response(const char *_buf, size_t len, int *minor_version, - int *status, const char **msg, size_t *msg_len, - phr_header *headers, size_t *num_headers, size_t last_len); - -/* ditto */ -extern (C) pure @nogc nothrow int phr_parse_headers(const char *buf, size_t len, - phr_header *headers, size_t *num_headers, size_t last_len); - -/* should be zero-filled before start */ -struct phr_chunked_decoder { - size_t bytes_left_in_chunk; /* number of bytes left in current chunk */ - char consume_trailer; /* if trailing headers should be consumed */ - char _hex_count; - char _state; -} - -/* the function rewrites the buffer given as (buf, bufsz) removing the chunked- - * encoding headers. When the function returns without an error, bufsz is - * updated to the length of the decoded data available. Applications should - * repeatedly call the function while it returns -2 (incomplete) every time - * supplying newly arrived data. If the end of the chunked-encoded data is - * found, the function returns a non-negative number indicating the number of - * octets left undecoded at the tail of the supplied buffer. Returns -1 on - * error. - */ -extern (C) pure @nogc nothrow ptrdiff_t phr_decode_chunked(phr_chunked_decoder *decoder, char *buf, size_t *bufsz); - -/* returns if the chunked decoder is in middle of chunked data */ -extern (C) pure @nogc nothrow int phr_decode_chunked_is_in_data(phr_chunked_decoder *decoder); - - -// =========== Public interface starts here ============= - -public: - -class HttpException : Exception { - HttpError error; - - pure @nogc nothrow this(HttpError error, string file = __FILE__, - size_t line = __LINE__, Throwable nextInChain = null) { - this.error = error; - super("Http exception", file, line, nextInChain); - } -} - -struct HttpParser(Interceptor) { - -private { - Interceptor interceptor; - Throwable failure; - phr_header[50] _headers; - char *_method; - char *path; - - int minor_version; - size_t buflen = 0, prevbuflen = 0, method_len, path_len, num_headers; -} - - - alias interceptor this; - - this(Interceptor interceptor) { - this.interceptor = interceptor; - } - - @property bool status() pure @safe nothrow { - return failure is null; - } - - string uri(bool canCopy=false)() { - static if(canCopy) { - return cast(string)path[0..path_len].dup; - } else { - return cast(string)path[0..path_len]; - } - } - - @property HttpMethod method() { - string s = cast(string)_method[0..method_len]; - return to!HttpMethod(s); - } - - - HttpHeader[] headers(bool canCopy=false)() { - HttpHeader[] hs = new HttpHeader[num_headers]; - - for(int i; i 0) { - /* successfully parsed the request */ - onMessageComplete(); - - if(pret < chunk.length) { - debug infof("try to parse next request"); - pret += doexecute(chunk[pret .. $]); // try to parse next http request data - } - - debug infof("pret=%d", pret); - return pret; - } else if(pret == -2) { - debug warning("parsing incomplete"); - num_headers = 0; - // failure = new HttpException(HttpError.UNKNOWN); - // throw failure; - - debug infof("pret=%d, chunk=%d", pret, chunk.length); - return 0; - } - - warning("wrong data format"); - num_headers = 0; - failure = new HttpException(HttpError.UNKNOWN); - throw failure; - } - - void onMessageComplete() { - // interceptor.onHeadersComplete(); - debug { - tracef("method is %s", _method[0..method_len]); - tracef("path is %s", path[0..path_len]); - tracef("HTTP version is 1.%d", minor_version); - foreach(ref phr_header h; _headers[0..num_headers]) { - tracef("Header: %s = %s", h.name[0..h.name_len], h.value[0..h.value_len]); - } - } - interceptor.onMessageComplete(); - } -} - -auto httpParser(Interceptor)(Interceptor interceptor) { - return HttpParser!Interceptor(interceptor); -} \ No newline at end of file diff --git a/frameworks/D/hunt/mmap/http/Processor.d b/frameworks/D/hunt/mmap/http/Processor.d deleted file mode 100644 index 25dcb32c411..00000000000 --- a/frameworks/D/hunt/mmap/http/Processor.d +++ /dev/null @@ -1,137 +0,0 @@ -/// An example "HTTP server" with poor usability but sensible performance -/// -module http.Processor; - -import std.conv; -import std.array, std.exception, std.format, std.algorithm.mutation, std.socket; -import core.stdc.stdlib; -import core.thread, core.atomic; -import http.Parser; - -import hunt.collection.ByteBuffer; -import http.Common; -import hunt.logging; -import hunt.io; -import hunt.util.DateTime; - - -private alias Parser = HttpParser!HttpProcessor; - - -struct HttpRequest { - private Parser* parser; - - HttpHeader[] headers(bool canCopy=false)() @property { - return parser.headers!canCopy(); - } - - HttpMethod method() @property { - return parser.method(); - } - - string uri(bool canCopy=false)() @property { - return parser.uri!(canCopy)(); - } -} - -version(NO_HTTPPARSER) { -enum string ResponseData = "HTTP/1.1 200 OK\r\nContent-Length: 13\r\nConnection: Keep-Alive\r\nContent-Type: text/plain\r\nServer: Hunt/1.0\r\nDate: Wed, 17 Apr 2013 12:00:00 GMT\r\n\r\nHello, World!"; -} - -abstract class HttpProcessor { - -package: - Appender!(char[]) outBuf; - HttpHeader[] headers; // buffer for headers - Parser parser; - HttpRequest request; - bool serving; - -public: - TcpStream client; - - this(TcpStream sock) { - serving = true; - client = sock; - headers = new HttpHeader[1]; - parser = httpParser(this); - request.parser = &parser; - } - - void run() { - client.onReceived(delegate int (ubyte[] buffer) { - version(NO_HTTPPARSER) { - client.write(cast(ubyte[])ResponseData); - } else { - int len = 0; - try { - len = parser.execute(buffer); - } catch(Exception ex) { - respondWith(ex.msg, 500); - len = cast(int)buffer.length; - } - - return len; - } - }) - .onClosed(() { - // notifyClientClosed(); - }) - .onError((string msg) { - debug warning("Error: ", msg); - }) - .start(); - } - - protected void notifyClientClosed() { - debug tracef("The connection[%s] is closed", client.remoteAddress()); - } - - void respondWith(string _body, uint status, HttpHeader[] headers...) { - return respondWith(cast(const(ubyte)[]) _body, status, headers); - } - - void respondWith(const(ubyte)[] _body, uint status, HttpHeader[] headers...) { - outBuf.clear(); - formattedWrite(outBuf, "HTTP/1.1 %s OK\r\n", status); - outBuf.put("Server: Hunt/1.0\r\n"); - - formattedWrite(outBuf, "Date: %s\r\n", DateTimeHelper.getDateAsGMT()); - if (!parser.shouldKeepAlive) - outBuf.put("Connection: close\r\n"); - foreach (ref hdr; headers) { - outBuf.put(hdr.name); - outBuf.put(": "); - outBuf.put(hdr.value); - outBuf.put("\r\n"); - } - formattedWrite(outBuf, "Content-Length: %d\r\n\r\n", _body.length); - outBuf.put(cast(string) _body); - client.write(cast(ubyte[]) outBuf.data); // TODO: short-writes are quite possible - } - - void onChunk(ref HttpRequest req, const(ubyte)[] chunk) { - // TODO: Tasks pending completion - 5/16/2019, 5:40:18 PM - // - } - - void onComplete(ref HttpRequest req); - - - final int onBody(Parser* parser, const(ubyte)[] chunk) { - onChunk(request, chunk); - return 0; - } - - final int onMessageComplete() { - try { - onComplete(request); - } catch(Exception ex) { - respondWith(ex.msg, 500); - } - if (!parser.shouldKeepAlive) - serving = false; - return 0; - } - -} diff --git a/frameworks/D/hunt/mmap/http/Server.d b/frameworks/D/hunt/mmap/http/Server.d deleted file mode 100644 index f0da1c56b51..00000000000 --- a/frameworks/D/hunt/mmap/http/Server.d +++ /dev/null @@ -1,122 +0,0 @@ -module http.Server; - -import hunt.event; -import hunt.io; -import hunt.logging.ConsoleLogger; -import hunt.system.Memory : totalCPUs; -import hunt.util.DateTime; - -import std.array; -import std.conv; -import std.json; -import std.socket; -import std.string; -import std.stdio; - -import http.Parser; -import http.Processor; - -shared static this() { - //DateTimeHelper.startClock(); -} - -import hunt.io.channel; - -/** -*/ -abstract class AbstractTcpServer { - protected EventLoopGroup _group = null; - protected bool _isStarted = false; - protected Address _address; - protected int _workersCount; - TcpStreamOption _tcpStreamoption; - - this(Address address, int thread = (totalCPUs - 1), int workersCount = 0) { - this._address = address; - _tcpStreamoption = TcpStreamOption.createOption(); - _tcpStreamoption.bufferSize = 1024 * 2; - _tcpStreamoption.isKeepalive = false; - _group = new EventLoopGroup(cast(uint) thread); - this._workersCount = workersCount; - } - - @property Address bindingAddress() { - return _address; - } - - void start() { - if (_isStarted) - return; - _isStarted = true; - - Socket server = new TcpSocket(); - server.setOption(SocketOptionLevel.SOCKET, SocketOption.REUSEADDR, true); - server.bind(new InternetAddress("0.0.0.0", 8080)); - server.listen(8192); - - trace("Launching server"); - debug { - _group.start(); - } else { - _group.start(100); - } - - if (_workersCount) { - defaultPoolThreads = _workersCount; - workerPool(); // Initilize worker poll - } - writefln("worker count: %d", _workersCount); - writefln("IO thread: %d", _group.size); - - while (true) { - try { - version (HUNT_DEBUG) - trace("Waiting for server.accept()"); - - Socket socket = server.accept(); - version (HUNT_DEBUG) { - infof("new connection from %s, fd=%d", - socket.remoteAddress.toString(), socket.handle()); - } - // EventLoop loop = _group.nextLoop(); - EventLoop loop = _group.nextLoop(socket.handle); - TcpStream stream = new TcpStream(loop, socket, _tcpStreamoption); - onConnectionAccepted(stream); - } catch (Exception e) { - warningf("Failure on accepting %s", e); - break; - } - } - _isStarted = false; - } - - protected void onConnectionAccepted(TcpStream client); - - void stop() { - if (!_isStarted) - return; - _isStarted = false; - _group.stop(); - } -} - -alias ProcessorCreater = HttpProcessor delegate(TcpStream client); - -/** -*/ -class HttpServer(T) : AbstractTcpServer if (is(T : HttpProcessor)) { - - this(string ip, ushort port, int thread = (totalCPUs - 1)) { - super(new InternetAddress(ip, port), thread); - } - - this(Address address, int thread = (totalCPUs - 1)) { - super(address, thread); - } - - override protected void onConnectionAccepted(TcpStream client) { - HttpProcessor httpProcessor = new T(client); - httpProcessor.run(); - } - -} diff --git a/frameworks/D/hunt/mmap/http/UrlEncoded.d b/frameworks/D/hunt/mmap/http/UrlEncoded.d deleted file mode 100644 index b4087e44e34..00000000000 --- a/frameworks/D/hunt/mmap/http/UrlEncoded.d +++ /dev/null @@ -1,361 +0,0 @@ -module http.UrlEncoded; - -import hunt.collection.List; -import hunt.collection.MultiMap; -import hunt.collection.StringBuffer; -import hunt.Exceptions; -import hunt.logging; -import hunt.text.Charset; -import hunt.text.Common; -import hunt.text.StringBuilder; -import hunt.util.TypeUtils; - -import std.conv; -import std.array; - - -/** - * Handles coding of MIME "x-www-form-urlencoded". - *

- * This class handles the encoding and decoding for either the query string of a - * URL or the _content of a POST HTTP request. - *

- * Notes - *

- * The UTF-8 charset is assumed, unless otherwise defined by either passing a - * parameter or setting the "org.hunt.utils.UrlEncoding.charset" System - * property. - *

- *

- * The hashtable either contains string single values, vectors of string or - * arrays of Strings. - *

- *

- * This class is only partially synchronised. In particular, simple get - * operations are not protected from concurrent updates. - *

- * - * @see java.net.URLEncoder - */ -class UrlEncoded : MultiMap!string { - - enum string ENCODING = StandardCharsets.UTF_8; - - - this() { - } - - this(string query) { - decodeTo(query, this, ENCODING); - } - - void decode(string query) { - decodeTo(query, this, ENCODING); - } - - void decode(string query, string charset) { - decodeTo(query, this, charset); - } - - /** - * Encode MultiMap with % encoding for UTF8 sequences. - * - * @return the MultiMap as a string with % encoding - */ - string encode() { - return encode(ENCODING, false); - } - - /** - * Encode MultiMap with % encoding for arbitrary string sequences. - * - * @param charset the charset to use for encoding - * @return the MultiMap as a string encoded with % encodings - */ - string encode(string charset) { - return encode(charset, false); - } - - /** - * Encode MultiMap with % encoding. - * - * @param charset the charset to encode with - * @param equalsForNullValue if True, then an '=' is always used, even - * for parameters without a value. e.g. "blah?a=&b=&c=". - * @return the MultiMap as a string encoded with % encodings - */ - string encode(string charset, bool equalsForNullValue) { - return encode(this, charset, equalsForNullValue); - } - - /** - * Encode MultiMap with % encoding. - * - * @param map the map to encode - * @param charset the charset to use for encoding (uses default encoding if null) - * @param equalsForNullValue if True, then an '=' is always used, even - * for parameters without a value. e.g. "blah?a=&b=&c=". - * @return the MultiMap as a string encoded with % encodings. - */ - static string encode(MultiMap!string map, string charset, bool equalsForNullValue) { - if (charset is null) - charset = ENCODING; - - StringBuilder result = new StringBuilder(128); - - bool delim = false; - foreach(string key, List!string list; map) - { - int s = list.size(); - - if (delim) { - result.append('&'); - } - - if (s == 0) { - result.append(encodeString(key, charset)); - if (equalsForNullValue) - result.append('='); - } else { - for (int i = 0; i < s; i++) { - if (i > 0) - result.append('&'); - string val = list.get(i); - result.append(encodeString(key, charset)); - - if (val != null) { - if (val.length > 0) { - result.append('='); - result.append(encodeString(val, charset)); - } else if (equalsForNullValue) - result.append('='); - } else if (equalsForNullValue) - result.append('='); - } - } - delim = true; - } - return result.toString(); - } - - /** - * Decoded parameters to Map. - * - * @param content the string containing the encoded parameters - * @param map the MultiMap to put parsed query parameters into - * @param charset the charset to use for decoding - */ - static void decodeTo(string content, MultiMap!string map, string charset = ENCODING) { - if (charset.empty) - charset = ENCODING; - - synchronized (map) { - string key = null; - string value = null; - int mark = -1; - bool encoded = false; - for (int i = 0; i < content.length; i++) { - char c = content[i]; - switch (c) { - case '&': - int l = i - mark - 1; - value = l == 0 ? "" : - (encoded ? decodeString(content, mark + 1, l) : content.substring(mark + 1, i)); - mark = i; - encoded = false; - if (key != null) { - map.add(key, value); - } else if (value != null && value.length > 0) { - map.add(value, ""); - } - key = null; - value = null; - break; - case '=': - if (key != null) - break; - key = encoded ? decodeString(content, mark + 1, i - mark - 1) : content.substring(mark + 1, i); - mark = i; - encoded = false; - break; - case '+': - encoded = true; - break; - case '%': - encoded = true; - break; - default: break; - } - } - - int contentLen = cast(int)content.length; - - if (key != null) { - int l = contentLen - mark - 1; - value = l == 0 ? "" : (encoded ? decodeString(content, mark + 1, l) : content.substring(mark + 1)); - version(HUNT_DEBUG) tracef("key=%s, value=%s", key, value); - map.add(key, value); - } else if (mark < contentLen) { - version(HUNT_DEBUG) tracef("empty value: content=%s, key=%s", content, key); - key = encoded - ? decodeString(content, mark + 1, contentLen - mark - 1, charset) - : content.substring(mark + 1); - if (!key.empty) { - map.add(key, ""); - } - } else { - warningf("No key found."); - } - } - } - - /** - * Decode string with % encoding. - * This method makes the assumption that the majority of calls - * will need no decoding. - * - * @param encoded the encoded string to decode - * @return the decoded string - */ - static string decodeString(string encoded) { - return decodeString(encoded, 0, cast(int)encoded.length); - } - - /** - * Decode string with % encoding. - * This method makes the assumption that the majority of calls - * will need no decoding. - * - * @param encoded the encoded string to decode - * @param offset the offset in the encoded string to decode from - * @param length the length of characters in the encoded string to decode - * @param charset the charset to use for decoding - * @return the decoded string - */ - static string decodeString(string encoded, int offset, int length, string charset = ENCODING) { - StringBuffer buffer = null; - - for (int i = 0; i < length; i++) { - char c = encoded.charAt(offset + i); - if (c < 0 || c > 0xff) { - if (buffer is null) { - buffer = new StringBuffer(length); - buffer.append(encoded, offset, offset + i + 1); - } else - buffer.append(c); - } else if (c == '+') { - if (buffer is null) { - buffer = new StringBuffer(length); - buffer.append(encoded, offset, offset + i); - } - - buffer.append(' '); - } else if (c == '%') { - if (buffer is null) { - buffer = new StringBuffer(length); - buffer.append(encoded, offset, offset + i); - } - - byte[] ba = new byte[length]; - int n = 0; - while (c >= 0 && c <= 0xff) { - if (c == '%') { - if (i + 2 < length) { - int o = offset + i + 1; - i += 3; - ba[n] = cast(byte) TypeUtils.parseInt(encoded, o, 2, 16); - n++; - } else { - ba[n++] = cast(byte) '?'; - i = length; - } - } else if (c == '+') { - ba[n++] = cast(byte) ' '; - i++; - } else { - ba[n++] = cast(byte) c; - i++; - } - - if (i >= length) - break; - c = encoded.charAt(offset + i); - } - - i--; - buffer.append(cast(string)(ba[0 .. n])); - - } else if (buffer !is null) - buffer.append(c); - } - - if (buffer is null) { - if (offset == 0 && encoded.length == length) - return encoded; - return encoded.substring(offset, offset + length); - } - - return buffer.toString(); - } - - - /** - * Perform URL encoding. - * - * @param string the string to encode - * @return encoded string. - */ - static string encodeString(string string) { - return encodeString(string, ENCODING); - } - - /** - * Perform URL encoding. - * - * @param string the string to encode - * @param charset the charset to use for encoding - * @return encoded string. - */ - static string encodeString(string str, string charset) { - if (charset is null) - charset = ENCODING; - byte[] bytes = cast(byte[])str; - // bytes = string.getBytes(charset); - - int len = cast(int)bytes.length; - byte[] encoded = new byte[bytes.length * 3]; - int n = 0; - bool noEncode = true; - - for (int i = 0; i < len; i++) { - byte b = bytes[i]; - - if (b == ' ') { - noEncode = false; - encoded[n++] = cast(byte) '+'; - } else if (b >= 'a' && b <= 'z' || - b >= 'A' && b <= 'Z' || - b >= '0' && b <= '9') { - encoded[n++] = b; - } else { - noEncode = false; - encoded[n++] = cast(byte) '%'; - byte nibble = cast(byte) ((b & 0xf0) >> 4); - if (nibble >= 10) - encoded[n++] = cast(byte) ('A' + nibble - 10); - else - encoded[n++] = cast(byte) ('0' + nibble); - nibble = cast(byte) (b & 0xf); - if (nibble >= 10) - encoded[n++] = cast(byte) ('A' + nibble - 10); - else - encoded[n++] = cast(byte) ('0' + nibble); - } - } - - if (noEncode) - return str; - - return cast(string)(encoded[0 .. n]); - } -} diff --git a/frameworks/D/hunt/patches/Makefile b/frameworks/D/hunt/patches/Makefile deleted file mode 100644 index bd1b2c34420..00000000000 --- a/frameworks/D/hunt/patches/Makefile +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2009-2014 Kazuho Oku, Tokuhiro Matsuno, Daisuke Murase, -# Shigeo Mitsunari -# -# The software is licensed under either the MIT License (below) or the Perl -# license. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. - -CC?=gcc -AR?=ar -PROVE?=prove - -CFLAGS += -Wall -Wextra -Werror -O3 - -all: package test - -test: test-bin - $(PROVE) -v ./test-bin - -test-bin: picohttpparser.c picotest/picotest.c test.c - $(CC) -Wall $(CFLAGS) $(LDFLAGS) -o $@ $^ - -picohttpparser.o: picohttpparser.c picohttpparser.h Makefile - $(CC) $(CFLAGS) -c picohttpparser.c - -package: picohttpparser.o - $(AR) rcs libpicohttpparser.a picohttpparser.o - -clean: - rm -f test-bin - -.PHONY: test - diff --git a/frameworks/D/hunt/pico/app.d b/frameworks/D/hunt/pico/app.d deleted file mode 100644 index 8a001f4062b..00000000000 --- a/frameworks/D/hunt/pico/app.d +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Collie - An asynchronous event-driven network framework using Dlang development - * - * Copyright (C) 2015-2018 Shanghai Putao Technology Co., Ltd - * - * Developer: Putao's Dlang team - * - * Licensed under the Apache-2.0 License. - * - */ -import std.getopt; -import std.stdio; - -//import hunt.database; -import hunt.io; -import hunt.system.Memory : totalCPUs; -import http.Processor; -import http.Server; -import http.DemoProcessor; -import std.experimental.allocator; -void main(string[] args) { - ushort port = 8080; - GetoptResult o = getopt(args, "port|p", "Port (default 8080)", &port); - if (o.helpWanted) { - defaultGetoptPrinter("A mini-http server powered by Hunt!", o.options); - return; - } - - //version (POSTGRESQL) { - // DatabaseOption options; - // debug { - // options = new DatabaseOption( - // "postgresql://benchmarkdbuser:benchmarkdbpass@10.1.11.44:5432/hello_world?charset=utf-8"); - // } else { - // options = new DatabaseOption( - // "postgresql://benchmarkdbuser:benchmarkdbpass@tfb-database:5432/hello_world?charset=utf-8"); - // } - // - // options.setMinimumConnection(totalCPUs*3); - // options.setMaximumConnection(totalCPUs*3); - // dbConnection = new Database(options); - //} - - AbstractTcpServer httpServer = new HttpServer!(DemoProcessor)("0.0.0.0", port, totalCPUs); - writefln("listening on http://%s", httpServer.bindingAddress.toString()); - httpServer.start(); -} diff --git a/frameworks/D/hunt/pico/http/Common.d b/frameworks/D/hunt/pico/http/Common.d deleted file mode 100644 index e1b1b0ade32..00000000000 --- a/frameworks/D/hunt/pico/http/Common.d +++ /dev/null @@ -1,85 +0,0 @@ -module http.Common; - - - -public enum HttpParserType : uint { - request = 0, - response = 1, - both = 2 -} - -struct HttpHeader { - string name, value; -} - -public enum HttpMethod : uint { - DELETE = 0, - GET = 1, - HEAD = 2, - POST = 3, - PUT = 4, - /* pathological */ - CONNECT = 5, - OPTIONS = 6, - TRACE = 7, - /* WebDAV */ - COPY = 8, - LOCK = 9, - MKCOL = 10, - MOVE = 11, - PROPFIND = 12, - PROPPATCH = 13, - SEARCH = 14, - UNLOCK = 15, - BIND = 16, - REBIND = 17, - UNBIND = 18, - ACL = 19, - /* subversion */ - REPORT = 20, - MKACTIVITY = 21, - CHECKOUT = 22, - MERGE = 23, - /* upnp */ - MSEARCH = 24, - NOTIFY = 25, - SUBSCRIBE = 26, - UNSUBSCRIBE = 27, - /* RFC-5789 */ - PATCH = 28, - PURGE = 29, - /* CalDAV */ - MKCALENDAR = 30, - /* RFC-2068, section 19.6.1.2 */ - LINK = 31, - UNLINK = 32, - /* icecast */ - SOURCE = 33, -} - -enum HttpError : uint { - OK, - /* Parsing-related errors */ - INVALID_EOF_STATE, - HEADER_OVERFLOW, - CLOSED_CONNECTION, - INVALID_VERSION, - INVALID_STATUS, - INVALID_METHOD, - INVALID_URL, - INVALID_HOST, - INVALID_PORT, - INVALID_PATH, - INVALID_QUERY_STRING, - INVALID_FRAGMENT, - LF_EXPECTED, - INVALID_HEADER_TOKEN, - INVALID_CONTENT_LENGTH, - UNEXPECTED_CONTENT_LENGTH, - INVALID_CHUNK_SIZE, - INVALID_CONSTANT, - INVALID_INTERNAL_STATE, - STRICT, - PAUSED, - UNKNOWN -} diff --git a/frameworks/D/hunt/pico/http/DemoProcessor.d b/frameworks/D/hunt/pico/http/DemoProcessor.d deleted file mode 100644 index f99562e45e0..00000000000 --- a/frameworks/D/hunt/pico/http/DemoProcessor.d +++ /dev/null @@ -1,253 +0,0 @@ -module http.DemoProcessor; - - - -// import stdx.data.json; -import std.json; - -import hunt.io; -import http.Common; -import http.Processor; -import http.HttpURI; -import http.UrlEncoded; -import hunt.logging.ConsoleLogger : trace, warning, tracef; - -import std.algorithm; -import std.array; -import std.exception; -import std.random; -import std.string; - -version (POSTGRESQL) { - // __gshared Database dbConnection; -} - -enum HttpHeader textHeader = HttpHeader("Content-Type", "text/plain; charset=UTF-8"); -enum HttpHeader htmlHeader = HttpHeader("Content-Type", "text/html; charset=UTF-8"); -enum HttpHeader jsonHeader = HttpHeader("Content-Type", "application/json; charset=UTF-8"); - - -enum plaintextLength = "/plaintext".length; -enum jsonLength = "/json".length; -enum dbLength = "/db".length; -enum fortunesLength = "/fortunes".length; - -class DemoProcessor : HttpProcessor { - version (POSTGRESQL) HttpURI uri; - - this(TcpStream client) { - version (POSTGRESQL) uri = new HttpURI(); - super(client); - } - - override void onComplete(ref HttpRequest req) { - - string path = req.uri; - if(path.length == plaintextLength) { // plaintext - respondWith(RET.TEXT, 200, textHeader); - } else if(path.length == jsonLength) { // json - //JSONValue js = JSONValue(["message" : JSONValue("Hello, World!")]); - respondWith(RET.JSON, 200, jsonHeader); - } else { - - version (POSTGRESQL) { - if(path.length == dbLength) { - respondSingleQuery(); - } else if(path.length == fortunesLength) { - respondFortunes(); - } else { - handleDbUpdate(path); - } - - } else { - respondWith404(); - } - } - } - - - private void respondWith404() { - //version (POSTGRESQL) { - // respondWith("The available paths are: /plaintext, /json, /db, /fortunes," ~ - // " /queries?queries=number, /updates?queries=number", 404); - //} else { - // respondWith("The available paths are: /plaintext, /json", 404); - //} - } - - version (POSTGRESQL) { - private void handleDbUpdate(string url) { - uri.parse(url); - - switch(uri.getPath()) { - case "/queries": - UrlEncoded queriesMap = new UrlEncoded(); - uri.decodeQueryTo(queriesMap); - int number = 1; - debug { - trace(queriesMap.toString()); - if (!queriesMap.containsKey("queries")) { - respondWith404(); - return; - } - - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - warning(ex.msg); - } - } - } else { - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - } - } - } - - respondMultipleQuery(number); - break; - - - case "/updates": - UrlEncoded queriesMap = new UrlEncoded(); - uri.decodeQueryTo(queriesMap); - int number = 1; - debug { - if (!queriesMap.containsKey("queries")) { - respondWith404(); - return; - } - - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - warning(ex.msg); - } - } - } else { - string v = queriesMap.getValue("queries", 0); - if (!v.empty) { - try { - number = to!int(v); - } catch (Exception ex) { - } - } - } - respondUpdates(number); - break; - - default: - respondWith404(); - break; - } - } - - - private void respondSingleQuery() { - //int id = uniform(1, 10001); - //string query = "SELECT randomNumber FROM world WHERE id = " ~ id.to!string; - //ResultSet rs = dbConnection.query(query); - // - //JSONValue js = JSONValue(["id" : JSONValue(id), "randomNumber" - // : JSONValue(to!int(rs.front()[0]))]); - // - //respondWith(js.toJSON(), 200, jsonHeader); - } - - private void respondMultipleQuery(int queries) { - //if (queries < 1) - // queries = 1; - //else if (queries > 500) - // queries = 500; - // - //JSONValue[] arr = new JSONValue[queries]; - //for (int i = 0; i < queries; i++) { - // immutable id = uniform(1, 10001); - // immutable query = "SELECT randomNumber FROM world WHERE id = " ~ id.to!string; - // ResultSet rs = dbConnection.query(query); - // - // arr[i] = JSONValue(["id" : JSONValue(id), "randomNumber" - // : JSONValue(to!int(rs.front()[0]))]); - //} - //JSONValue js = JSONValue(arr); - //respondWith(js.toJSON(), 200, jsonHeader); - } - - private void respondFortunes() { - //immutable query = "SELECT id, message::text FROM Fortune"; - //ResultSet rs = dbConnection.query(query); - //FortuneModel[] data = rs.map!(f => FortuneModel(f["id"].to!int, f["message"])).array; - //data ~= FortuneModel(0, "Additional fortune added at request time."); - //data.sort!((a, b) => a.message < b.message); - //// trace(data); - // - //respondWith(randerFortunes(data), 200, htmlHeader); - } - - static string randerFortunes(FortuneModel[] data) { - Appender!string sb; - sb.put(` - - - Fortunes - - - - - - -`); - - foreach (FortuneModel f; data) { - string message = replace(f.message, ">", ">"); - message = replace(message, "<", "<"); - message = replace(message, "\"", """); - sb.put(format(" \n \n \n", f.id, message)); - } - - sb.put("
idmessage
%d%s
\n \n"); - - return sb.data; - } - - private void respondUpdates(int queries) { - //if (queries < 1) - // queries = 1; - //else if (queries > 500) - // queries = 500; - // - //JSONValue[] arr = new JSONValue[queries]; - //for (int i = 0; i < queries; i++) { - // immutable id = uniform(1, 10001); - // immutable idString = id.to!string; - // immutable query = "SELECT randomNumber FROM world WHERE id = " ~ idString; - // ResultSet rs = dbConnection.query(query); - // int randomNumber = to!int(rs.front()[0]); - // debug tracef("id=%d, randomNumber=%d", id, randomNumber); - // - // randomNumber = uniform(1, 10001); - // string updateSql = "UPDATE world SET randomNumber = " - // ~ randomNumber.to!string ~ " WHERE id = " ~ idString; - // int r = dbConnection.execute(updateSql); - // // debug tracef("r=%d", r); - // - // arr[i] = JSONValue(["id" : JSONValue(id), "randomNumber" : JSONValue(randomNumber)]); - //} - // - //JSONValue js = JSONValue(arr); - //respondWith(js.toJSON(), 200, jsonHeader); - } - } -} - -struct FortuneModel { - int id; - string message; -} diff --git a/frameworks/D/hunt/pico/http/HttpURI.d b/frameworks/D/hunt/pico/http/HttpURI.d deleted file mode 100644 index 39891e5b0b2..00000000000 --- a/frameworks/D/hunt/pico/http/HttpURI.d +++ /dev/null @@ -1,1167 +0,0 @@ -module http.HttpURI; - - - -import hunt.collection.MultiMap; - -import hunt.Exceptions; -import hunt.text.Charset; -import hunt.text.Common; -import hunt.text.StringBuilder; -import hunt.util.TypeUtils; -import http.UrlEncoded; - -import std.array; -import std.conv; -import std.string; - -import hunt.logging; - - -/** - * Http URI. Parse a HTTP URI from a string or byte array. Given a URI - * http://user@host:port/path/info;param?query#fragment this class - * will split it into the following undecoded optional elements: - *
    - *
  • {@link #getScheme()} - http:
  • - *
  • {@link #getAuthority()} - //name@host:port
  • - *
  • {@link #getHost()} - host
  • - *
  • {@link #getPort()} - port
  • - *
  • {@link #getPath()} - /path/info
  • - *
  • {@link #getParam()} - param
  • - *
  • {@link #getQuery()} - query
  • - *
  • {@link #getFragment()} - fragment
  • - *
- * - https://bob:bobby@www.lunatech.com:8080/file;p=1?q=2#third - \___/ \_/ \___/ \______________/ \__/\_______/ \_/ \___/ - | | | | | | \_/ | | - Scheme User Password Host Port Path | | Fragment - \_____________________________/ | Query - | Path parameter - Authority - *

- * Any parameters will be returned from {@link #getPath()}, but are excluded - * from the return value of {@link #getDecodedPath()}. If there are multiple - * parameters, the {@link #getParam()} method returns only the last one. - * - * See_Also: - * https://stackoverflow.com/questions/1634271/url-encoding-the-space-character-or-20 - * https://web.archive.org/web/20151218094722/http://blog.lunatech.com/2009/02/03/what-every-web-developer-must-know-about-url-encoding - */ -class HttpURI { - private enum State { - START, HOST_OR_PATH, SCHEME_OR_PATH, HOST, IPV6, PORT, PATH, PARAM, QUERY, FRAGMENT, ASTERISK - } - - private string _scheme; - private string _user; - private string _host; - private int _port; - private string _path; - private string _param; - private string _query; - private string _fragment; - - string _uri; - string _decodedPath; - - /** - * Construct a normalized URI. Port is not set if it is the default port. - * - * @param scheme - * the URI scheme - * @param host - * the URI hose - * @param port - * the URI port - * @param path - * the URI path - * @param param - * the URI param - * @param query - * the URI query - * @param fragment - * the URI fragment - * @return the normalized URI - */ - static HttpURI createHttpURI(string scheme, string host, int port, string path, string param, string query, - string fragment) { - if (port == 80 && (scheme == "http")) - port = 0; - if (port == 443 && (scheme == "https")) - port = 0; - return new HttpURI(scheme, host, port, path, param, query, fragment); - } - - this() { - } - - this(string scheme, string host, int port, string path, string param, string query, string fragment) { - _scheme = scheme; - _host = host; - _port = port; - _path = path; - _param = param; - _query = query; - _fragment = fragment; - } - - this(HttpURI uri) { - this(uri._scheme, uri._host, uri._port, uri._path, uri._param, uri._query, uri._fragment); - _uri = uri._uri; - } - - this(string uri) { - _port = -1; - parse(State.START, uri); - } - - // this(URI uri) { - // _uri = null; - - // _scheme = uri.getScheme(); - // _host = uri.getHost(); - // if (_host == null && uri.getRawSchemeSpecificPart().startsWith("//")) - // _host = ""; - // _port = uri.getPort(); - // _user = uri.getUserInfo(); - // _path = uri.getRawPath(); - - // _decodedPath = uri.getPath(); - // if (_decodedPath != null) { - // int p = _decodedPath.lastIndexOf(';'); - // if (p >= 0) - // _param = _decodedPath.substring(p + 1); - // } - // _query = uri.getRawQuery(); - // _fragment = uri.getFragment(); - - // _decodedPath = null; - // } - - this(string scheme, string host, int port, string pathQuery) { - _uri = null; - - _scheme = scheme; - _host = host; - _port = port; - - parse(State.PATH, pathQuery); - - } - - void parse(string uri) { - clear(); - _uri = uri; - parse(State.START, uri); - } - - /** - * Parse according to https://tools.ietf.org/html/rfc7230#section-5.3 - * - * @param method - * the request method - * @param uri - * the request uri - */ - void parseRequestTarget(string method, string uri) { - clear(); - _uri = uri; - - if (method == "CONNECT") - _path = uri; - else - parse(uri.startsWith("/") ? State.PATH : State.START, uri); - } - - // deprecated("") - // void parseConnect(string uri) { - // clear(); - // _uri = uri; - // _path = uri; - // } - - void parse(string uri, int offset, int length) { - clear(); - int end = offset + length; - _uri = uri.substring(offset, end); - parse(State.START, uri); - } - - private void parse(State state, string uri) { - bool encoded = false; - int end = cast(int)uri.length; - int mark = 0; - int path_mark = 0; - char last = '/'; - for (int i = 0; i < end; i++) { - char c = uri[i]; - - final switch (state) { - case State.START: { - switch (c) { - case '/': - mark = i; - state = State.HOST_OR_PATH; - break; - case ';': - mark = i + 1; - state = State.PARAM; - break; - case '?': - // assume empty path (if seen at start) - _path = ""; - mark = i + 1; - state = State.QUERY; - break; - case '#': - mark = i + 1; - state = State.FRAGMENT; - break; - case '*': - _path = "*"; - state = State.ASTERISK; - break; - - case '.': - path_mark = i; - state = State.PATH; - encoded = true; - break; - - default: - mark = i; - if (_scheme == null) - state = State.SCHEME_OR_PATH; - else { - path_mark = i; - state = State.PATH; - } - break; - } - - continue; - } - - case State.SCHEME_OR_PATH: { - switch (c) { - case ':': - // must have been a scheme - _scheme = uri.substring(mark, i); - // Start again with scheme set - state = State.START; - break; - - case '/': - // must have been in a path and still are - state = State.PATH; - break; - - case ';': - // must have been in a path - mark = i + 1; - state = State.PARAM; - break; - - case '?': - // must have been in a path - _path = uri.substring(mark, i); - mark = i + 1; - state = State.QUERY; - break; - - case '%': - // must have be in an encoded path - encoded = true; - state = State.PATH; - break; - - case '#': - // must have been in a path - _path = uri.substring(mark, i); - state = State.FRAGMENT; - break; - - default: - break; - } - continue; - } - - case State.HOST_OR_PATH: { - switch (c) { - case '/': - _host = ""; - mark = i + 1; - state = State.HOST; - break; - - case '@': - case ';': - case '?': - case '#': - // was a path, look again - i--; - path_mark = mark; - state = State.PATH; - break; - - case '.': - // it is a path - encoded = true; - path_mark = mark; - state = State.PATH; - break; - - default: - // it is a path - path_mark = mark; - state = State.PATH; - } - continue; - } - - case State.HOST: { - switch (c) { - case '/': - _host = uri.substring(mark, i); - path_mark = mark = i; - state = State.PATH; - break; - case ':': - if (i > mark) - _host = uri.substring(mark, i); - mark = i + 1; - state = State.PORT; - break; - case '@': - if (_user != null) - throw new IllegalArgumentException("Bad authority"); - _user = uri.substring(mark, i); - mark = i + 1; - break; - - case '[': - state = State.IPV6; - break; - - default: - break; - } - break; - } - - case State.IPV6: { - switch (c) { - case '/': - throw new IllegalArgumentException("No closing ']' for ipv6 in " ~ uri); - case ']': - c = uri.charAt(++i); - _host = uri.substring(mark, i); - if (c == ':') { - mark = i + 1; - state = State.PORT; - } else { - path_mark = mark = i; - state = State.PATH; - } - break; - - default: - break; - } - - break; - } - - case State.PORT: { - if (c == '@') { - if (_user != null) - throw new IllegalArgumentException("Bad authority"); - // It wasn't a port, but a password! - _user = _host ~ ":" ~ uri.substring(mark, i); - mark = i + 1; - state = State.HOST; - } else if (c == '/') { - // _port = TypeUtils.parseInt(uri, mark, i - mark, 10); - _port = to!int(uri[mark .. i], 10); - path_mark = mark = i; - state = State.PATH; - } - break; - } - - case State.PATH: { - switch (c) { - case ';': - mark = i + 1; - state = State.PARAM; - break; - case '?': - _path = uri.substring(path_mark, i); - mark = i + 1; - state = State.QUERY; - break; - case '#': - _path = uri.substring(path_mark, i); - mark = i + 1; - state = State.FRAGMENT; - break; - case '%': - encoded = true; - break; - case '.': - if ('/' == last) - encoded = true; - break; - - default: - break; - } - break; - } - - case State.PARAM: { - switch (c) { - case '?': - _path = uri.substring(path_mark, i); - _param = uri.substring(mark, i); - mark = i + 1; - state = State.QUERY; - break; - case '#': - _path = uri.substring(path_mark, i); - _param = uri.substring(mark, i); - mark = i + 1; - state = State.FRAGMENT; - break; - case '/': - encoded = true; - // ignore internal params - state = State.PATH; - break; - case ';': - // multiple parameters - mark = i + 1; - break; - - default: - break; - } - break; - } - - case State.QUERY: { - if (c == '#') { - _query = uri.substring(mark, i); - mark = i + 1; - state = State.FRAGMENT; - } - break; - } - - case State.ASTERISK: { - throw new IllegalArgumentException("Bad character '*'"); - } - - case State.FRAGMENT: { - _fragment = uri.substring(mark, end); - i = end; - break; - } - } - last = c; - } - - final switch (state) { - case State.START: - break; - case State.SCHEME_OR_PATH: - _path = uri.substring(mark, end); - break; - - case State.HOST_OR_PATH: - _path = uri.substring(mark, end); - break; - - case State.HOST: - if (end > mark) - _host = uri.substring(mark, end); - break; - - case State.IPV6: - throw new IllegalArgumentException("No closing ']' for ipv6 in " ~ uri); - - case State.PORT: - // _port = TypeUtils.parseInt(uri, mark, end - mark, 10); - _port = to!int(uri[mark .. end], 10); - break; - - case State.ASTERISK: - break; - - case State.FRAGMENT: - _fragment = uri.substring(mark, end); - break; - - case State.PARAM: - _path = uri.substring(path_mark, end); - _param = uri.substring(mark, end); - break; - - case State.PATH: - _path = uri.substring(path_mark, end); - break; - - case State.QUERY: - _query = uri.substring(mark, end); - break; - } - - if (!encoded) { - if (_param == null) - _decodedPath = _path; - else - _decodedPath = _path[0 .. _path.length - _param.length - 1]; - } - } - - string getScheme() { - return _scheme; - } - - string getHost() { - // Return null for empty host to retain compatibility with java.net.URI - if (_host != null && _host.length == 0) - return null; - return _host; - } - - int getPort() { - return _port; - } - - /** - * The parsed Path. - * - * @return the path as parsed on valid URI. null for invalid URI. - */ - string getPath() { - return _path; - } - - string getDecodedPath() { - if (_decodedPath.empty && !_path.empty) - _decodedPath = URIUtils.canonicalPath(URIUtils.decodePath(_path)); - return _decodedPath; - } - - string getParam() { - return _param; - } - - string getQuery() { - return _query; - } - - bool hasQuery() { - return _query != null && _query.length > 0; - } - - string getFragment() { - return _fragment; - } - - void decodeQueryTo(MultiMap!string parameters, string encoding = StandardCharsets.UTF_8) { - if (_query == _fragment) - return; - - UrlEncoded.decodeTo(_query, parameters, encoding); - } - - void clear() { - _uri = null; - - _scheme = null; - _host = null; - _port = -1; - _path = null; - _param = null; - _query = null; - _fragment = null; - - _decodedPath = null; - } - - bool isAbsolute() { - return _scheme != null && _scheme.length > 0; - } - - override - string toString() { - if (_uri is null) { - StringBuilder ot = new StringBuilder(); - - if (_scheme != null) - ot.append(_scheme).append(':'); - - if (_host != null) { - ot.append("//"); - if (_user != null) - ot.append(_user).append('@'); - ot.append(_host); - } - - if (_port > 0) - ot.append(':').append(_port); - - if (_path != null) - ot.append(_path); - - if (_query != null) - ot.append('?').append(_query); - - if (_fragment != null) - ot.append('#').append(_fragment); - - if (ot.length > 0) - _uri = ot.toString(); - else - _uri = ""; - } - return _uri; - } - - bool equals(Object o) { - if (o is this) - return true; - if (!(typeid(o) == typeid(HttpURI))) - return false; - return toString().equals(o.toString()); - } - - void setScheme(string scheme) { - _scheme = scheme; - _uri = null; - } - - /** - * @param host - * the host - * @param port - * the port - */ - void setAuthority(string host, int port) { - _host = host; - _port = port; - _uri = null; - } - - /** - * @param path - * the path - */ - void setPath(string path) { - _uri = null; - _path = path; - _decodedPath = null; - } - - /** - * @param path - * the decoded path - */ - // void setDecodedPath(string path) { - // _uri = null; - // _path = URIUtils.encodePath(path); - // _decodedPath = path; - // } - - void setPathQuery(string path) { - _uri = null; - _path = null; - _decodedPath = null; - _param = null; - _fragment = null; - if (path != null) - parse(State.PATH, path); - } - - void setQuery(string query) { - _query = query; - _uri = null; - } - - // URI toURI() { - // return new URI(_scheme, null, _host, _port, _path, _query == null ? null : UrlEncoded.decodestring(_query), - // _fragment); - // } - - string getPathQuery() { - if (_query == null) - return _path; - return _path ~ "?" ~ _query; - } - - bool hasAuthority() { - return _host != null; - } - - string getAuthority() { - if (_port > 0) - return _host ~ ":" ~ to!string(_port); - return _host; - } - - string getUser() { - return _user; - } - -} - - -/** - * Parse an authority string into Host and Port - *

Parse a string in the form "host:port", handling IPv4 an IPv6 hosts

- * - */ -class URIUtils -{ - /* ------------------------------------------------------------ */ - /* Decode a URI path and strip parameters - */ - static string decodePath(string path) { - return decodePath(path, 0, cast(int)path.length); - } - - /* ------------------------------------------------------------ */ - /* Decode a URI path and strip parameters of UTF-8 path - */ - static string decodePath(string path, int offset, int length) { - try { - StringBuilder builder = null; - int end = offset + length; - for (int i = offset; i < end; i++) { - char c = path[i]; - switch (c) { - case '%': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - if ((i + 2) < end) { - char u = path.charAt(i + 1); - if (u == 'u') { - // TODO this is wrong. This is a codepoint not a char - //builder.append(cast(char) (0xffff & TypeUtils.parseInt(path, i + 2, 4, 16))); - i += 5; - } else { - //builder.append(cast(byte) (0xff & (TypeUtils.convertHexDigit(u) * 16 + TypeUtils.convertHexDigit(path.charAt(i + 2))))); - i += 2; - } - } else { - throw new IllegalArgumentException("Bad URI % encoding"); - } - - break; - - case ';': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - - while (++i < end) { - if (path[i] == '/') { - builder.append('/'); - break; - } - } - - break; - - default: - if (builder !is null) - builder.append(c); - break; - } - } - - if (builder !is null) - return builder.toString(); - if (offset == 0 && length == path.length) - return path; - return path.substring(offset, end); - } catch (Exception e) { - // System.err.println(path.substring(offset, offset + length) + " " + e); - error(e.toString); - return decodeISO88591Path(path, offset, length); - } - } - - - /* ------------------------------------------------------------ */ - /* Decode a URI path and strip parameters of ISO-8859-1 path - */ - private static string decodeISO88591Path(string path, int offset, int length) { - StringBuilder builder = null; - int end = offset + length; - for (int i = offset; i < end; i++) { - char c = path[i]; - switch (c) { - case '%': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - if ((i + 2) < end) { - char u = path.charAt(i + 1); - if (u == 'u') { - // TODO this is wrong. This is a codepoint not a char - // builder.append(cast(char) (0xffff & TypeUtils.parseInt(path, i + 2, 4, 16))); - i += 5; - } else { - //builder.append(cast(byte) (0xff & (TypeUtils.convertHexDigit(u) * 16 + TypeUtils.convertHexDigit(path.charAt(i + 2))))); - i += 2; - } - } else { - throw new IllegalArgumentException(""); - } - - break; - - case ';': - if (builder is null) { - builder = new StringBuilder(path.length); - builder.append(path, offset, i - offset); - } - while (++i < end) { - if (path[i] == '/') { - builder.append('/'); - break; - } - } - break; - - - default: - if (builder !is null) - builder.append(c); - break; - } - } - - if (builder !is null) - return builder.toString(); - if (offset == 0 && length == path.length) - return path; - return path.substring(offset, end); - } - - /* ------------------------------------------------------------ */ - - /** - * Convert a decoded path to a canonical form. - *

- * All instances of "." and ".." are factored out. - *

- *

- * Null is returned if the path tries to .. above its root. - *

- * - * @param path the path to convert, decoded, with path separators '/' and no queries. - * @return the canonical path, or null if path traversal above root. - */ - static string canonicalPath(string path) { - - warningf("canonicalPath ..............."); - if (path.empty) - return path; - - bool slash = true; - int end = cast(int)path.length; - int i = 0; - - loop: - while (i < end) { - char c = path[i]; - switch (c) { - case '/': - slash = true; - break; - - case '.': - if (slash) - break loop; - slash = false; - break; - - default: - slash = false; - } - - i++; - } - - if (i == end) - return path; - - StringBuilder canonical = new StringBuilder(path.length); - canonical.append(path, 0, i); - - int dots = 1; - i++; - while (i <= end) { - char c = i < end ? path[i] : '\0'; - switch (c) { - case '\0': - case '/': - switch (dots) { - case 0: - if (c != '\0') - canonical.append(c); - break; - - case 1: - break; - - case 2: - if (canonical.length < 2) - return null; - canonical.setLength(canonical.length - 1); - canonical.setLength(canonical.lastIndexOf("/") + 1); - break; - - default: - while (dots-- > 0) - canonical.append('.'); - if (c != '\0') - canonical.append(c); - } - - slash = true; - dots = 0; - break; - - case '.': - if (dots > 0) - dots++; - else if (slash) - dots = 1; - else - canonical.append('.'); - slash = false; - break; - - default: - while (dots-- > 0) - canonical.append('.'); - canonical.append(c); - dots = 0; - slash = false; - } - - i++; - } - return canonical.toString(); - } - - - /* ------------------------------------------------------------ */ - - /** - * Convert a path to a cananonical form. - *

- * All instances of "." and ".." are factored out. - *

- *

- * Null is returned if the path tries to .. above its root. - *

- * - * @param path the path to convert (expects URI/URL form, encoded, and with path separators '/') - * @return the canonical path, or null if path traversal above root. - */ - static string canonicalEncodedPath(string path) { - if (path.empty) - return path; - bool slash = true; - int end = cast(int)path.length; - int i = 0; - - loop: - while (i < end) { - char c = path[i]; - switch (c) { - case '/': - slash = true; - break; - - case '.': - if (slash) - break loop; - slash = false; - break; - - case '?': - return path; - - default: - slash = false; - } - - i++; - } - - if (i == end) - return path; - - StringBuilder canonical = new StringBuilder(path.length); - canonical.append(path, 0, i); - - int dots = 1; - i++; - while (i <= end) { - char c = i < end ? path[i] : '\0'; - switch (c) { - case '\0': - case '/': - case '?': - switch (dots) { - case 0: - if (c != '\0') - canonical.append(c); - break; - - case 1: - if (c == '?') - canonical.append(c); - break; - - case 2: - if (canonical.length < 2) - return null; - canonical.setLength(canonical.length - 1); - canonical.setLength(canonical.lastIndexOf("/") + 1); - if (c == '?') - canonical.append(c); - break; - default: - while (dots-- > 0) - canonical.append('.'); - if (c != '\0') - canonical.append(c); - } - - slash = true; - dots = 0; - break; - - case '.': - if (dots > 0) - dots++; - else if (slash) - dots = 1; - else - canonical.append('.'); - slash = false; - break; - - default: - while (dots-- > 0) - canonical.append('.'); - canonical.append(c); - dots = 0; - slash = false; - } - - i++; - } - return canonical.toString(); - } - - - - /* ------------------------------------------------------------ */ - - /** - * Convert a path to a compact form. - * All instances of "//" and "///" etc. are factored out to single "/" - * - * @param path the path to compact - * @return the compacted path - */ - static string compactPath(string path) { - if (path == null || path.length == 0) - return path; - int state = 0; - int end = cast(int)path.length; - int i = 0; - - loop: - while (i < end) { - char c = path[i]; - switch (c) { - case '?': - return path; - case '/': - state++; - if (state == 2) - break loop; - break; - default: - state = 0; - } - i++; - } - - if (state < 2) - return path; - - StringBuilder buf = new StringBuilder(path.length); - buf.append(path, 0, i); - - loop2: - while (i < end) { - char c = path[i]; - switch (c) { - case '?': - buf.append(path, i, end); - break loop2; - case '/': - if (state++ == 0) - buf.append(c); - break; - default: - state = 0; - buf.append(c); - } - i++; - } - - return buf.toString(); - } - - /* ------------------------------------------------------------ */ - - /** - * @param uri URI - * @return True if the uri has a scheme - */ - static bool hasScheme(string uri) { - for (int i = 0; i < uri.length; i++) { - char c = uri[i]; - if (c == ':') - return true; - if (!(c >= 'a' && c <= 'z' || - c >= 'A' && c <= 'Z' || - (i > 0 && (c >= '0' && c <= '9' || - c == '.' || - c == '+' || - c == '-')) - )) - break; - } - return false; - } -} diff --git a/frameworks/D/hunt/pico/http/Parser.d b/frameworks/D/hunt/pico/http/Parser.d deleted file mode 100644 index 8bb8ea6acb2..00000000000 --- a/frameworks/D/hunt/pico/http/Parser.d +++ /dev/null @@ -1,203 +0,0 @@ -/// Minimalistic low-overhead wrapper for nodejs/http-parser -/// Used for benchmarks with simple server -module http.Parser; - - - -import http.Common; - -import hunt.logging.ConsoleLogger; -import std.conv; -import std.range.primitives; -import core.stdc.string; - -import std.experimental.allocator; - -/* contains name and value of a header (name == NULL if is a continuing line - * of a multiline header */ -struct phr_header { - const char *name; - size_t name_len; - const char *value; - size_t value_len; -} - -/* returns number of bytes consumed if successful, -2 if request is partial, - * -1 if failed */ -extern (C) pure @nogc nothrow int phr_parse_request(const char *buf, size_t len, const char **method, - size_t *method_len, const char **path, size_t *path_len, - int *minor_version, phr_header *headers, size_t *num_headers, size_t last_len); - -/* ditto */ -extern (C) pure @nogc nothrow int phr_parse_response(const char *_buf, size_t len, int *minor_version, - int *status, const char **msg, size_t *msg_len, - phr_header *headers, size_t *num_headers, size_t last_len); - -/* ditto */ -extern (C) pure @nogc nothrow int phr_parse_headers(const char *buf, size_t len, - phr_header *headers, size_t *num_headers, size_t last_len); - -/* should be zero-filled before start */ -struct phr_chunked_decoder { - size_t bytes_left_in_chunk; /* number of bytes left in current chunk */ - char consume_trailer; /* if trailing headers should be consumed */ - char _hex_count; - char _state; -} - -/* the function rewrites the buffer given as (buf, bufsz) removing the chunked- - * encoding headers. When the function returns without an error, bufsz is - * updated to the length of the decoded data available. Applications should - * repeatedly call the function while it returns -2 (incomplete) every time - * supplying newly arrived data. If the end of the chunked-encoded data is - * found, the function returns a non-negative number indicating the number of - * octets left undecoded at the tail of the supplied buffer. Returns -1 on - * error. - */ -extern (C) pure @nogc nothrow ptrdiff_t phr_decode_chunked(phr_chunked_decoder *decoder, char *buf, size_t *bufsz); - -/* returns if the chunked decoder is in middle of chunked data */ -extern (C) pure @nogc nothrow int phr_decode_chunked_is_in_data(phr_chunked_decoder *decoder); - - -// =========== Public interface starts here ============= - -public: - -class HttpException : Exception { - HttpError error; - - pure @nogc nothrow this(HttpError error, string file = __FILE__, - size_t line = __LINE__, Throwable nextInChain = null) { - this.error = error; - super("Http exception", file, line, nextInChain); - } -} - -struct HttpParser(Interceptor) { - -private { - Interceptor interceptor; - Throwable failure; - phr_header[50] _headers; - char *_method; - char *path; - - int minor_version; - size_t buflen = 0, prevbuflen = 0, method_len, path_len, num_headers; -} - - - alias interceptor this; - - this(Interceptor interceptor) { - this.interceptor = interceptor; - } - - @property bool status() pure @safe nothrow { - return failure is null; - } - - string uri(bool canCopy=false)() { - static if(canCopy) { - return cast(string)path[0..path_len].dup; - } else { - return cast(string)path[0..path_len]; - } - } - - @property HttpMethod method() { - string s = cast(string)_method[0..method_len]; - return to!HttpMethod(s); - } - - - HttpHeader[] headers(bool canCopy=false)() { - HttpHeader[] hs = new HttpHeader[num_headers]; - //HttpHeader[] hs = theAllocator.make!(HttpHeader[num_headers]); - for(int i; i 0) { - /* successfully parsed the request */ - onMessageComplete(); - - if(pret < chunk.length) { - debug infof("try to parse next request"); - pret += doexecute(chunk[pret .. $]); // try to parse next http request data - } - - debug infof("pret=%d", pret); - return pret; - } else if(pret == -2) { - debug warning("parsing incomplete"); - num_headers = 0; - debug infof("pret=%d, chunk=%d", pret, chunk.length); - return 0; - } - - warning("wrong data format"); - num_headers = 0; - failure = new HttpException(HttpError.UNKNOWN); - throw failure; - } - - void onMessageComplete() { - // interceptor.onHeadersComplete(); - debug { - tracef("method is %s", _method[0..method_len]); - tracef("path is %s", path[0..path_len]); - tracef("HTTP version is 1.%d", minor_version); - foreach(ref phr_header h; _headers[0..num_headers]) { - tracef("Header: %s = %s", h.name[0..h.name_len], h.value[0..h.value_len]); - } - } - interceptor.onMessageComplete(); - } -} - -auto httpParser(Interceptor)(Interceptor interceptor) { - return HttpParser!Interceptor(interceptor); -} diff --git a/frameworks/D/hunt/pico/http/Processor.d b/frameworks/D/hunt/pico/http/Processor.d deleted file mode 100644 index dd82fbb7c8f..00000000000 --- a/frameworks/D/hunt/pico/http/Processor.d +++ /dev/null @@ -1,239 +0,0 @@ -/// An example "HTTP server" with poor usability but sensible performance -/// -module http.Processor; - - - -import std.conv; -import std.array, std.exception, std.format, std.algorithm.mutation, std.socket; -import core.stdc.stdlib; -import core.thread, core.atomic; -import http.Parser; - -import hunt.collection.ByteBuffer; -import http.Common; -import hunt.logging; -import hunt.io; -import hunt.util.DateTime; -import std.array; -import std.string; -import core.stdc.string; -import core.stdc.stdlib; -import std.stdio; -import hunt.io.IoError; -import std.experimental.allocator; -private alias Parser = HttpParser!HttpProcessor; - -void * keepAliveValue; -void * nokeepAliveValue; -long index1; -long index2; -long length1; -long length2; - - -void * keepAliveJson; -void * nokeepAliveJson; -long index3; -long index4; -long length3; -long length4; - - -enum RET -{ - TEXT, - JSON, - DEF -} - - -static this() -{ - index1 = keepAliveResponseData.indexOf("Date:") + 6; - index2 = nokeepAliveResponseData.indexOf("Date:") + 6; - length1 = keepAliveResponseData.length; - length2 = nokeepAliveResponseData.length; - keepAliveValue = malloc(length1); - nokeepAliveValue = malloc(length2); - memcpy(keepAliveValue , (cast(ubyte[])keepAliveResponseData).ptr, length1); - memcpy(nokeepAliveValue , (cast(ubyte[])nokeepAliveResponseData).ptr, length2); - - - index3 = keepAliveJsonDate.indexOf("Date:") + 6; - index4 = nokeepAliveJsonDate.indexOf("Date:") + 6; - length3 = keepAliveJsonDate.length; - length4 = nokeepAliveJsonDate.length; - keepAliveJson = malloc(length3); - nokeepAliveJson = malloc(length4); - memcpy(keepAliveJson , (cast(ubyte[])keepAliveJsonDate).ptr, length3); - memcpy(nokeepAliveJson , (cast(ubyte[])nokeepAliveJsonDate).ptr, length4); -} - -struct HttpRequest { - private Parser* parser; - - HttpHeader[] headers(bool canCopy=false)() @property { - return parser.headers!canCopy(); - } - - HttpMethod method() @property { - return parser.method(); - } - - string uri(bool canCopy=false)() @property { - return parser.uri!(canCopy)(); - } -} - -//version(NO_HTTPPARSER) { -enum string ResponseData = "HTTP/1.1 200 OK\r\nContent-Length: 13\r\nConnection: Keep-Alive\r\nContent-Type: text/plain\r\nServer: Hunt/1.0\r\nDate: Wed, 17 Apr 2013 12:00:00 GMT\r\n\r\nHello, World!"; -//} - -enum string keepAliveResponseData = "HTTP/1.1 200 OK\r\nContent-Length: 13\r\nConnection: Keep-Alive\r\nContent-Type: text/plain\r\nServer: Hunt/1.0\r\nDate: Wed, 17 Apr 2013 12:00:00 GMT\r\n\r\nHello, World!"; -enum string nokeepAliveResponseData = "HTTP/1.1 200 OK\r\nContent-Length: 13\r\nConnection: close\r\nContent-Type: text/plain\r\nServer: Hunt/1.0\r\nDate: Wed, 17 Apr 2013 12:00:00 GMT\r\n\r\nHello, World!"; - -enum string keepAliveJsonDate = "HTTP/1.1 200 OK\r\nContent-Length: 27\r\nConnection: Keep-Alive\r\nContent-Type: application/json; charset=UTF-8\r\nServer: Hunt/1.0\r\nDate: Wed, 17 Apr 2013 12:00:00 GMT\r\n\r\n{\"message\":\"Hello, World!\"}"; -enum string nokeepAliveJsonDate = "HTTP/1.1 200 OK\r\nContent-Length: 27\r\nConnection: close\r\nContent-Type: application/json; charset=UTF-8\r\nServer: Hunt/1.0\r\nDate: Wed, 17 Apr 2013 12:00:00 GMT\r\n\r\n{\"message\":\"Hello, World!\"}"; - -abstract class HttpProcessor { - -package: - HttpHeader[] headers; // buffer for headers - Parser parser; - HttpRequest request; - bool serving; - - -public: - TcpStream client; - - this(TcpStream sock) { - serving = true; - client = sock; - headers = new HttpHeader[1]; - //headers = theAllocator.makeArray!(HttpHeader)(1); - parser = httpParser(this); - request.parser = &parser; - //index1 = keepAliveResponseData.indexOf("Date:") + 6; - //length1 = keepAliveResponseData.length; - //length2 = nokeepAliveResponseData.length; - //keepAliveValue = malloc(length1); - //nokeepAliveValue = malloc(length2); - //memcpy(keepAliveValue , (cast(ubyte[])keepAliveResponseData).ptr, length1); - //memcpy(nokeepAliveValue , (cast(ubyte[])nokeepAliveResponseData).ptr, length2); - } - - void run() { - client.received((ByteBuffer buffer) { - version(NO_HTTPPARSER) { - client.write(cast(ubyte[])ResponseData); - } else { - try { - int len = parser.execute(cast(ubyte[]) buffer.getRemaining()); - buffer.position(buffer.position() + len); - } catch(Exception ex) { - buffer.clear(); // drop all the wrong data - //respondWith(ex.msg, 500); - } - } - }) - .closed(() { - // notifyClientClosed(); - }) - .error((IoError msg) { - warning("Error: ", msg.errorMsg()); - }) - .start(); - } - - protected void notifyClientClosed() { - debug tracef("The connection[%s] is closed", client.remoteAddress()); - } - - void respondWith(RET type, uint status, HttpHeader[] headers...) { - switch(type) - { - case RET.TEXT: - { - if (parser.shouldKeepAlive) - { - memcpy(keepAliveValue + index1 , (cast(ubyte[])(DateTimeHelper.getDateAsGMT())).ptr, 29); - //memcpy(keepAliveValue + index1 , (cast(ubyte[])("Wed, 17 Apr 2013 12:00:00 GMT")).ptr, 29); - client.write(cast(ubyte[]) keepAliveValue[0 .. length1]); - }else - { - memcpy(nokeepAliveValue + index2 , (cast(ubyte[])(DateTimeHelper.getDateAsGMT())).ptr, 29); - //memcpy(nokeepAliveValue + index2 , (cast(ubyte[])("Wed, 17 Apr 2013 12:00:00 GMT")).ptr, 29); - client.write(cast(ubyte[]) nokeepAliveValue[0 .. length2]); - } - break; - } - case RET.JSON: - { - if (parser.shouldKeepAlive) - { - memcpy(keepAliveJson + index3 , (cast(ubyte[])(DateTimeHelper.getDateAsGMT())).ptr, 29); - //memcpy(keepAliveValue + index1 , (cast(ubyte[])("Wed, 17 Apr 2013 12:00:00 GMT")).ptr, 29); - client.write(cast(ubyte[]) keepAliveJson[0 .. length3]); - }else - { - memcpy(nokeepAliveJson + index4 , (cast(ubyte[])(DateTimeHelper.getDateAsGMT())).ptr, 29); - //memcpy(nokeepAliveValue + index2 , (cast(ubyte[])("Wed, 17 Apr 2013 12:00:00 GMT")).ptr, 29); - client.write(cast(ubyte[]) nokeepAliveJson[0 .. length4]); - } - break; - } - default: - { - - } - } - - //return respondWith(cast(const(ubyte)[]) _body, status, headers); - } - - //void respondWith(const(ubyte)[] _body, uint status, HttpHeader[] headers...) { - // outBuf.clear(); - // formattedWrite(outBuf, "HTTP/1.1 %s OK\r\n", status); - // outBuf.put("Server: Hunt/1.0\r\n"); - // - // formattedWrite(outBuf, "Date: %s\r\n", DateTimeHelper.getDateAsGMT()); - // if (!parser.shouldKeepAlive) - // outBuf.put("Connection: close\r\n"); - // foreach (ref hdr; headers) { - // outBuf.put(hdr.name); - // outBuf.put(": "); - // outBuf.put(hdr.value); - // outBuf.put("\r\n"); - // } - // formattedWrite(outBuf, "Content-Length: %d\r\n\r\n", _body.length); - // outBuf.put(cast(string) _body); - // client.write(cast(ubyte[]) outBuf.data); // TODO: short-writes are quite possible - //} - - void onChunk(ref HttpRequest req, const(ubyte)[] chunk) { - // TODO: Tasks pending completion - 5/16/2019, 5:40:18 PM - // - } - - void onComplete(ref HttpRequest req); - - - final int onBody(Parser* parser, const(ubyte)[] chunk) { - onChunk(request, chunk); - return 0; - } - - final int onMessageComplete() { - try { - onComplete(request); - } catch(Exception ex) { - //respondWith(ex.msg, 500); - } - if (!parser.shouldKeepAlive) - serving = false; - return 0; - } - -} diff --git a/frameworks/D/hunt/pico/http/Server.d b/frameworks/D/hunt/pico/http/Server.d deleted file mode 100644 index 9f36c6f39d4..00000000000 --- a/frameworks/D/hunt/pico/http/Server.d +++ /dev/null @@ -1,133 +0,0 @@ -module http.Server; - - - -import hunt.event; -import hunt.io; -import hunt.logging.ConsoleLogger; -import hunt.system.Memory : totalCPUs; -import hunt.util.DateTime; - -import std.array; -import std.conv; -import std.json; -import std.socket; -import std.string; -import std.stdio; - -import http.Parser; -import http.Processor; -import hunt.io.channel.Common; - -shared static this() { - DateTimeHelper.startClock(); -} - - - -import hunt.io.channel; -import std.experimental.allocator; -/** -*/ -abstract class AbstractTcpServer { - protected EventLoopGroup _group = null; - protected bool _isStarted = false; - protected Address _address; - protected int _workersCount; - TcpStreamOptions _tcpStreamoption; - - this(Address address, int thread = (totalCPUs - 1), int workersCount = 0) { - this._address = address; - _tcpStreamoption = TcpStreamOptions.create(); - _tcpStreamoption.bufferSize = 1024 * 4; - _tcpStreamoption.isKeepalive = false; - _group = new EventLoopGroup(cast(uint) thread); - //_group = theAllocator.make!EventLoopGroup(cast(uint) thread*2); - this._workersCount = workersCount; - //defaultPoolThreads(thread); - } - - @property Address bindingAddress() { - return _address; - } - - void start() { - if (_isStarted) - return; - _isStarted = true; - - Socket server = new TcpSocket(); - //Socket server = theAllocator.make!TcpSocket; - server.setOption(SocketOptionLevel.SOCKET, SocketOption.REUSEADDR, true); - server.bind(new InternetAddress("0.0.0.0", 8080)); - //server.bind( theAllocator.make!(InternetAddress("0.0.0.0", 8080))); - server.listen(8192); - - trace("Launching mini-http server"); - debug { - _group.start(_tcpStreamoption.bufferSize); - } else { - _group.start(_tcpStreamoption.bufferSize); - } - - if (_workersCount) { - defaultPoolThreads = _workersCount; - workerPool(); // Initilize worker poll - } - writefln("worker count: %d", _workersCount); - writefln("IO thread: %d", _group.size); - - while (true) { - try { - version (HUNT_DEBUG) - trace("Waiting for server.accept()"); - - Socket socket = server.accept(); - version (HUNT_DEBUG) { - infof("new connection from %s, fd=%d", - socket.remoteAddress.toString(), socket.handle()); - } - // EventLoop loop = _group.nextLoop(); - EventLoop loop = _group.nextLoop(socket.handle); - TcpStream stream = theAllocator.make!TcpStream(loop, socket, _tcpStreamoption); - //TcpStream stream = new TcpStream(loop, socket, _tcpStreamoption); - onConnectionAccepted(stream); - } catch (Exception e) { - warningf("Failure on accepting %s", e); - break; - } - } - _isStarted = false; - } - - protected void onConnectionAccepted(TcpStream client); - - void stop() { - if (!_isStarted) - return; - _isStarted = false; - _group.stop(); - } -} - -alias ProcessorCreater = HttpProcessor delegate(TcpStream client); - -/** -*/ -class HttpServer(T) : AbstractTcpServer if (is(T : HttpProcessor)) { - - this(string ip, ushort port, int thread = (totalCPUs - 1)) { - super(theAllocator.make!InternetAddress(ip, port), thread); - } - - this(Address address, int thread = (totalCPUs - 1)) { - super(address, thread); - } - - override protected void onConnectionAccepted(TcpStream client) { - //HttpProcessor httpProcessor = new T(client); - HttpProcessor httpProcessor = theAllocator.make!T(client); - httpProcessor.run(); - } - -} diff --git a/frameworks/D/hunt/pico/http/UrlEncoded.d b/frameworks/D/hunt/pico/http/UrlEncoded.d deleted file mode 100644 index cea3d1f9a45..00000000000 --- a/frameworks/D/hunt/pico/http/UrlEncoded.d +++ /dev/null @@ -1,362 +0,0 @@ -module http.UrlEncoded; - - - -import hunt.collection.List; -import hunt.collection.MultiMap; -import hunt.collection.StringBuffer; -import hunt.Exceptions; -import hunt.logging; -import hunt.text.Charset; -import hunt.text.Common; -import hunt.text.StringBuilder; -import hunt.util.TypeUtils; - -import std.conv; -import std.array; - - -/** - * Handles coding of MIME "x-www-form-urlencoded". - *

- * This class handles the encoding and decoding for either the query string of a - * URL or the _content of a POST HTTP request. - *

- * Notes - *

- * The UTF-8 charset is assumed, unless otherwise defined by either passing a - * parameter or setting the "org.hunt.utils.UrlEncoding.charset" System - * property. - *

- *

- * The hashtable either contains string single values, vectors of string or - * arrays of Strings. - *

- *

- * This class is only partially synchronised. In particular, simple get - * operations are not protected from concurrent updates. - *

- * - * @see java.net.URLEncoder - */ -class UrlEncoded : MultiMap!string { - - enum string ENCODING = StandardCharsets.UTF_8; - - - this() { - } - - this(string query) { - decodeTo(query, this, ENCODING); - } - - void decode(string query) { - decodeTo(query, this, ENCODING); - } - - void decode(string query, string charset) { - decodeTo(query, this, charset); - } - - /** - * Encode MultiMap with % encoding for UTF8 sequences. - * - * @return the MultiMap as a string with % encoding - */ - string encode() { - return encode(ENCODING, false); - } - - /** - * Encode MultiMap with % encoding for arbitrary string sequences. - * - * @param charset the charset to use for encoding - * @return the MultiMap as a string encoded with % encodings - */ - string encode(string charset) { - return encode(charset, false); - } - - /** - * Encode MultiMap with % encoding. - * - * @param charset the charset to encode with - * @param equalsForNullValue if True, then an '=' is always used, even - * for parameters without a value. e.g. "blah?a=&b=&c=". - * @return the MultiMap as a string encoded with % encodings - */ - string encode(string charset, bool equalsForNullValue) { - return encode(this, charset, equalsForNullValue); - } - - /** - * Encode MultiMap with % encoding. - * - * @param map the map to encode - * @param charset the charset to use for encoding (uses default encoding if null) - * @param equalsForNullValue if True, then an '=' is always used, even - * for parameters without a value. e.g. "blah?a=&b=&c=". - * @return the MultiMap as a string encoded with % encodings. - */ - static string encode(MultiMap!string map, string charset, bool equalsForNullValue) { - if (charset is null) - charset = ENCODING; - - StringBuilder result = new StringBuilder(128); - bool delim = false; - foreach(string key, List!string list; map) - { - int s = list.size(); - - if (delim) { - result.append('&'); - } - - if (s == 0) { - result.append(encodeString(key, charset)); - if (equalsForNullValue) - result.append('='); - } else { - for (int i = 0; i < s; i++) { - if (i > 0) - result.append('&'); - string val = list.get(i); - result.append(encodeString(key, charset)); - - if (val != null) { - if (val.length > 0) { - result.append('='); - result.append(encodeString(val, charset)); - } else if (equalsForNullValue) - result.append('='); - } else if (equalsForNullValue) - result.append('='); - } - } - delim = true; - } - return result.toString(); - } - - /** - * Decoded parameters to Map. - * - * @param content the string containing the encoded parameters - * @param map the MultiMap to put parsed query parameters into - * @param charset the charset to use for decoding - */ - static void decodeTo(string content, MultiMap!string map, string charset = ENCODING) { - if (charset.empty) - charset = ENCODING; - - synchronized (map) { - string key = null; - string value = null; - int mark = -1; - bool encoded = false; - for (int i = 0; i < content.length; i++) { - char c = content[i]; - switch (c) { - case '&': - int l = i - mark - 1; - value = l == 0 ? "" : - (encoded ? decodeString(content, mark + 1, l) : content.substring(mark + 1, i)); - mark = i; - encoded = false; - if (key != null) { - map.add(key, value); - } else if (value != null && value.length > 0) { - map.add(value, ""); - } - key = null; - value = null; - break; - case '=': - if (key != null) - break; - key = encoded ? decodeString(content, mark + 1, i - mark - 1) : content.substring(mark + 1, i); - mark = i; - encoded = false; - break; - case '+': - encoded = true; - break; - case '%': - encoded = true; - break; - default: break; - } - } - - int contentLen = cast(int)content.length; - - if (key != null) { - int l = contentLen - mark - 1; - value = l == 0 ? "" : (encoded ? decodeString(content, mark + 1, l) : content.substring(mark + 1)); - version(HUNT_DEBUG) tracef("key=%s, value=%s", key, value); - map.add(key, value); - } else if (mark < contentLen) { - version(HUNT_DEBUG) tracef("empty value: content=%s, key=%s", content, key); - key = encoded - ? decodeString(content, mark + 1, contentLen - mark - 1, charset) - : content.substring(mark + 1); - if (!key.empty) { - map.add(key, ""); - } - } else { - warningf("No key found."); - } - } - } - - /** - * Decode string with % encoding. - * This method makes the assumption that the majority of calls - * will need no decoding. - * - * @param encoded the encoded string to decode - * @return the decoded string - */ - static string decodeString(string encoded) { - return decodeString(encoded, 0, cast(int)encoded.length); - } - - /** - * Decode string with % encoding. - * This method makes the assumption that the majority of calls - * will need no decoding. - * - * @param encoded the encoded string to decode - * @param offset the offset in the encoded string to decode from - * @param length the length of characters in the encoded string to decode - * @param charset the charset to use for decoding - * @return the decoded string - */ - static string decodeString(string encoded, int offset, int length, string charset = ENCODING) { - StringBuffer buffer = null; - warningf("decodeString ..............."); - for (int i = 0; i < length; i++) { - char c = encoded.charAt(offset + i); - if (c < 0 || c > 0xff) { - if (buffer is null) { - buffer = new StringBuffer(length); - buffer.append(encoded, offset, offset + i + 1); - } else - buffer.append(c); - } else if (c == '+') { - if (buffer is null) { - buffer = new StringBuffer(length); - buffer.append(encoded, offset, offset + i); - } - - buffer.append(' '); - } else if (c == '%') { - if (buffer is null) { - buffer = new StringBuffer(length); - buffer.append(encoded, offset, offset + i); - } - - byte[] ba = new byte[length]; - int n = 0; - while (c >= 0 && c <= 0xff) { - if (c == '%') { - if (i + 2 < length) { - int o = offset + i + 1; - i += 3; - // ba[n] = cast(byte) TypeUtils.parseInt(encoded, o, 2, 16); - n++; - } else { - ba[n++] = cast(byte) '?'; - i = length; - } - } else if (c == '+') { - ba[n++] = cast(byte) ' '; - i++; - } else { - ba[n++] = cast(byte) c; - i++; - } - - if (i >= length) - break; - c = encoded.charAt(offset + i); - } - - i--; - buffer.append(cast(string)(ba[0 .. n])); - - } else if (buffer !is null) - buffer.append(c); - } - - if (buffer is null) { - if (offset == 0 && encoded.length == length) - return encoded; - return encoded.substring(offset, offset + length); - } - - return buffer.toString(); - } - - - /** - * Perform URL encoding. - * - * @param string the string to encode - * @return encoded string. - */ - static string encodeString(string string) { - return encodeString(string, ENCODING); - } - - /** - * Perform URL encoding. - * - * @param string the string to encode - * @param charset the charset to use for encoding - * @return encoded string. - */ - static string encodeString(string str, string charset) { - if (charset is null) - charset = ENCODING; - byte[] bytes = cast(byte[])str; - // bytes = string.getBytes(charset); - warningf("encodeString ..............."); - int len = cast(int)bytes.length; - byte[] encoded = new byte[bytes.length * 3]; - int n = 0; - bool noEncode = true; - - for (int i = 0; i < len; i++) { - byte b = bytes[i]; - - if (b == ' ') { - noEncode = false; - encoded[n++] = cast(byte) '+'; - } else if (b >= 'a' && b <= 'z' || - b >= 'A' && b <= 'Z' || - b >= '0' && b <= '9') { - encoded[n++] = b; - } else { - noEncode = false; - encoded[n++] = cast(byte) '%'; - byte nibble = cast(byte) ((b & 0xf0) >> 4); - if (nibble >= 10) - encoded[n++] = cast(byte) ('A' + nibble - 10); - else - encoded[n++] = cast(byte) ('0' + nibble); - nibble = cast(byte) (b & 0xf); - if (nibble >= 10) - encoded[n++] = cast(byte) ('A' + nibble - 10); - else - encoded[n++] = cast(byte) ('0' + nibble); - } - } - - if (noEncode) - return str; - - return cast(string)(encoded[0 .. n]); - } -} diff --git a/frameworks/Dart/dart2/README.md b/frameworks/Dart/dart2/README.md index d5c281055e0..6ade1488b34 100644 --- a/frameworks/Dart/dart2/README.md +++ b/frameworks/Dart/dart2/README.md @@ -9,7 +9,7 @@ The tests were run with: -- [Dart v2.12](https://dart.dev/) +- [Dart v2.17.6](https://dart.dev/) ## Test URLs diff --git a/frameworks/Dart/dart2/analysis_options.yaml b/frameworks/Dart/dart2/analysis_options.yaml index 81c61e2a2ef..e5046b65dcf 100644 --- a/frameworks/Dart/dart2/analysis_options.yaml +++ b/frameworks/Dart/dart2/analysis_options.yaml @@ -1,7 +1,8 @@ -include: package:pedantic/analysis_options.yaml +include: package:lints/recommended.yaml analyzer: - strong-mode: - implicit-casts: false + language: + strict-casts: true + linter: rules: - avoid_dynamic_calls diff --git a/frameworks/Dart/dart2/dart2.dockerfile b/frameworks/Dart/dart2/dart2.dockerfile index bb89fefd48f..65c4abc556e 100644 --- a/frameworks/Dart/dart2/dart2.dockerfile +++ b/frameworks/Dart/dart2/dart2.dockerfile @@ -1,10 +1,10 @@ -FROM google/dart:2.12 +FROM dart:2.17.6 WORKDIR /dart_app COPY pubspec.yaml pubspec.yaml COPY server.dart server.dart -RUN pub upgrade +RUN dart pub upgrade EXPOSE 8080 diff --git a/frameworks/Dart/dart2/pubspec.yaml b/frameworks/Dart/dart2/pubspec.yaml index d5206676a25..83ff6025006 100644 --- a/frameworks/Dart/dart2/pubspec.yaml +++ b/frameworks/Dart/dart2/pubspec.yaml @@ -1,7 +1,7 @@ name: dartbenchmark description: A benchmark of dart environment: - sdk: '>=2.12.0 <3.0.0' + sdk: '>=2.17.0 <3.0.0' dev_dependencies: - pedantic: ^1.0.0 + lints: ^2.0.0 diff --git a/frameworks/Elixir/plug/elixir-plug-ecto.dockerfile b/frameworks/Elixir/plug/elixir-plug-ecto.dockerfile index 564ae886424..c06fbfdd283 100644 --- a/frameworks/Elixir/plug/elixir-plug-ecto.dockerfile +++ b/frameworks/Elixir/plug/elixir-plug-ecto.dockerfile @@ -1,8 +1,25 @@ -FROM elixir:1.9.4 as builder +FROM erlang:25 AS builder RUN apt-get update -y && \ apt-get install -y libicu-dev +# elixir expects utf8. +ENV ELIXIR_VERSION="v1.13.4" \ + LANG=C.UTF-8 + +RUN set -xe \ + && ELIXIR_DOWNLOAD_URL="https://github.com/elixir-lang/elixir/archive/${ELIXIR_VERSION}.tar.gz" \ + && ELIXIR_DOWNLOAD_SHA256="95daf2dd3052e6ca7d4d849457eaaba09de52d65ca38d6933c65bc1cdf6b8579" \ + && curl -fSL -o elixir-src.tar.gz $ELIXIR_DOWNLOAD_URL \ + && echo "$ELIXIR_DOWNLOAD_SHA256 elixir-src.tar.gz" | sha256sum -c - \ + && mkdir -p /usr/local/src/elixir \ + && tar -xzC /usr/local/src/elixir --strip-components=1 -f elixir-src.tar.gz \ + && rm elixir-src.tar.gz \ + && cd /usr/local/src/elixir \ + && make install clean \ + && find /usr/local/src/elixir/ -type f -not -regex "/usr/local/src/elixir/lib/[^\/]*/lib.*" -exec rm -rf {} + \ + && find /usr/local/src/elixir/ -type d -depth -empty -delete + ENV MIX_ENV=prod \ LANG=C.UTF-8 @@ -20,7 +37,7 @@ RUN mix deps.get RUN mix deps.compile RUN mix release -FROM debian:buster-slim AS app +FROM debian:bullseye-slim AS app RUN apt-get update -y && \ apt-get install -y openssl libicu-dev diff --git a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/fortune.ex b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/fortune.ex index 089cfade1c4..7613887541e 100644 --- a/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/fortune.ex +++ b/frameworks/Elixir/plug/lib/framework_benchmarks/handlers/fortune.ex @@ -36,10 +36,7 @@ defmodule FrameworkBenchmarks.Handlers.Fortune do fortunes = fortunes - |> Enum.sort(fn %{message: first}, %{message: second} - when is_binary(first) and is_binary(second) -> - :ucol.compare(first, second) != 1 - end) + |> Enum.sort(fn f1, f2 -> f1.message < f2.message end) conn |> Plug.Conn.put_resp_content_type("text/html") diff --git a/frameworks/Elixir/plug/mix.exs b/frameworks/Elixir/plug/mix.exs index 6d8549db0cb..b1213d8fd10 100644 --- a/frameworks/Elixir/plug/mix.exs +++ b/frameworks/Elixir/plug/mix.exs @@ -4,8 +4,8 @@ defmodule FrameworkBenchmarks.MixProject do def project do [ app: :framework_benchmarks, - version: "0.1.0", - elixir: "~> 1.9", + version: "1.1.0", + elixir: "~> 1.13", start_permanent: Mix.env() == :prod, deps: deps() ] @@ -22,13 +22,12 @@ defmodule FrameworkBenchmarks.MixProject do # Run "mix help deps" to learn about dependencies. defp deps do [ - {:plug_cowboy, "~> 2.0"}, - {:eljiffy, "~> 1.3.0"}, - {:ecto_sql, "~> 3.0"}, - {:postgrex, ">= 0.0.0"}, - {:cachex, "~> 3.2"}, - {:phoenix_html, "~> 2.13"}, - {:ucol, "~> 2.0"} + {:plug_cowboy, "~> 2.5"}, + {:eljiffy, "~> 1.3"}, + {:ecto_sql, "~> 3.8"}, + {:postgrex, "~> 0.16.3"}, + {:cachex, "~> 3.4"}, + {:phoenix_html, "~> 3.2"} ] end end diff --git a/frameworks/Elixir/plug/mix.lock b/frameworks/Elixir/plug/mix.lock index 6c835a3db77..0a44bc74b01 100644 --- a/frameworks/Elixir/plug/mix.lock +++ b/frameworks/Elixir/plug/mix.lock @@ -1,25 +1,26 @@ %{ - "cachex": {:hex, :cachex, "3.2.0", "a596476c781b0646e6cb5cd9751af2e2974c3e0d5498a8cab71807618b74fe2f", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm"}, - "connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm"}, - "cowboy": {:hex, :cowboy, "2.7.0", "91ed100138a764355f43316b1d23d7ff6bdb0de4ea618cb5d8677c93a7a2f115", [:rebar3], [{:cowlib, "~> 2.8.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.7.1", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm"}, - "cowlib": {:hex, :cowlib, "2.8.0", "fd0ff1787db84ac415b8211573e9a30a3ebe71b5cbff7f720089972b2319c8a4", [:rebar3], [], "hexpm"}, - "db_connection": {:hex, :db_connection, "2.2.0", "e923e88887cd60f9891fd324ac5e0290954511d090553c415fbf54be4c57ee63", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"}, - "decimal": {:hex, :decimal, "1.8.1", "a4ef3f5f3428bdbc0d35374029ffcf4ede8533536fa79896dd450168d9acdf3c", [:mix], [], "hexpm"}, - "ecto": {:hex, :ecto, "3.3.1", "82ab74298065bf0c64ca299f6c6785e68ea5d6b980883ee80b044499df35aba1", [:mix], [{:decimal, "~> 1.6", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"}, - "ecto_sql": {:hex, :ecto_sql, "3.3.2", "92804e0de69bb63e621273c3492252cb08a29475c05d40eeb6f41ad2d483cfd3", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"}, - "eljiffy": {:hex, :eljiffy, "1.3.0", "7e584be454c5ec3fc3ae472eedb4cb2185e9ed6cd863df383ef601de3f3b27fd", [:mix], [{:jiffy, "~> 1.0", [hex: :jiffy, repo: "hexpm", optional: false]}], "hexpm"}, - "eternal": {:hex, :eternal, "1.2.1", "d5b6b2499ba876c57be2581b5b999ee9bdf861c647401066d3eeed111d096bc4", [:mix], [], "hexpm"}, - "jiffy": {:hex, :jiffy, "1.0.1", "4f25639772ca41202f41ba9c8f6ca0933554283dd4742c90651e03471c55e341", [:rebar3], [], "hexpm"}, - "jumper": {:hex, :jumper, "1.0.1", "3c00542ef1a83532b72269fab9f0f0c82bf23a35e27d278bfd9ed0865cecabff", [:mix], [], "hexpm"}, - "mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm"}, - "phoenix_html": {:hex, :phoenix_html, "2.13.3", "850e292ff6e204257f5f9c4c54a8cb1f6fbc16ed53d360c2b780a3d0ba333867", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, - "plug": {:hex, :plug, "1.8.3", "12d5f9796dc72e8ac9614e94bda5e51c4c028d0d428e9297650d09e15a684478", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"}, - "plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, - "plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"}, - "postgrex": {:hex, :postgrex, "0.15.3", "5806baa8a19a68c4d07c7a624ccdb9b57e89cbc573f1b98099e3741214746ae4", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"}, - "ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"}, - "sleeplocks": {:hex, :sleeplocks, "1.1.1", "3d462a0639a6ef36cc75d6038b7393ae537ab394641beb59830a1b8271faeed3", [:rebar3], [], "hexpm"}, - "telemetry": {:hex, :telemetry, "0.4.1", "ae2718484892448a24470e6aa341bc847c3277bfb8d4e9289f7474d752c09c7f", [:rebar3], [], "hexpm"}, - "ucol": {:hex, :ucol, "2.0.0", "64f9589d682dac6ca59252e1222e22697784f74addd0b88c5e34d53d267356bb", [:rebar3], [], "hexpm"}, - "unsafe": {:hex, :unsafe, "1.0.1", "a27e1874f72ee49312e0a9ec2e0b27924214a05e3ddac90e91727bc76f8613d8", [:mix], [], "hexpm"}, + "cachex": {:hex, :cachex, "3.4.0", "868b2959ea4aeb328c6b60ff66c8d5123c083466ad3c33d3d8b5f142e13101fb", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "370123b1ab4fba4d2965fb18f87fd758325709787c8c5fce35b3fe80645ccbe5"}, + "connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"}, + "cowboy": {:hex, :cowboy, "2.9.0", "865dd8b6607e14cf03282e10e934023a1bd8be6f6bacf921a7e2a96d800cd452", [:make, :rebar3], [{:cowlib, "2.11.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "2c729f934b4e1aa149aff882f57c6372c15399a20d54f65c8d67bef583021bde"}, + "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"}, + "cowlib": {:hex, :cowlib, "2.11.0", "0b9ff9c346629256c42ebe1eeb769a83c6cb771a6ee5960bd110ab0b9b872063", [:make, :rebar3], [], "hexpm", "2b3e9da0b21c4565751a6d4901c20d1b4cc25cbb7fd50d91d2ab6dd287bc86a9"}, + "db_connection": {:hex, :db_connection, "2.4.2", "f92e79aff2375299a16bcb069a14ee8615c3414863a6fef93156aee8e86c2ff3", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4fe53ca91b99f55ea249693a0229356a08f4d1a7931d8ffa79289b145fe83668"}, + "decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"}, + "ecto": {:hex, :ecto, "3.8.4", "e06b8b87e62b27fea17fd2ff6041572ddd10339fd16cdf58446e402c6c90a74b", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "f9244288b8d42db40515463a008cf3f4e0e564bb9c249fe87bf28a6d79fe82d4"}, + "ecto_sql": {:hex, :ecto_sql, "3.8.3", "a7d22c624202546a39d615ed7a6b784580391e65723f2d24f65941b4dd73d471", [:mix], [{:db_connection, "~> 2.5 or ~> 2.4.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.8.4", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0 or ~> 0.16.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "348cb17fb9e6daf6f251a87049eafcb57805e2892e5e6a0f5dea0985d367329b"}, + "eljiffy": {:hex, :eljiffy, "1.3.0", "7e584be454c5ec3fc3ae472eedb4cb2185e9ed6cd863df383ef601de3f3b27fd", [:mix], [{:jiffy, "~> 1.0", [hex: :jiffy, repo: "hexpm", optional: false]}], "hexpm", "90420512d60fb45bc9c09221b4d89cc539c9bfefc1c62f24cb3e2cb13acf2215"}, + "eternal": {:hex, :eternal, "1.2.2", "d1641c86368de99375b98d183042dd6c2b234262b8d08dfd72b9eeaafc2a1abd", [:mix], [], "hexpm", "2c9fe32b9c3726703ba5e1d43a1d255a4f3f2d8f8f9bc19f094c7cb1a7a9e782"}, + "jiffy": {:hex, :jiffy, "1.1.1", "aca10f47aa91697bf24ab9582c74e00e8e95474c7ef9f76d4f1a338d0f5de21b", [:rebar3], [], "hexpm", "62e1f0581c3c19c33a725c781dfa88410d8bff1bbafc3885a2552286b4785c4c"}, + "jumper": {:hex, :jumper, "1.0.1", "3c00542ef1a83532b72269fab9f0f0c82bf23a35e27d278bfd9ed0865cecabff", [:mix], [], "hexpm", "318c59078ac220e966d27af3646026db9b5a5e6703cb2aa3e26bcfaba65b7433"}, + "mime": {:hex, :mime, "2.0.2", "0b9e1a4c840eafb68d820b0e2158ef5c49385d17fb36855ac6e7e087d4b1dcc5", [:mix], [], "hexpm", "e6a3f76b4c277739e36c2e21a2c640778ba4c3846189d5ab19f97f126df5f9b7"}, + "phoenix_html": {:hex, :phoenix_html, "3.2.0", "1c1219d4b6cb22ac72f12f73dc5fad6c7563104d083f711c3fcd8551a1f4ae11", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "36ec97ba56d25c0136ef1992c37957e4246b649d620958a1f9fa86165f8bc54f"}, + "plug": {:hex, :plug, "1.13.6", "187beb6b67c6cec50503e940f0434ea4692b19384d47e5fdfd701e93cadb4cc2", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "02b9c6b9955bce92c829f31d6284bf53c591ca63c4fb9ff81dfd0418667a34ff"}, + "plug_cowboy": {:hex, :plug_cowboy, "2.5.2", "62894ccd601cf9597e2c23911ff12798a8a18d237e9739f58a6b04e4988899fe", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ea6e87f774c8608d60c8d34022a7d073bd7680a0a013f049fc62bf35efea1044"}, + "plug_crypto": {:hex, :plug_crypto, "1.2.2", "05654514ac717ff3a1843204b424477d9e60c143406aa94daf2274fdd280794d", [:mix], [], "hexpm", "87631c7ad914a5a445f0a3809f99b079113ae4ed4b867348dd9eec288cecb6db"}, + "postgrex": {:hex, :postgrex, "0.16.3", "fac79a81a9a234b11c44235a4494d8565303fa4b9147acf57e48978a074971db", [:mix], [{:connection, "~> 1.1", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "aeaae1d2d1322da4e5fe90d241b0a564ce03a3add09d7270fb85362166194590"}, + "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, + "sleeplocks": {:hex, :sleeplocks, "1.1.1", "3d462a0639a6ef36cc75d6038b7393ae537ab394641beb59830a1b8271faeed3", [:rebar3], [], "hexpm", "84ee37aeff4d0d92b290fff986d6a95ac5eedf9b383fadfd1d88e9b84a1c02e1"}, + "telemetry": {:hex, :telemetry, "1.1.0", "a589817034a27eab11144ad24d5c0f9fab1f58173274b1e9bae7074af9cbee51", [:rebar3], [], "hexpm", "b727b2a1f75614774cff2d7565b64d0dfa5bd52ba517f16543e6fc7efcc0df48"}, + "ucol": {:hex, :ucol, "2.0.0", "64f9589d682dac6ca59252e1222e22697784f74addd0b88c5e34d53d267356bb", [:rebar3], [], "hexpm", "b544b88ce034d1d1ab58e093744cbded9a1e8b05006870b4d3865d6cd5066a21"}, + "unsafe": {:hex, :unsafe, "1.0.1", "a27e1874f72ee49312e0a9ec2e0b27924214a05e3ddac90e91727bc76f8613d8", [:mix], [], "hexpm", "6c7729a2d214806450d29766abc2afaa7a2cbecf415be64f36a6691afebb50e5"}, } diff --git a/frameworks/Erlang/elli/benchmark_config.json b/frameworks/Erlang/elli/benchmark_config.json index 475387c8066..e376c23a843 100644 --- a/frameworks/Erlang/elli/benchmark_config.json +++ b/frameworks/Erlang/elli/benchmark_config.json @@ -9,7 +9,7 @@ "approach": "Stripped", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "elli", "language": "Erlang", "flavor": "None", "orm": "Raw", diff --git a/frameworks/Erlang/elli/rebar.config b/frameworks/Erlang/elli/rebar.config index a5c2cc124f5..93812115b54 100644 --- a/frameworks/Erlang/elli/rebar.config +++ b/frameworks/Erlang/elli/rebar.config @@ -2,5 +2,5 @@ [ {jiffy, ".*", {git, "https://github.com/davisp/jiffy.git", {tag, "0.15.0"}}}, {emysql, ".*", {git, "https://github.com/deadtrickster/Emysql.git", "52b802098322aad372198b9f5fa9ae9a4c758ad1"}}, - {elli, "", {git, "git://github.com/knutin/elli.git", {tag, "v1.0.5"}}} + {elli, "", {git, "https://github.com/knutin/elli.git", {tag, "v1.0.5"}}} ]}. diff --git a/frameworks/Erlang/mochiweb/benchmark_config.json b/frameworks/Erlang/mochiweb/benchmark_config.json index a6461bd359d..9f874995528 100644 --- a/frameworks/Erlang/mochiweb/benchmark_config.json +++ b/frameworks/Erlang/mochiweb/benchmark_config.json @@ -11,7 +11,7 @@ "approach": "Stripped", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "Mochiweb", "language": "Erlang", "flavor": "None", "orm": "Raw", diff --git a/frameworks/Erlang/mochiweb/rebar.config b/frameworks/Erlang/mochiweb/rebar.config index bb3f4cba664..8ce3b55e013 100644 --- a/frameworks/Erlang/mochiweb/rebar.config +++ b/frameworks/Erlang/mochiweb/rebar.config @@ -1,9 +1,9 @@ %% -*- erlang -*- {deps, [ - {mochiweb, "2.9.0", {git, "git://github.com/mochi/mochiweb.git", {tag, "v2.9.0"}}}, - {jsonx, ".*", {git, "git://github.com/iskra/jsonx.git", "9c95948c6835827ed61a9506ae4a9aba61acf335"}}, - {emysql, ".*", {git, "git://github.com/deadtrickster/Emysql.git"}}, - {erlydtl, "0.11.1", {git, "git://github.com/erlydtl/erlydtl.git", {tag, "0.11.1"}}} + {mochiweb, "2.9.0", {git, "https://github.com/mochi/mochiweb.git", {tag, "v2.9.0"}}}, + {jsonx, ".*", {git, "https://github.com/iskra/jsonx.git", "9c95948c6835827ed61a9506ae4a9aba61acf335"}}, + {emysql, ".*", {git, "https://github.com/deadtrickster/Emysql.git"}}, + {erlydtl, "0.11.1", {git, "https://github.com/erlydtl/erlydtl.git", {tag, "0.11.1"}}} ]}. {erlydtl_opts, [ {doc_root, "priv/templates"}, diff --git a/frameworks/FSharp/falco/falco.dockerfile b/frameworks/FSharp/falco/falco.dockerfile index 600509fd2b8..f98693ff815 100644 --- a/frameworks/FSharp/falco/falco.dockerfile +++ b/frameworks/FSharp/falco/falco.dockerfile @@ -4,6 +4,11 @@ COPY src/App . RUN dotnet publish -c Release -o out FROM mcr.microsoft.com/dotnet/aspnet:6.0 AS runtime +# Full PGO +ENV DOTNET_TieredPGO 1 +ENV DOTNET_TC_QuickJitForLoops 1 +ENV DOTNET_ReadyToRun 0 + ENV ASPNETCORE_URLS http://+:8080 WORKDIR /app COPY --from=build /app/out ./ diff --git a/frameworks/FSharp/falco/src/App/App.fsproj b/frameworks/FSharp/falco/src/App/App.fsproj index 080c925f30c..a620e9cfc3d 100644 --- a/frameworks/FSharp/falco/src/App/App.fsproj +++ b/frameworks/FSharp/falco/src/App/App.fsproj @@ -16,10 +16,10 @@ - + - + diff --git a/frameworks/FSharp/frank/frank.dockerfile b/frameworks/FSharp/frank/frank.dockerfile index 600509fd2b8..f98693ff815 100644 --- a/frameworks/FSharp/frank/frank.dockerfile +++ b/frameworks/FSharp/frank/frank.dockerfile @@ -4,6 +4,11 @@ COPY src/App . RUN dotnet publish -c Release -o out FROM mcr.microsoft.com/dotnet/aspnet:6.0 AS runtime +# Full PGO +ENV DOTNET_TieredPGO 1 +ENV DOTNET_TC_QuickJitForLoops 1 +ENV DOTNET_ReadyToRun 0 + ENV ASPNETCORE_URLS http://+:8080 WORKDIR /app COPY --from=build /app/out ./ diff --git a/frameworks/FSharp/frank/src/App/App.fsproj b/frameworks/FSharp/frank/src/App/App.fsproj index ee8a4f573bf..a1131a2a4b3 100644 --- a/frameworks/FSharp/frank/src/App/App.fsproj +++ b/frameworks/FSharp/frank/src/App/App.fsproj @@ -9,13 +9,13 @@ - + - - - - - + + + + + diff --git a/frameworks/FSharp/giraffe/src/App/App.fsproj b/frameworks/FSharp/giraffe/src/App/App.fsproj index 5e69d82ccf6..055cd917908 100644 --- a/frameworks/FSharp/giraffe/src/App/App.fsproj +++ b/frameworks/FSharp/giraffe/src/App/App.fsproj @@ -6,10 +6,10 @@ - + - - + + diff --git a/frameworks/FSharp/suave/src/App/App.fsproj b/frameworks/FSharp/suave/src/App/App.fsproj index c1d76b80772..f38d4e7bbd6 100755 --- a/frameworks/FSharp/suave/src/App/App.fsproj +++ b/frameworks/FSharp/suave/src/App/App.fsproj @@ -12,7 +12,7 @@ - + diff --git a/frameworks/FSharp/zebra/src/App/App.fsproj b/frameworks/FSharp/zebra/src/App/App.fsproj index af7b5671f43..20e8047df01 100644 --- a/frameworks/FSharp/zebra/src/App/App.fsproj +++ b/frameworks/FSharp/zebra/src/App/App.fsproj @@ -9,10 +9,10 @@ - - + + - + diff --git a/frameworks/FSharp/zebra/zebra-simple.dockerfile b/frameworks/FSharp/zebra/zebra-simple.dockerfile index a5fefdab0cc..1369fda8e76 100644 --- a/frameworks/FSharp/zebra/zebra-simple.dockerfile +++ b/frameworks/FSharp/zebra/zebra-simple.dockerfile @@ -4,6 +4,11 @@ COPY src/App . RUN dotnet publish -c Release -o out FROM mcr.microsoft.com/dotnet/aspnet:6.0 AS runtime +# Full PGO +ENV DOTNET_TieredPGO 1 +ENV DOTNET_TC_QuickJitForLoops 1 +ENV DOTNET_ReadyToRun 0 + ENV ASPNETCORE_URLS http://+:8080 WORKDIR /app COPY --from=build /app/out ./ diff --git a/frameworks/FSharp/zebra/zebra.dockerfile b/frameworks/FSharp/zebra/zebra.dockerfile index 600509fd2b8..f98693ff815 100644 --- a/frameworks/FSharp/zebra/zebra.dockerfile +++ b/frameworks/FSharp/zebra/zebra.dockerfile @@ -4,6 +4,11 @@ COPY src/App . RUN dotnet publish -c Release -o out FROM mcr.microsoft.com/dotnet/aspnet:6.0 AS runtime +# Full PGO +ENV DOTNET_TieredPGO 1 +ENV DOTNET_TC_QuickJitForLoops 1 +ENV DOTNET_ReadyToRun 0 + ENV ASPNETCORE_URLS http://+:8080 WORKDIR /app COPY --from=build /app/out ./ diff --git a/frameworks/Go/aah/benchmark_config.json b/frameworks/Go/aah/benchmark_config.json index 7a8d539a28c..07adfe6369b 100644 --- a/frameworks/Go/aah/benchmark_config.json +++ b/frameworks/Go/aah/benchmark_config.json @@ -8,7 +8,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "MySQL", - "framework": "None", + "framework": "aah", "language": "Go", "flavor": "None", "orm": "Raw", @@ -31,7 +31,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "MySQL", - "framework": "None", + "framework": "aah", "language": "Go", "flavor": "None", "orm": "Raw", @@ -54,7 +54,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "Postgres", - "framework": "None", + "framework": "aah", "language": "Go", "flavor": "None", "orm": "Raw", diff --git a/frameworks/Go/beego/README.md b/frameworks/Go/beego/README.md deleted file mode 100644 index 12509b7abad..00000000000 --- a/frameworks/Go/beego/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# Beego Benchmarking Test - -This is the Beego portion of a [benchmarking test suite](../) comparing a variety of web development platforms. - -### JSON Encoding Test - -* [Beego JSON output](https://github.com/astaxie/beego/blob/master/docs/en/Quickstart.md#output-json-and-xml) - -## Versions - -* [Go 1.8](https://golang.org/) -* [Beego](https://github.com/astaxie/beego/) - -## Test URLs - -### JSON Encoding Test - - http://localhost:8080/json diff --git a/frameworks/Go/beego/beego-orm-mysql.dockerfile b/frameworks/Go/beego/beego-orm-mysql.dockerfile deleted file mode 100644 index 2176debd0f8..00000000000 --- a/frameworks/Go/beego/beego-orm-mysql.dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -FROM golang:1.14 - -ADD ./ /beego -WORKDIR /beego - -RUN mkdir bin -ENV GOPATH /beego -ENV PATH ${GOPATH}/bin:${PATH} - -RUN curl -sL -o install_glide.sh https://glide.sh/get -RUN sh install_glide.sh - -RUN glide -v -WORKDIR src/hello-orm-mysql -RUN glide install - -RUN go build -o server main.go - -EXPOSE 8080 - -CMD ./server diff --git a/frameworks/Go/beego/beego.dockerfile b/frameworks/Go/beego/beego.dockerfile deleted file mode 100644 index 79e94628bd9..00000000000 --- a/frameworks/Go/beego/beego.dockerfile +++ /dev/null @@ -1,22 +0,0 @@ -FROM golang:1.14 - -ADD ./ /beego -WORKDIR /beego - -RUN mkdir bin -ENV GOPATH /beego -ENV PATH ${GOPATH}/bin:${PATH} - -RUN curl -sL -o install_glide.sh https://glide.sh/get -RUN sh install_glide.sh - -RUN glide -v -WORKDIR src/hello -RUN glide install - -RUN go build -o server main.go - -EXPOSE 8080 - -CMD ./server - diff --git a/frameworks/Go/beego/pkg/.gitkeep b/frameworks/Go/beego/pkg/.gitkeep deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/frameworks/Go/beego/src/hello-orm-mysql/controllers/base_controller.go b/frameworks/Go/beego/src/hello-orm-mysql/controllers/base_controller.go deleted file mode 100644 index f5b826c07ec..00000000000 --- a/frameworks/Go/beego/src/hello-orm-mysql/controllers/base_controller.go +++ /dev/null @@ -1,27 +0,0 @@ -package controllers - -import "github.com/astaxie/beego" - -type Base struct { - beego.Controller -} - -const Server = "Server" -const Beego = "Beego" -const queries = "queries" -const json = "json" - -func (c *Base) Prepare() { - c.Ctx.Output.Header(Server, Beego) -} - -func (c *Base) getQueriesParam() int { - n, err := c.GetInt(queries) - if err != nil || n < 1 { - return 1 - } - if n > 500 { - return 500 - } - return n -} diff --git a/frameworks/Go/beego/src/hello-orm-mysql/controllers/db_controller.go b/frameworks/Go/beego/src/hello-orm-mysql/controllers/db_controller.go deleted file mode 100644 index c8984ec4a40..00000000000 --- a/frameworks/Go/beego/src/hello-orm-mysql/controllers/db_controller.go +++ /dev/null @@ -1,27 +0,0 @@ -package controllers - -import ( - "log" - "math/rand" - - "hello-orm-mysql/models" - - "github.com/astaxie/beego/orm" -) - -const worldRowCount = 10000 - -type DBController struct { - Base -} - -func (c *DBController) Get() { - o := orm.NewOrm() - w := models.World{Id: uint16(rand.Intn(worldRowCount) + 1)} - err := o.Read(&w) - if err != nil { - log.Fatalf("Error read world row: %s", err.Error()) - } - c.Data[json] = &w - c.ServeJSON() -} diff --git a/frameworks/Go/beego/src/hello-orm-mysql/controllers/db_update_controller.go b/frameworks/Go/beego/src/hello-orm-mysql/controllers/db_update_controller.go deleted file mode 100644 index 75b728f9b06..00000000000 --- a/frameworks/Go/beego/src/hello-orm-mysql/controllers/db_update_controller.go +++ /dev/null @@ -1,31 +0,0 @@ -package controllers - -import ( - "log" - "math/rand" - - "hello-orm-mysql/models" - - "github.com/astaxie/beego/orm" -) - -type DBUpdateController struct { - Base -} - -func (c *DBUpdateController) Get() { - n := c.getQueriesParam() - o := orm.NewOrm() - world := make([]models.World, n) - for i := 0; i < n; i++ { - if err := o.Raw(models.WorldSelect, rand.Intn(models.WorldRowCount)+1).QueryRow(&world[i].Id, &world[i].RandomNumber); err != nil { - log.Fatalf("Error scanning world row: %v", err) - } - world[i].RandomNumber = uint16(rand.Intn(models.WorldRowCount) + 1) - if _, err := o.Raw(models.WorldUpdate, world[i].RandomNumber, world[i].Id).Exec(); err != nil { - log.Fatalf("Error updating world row: %v", err) - } - } - c.Data[json] = &world - c.ServeJSON() -} diff --git a/frameworks/Go/beego/src/hello-orm-mysql/controllers/queries_controller.go b/frameworks/Go/beego/src/hello-orm-mysql/controllers/queries_controller.go deleted file mode 100644 index 973f5b7f52e..00000000000 --- a/frameworks/Go/beego/src/hello-orm-mysql/controllers/queries_controller.go +++ /dev/null @@ -1,13 +0,0 @@ -package controllers - -import "hello-orm-mysql/models" - -type QueriesController struct { - Base -} - -func (c *QueriesController) Get() { - n := c.getQueriesParam() - c.Data[json] = models.GetQueriesWorld(n) - c.ServeJSON() -} diff --git a/frameworks/Go/beego/src/hello-orm-mysql/glide.yaml b/frameworks/Go/beego/src/hello-orm-mysql/glide.yaml deleted file mode 100644 index 7e82fc71da6..00000000000 --- a/frameworks/Go/beego/src/hello-orm-mysql/glide.yaml +++ /dev/null @@ -1,8 +0,0 @@ -package: beego/src/hello-orm-mysql -import: -- package: github.com/astaxie/beego - version: ^1.8.0 - subpackages: - - orm -- package: github.com/go-sql-driver/mysql - version: ^1.3.0 diff --git a/frameworks/Go/beego/src/hello-orm-mysql/main.go b/frameworks/Go/beego/src/hello-orm-mysql/main.go deleted file mode 100644 index 2c4e11ccea5..00000000000 --- a/frameworks/Go/beego/src/hello-orm-mysql/main.go +++ /dev/null @@ -1,15 +0,0 @@ -package main - -import ( - "hello-orm-mysql/controllers" - - "github.com/astaxie/beego" -) - -func main() { - beego.BConfig.RunMode = "prod" - beego.Router("/db", &controllers.DBController{}) - beego.Router("/update", &controllers.DBUpdateController{}) - beego.Router("/queries", &controllers.QueriesController{}) - beego.Run() -} diff --git a/frameworks/Go/beego/src/hello-orm-mysql/models/init.go b/frameworks/Go/beego/src/hello-orm-mysql/models/init.go deleted file mode 100644 index 00ffea7508f..00000000000 --- a/frameworks/Go/beego/src/hello-orm-mysql/models/init.go +++ /dev/null @@ -1,18 +0,0 @@ -package models - -import ( - "github.com/astaxie/beego/orm" - _ "github.com/go-sql-driver/mysql" -) - -const ( - // Database - connectionString = "benchmarkdbuser:benchmarkdbpass@tcp(tfb-database:3306)/hello_world" - macIdleConnection = 30 - maxConnectionCount = 256 -) - -func init() { - orm.RegisterModel(new(World)) - orm.RegisterDataBase("default", "mysql", connectionString, macIdleConnection, maxConnectionCount) -} diff --git a/frameworks/Go/beego/src/hello-orm-mysql/models/world.go b/frameworks/Go/beego/src/hello-orm-mysql/models/world.go deleted file mode 100644 index b43cb9b754c..00000000000 --- a/frameworks/Go/beego/src/hello-orm-mysql/models/world.go +++ /dev/null @@ -1,31 +0,0 @@ -package models - -import ( - "log" - "math/rand" - - "github.com/astaxie/beego/orm" -) - -type World struct { - Id uint16 `orm:"pk" json:"id"` - RandomNumber uint16 `orm:"column(randomNumber)" json:"randomNumber"` -} - -const WorldSelect = "SELECT id, randomNumber FROM World WHERE id = ?" -const WorldUpdate = "UPDATE World SET randomNumber = ? WHERE id = ?" -const WorldRowCount = 10000 - -func GetQueriesWorld(queries int) *[]World { - o := orm.NewOrm() - ww := make([]World, queries) - for i := 0; i < queries; i++ { - err := o.Raw(WorldSelect, rand.Intn(WorldRowCount)+1).QueryRow(&ww[i]) - if err != nil { - log.Fatalf("Error scanning world row: %v", err) - } - ww[i].RandomNumber = uint16(rand.Intn(WorldRowCount) + 1) - _, err = o.Raw(WorldUpdate, ww[i].RandomNumber, ww[i].Id).Exec() - } - return &ww -} diff --git a/frameworks/Go/beego/src/hello/controllers/base_controller.go b/frameworks/Go/beego/src/hello/controllers/base_controller.go deleted file mode 100644 index ed90a684063..00000000000 --- a/frameworks/Go/beego/src/hello/controllers/base_controller.go +++ /dev/null @@ -1,15 +0,0 @@ -package controllers - -import "github.com/astaxie/beego" - -type Base struct { - beego.Controller -} - -const Server = "Server" -const Beego = "Beego" -const json = "json" - -func (c *Base) Prepare() { - c.Ctx.Output.Header(Server, Beego) -} diff --git a/frameworks/Go/beego/src/hello/controllers/json_controller.go b/frameworks/Go/beego/src/hello/controllers/json_controller.go deleted file mode 100644 index b999136ce18..00000000000 --- a/frameworks/Go/beego/src/hello/controllers/json_controller.go +++ /dev/null @@ -1,12 +0,0 @@ -package controllers - -import "hello/models" - -type JsonController struct { - Base -} - -func (c *JsonController) Get() { - c.Data[json] = &models.Message{Message: helloWorldString} - c.ServeJSON() -} diff --git a/frameworks/Go/beego/src/hello/controllers/plaintext_controller.go b/frameworks/Go/beego/src/hello/controllers/plaintext_controller.go deleted file mode 100644 index 7b7721ff648..00000000000 --- a/frameworks/Go/beego/src/hello/controllers/plaintext_controller.go +++ /dev/null @@ -1,16 +0,0 @@ -package controllers - -type PlaintextController struct { - Base -} - -const helloWorldString = "Hello, World!" - -var ( - helloWorldBytes = []byte(helloWorldString) -) - -func (c *PlaintextController) Get() { - c.Ctx.Output.Header("Content-Type", "text/plain") - c.Ctx.Output.Body(helloWorldBytes) -} diff --git a/frameworks/Go/beego/src/hello/glide.yaml b/frameworks/Go/beego/src/hello/glide.yaml deleted file mode 100644 index 237760ead1f..00000000000 --- a/frameworks/Go/beego/src/hello/glide.yaml +++ /dev/null @@ -1,8 +0,0 @@ -package: beego/src/hello -import: -- package: github.com/astaxie/beego - version: ^1.8.0 - subpackages: - - orm -- package: github.com/go-sql-driver/mysql - version: ^1.3.0 diff --git a/frameworks/Go/beego/src/hello/main.go b/frameworks/Go/beego/src/hello/main.go deleted file mode 100644 index 586b9691fdc..00000000000 --- a/frameworks/Go/beego/src/hello/main.go +++ /dev/null @@ -1,14 +0,0 @@ -package main - -import ( - "hello/controllers" - - "github.com/astaxie/beego" -) - -func main() { - beego.BConfig.RunMode = "prod" - beego.Router("/json", &controllers.JsonController{}) - beego.Router("/plaintext", &controllers.PlaintextController{}) - beego.Run() -} diff --git a/frameworks/Go/beego/src/hello/models/message.go b/frameworks/Go/beego/src/hello/models/message.go deleted file mode 100644 index 59992289f08..00000000000 --- a/frameworks/Go/beego/src/hello/models/message.go +++ /dev/null @@ -1,5 +0,0 @@ -package models - -type Message struct { - Message string `json:"message"` -} diff --git a/frameworks/Go/echo/benchmark_config.json b/frameworks/Go/echo/benchmark_config.json index 0a88cebeb81..64cdf83f307 100644 --- a/frameworks/Go/echo/benchmark_config.json +++ b/frameworks/Go/echo/benchmark_config.json @@ -12,7 +12,7 @@ "approach": "Realistic", "classification": "Micro", "database": "Postgres", - "framework": "None", + "framework": "echo", "language": "Go", "flavor": "None", "orm": "Raw", diff --git a/frameworks/Go/evio/benchmark_config.json b/frameworks/Go/evio/benchmark_config.json index 9e08d2d094c..6c4d3495911 100644 --- a/frameworks/Go/evio/benchmark_config.json +++ b/frameworks/Go/evio/benchmark_config.json @@ -7,7 +7,7 @@ "approach": "Realistic", "classification": "Platform", "database": "None", - "framework": "None", + "framework": "evio", "language": "Go", "flavor": "None", "orm": "Raw", @@ -25,7 +25,7 @@ "approach": "Realistic", "classification": "Platform", "database": "None", - "framework": "None", + "framework": "evio", "language": "Go", "flavor": "None", "orm": "Raw", diff --git a/frameworks/Go/falcore/README.md b/frameworks/Go/falcore/README.md deleted file mode 100644 index fd8c75c5825..00000000000 --- a/frameworks/Go/falcore/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# Go Falcore Benchmarking Test - -This is the go Falcore portion of a [benchmarking test suite](../) comparing a variety of web development platforms. -Falcore is an HTTP server written in Go that provides an alternate API and additional features compared to the Go standard -library HTTP server. - -### Source -* [All test source](src/framework_benchmarks/falcore.go) - -## Versions - -* [Go 1.1.1](http://golang.org/) - -## Test URLs - - http://localhost:8080/json - http://localhost:8080/db - http://localhost:8080/fortune - http://localhost:8080/queries - http://localhost:8080/update - http://localhost:8080/plaintext - diff --git a/frameworks/Go/falcore/benchmark_config.json b/frameworks/Go/falcore/benchmark_config.json deleted file mode 100644 index 0806b9ffe9c..00000000000 --- a/frameworks/Go/falcore/benchmark_config.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "framework": "falcore", - "tests": [{ - "default": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortune", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "MySQL", - "framework": "Falcore", - "language": "Go", - "flavor": "None", - "orm": "Raw", - "platform": "None", - "webserver": "None", - "os": "Linux", - "database_os": "Linux", - "display_name": "Falcore", - "notes": "", - "versus": "go" - } - }] -} diff --git a/frameworks/Go/falcore/config.toml b/frameworks/Go/falcore/config.toml deleted file mode 100644 index b9291dc5465..00000000000 --- a/frameworks/Go/falcore/config.toml +++ /dev/null @@ -1,18 +0,0 @@ -[framework] -name = "falcore" - -[main] -urls.plaintext = "/plaintext" -urls.json = "/json" -urls.db = "/db" -urls.query = "/queries?queries=" -urls.fortune = "/fortune" -approach = "Realistic" -classification = "Micro" -database = "MySQL" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = "None" -webserver = "None" -versus = "go" diff --git a/frameworks/Go/falcore/falcore.dockerfile b/frameworks/Go/falcore/falcore.dockerfile deleted file mode 100644 index 25f4e88e208..00000000000 --- a/frameworks/Go/falcore/falcore.dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM golang:1.14 - -ADD ./ /falcore -WORKDIR /falcore - -RUN mkdir bin -ENV GOPATH /falcore -ENV PATH ${GOPATH}/bin:${PATH} - -RUN go get ./... - -EXPOSE 8080 - -CMD go run src/framework_benchmarks/falcore.go diff --git a/frameworks/Go/falcore/pkg/.gitkeep b/frameworks/Go/falcore/pkg/.gitkeep deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/frameworks/Go/falcore/src/framework_benchmarks/falcore.go b/frameworks/Go/falcore/src/framework_benchmarks/falcore.go deleted file mode 100644 index 36293d1fa1d..00000000000 --- a/frameworks/Go/falcore/src/framework_benchmarks/falcore.go +++ /dev/null @@ -1,291 +0,0 @@ -package main - -import ( - "database/sql" - "html/template" - "io" - "log" - "math/rand" - "net/http" - "sort" - "strconv" - "sync" - "time" - - "github.com/fitstar/falcore" - "github.com/fitstar/falcore/responder" - _ "github.com/go-sql-driver/mysql" -) - -type Message struct { - Message string `json:"message"` -} - -type World struct { - Id uint16 `json:"id"` - RandomNumber uint16 `json:"randomNumber"` -} - -type Fortune struct { - Id uint16 `json:"id"` - Message string `json:"message"` -} - -const ( - // Database - connectionString = "benchmarkdbuser:benchmarkdbpass@tcp(tfb-database:3306)/hello_world?collation=utf8mb4_bin" - worldSelect = "SELECT id, randomNumber FROM World WHERE id = ?" - worldUpdate = "UPDATE World SET randomNumber = ? WHERE id = ?" - fortuneSelect = "SELECT id, message FROM Fortune;" - worldRowCount = 10000 - maxConnectionCount = 256 - - helloWorldString = "Hello, World!" -) - -var ( - // Templates - tmpl = template.Must(template.ParseFiles("templates/layout.html", "templates/fortune.html")) - - // Database - worldStatement *sql.Stmt - fortuneStatement *sql.Stmt - updateStatement *sql.Stmt - - helloWorldBytes = []byte(helloWorldString) -) - -type stats struct { - Count time.Duration - Sum time.Duration -} - -// for profiling -var reqCount = 0 -var statMap = make(map[string]*stats) -var compCallbackMutex = &sync.Mutex{} - -func CompletionCallback(req *falcore.Request, res *http.Response) { - l := req.PipelineStageStats - compCallbackMutex.Lock() - incrStat(statMap, "overhead", req.Overhead) - incrStat(statMap, "tots", req.EndTime.Sub(req.StartTime)) - for e := l.Front(); e != nil; e = e.Next() { - pss, _ := e.Value.(*falcore.PipelineStageStat) - dur := pss.EndTime.Sub(pss.StartTime) - incrStat(statMap, pss.Name, dur) - } - reqCount++ - if reqCount%10000 == 0 { - for k, v := range statMap { - log.Printf("%v: %v\n", k, v.Sum/v.Count) - } - log.Println("") - } - compCallbackMutex.Unlock() -} - -func incrStat(statMap map[string]*stats, name string, dur time.Duration) { - if s, ok := statMap[name]; ok { - s.Count++ - s.Sum += dur - } else { - statMap[name] = &stats{1, dur} - } -} - -func main() { - db, err := sql.Open("mysql", connectionString) - if err != nil { - log.Fatalf("Error opening database: %v", err) - } - db.SetMaxIdleConns(maxConnectionCount) - worldStatement, err = db.Prepare(worldSelect) - if err != nil { - log.Fatal(err) - } - fortuneStatement, err = db.Prepare(fortuneSelect) - if err != nil { - log.Fatal(err) - } - updateStatement, err = db.Prepare(worldUpdate) - if err != nil { - log.Fatal(err) - } - pipeline := falcore.NewPipeline() - - pipeline.Upstream.PushBack(dbFilter) - pipeline.Upstream.PushBack(queriesFilter) - pipeline.Upstream.PushBack(jsonFilter) - pipeline.Upstream.PushBack(fortuneFilter) - pipeline.Upstream.PushBack(updateFilter) - pipeline.Upstream.PushBack(plaintextFilter) - - pipeline.Downstream.PushBack(requiredHeaders) - - /* - http.HandleFunc("/db", dbHandler) - http.HandleFunc("/queries", queriesHandler) - http.HandleFunc("/json", jsonHandler) - http.HandleFunc("/fortune", fortuneHandler) - http.HandleFunc("/update", updateHandler) - http.HandleFunc("/plaintext", plaintextHandler) - */ - server := falcore.NewServer(8080, pipeline) - // uncomment for printing internal peromance stats - //server.CompletionCallback = CompletionCallback - - if err := server.ListenAndServe(); err != nil { - log.Println("Could not start server:", err) - } - //if err := http.ListenAndServe(":8080", server); err != nil { - // log.Println("Could not start server:", err) - //} -} - -var requiredHeaders = falcore.NewResponseFilter(func(req *falcore.Request, res *http.Response) { - res.Header.Set("Server", "falcore") - res.Header.Set("Date", time.Now().Format(time.RFC1123)) -}) - -// Test 1: JSON serialization -var jsonFilter = falcore.NewRequestFilter(func(req *falcore.Request) *http.Response { - if req.HttpRequest.URL.Path == "/json" { - resp, _ := responder.JSONResponse(req.HttpRequest, 200, nil, &Message{helloWorldString}) - return resp - } - return nil -}) - -// Test 2: Single database query -var dbFilter = falcore.NewRequestFilter(func(req *falcore.Request) *http.Response { - if req.HttpRequest.URL.Path == "/db" { - var world World - err := worldStatement.QueryRow(rand.Intn(worldRowCount)+1).Scan(&world.Id, &world.RandomNumber) - if err != nil { - log.Fatalf("Error scanning world row: %s", err.Error()) - } - - resp, _ := responder.JSONResponse(req.HttpRequest, 200, nil, &world) - return resp - } - return nil -}) - -// Test 3: Multiple database queries -var queriesFilter = falcore.NewRequestFilter(func(req *falcore.Request) *http.Response { - - if req.HttpRequest.URL.Path == "/queries" { - - n := 1 - - if nStr := req.HttpRequest.URL.Query().Get("queries"); len(nStr) > 0 { - n, _ = strconv.Atoi(nStr) // rvalue is 0 if nStr is non-number. - } - - // In the case of nStr is non-number string, n will be 0. So need to change n to 1. - if n == 0 { - n = 1 - } - - // In the case of nStr is number and its value is higher than 500, change n to 500 - if n > 500 { - n = 500 - } - - world := make([]World, n) - for i := 0; i < n; i++ { - err := worldStatement.QueryRow(rand.Intn(worldRowCount)+1).Scan(&world[i].Id, &world[i].RandomNumber) - if err != nil { - log.Fatalf("Error scanning world row: %s", err.Error()) - } - } - resp, _ := responder.JSONResponse(req.HttpRequest, 200, nil, &world) - return resp - } - return nil -}) - -// Test 4: Fortunes -var fortuneFilter = falcore.NewRequestFilter(func(req *falcore.Request) *http.Response { - if req.HttpRequest.URL.Path == "/fortune" { - rows, err := fortuneStatement.Query() - if err != nil { - log.Fatalf("Error preparing statement: %v", err) - } - - fortunes := make(Fortunes, 0, 16) - for rows.Next() { //Fetch rows - fortune := Fortune{} - if err := rows.Scan(&fortune.Id, &fortune.Message); err != nil { - log.Fatalf("Error scanning fortune row: %s", err.Error()) - } - fortunes = append(fortunes, &fortune) - } - fortunes = append(fortunes, &Fortune{Message: "Additional fortune added at request time."}) - - sort.Sort(ByMessage{fortunes}) - pipeReader, pipeWriter := io.Pipe() - // TODO maybe figure out err handling - go func() { - tmpl.Execute(pipeWriter, fortunes) - pipeWriter.Close() - }() - - textHtml := http.Header{"Content-Type": []string{"text/html;charset=utf-8"}} - return falcore.SimpleResponse(req.HttpRequest, 200, textHtml, -1, pipeReader) - } - return nil -}) - -// Test 5: Database updates -var updateFilter = falcore.NewRequestFilter(func(req *falcore.Request) *http.Response { - if req.HttpRequest.URL.Path == "/update" { - n := 1 - if nStr := req.HttpRequest.URL.Query().Get("queries"); len(nStr) > 0 { - n, _ = strconv.Atoi(nStr) - } - - if n <= 1 { - var world World - worldStatement.QueryRow(rand.Intn(worldRowCount)+1).Scan(&world.Id, &world.RandomNumber) - world.RandomNumber = uint16(rand.Intn(worldRowCount) + 1) - updateStatement.Exec(world.RandomNumber, world.Id) - resp, _ := responder.JSONResponse(req.HttpRequest, 200, nil, &world) - return resp - } else { - world := make([]World, n) - for i := 0; i < n; i++ { - if err := worldStatement.QueryRow(rand.Intn(worldRowCount)+1).Scan(&world[i].Id, &world[i].RandomNumber); err != nil { - log.Fatalf("Error scanning world row: %s", err.Error()) - } - world[i].RandomNumber = uint16(rand.Intn(worldRowCount) + 1) - if _, err := updateStatement.Exec(world[i].RandomNumber, world[i].Id); err != nil { - log.Fatalf("Error updating world row: %s", err.Error()) - } - } - resp, _ := responder.JSONResponse(req.HttpRequest, 200, nil, world) - return resp - } - - } - return nil -}) - -// Test 6: Plaintext -var plaintextFilter = falcore.NewRequestFilter(func(req *falcore.Request) *http.Response { - if req.HttpRequest.URL.Path == "/plaintext" { - textPlain := http.Header{"Content-Type": []string{"text/plain"}} - return falcore.ByteResponse(req.HttpRequest, 200, textPlain, helloWorldBytes) - } - return nil -}) - -type Fortunes []*Fortune - -func (s Fortunes) Len() int { return len(s) } -func (s Fortunes) Swap(i, j int) { s[i], s[j] = s[j], s[i] } - -type ByMessage struct{ Fortunes } - -func (s ByMessage) Less(i, j int) bool { return s.Fortunes[i].Message < s.Fortunes[j].Message } diff --git a/frameworks/Go/falcore/templates/fortune.html b/frameworks/Go/falcore/templates/fortune.html deleted file mode 100644 index d6d0727d85a..00000000000 --- a/frameworks/Go/falcore/templates/fortune.html +++ /dev/null @@ -1,14 +0,0 @@ -{{define "content"}} - - - - - -{{range .}} - - - - -{{end}} -
idmessage
{{.Id}}{{.Message}}
-{{end}} diff --git a/frameworks/Go/falcore/templates/layout.html b/frameworks/Go/falcore/templates/layout.html deleted file mode 100644 index 557151668f1..00000000000 --- a/frameworks/Go/falcore/templates/layout.html +++ /dev/null @@ -1,9 +0,0 @@ - - - -Fortunes - - -{{template "content" .}} - - \ No newline at end of file diff --git a/frameworks/Go/fasthttp/src/go.mod b/frameworks/Go/fasthttp/src/go.mod index 13b85c04266..f4a662dc30f 100644 --- a/frameworks/Go/fasthttp/src/go.mod +++ b/frameworks/Go/fasthttp/src/go.mod @@ -5,7 +5,7 @@ go 1.17 require ( github.com/jackc/pgx/v4 v4.14.1 github.com/savsgio/gotils v0.0.0-20211223103454-d0aaa54c5899 - github.com/valyala/fasthttp v1.31.0 + github.com/valyala/fasthttp v1.34.0 github.com/valyala/quicktemplate v1.7.0 ) @@ -19,10 +19,10 @@ require ( github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect github.com/jackc/pgtype v1.9.1 // indirect github.com/jackc/puddle v1.2.1 // indirect - github.com/klauspost/compress v1.13.6 // indirect + github.com/klauspost/compress v1.15.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/tcplisten v1.0.0 // indirect - golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3 // indirect - golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e // indirect + golang.org/x/crypto v0.0.0-20220214200702-86341886e292 // indirect + golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9 // indirect golang.org/x/text v0.3.7 // indirect ) diff --git a/frameworks/Go/fasthttp/src/go.sum b/frameworks/Go/fasthttp/src/go.sum index 82974bab7be..a94d48c1da1 100644 --- a/frameworks/Go/fasthttp/src/go.sum +++ b/frameworks/Go/fasthttp/src/go.sum @@ -20,6 +20,7 @@ github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRx github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= @@ -40,6 +41,7 @@ github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5W github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= @@ -72,8 +74,8 @@ github.com/jackc/puddle v1.2.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dv github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.13.5/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc= -github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.15.0 h1:xqfchp4whNFxn5A4XFyyYtitiWI8Hy5EW59jEwcyL6U= +github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -118,8 +120,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasthttp v1.30.0/go.mod h1:2rsYD01CKFrjjsvFxx75KlEUNpWNBY9JWD3K/7o2Cus= -github.com/valyala/fasthttp v1.31.0 h1:lrauRLII19afgCs2fnWRJ4M5IkV0lo2FqA61uGkNBfE= -github.com/valyala/fasthttp v1.31.0/go.mod h1:2rsYD01CKFrjjsvFxx75KlEUNpWNBY9JWD3K/7o2Cus= +github.com/valyala/fasthttp v1.34.0 h1:d3AAQJ2DRcxJYHm7OXNXtXt2as1vMDfxeIcFvhmGGm4= +github.com/valyala/fasthttp v1.34.0/go.mod h1:epZA5N+7pY6ZaEKRmstzOuYJx9HI8DI1oaCGZpdH4h0= github.com/valyala/quicktemplate v1.7.0 h1:LUPTJmlVcb46OOUY3IeD9DojFpAVbsG+5WFTcjMJzCM= github.com/valyala/quicktemplate v1.7.0/go.mod h1:sqKJnoaOF88V07vkO+9FL8fb9uZg/VPSJnLYn+LmLk8= github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8= @@ -146,8 +148,8 @@ golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWP golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3 h1:0es+/5331RGQPcXlMfP+WrnIIS6dNnNRe0WB02W0F4M= -golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220214200702-86341886e292 h1:f+lwQ+GtmgoY+A2YaQxlSOnDjXcQ7ZRLWOHbC6HtRqE= +golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= @@ -157,6 +159,8 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -172,10 +176,12 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e h1:fLOSk5Q00efkSvAm+4xcoXD+RRmLmmulPn5I3Y9F2EM= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9 h1:nhht2DYV/Sn3qOayu8lM+cU1ii9sTLUeBQwQQfUHtrs= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= diff --git a/frameworks/Go/gin/gin.dockerfile b/frameworks/Go/gin/gin.dockerfile index 1c63f9a04a4..d081fd7eca3 100644 --- a/frameworks/Go/gin/gin.dockerfile +++ b/frameworks/Go/gin/gin.dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.14 +FROM golang:1.18 ENV GO111MODULE=off diff --git a/frameworks/Go/gnet/benchmark_config.json b/frameworks/Go/gnet/benchmark_config.json index c24bcf5bf06..484ec41b8ff 100644 --- a/frameworks/Go/gnet/benchmark_config.json +++ b/frameworks/Go/gnet/benchmark_config.json @@ -7,7 +7,7 @@ "approach": "Realistic", "classification": "Platform", "database": "None", - "framework": "None", + "framework": "gnet", "language": "Go", "flavor": "None", "orm": "Raw", diff --git a/frameworks/Go/gnet/gnet.dockerfile b/frameworks/Go/gnet/gnet.dockerfile index 4f03465f58d..3725a74337a 100644 --- a/frameworks/Go/gnet/gnet.dockerfile +++ b/frameworks/Go/gnet/gnet.dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.14 +FROM golang:1.17 ENV GO111MODULE on @@ -6,9 +6,9 @@ WORKDIR /gnet COPY ./src /gnet -RUN go mod download +RUN go mod tidy -RUN go build -o app -gcflags="-l=4" -ldflags="-s -w" . +RUN go build -o app -tags=poll_opt -gcflags="-l=4" -ldflags="-s -w" . EXPOSE 8080 diff --git a/frameworks/Go/gnet/src/go.mod b/frameworks/Go/gnet/src/go.mod index 8374eb2dc34..c3dd02c9b20 100644 --- a/frameworks/Go/gnet/src/go.mod +++ b/frameworks/Go/gnet/src/go.mod @@ -1,5 +1,15 @@ module gnet -go 1.14 +go 1.17 -require github.com/panjf2000/gnet v1.0.1 +require ( + github.com/panjf2000/gnet/v2 v2.0.2 + go.uber.org/multierr v1.8.0 // indirect + golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5 // indirect +) + +require ( + go.uber.org/atomic v1.9.0 // indirect + go.uber.org/zap v1.21.0 // indirect + gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect +) diff --git a/frameworks/Go/gnet/src/go.sum b/frameworks/Go/gnet/src/go.sum index bcaa288b24c..ab274af8d77 100644 --- a/frameworks/Go/gnet/src/go.sum +++ b/frameworks/Go/gnet/src/go.sum @@ -1,31 +1,76 @@ +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= +github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/libp2p/go-reuseport v0.0.1 h1:7PhkfH73VXfPJYKQ6JwS5I/eVcoyYi9IMNGc6FWpFLw= -github.com/libp2p/go-reuseport v0.0.1/go.mod h1:jn6RmB1ufnQwl0Q1f+YxAj8isJgDCQzaaxIFYDhcYEA= -github.com/panjf2000/ants/v2 v2.3.1 h1:9iOZHO5XlSO1Gs5K7x06uDFy8bkicWlhOKGh/TufAZg= -github.com/panjf2000/ants/v2 v2.3.1/go.mod h1:LtwNaBX6OeF5qRtQlaeGndalVwJlS2ueur7uwoAHbPA= -github.com/panjf2000/gnet v1.0.1 h1:IhaLkjtdtJax5N1uwRUnCIbkDV8bIeLTvReShNFw4AI= -github.com/panjf2000/gnet v1.0.1/go.mod h1:Ux2Nc2pRFNk57YpDhHaZ9jaB4taAiqMBkcAWe/mWxmI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/panjf2000/ants/v2 v2.4.8 h1:JgTbolX6K6RreZ4+bfctI0Ifs+3mrE5BIHudQxUDQ9k= +github.com/panjf2000/ants/v2 v2.4.8/go.mod h1:f6F0NZVFsGCp5A7QW/Zj/m92atWwOkY0OIhFxRNFr4A= +github.com/panjf2000/gnet/v2 v2.0.2 h1:XddTL/+z3T0P51cnt+m9f/ke6XYZGOau1clBTTgh6Ek= +github.com/panjf2000/gnet/v2 v2.0.2/go.mod h1:unWr2B4jF0DQPJH3GsXBGQiDcAamM6+Pf5FiK705kc4= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/smallnest/goframe v1.0.0 h1:ywsSz9P5BFiqn39w8iFDENTdqN44v+B5bp1PbCH+PVw= -github.com/smallnest/goframe v1.0.0/go.mod h1:Dy8560GXrB6w5OJnVBU71dJtSyINdnqHHe6atDaZX00= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -golang.org/x/sys v0.0.0-20190228124157-a34e9553db1e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= +go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= +go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/multierr v1.7.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= +go.uber.org/multierr v1.8.0 h1:dg6GjLku4EH+249NNmoIciG9N/jURbDG+pFlTkhzIC8= +go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= +go.uber.org/zap v1.21.0 h1:WefMeulhovoZ2sYXz7st6K0sLj7bBhpiFaud4r4zST8= +go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220224120231-95c6836cb0e7/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5 h1:y/woIyUBFbpQGKS0u1aHF/40WUDnek3fPOyD08H5Vng= +golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= +gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo= gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/frameworks/Go/gnet/src/main.go b/frameworks/Go/gnet/src/main.go index 8d9c74257ce..88f1fba167c 100644 --- a/frameworks/Go/gnet/src/main.go +++ b/frameworks/Go/gnet/src/main.go @@ -2,62 +2,101 @@ package main import ( "bytes" + "errors" "flag" "fmt" "log" "runtime" + "sync/atomic" "time" - "github.com/panjf2000/gnet" + "github.com/panjf2000/gnet/v2" ) type httpServer struct { - *gnet.EventServer + gnet.BuiltinEventEngine + + addr string + multicore bool + eng gnet.Engine } type httpCodec struct { delimiter []byte + buf []byte } -func (hc *httpCodec) Encode(c gnet.Conn, buf []byte) (out []byte, err error) { - return buf, nil +func (hc *httpCodec) appendResponse() { + hc.buf = append(hc.buf, "HTTP/1.1 200 OK\r\nServer: gnet\r\nContent-Type: text/plain\r\nDate: "...) + //hc.buf = time.Now().AppendFormat(hc.buf, "Mon, 02 Jan 2006 15:04:05 GMT") + hc.buf = append(hc.buf, NowTimeFormat()...) + hc.buf = append(hc.buf, "\r\nContent-Length: 13\r\n\r\nHello, World!"...) } -func (hc *httpCodec) Decode(c gnet.Conn) (out []byte, err error) { - buf := c.Read() - if buf == nil { - return +var errCRLFNotFound = errors.New("CRLF not found") + +func (hc *httpCodec) parse(data []byte) (int, error) { + if idx := bytes.Index(data, hc.delimiter); idx != -1 { + return idx + 4, nil } - c.ResetBuffer() + return -1, errCRLFNotFound +} + +func (hc *httpCodec) reset() { + hc.buf = hc.buf[:0] +} + +func (hs *httpServer) OnBoot(eng gnet.Engine) gnet.Action { + hs.eng = eng + log.Printf("echo server with multi-core=%t is listening on %s\n", hs.multicore, hs.addr) + return gnet.None +} - // process the pipeline - var i int +func (hs *httpServer) OnOpen(c gnet.Conn) ([]byte, gnet.Action) { + c.SetContext(&httpCodec{delimiter: []byte("\r\n\r\n")}) + return nil, gnet.None +} + +func (hs *httpServer) OnTraffic(c gnet.Conn) gnet.Action { + buf, _ := c.Next(-1) + hc := c.Context().(*httpCodec) pipeline: - if i = bytes.Index(buf, hc.delimiter); i != -1 { - out = append(out, "HTTP/1.1 200 OK\r\nServer: gnet\r\nContent-Type: text/plain\r\nDate: "...) - out = time.Now().AppendFormat(out, "Mon, 02 Jan 2006 15:04:05 GMT") - out = append(out, "\r\nContent-Length: 13\r\n\r\nHello, World!"...) - buf = buf[i+4:] + nextOffset, err := hc.parse(buf) + if err != nil { + goto response + } + hc.appendResponse() + buf = buf[nextOffset:] + if len(buf) > 0 { goto pipeline } - // request not ready, yet - return +response: + c.Write(hc.buf) + hc.reset() + return gnet.None +} + +var now atomic.Value + +func ticktock() { + now.Store(nowTimeFormat()) + for range time.Tick(time.Second) { + now.Store(nowTimeFormat()) + } } -func (hs *httpServer) OnInitComplete(srv gnet.Server) (action gnet.Action) { - log.Printf("HTTP server is listening on %s (multi-cores: %t, loops: %d)\n", - srv.Addr.String(), srv.Multicore, srv.NumEventLoop) - return +func NowTimeFormat() string { + return now.Load().(string) } -func (hs *httpServer) React(frame []byte, c gnet.Conn) (out []byte, action gnet.Action) { - // handle the request - out = frame - return +func nowTimeFormat() string { + return time.Now().Format("Mon, 02 Jan 2006 15:04:05 GMT") } func init() { runtime.GOMAXPROCS(runtime.NumCPU() * 2) + now.Store(nowTimeFormat()) + go ticktock() } func main() { @@ -69,9 +108,8 @@ func main() { flag.BoolVar(&multicore, "multicore", true, "multicore") flag.Parse() - http := new(httpServer) - hc := &httpCodec{delimiter: []byte("\r\n\r\n")} + hs := &httpServer{addr: fmt.Sprintf("tcp://:%d", port), multicore: multicore} // Start serving! - log.Fatal(gnet.Serve(http, fmt.Sprintf("tcp://:%d", port), gnet.WithMulticore(multicore), gnet.WithCodec(hc))) + log.Println("server exits:", gnet.Run(hs, hs.addr, gnet.WithMulticore(multicore))) } diff --git a/frameworks/Go/go-std/benchmark_config.json b/frameworks/Go/go-std/benchmark_config.json index ae371ee0ad4..863aff7d2bf 100644 --- a/frameworks/Go/go-std/benchmark_config.json +++ b/frameworks/Go/go-std/benchmark_config.json @@ -8,7 +8,7 @@ "approach": "Realistic", "classification": "Platform", "database": "None", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", @@ -29,7 +29,7 @@ "approach": "Realistic", "classification": "Platform", "database": "Postgres", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", @@ -50,7 +50,7 @@ "approach": "Realistic", "classification": "Platform", "database": "Postgres", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", @@ -68,7 +68,7 @@ "approach": "Realistic", "classification": "Platform", "database": "Postgres", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", @@ -91,7 +91,7 @@ "approach": "Realistic", "classification": "Platform", "database": "Postgres", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", @@ -112,7 +112,7 @@ "approach": "Realistic", "classification": "Platform", "database": "Postgres", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", @@ -130,7 +130,7 @@ "approach": "Realistic", "classification": "Platform", "database": "Postgres", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", @@ -151,7 +151,7 @@ "approach": "Realistic", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", @@ -172,7 +172,7 @@ "approach": "Realistic", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", @@ -193,7 +193,7 @@ "approach": "Realistic", "classification": "Platform", "database": "MongoDB", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", @@ -214,7 +214,7 @@ "approach": "Realistic", "classification": "Platform", "database": "MongoDB", - "framework": "None", + "framework": "go-std", "language": "Go", "flavor": "None", "orm": "Raw", diff --git a/frameworks/Go/goframe/benchmark_config.json b/frameworks/Go/goframe/benchmark_config.json index e4c020f9746..3b8ed0ca203 100644 --- a/frameworks/Go/goframe/benchmark_config.json +++ b/frameworks/Go/goframe/benchmark_config.json @@ -4,14 +4,15 @@ "default": { "json_url": "/json", "db_url": "/db", - "query_url": "/dbs?queries=", + "query_url": "/queries?queries=", + "cached_query_url": "/cached-worlds?queries=", "fortune_url": "/fortunes", - "update_url": "/update?queries=", + "update_url": "/updates?queries=", "plaintext_url": "/plaintext", "port": 8080, "approach": "Realistic", - "classification": "Micro", - "database": "MySQL", + "classification": "Platform", + "database": "Postgres", "framework": "goframe", "language": "Go", "flavor": "None", diff --git a/frameworks/Go/goframe/config.toml b/frameworks/Go/goframe/config.toml index 8b3eef94a24..50bafa07533 100644 --- a/frameworks/Go/goframe/config.toml +++ b/frameworks/Go/goframe/config.toml @@ -5,12 +5,13 @@ name = "goframe" urls.plaintext = "/plaintext" urls.json = "/json" urls.db = "/db" -urls.query = "/dbs?queries=" -urls.update = "/update?queries=" +urls.query = "/queries?queries=" +urls.update = "/updates?queries=" urls.fortune = "/fortunes" +urls.cached_query = "/cached-worlds?queries=" approach = "Realistic" -classification = "Micro" -database = "MySQL" +classification = "Platform" +database = "Postgres" database_os = "Linux" os = "Linux" orm = "Raw" diff --git a/frameworks/Go/goframe/goframe.dockerfile b/frameworks/Go/goframe/goframe.dockerfile index 0f0e2208438..1059d4192ab 100644 --- a/frameworks/Go/goframe/goframe.dockerfile +++ b/frameworks/Go/goframe/goframe.dockerfile @@ -1,9 +1,12 @@ -FROM golang:1.15 +FROM golang:1.17 ADD ./src /goframe WORKDIR /goframe -RUN go build -o main main.go +RUN go get -u github.com/valyala/quicktemplate/qtc@v1.6.3 +RUN go mod tidy +RUN go generate ./template +RUN go build -ldflags="-s -w" -o app . EXPOSE 8080 -CMD ./main +CMD ./app diff --git a/frameworks/Go/goframe/src/go.mod b/frameworks/Go/goframe/src/go.mod index cd0d087c7a6..5c9083de74b 100644 --- a/frameworks/Go/goframe/src/go.mod +++ b/frameworks/Go/goframe/src/go.mod @@ -1,8 +1,10 @@ module goframe -go 1.15 +go 1.17 require ( - github.com/go-sql-driver/mysql v1.5.0 - github.com/gogf/gf latest -) + github.com/bytedance/sonic v1.1.0 + github.com/gogf/gf/v2 v2.0.0-rc3 + github.com/jackc/pgx/v4 v4.14.1 + github.com/valyala/quicktemplate v1.7.0 +) \ No newline at end of file diff --git a/frameworks/Go/goframe/src/go.sum b/frameworks/Go/goframe/src/go.sum index 4b329266e6c..b05a2f57fac 100644 --- a/frameworks/Go/goframe/src/go.sum +++ b/frameworks/Go/goframe/src/go.sum @@ -1,50 +1,337 @@ -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/clbanning/mxj v1.8.5-0.20200714211355-ff02cfb8ea28 h1:LdXxtjzvZYhhUaonAaAKArG3pyC67kGL3YY+6hGG8G4= -github.com/clbanning/mxj v1.8.5-0.20200714211355-ff02cfb8ea28/go.mod h1:BVjHeAH+rl9rs6f+QIpeRl0tfu10SXn1pUSa5PVGJng= +github.com/BurntSushi/toml v0.4.1 h1:GaI7EiDXDRfa8VshkTj7Fym7ha+y8/XxIgD2okUIjLw= +github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/andybalholm/brotli v1.0.2/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= +github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/bytedance/sonic v1.1.0 h1:3GJdsQPOEN6UNeTvlvXyhKhCJV2c+QUNZDCl/BewYTI= +github.com/bytedance/sonic v1.1.0/go.mod h1:V973WhNhGmvHxW6nQmsHEfHaoU9F3zTF+93rH03hcUQ= +github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06 h1:1sDoSuDPWzhkdzNVxCxtIaKiAe96ESVPv8coGwc1gZ4= +github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= +github.com/clbanning/mxj/v2 v2.5.5 h1:oT81vUeEiQQ/DcHbzSytRngP6Ky9O+L+0Bw0zSJag9E= +github.com/clbanning/mxj/v2 v2.5.5/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s= +github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs= -github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= -github.com/gogf/gf v1.14.5 h1:y/4q8rfFtiuIL7PwNUhG8RyBWLlvR1pl44x7/EJeDMI= -github.com/gogf/gf v1.14.5/go.mod h1:s4b0tkBqHyEWAk/Hwm4hzUCbCbdIPeERxB2wmeBg11g= -github.com/gomodule/redigo v2.0.0+incompatible h1:K/R+8tc58AaqLkqG2Ol3Qk+DR/TlNuhuh457pBFPtt0= -github.com/gomodule/redigo v2.0.0+incompatible/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= +github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= +github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-redis/redis/v8 v8.11.4 h1:kHoYkfZP6+pe04aFTnhDH6GDROa5yJdHJVNxV3F46Tg= +github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w= +github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/goccy/go-json v0.9.4 h1:L8MLKG2mvVXiQu07qB6hmfqeSYQdOnqPot2GhsIwIaI= +github.com/goccy/go-json v0.9.4/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= +github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogf/gf/v2 v2.0.0-rc3 h1:FkmLFhgOCZnyr24H/Yj9V1psS7fJ79DtPuSz+l/kwsc= +github.com/gogf/gf/v2 v2.0.0-rc3/go.mod h1:apktt6TleWtCIwpz63vBqUnw8MX8gWKoZyxgDpXFtgM= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/gorilla/websocket v1.4.1 h1:q7AeDBpnBk8AogcD4DSag/Ukw/KV+YhzLj2bP5HvKCM= -github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/grokify/html-strip-tags-go v0.0.0-20190921062105-daaa06bf1aaf h1:wIOAyJMMen0ELGiFzlmqxdcV1yGbkyHBAB6PolcNbLA= -github.com/grokify/html-strip-tags-go v0.0.0-20190921062105-daaa06bf1aaf/go.mod h1:2Su6romC5/1VXOQMaWL2yb618ARB8iVo6/DR99A6d78= -github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grokify/html-strip-tags-go v0.0.1 h1:0fThFwLbW7P/kOiTBs03FsJSV9RM2M/Q/MOnCQxKMo0= +github.com/grokify/html-strip-tags-go v0.0.1/go.mod h1:2Su6romC5/1VXOQMaWL2yb618ARB8iVo6/DR99A6d78= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= +github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgconn v1.10.1 h1:DzdIHIjG1AxGwoEEqS+mGsURyjt4enSmqzACXvVzOT8= +github.com/jackc/pgconn v1.10.1/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.2.0 h1:r7JypeP2D3onoQTCxWdTpCtJ4D+qpKr0TxvoyMhZ5ns= +github.com/jackc/pgproto3/v2 v2.2.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= +github.com/jackc/pgtype v1.9.1 h1:MJc2s0MFS8C3ok1wQTdQxWuXQcB6+HwAm5x1CzW7mf0= +github.com/jackc/pgtype v1.9.1/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= +github.com/jackc/pgx/v4 v4.14.1 h1:71oo1KAGI6mXhLiTMn6iDFcp3e7+zon/capWjl2OEFU= +github.com/jackc/pgx/v4 v4.14.1/go.mod h1:RgDuE4Z34o7XE92RpLsvFiOEfrAUT0Xt2KxvX73W06M= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.2.0 h1:DNDKdn/pDrWvDWyT2FYvpZVE81OAhWrjCv19I9n108Q= +github.com/jackc/puddle v1.2.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= +github.com/klauspost/compress v1.13.5/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/cpuid/v2 v2.0.9 h1:lgaqFMSdTdQYdZ04uHyN2d/eKdOMyi2YLSvlQIBFYa4= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.2 h1:AqzbZs4ZoCBp+GtejcpCpcxM3zlSMx29dXbUSeVtJb8= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/olekukonko/tablewriter v0.0.1 h1:b3iUnf1v+ppJiOfNX4yxxqfWKMQPZR5yoh8urCTFX88= -github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= +github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.16.0 h1:6gjqkI8iiRHMvdccRJM8rVKjCWk6ZIm6FTm3ddIe4/c= +github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/tidwall/gjson v1.12.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.13.0 h1:3TFY9yxOQShrvmjdM76K+jc66zJeT6D3/VFFYCGQf7M= +github.com/tidwall/gjson v1.13.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.4 h1:cuiLzLnaMeBhRmEv00Lpk3tkYrcxpmbU81tAY4Dw0tc= +github.com/tidwall/sjson v1.2.4/go.mod h1:098SZ494YoMWPmMO6ct4dcFnqxwj9r/gF0Etp19pSNM= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.30.0/go.mod h1:2rsYD01CKFrjjsvFxx75KlEUNpWNBY9JWD3K/7o2Cus= +github.com/valyala/quicktemplate v1.7.0 h1:LUPTJmlVcb46OOUY3IeD9DojFpAVbsG+5WFTcjMJzCM= +github.com/valyala/quicktemplate v1.7.0/go.mod h1:sqKJnoaOF88V07vkO+9FL8fb9uZg/VPSJnLYn+LmLk8= +github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +go.opentelemetry.io/otel v1.0.0 h1:qTTn6x71GVBvoafHK/yaRUmFzI4LcONZD0/kXxl5PHI= +go.opentelemetry.io/otel v1.0.0/go.mod h1:AjRVh9A5/5DE7S+mZtTR6t8vpKKryam+0lREnfmS4cg= +go.opentelemetry.io/otel/sdk v1.0.0 h1:BNPMYUONPNbLneMttKSjQhOTlFLOD9U22HNG1KrIN2Y= +go.opentelemetry.io/otel/sdk v1.0.0/go.mod h1:PCrDHlSy5x1kjezSdL37PhbFUMjrsLRshJ2zCzeXwbM= +go.opentelemetry.io/otel/trace v1.0.0 h1:TSBr8GTEtKevYMG/2d21M989r5WJYVimhTHBKVEZuh4= +go.opentelemetry.io/otel/trace v1.0.0/go.mod h1:PXTWqayeFUlJV1YDNhsJYB184+IvAH814St6o6ajzIs= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670 h1:18EFjUmQOcUvxNYSkA6jO9VAiXCnxFY6NyDX0bHDmkU= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/net v0.0.0-20200602114024-627f9648deb9 h1:pNX+40auqi2JqRfOP1akLGtYcn15TUbkhwuCO3foqqM= -golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 h1:/UOmuWzQfxxo9UtlXMwuQU8CMgg1eZXqTRwkSQJWKOI= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 h1:CIJ76btIcR3eFI5EgSo6k1qKw9KJexJuRLI9G7Hp5wE= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.8-0.20211105212822-18b340fc7af2 h1:GLw7MR8AfAG2GmGcmVgObFOHXYypgGjnGno25RDwn3Y= +golang.org/x/text v0.3.8-0.20211105212822-18b340fc7af2/go.mod h1:EFNZuWvGYxIRUEX+K8UmCFwYmZjqcrnq15ZuVldZkZ0= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= diff --git a/frameworks/Go/goframe/src/handler/db.go b/frameworks/Go/goframe/src/handler/db.go new file mode 100644 index 00000000000..86e76bd3d4a --- /dev/null +++ b/frameworks/Go/goframe/src/handler/db.go @@ -0,0 +1,45 @@ +package handler + +import ( + "context" + "fmt" + + "github.com/jackc/pgx/v4/pgxpool" +) + +const ( + dbHost = "tfb-database" + dbPort = 5432 + dbUser = "benchmarkdbuser" + dbPaswd = "benchmarkdbpass" + dbName = "hello_world" + + worldSelectSQL = "SELECT id, randomNumber FROM World WHERE id = $1" + worldSelectCacheSQL = "SELECT id, randomNumber FROM World LIMIT $1" + worldUpdateSQL = "UPDATE World SET randomNumber = $1 WHERE id = $2" + fortuneSelectSQL = "SELECT id, message FROM Fortune" +) + +var db *pgxpool.Pool + +func InitDB(maxConn int) error { + pgx, err := newPGX(maxConn) + if err != nil { + return err + } + + db = pgx + + return nil +} + +func CloseDB() { + db.Close() +} + +func newPGX(maxConn int) (*pgxpool.Pool, error) { + dsn := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s pool_max_conns=%d", + dbHost, dbPort, dbUser, dbPaswd, dbName, maxConn, + ) + return pgxpool.Connect(context.Background(), dsn) +} diff --git a/frameworks/Go/goframe/src/handler/handler.go b/frameworks/Go/goframe/src/handler/handler.go new file mode 100644 index 00000000000..60b2e37d6f3 --- /dev/null +++ b/frameworks/Go/goframe/src/handler/handler.go @@ -0,0 +1,157 @@ +package handler + +import ( + "context" + "net/http" + "sort" + + "github.com/bytedance/sonic" + "github.com/jackc/pgx/v4" + + "goframe/template" +) + +const ( + helloWorldStr = "Hello, World!" + contentTypePlain = "text/plain; charset=utf-8" + contentTypeHtml = "text/html; charset=utf-8" + contentTypeJson = "application/json" +) + +var ( + ctx = context.Background() + helloWorldBytes = []byte(helloWorldStr) + worldsCache = &Worlds{W: make([]World, worldsCount)} +) + +// PopulateWorldsCache populates the worlds cache for the cache test. +func PopulateWorldsCache() { + rows, err := db.Query(ctx, worldSelectCacheSQL, len(worldsCache.W)) + if err != nil { + panic(err) + } + i := 0 + for rows.Next() { + w := &worldsCache.W[i] + if err = rows.Scan(&w.ID, &w.RandomNumber); err != nil { + panic(err) + } + i++ + } +} + +// JSON . Test 1: JSON serialization. +func JSON(w http.ResponseWriter, r *http.Request) { + message := acquireMessage() + message.Message = helloWorldStr + output, _ := sonic.Marshal(message) + w.Header().Set("Content-Type", contentTypeJson) + _, _ = w.Write(output) + releaseMessage(message) +} + +// DB . Test 2: Single database query. +func DB(w http.ResponseWriter, r *http.Request) { + world := acquireWorld() + _ = db.QueryRow(ctx, worldSelectSQL, randomWorldNum()).Scan(&world.ID, &world.RandomNumber) + output, _ := sonic.Marshal(world) + w.Header().Set("Content-Type", contentTypeJson) + _, _ = w.Write(output) + releaseWorld(world) +} + +// Queries . Test 3: Multiple database queries. +func Queries(w http.ResponseWriter, r *http.Request) { + var ( + queries = queriesParam(r) + worlds = acquireWorlds() + ) + worlds.W = worlds.W[:queries] + for i := 0; i < queries; i++ { + world := &worlds.W[i] + _ = db.QueryRow(ctx, worldSelectSQL, randomWorldNum()).Scan(&world.ID, &world.RandomNumber) + } + output, _ := sonic.Marshal(worlds.W) + w.Header().Set("Content-Type", contentTypeJson) + _, _ = w.Write(output) + releaseWorlds(worlds) +} + +// CachedWorlds . Test 4: Multiple cache queries. +func CachedWorlds(w http.ResponseWriter, r *http.Request) { + var ( + queries = queriesParam(r) + worlds = acquireWorlds() + ) + worlds.W = worlds.W[:queries] + for i := 0; i < queries; i++ { + worlds.W[i] = worldsCache.W[randomWorldNum()-1] + } + output, _ := sonic.Marshal(worlds.W) + w.Header().Set("Content-Type", contentTypeJson) + _, _ = w.Write(output) + releaseWorlds(worlds) +} + +// FortunesQuick . Test 5: Fortunes. +func FortunesQuick(w http.ResponseWriter, r *http.Request) { + var ( + fortune = template.AcquireFortune() + fortunes = template.AcquireFortunes() + ) + rows, _ := db.Query(ctx, fortuneSelectSQL) + for rows.Next() { + _ = rows.Scan(&fortune.ID, &fortune.Message) // nolint:errcheck + fortunes.F = append(fortunes.F, *fortune) + } + + fortune.ID = 0 + fortune.Message = "Additional fortune added at request time." + fortunes.F = append(fortunes.F, *fortune) + + sort.Slice(fortunes.F, func(i, j int) bool { + return fortunes.F[i].Message < fortunes.F[j].Message + }) + + w.Header().Set("Content-Type", contentTypeHtml) + template.WriteFortunePage(w, fortunes.F) + + template.ReleaseFortune(fortune) + template.ReleaseFortunes(fortunes) +} + +// Updates . Test 6: Database updates. +func Updates(w http.ResponseWriter, r *http.Request) { + var ( + queries = queriesParam(r) + worlds = acquireWorlds() + ) + worlds.W = worlds.W[:queries] + for i := 0; i < queries; i++ { + world := &worlds.W[i] + _ = db.QueryRow(ctx, worldSelectSQL, randomWorldNum()).Scan(&world.ID, &world.RandomNumber) + world.RandomNumber = int32(randomWorldNum()) + } + // against deadlocks + sort.Slice(worlds.W, func(i, j int) bool { + return worlds.W[i].ID < worlds.W[j].ID + }) + batch := new(pgx.Batch) + for i := 0; i < queries; i++ { + world := &worlds.W[i] + batch.Queue(worldUpdateSQL, world.RandomNumber, world.ID) + } + _ = db.SendBatch(ctx, batch).Close() + + output, _ := sonic.Marshal(worlds.W) + w.Header().Set("Content-Type", contentTypeJson) + _, _ = w.Write(output) + + releaseWorlds(worlds) +} + +// Plaintext . Test 7: Plaintext. +func Plaintext(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", contentTypePlain) + _, _ = w.Write(helloWorldBytes) +} diff --git a/frameworks/Go/goframe/src/handler/message.go b/frameworks/Go/goframe/src/handler/message.go new file mode 100644 index 00000000000..82cc639d258 --- /dev/null +++ b/frameworks/Go/goframe/src/handler/message.go @@ -0,0 +1,20 @@ +package handler + +import ( + "sync" +) + +var messagePool = sync.Pool{ + New: func() interface{} { + return new(Message) + }, +} + +func acquireMessage() *Message { + return messagePool.Get().(*Message) +} + +func releaseMessage(m *Message) { + m.Message = "" + messagePool.Put(m) +} diff --git a/frameworks/Go/goframe/src/handler/types.go b/frameworks/Go/goframe/src/handler/types.go new file mode 100644 index 00000000000..7e641e1c4d3 --- /dev/null +++ b/frameworks/Go/goframe/src/handler/types.go @@ -0,0 +1,14 @@ +package handler + +type Message struct { + Message string `json:"message"` +} + +type World struct { + ID int32 `json:"id"` + RandomNumber int32 `json:"randomnumber"` +} + +type Worlds struct { + W []World +} diff --git a/frameworks/Go/goframe/src/handler/utils.go b/frameworks/Go/goframe/src/handler/utils.go new file mode 100644 index 00000000000..723df22bb6c --- /dev/null +++ b/frameworks/Go/goframe/src/handler/utils.go @@ -0,0 +1,21 @@ +package handler + +import ( + "math/rand" + "net/http" + "strconv" +) + +func queriesParam(r *http.Request) int { + n, _ := strconv.Atoi(r.URL.Query().Get("queries")) + if n < 1 { + n = 1 + } else if n > maxWorlds { + n = maxWorlds + } + return n +} + +func randomWorldNum() int { + return rand.Intn(worldsCount) + 1 +} diff --git a/frameworks/Go/goframe/src/handler/world.go b/frameworks/Go/goframe/src/handler/world.go new file mode 100644 index 00000000000..4f362ab430a --- /dev/null +++ b/frameworks/Go/goframe/src/handler/world.go @@ -0,0 +1,44 @@ +package handler + +import ( + "sync" +) + +const ( + maxWorlds = 500 + worldsCount = 10000 +) + +var ( + worldPool = sync.Pool{ + New: func() interface{} { + return new(World) + }, + } + worldsPool = sync.Pool{ + New: func() interface{} { + return &Worlds{ + W: make([]World, 0, maxWorlds), + } + }, + } +) + +func acquireWorld() *World { + return worldPool.Get().(*World) +} + +func releaseWorld(w *World) { + w.ID = 0 + w.RandomNumber = 0 + worldPool.Put(w) +} + +func acquireWorlds() *Worlds { + return worldsPool.Get().(*Worlds) +} + +func releaseWorlds(w *Worlds) { + w.W = w.W[:0] + worldsPool.Put(w) +} diff --git a/frameworks/Go/goframe/src/main.go b/frameworks/Go/goframe/src/main.go index ce0313d920c..425b891f02d 100644 --- a/frameworks/Go/goframe/src/main.go +++ b/frameworks/Go/goframe/src/main.go @@ -1,198 +1,57 @@ package main import ( - "log" + "net/http" "runtime" - "sort" - "database/sql" - - _ "github.com/go-sql-driver/mysql" - - "github.com/gogf/gf/frame/g" - "github.com/gogf/gf/net/ghttp" - "github.com/gogf/gf/util/grand" -) - -type ( - World struct { - Id uint16 `json:"id"` - RandomNumber uint16 `json:"randomNumber"` - } - - Fortune struct { - Id uint16 `json:"id"` - Message string `json:"message"` - } - - Fortunes []*Fortune - - ByMessage struct { - Fortunes - } + "github.com/gogf/gf/v2/frame/g" + "goframe/handler" ) -func (s Fortunes) Len() int { - return len(s) -} - -func (s Fortunes) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} - -func (s ByMessage) Less(i, j int) bool { - return s.Fortunes[i].Message < s.Fortunes[j].Message -} - const ( - worldSelect = "SELECT id, randomNumber FROM World WHERE id = ?" - worldUpdate = "UPDATE World SET randomNumber = ? WHERE id = ?" - fortuneSelect = "SELECT id, message FROM Fortune;" - worldRowCount = 10000 - maxConnectionCount = 256 -) - -var ( - worldStatement *sql.Stmt - fortuneStatement *sql.Stmt - updateStatement *sql.Stmt + routeJson = `/json` + routeDb = `/db` + routeQueries = `/queries` + routeCachedWorlds = `/cached-worlds` + routeFortunes = `/fortunes` + routeUpdates = `/updates` + routePlaintext = `/plaintext` ) -func init() { - runtime.GOMAXPROCS(runtime.NumCPU()) - db, err := sql.Open( - "mysql", - "benchmarkdbuser:benchmarkdbpass@tcp(tfb-database:3306)/hello_world", - ) - if err != nil { - log.Fatalf("Error opening database: %v", err) - } - db.SetMaxIdleConns(maxConnectionCount) - if worldStatement, err = db.Prepare(worldSelect); err != nil { - log.Fatal(err) - } - if fortuneStatement, err = db.Prepare(fortuneSelect); err != nil { - log.Fatal(err) - } - if updateStatement, err = db.Prepare(worldUpdate); err != nil { - log.Fatal(err) - } -} - func main() { - g.View().SetAutoEncode(true) - g.View().Parse("fortune.html", g.Map{"list":nil}) - s := g.Server() - s.Group("/", func(group *ghttp.RouterGroup) { - group.Middleware(func(r *ghttp.Request) { - r.Middleware.Next() - r.Response.Header().Set("Server", "GoFrame") - }) - group.GET("/db", handlerDb) - group.GET("/dbs", handlerDbs) - group.GET("/json", handlerJson) - group.GET("/update", handlerUpdate) - group.GET("/fortunes", handlerFortunes) - group.GET("/plaintext", handlerPlaintext) - }) - s.SetPort(8080) - s.Run() -} - -/// Test 1: JSON serialization -func handlerJson(r *ghttp.Request) { - r.Response.WriteJson(g.Map{ - "message": "Hello, World!", - }) -} - -/// Test 2: Single database query -func handlerDb(r *ghttp.Request) { - var world World - err := worldStatement.QueryRow(grand.Intn(worldRowCount)+1). - Scan( - &world.Id, - &world.RandomNumber, - ) - if err != nil { - r.Response.WriteStatusExit(500, err.Error()) + // Db settings. + if err := handler.InitDB(runtime.NumCPU() * 4); err != nil { + panic(err) } - r.Response.WriteJson(world) -} + defer handler.CloseDB() -/// Test 3: Multiple database queries -func handlerDbs(r *ghttp.Request) { - var ( - queries = parseQueries(r) - worlds = make([]World, queries) - ) - for i := 0; i < queries; i++ { - err := worldStatement.QueryRow(grand.Intn(worldRowCount)+1). - Scan( - &worlds[i].Id, - &worlds[i].RandomNumber, - ) - if err != nil { - r.Response.WriteStatusExit(500, err.Error()) - } - } - r.Response.WriteJson(worlds) -} + // Init and populate worlds cache. + handler.PopulateWorldsCache() -/// Test 4: Fortunes -func handlerFortunes(r *ghttp.Request) { - rows, err := fortuneStatement.Query() - if err != nil { - r.Response.WriteStatusExit(500, err.Error()) - } - fortunes := make(Fortunes, 0, 16) - for rows.Next() { - fortune := Fortune{} - if err := rows.Scan(&fortune.Id, &fortune.Message); err != nil { - r.Response.WriteStatusExit(500, err.Error()) + // Init http server and handler. + s := g.Server() + s.SetHandler(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Server", "GoFrame") + switch r.URL.Path { + case routeJson: + handler.JSON(w, r) + case routeDb: + handler.DB(w, r) + case routeQueries: + handler.Queries(w, r) + case routeCachedWorlds: + handler.CachedWorlds(w, r) + case routeFortunes: + handler.FortunesQuick(w, r) + case routeUpdates: + handler.Updates(w, r) + case routePlaintext: + handler.Plaintext(w, r) + default: + w.WriteHeader(http.StatusNotFound) + _, _ = w.Write([]byte(http.StatusText(http.StatusNotFound))) } - fortunes = append(fortunes, &fortune) - } - fortunes = append(fortunes, &Fortune{Message: "Additional fortune added at request time."}) - sort.Sort(ByMessage{fortunes}) - - r.Response.WriteTpl("fortune.html", g.Map{ - "list": fortunes, }) -} - -/// Test 5: Database updates -func handlerUpdate(r *ghttp.Request) { - var ( - queries = parseQueries(r) - world = make([]World, queries) - ) - for i := 0; i < queries; i++ { - if err := worldStatement.QueryRow(grand.Intn(worldRowCount)+1).Scan( - &world[i].Id, - &world[i].RandomNumber, - ); err != nil { - r.Response.WriteStatusExit(500, err.Error()) - } - world[i].RandomNumber = uint16(grand.Intn(worldRowCount) + 1) - if _, err := updateStatement.Exec(world[i].RandomNumber, world[i].Id); err != nil { - r.Response.WriteStatusExit(500, err.Error()) - } - } - r.Response.WriteJson(world) -} - -/// Test 6: plaintext -func handlerPlaintext(r *ghttp.Request) { - r.Response.Write("Hello, World!") -} - -func parseQueries(r *ghttp.Request) int { - n := r.GetQueryInt("queries") - if n < 1 { - n = 1 - } else if n > 500 { - n = 500 - } - return n + s.SetPort(8080) + s.Run() } diff --git a/frameworks/Go/goframe/src/template/fortune.go b/frameworks/Go/goframe/src/template/fortune.go new file mode 100644 index 00000000000..8559807a081 --- /dev/null +++ b/frameworks/Go/goframe/src/template/fortune.go @@ -0,0 +1,45 @@ +package template + +import ( + "sync" +) + +//go:generate qtc + +var ( + fortunePool = sync.Pool{ + New: func() interface{} { + return new(Fortune) + }, + } + fortunesPool = sync.Pool{ + New: func() interface{} { + return &Fortunes{ + F: make([]Fortune, 0, 16), + } + }, + } +) + +// AcquireFortune returns new message from pool. +func AcquireFortune() *Fortune { + return fortunePool.Get().(*Fortune) +} + +// ReleaseFortune resets the message and return it to the pool. +func ReleaseFortune(f *Fortune) { + f.ID = 0 + f.Message = "" + fortunePool.Put(f) +} + +// AcquireFortunes returns new fortunes from pool. +func AcquireFortunes() *Fortunes { + return fortunesPool.Get().(*Fortunes) +} + +// ReleaseFortunes resets the fortunes and return it to the pool. +func ReleaseFortunes(f *Fortunes) { + f.F = f.F[:0] + fortunesPool.Put(f) +} diff --git a/frameworks/Go/goframe/src/template/fortune.html b/frameworks/Go/goframe/src/template/fortune.html deleted file mode 100644 index ffc1fd6e5cb..00000000000 --- a/frameworks/Go/goframe/src/template/fortune.html +++ /dev/null @@ -1,20 +0,0 @@ - - - -Fortunes - - - - - - - -{{range .list}} - - - - -{{end}} -
idmessage
{{.Id}}{{.Message}}
- - diff --git a/frameworks/Go/goframe/src/template/fortunes.qtpl b/frameworks/Go/goframe/src/template/fortunes.qtpl new file mode 100644 index 00000000000..d387990d4ae --- /dev/null +++ b/frameworks/Go/goframe/src/template/fortunes.qtpl @@ -0,0 +1,15 @@ +{% func FortunePage(rows []Fortune) %} + + +Fortunes + + + + +{% for _, r := range rows %} + +{% endfor %} +
idmessage
{%d int(r.ID) %}{%s r.Message %}
+ + +{% endfunc %} diff --git a/frameworks/Go/goframe/src/template/fortunes.qtpl.go b/frameworks/Go/goframe/src/template/fortunes.qtpl.go new file mode 100644 index 00000000000..451dd7b5bb4 --- /dev/null +++ b/frameworks/Go/goframe/src/template/fortunes.qtpl.go @@ -0,0 +1,81 @@ +// Code generated by qtc from "fortunes.qtpl". DO NOT EDIT. +// See https://github.com/valyala/quicktemplate for details. + +//line fortunes.qtpl:1 +package template + +//line fortunes.qtpl:1 +import ( + qtio422016 "io" + + qt422016 "github.com/valyala/quicktemplate" +) + +//line fortunes.qtpl:1 +var ( + _ = qtio422016.Copy + _ = qt422016.AcquireByteBuffer +) + +//line fortunes.qtpl:1 +func StreamFortunePage(qw422016 *qt422016.Writer, rows []Fortune) { +//line fortunes.qtpl:1 + qw422016.N().S(` + + +Fortunes + + + + +`) +//line fortunes.qtpl:9 + for _, r := range rows { +//line fortunes.qtpl:9 + qw422016.N().S(` + +`) +//line fortunes.qtpl:11 + } +//line fortunes.qtpl:11 + qw422016.N().S(` +
idmessage
`) +//line fortunes.qtpl:10 + qw422016.N().D(int(r.ID)) +//line fortunes.qtpl:10 + qw422016.N().S(``) +//line fortunes.qtpl:10 + qw422016.E().S(r.Message) +//line fortunes.qtpl:10 + qw422016.N().S(`
+ + +`) +//line fortunes.qtpl:15 +} + +//line fortunes.qtpl:15 +func WriteFortunePage(qq422016 qtio422016.Writer, rows []Fortune) { +//line fortunes.qtpl:15 + qw422016 := qt422016.AcquireWriter(qq422016) +//line fortunes.qtpl:15 + StreamFortunePage(qw422016, rows) +//line fortunes.qtpl:15 + qt422016.ReleaseWriter(qw422016) +//line fortunes.qtpl:15 +} + +//line fortunes.qtpl:15 +func FortunePage(rows []Fortune) string { +//line fortunes.qtpl:15 + qb422016 := qt422016.AcquireByteBuffer() +//line fortunes.qtpl:15 + WriteFortunePage(qb422016, rows) +//line fortunes.qtpl:15 + qs422016 := string(qb422016.B) +//line fortunes.qtpl:15 + qt422016.ReleaseByteBuffer(qb422016) +//line fortunes.qtpl:15 + return qs422016 +//line fortunes.qtpl:15 +} diff --git a/frameworks/Go/goframe/src/template/types.go b/frameworks/Go/goframe/src/template/types.go new file mode 100644 index 00000000000..5893c218304 --- /dev/null +++ b/frameworks/Go/goframe/src/template/types.go @@ -0,0 +1,10 @@ +package template + +type Fortune struct { + ID int `json:"id,omitempty"` + Message string `json:"message,omitempty"` +} + +type Fortunes struct { + F []Fortune +} diff --git a/frameworks/Go/gramework/benchmark_config.json b/frameworks/Go/gramework/benchmark_config.json index f709e680bd2..0ee4fa7d75a 100644 --- a/frameworks/Go/gramework/benchmark_config.json +++ b/frameworks/Go/gramework/benchmark_config.json @@ -8,7 +8,7 @@ "approach": "Realistic", "classification": "Platform", "database": "Postgres", - "framework": "None", + "framework": "gramework", "language": "Go", "flavor": "None", "orm": "Raw", diff --git a/frameworks/Go/revel/benchmark_config.json b/frameworks/Go/revel/benchmark_config.json index 3556ab75fe1..c2ca8e72d4a 100644 --- a/frameworks/Go/revel/benchmark_config.json +++ b/frameworks/Go/revel/benchmark_config.json @@ -8,7 +8,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "MySQL", - "framework": "None", + "framework": "revel", "language": "Go", "flavor": "None", "orm": "Raw", @@ -28,7 +28,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "MySQL", - "framework": "None", + "framework": "revel", "language": "Go", "flavor": "None", "orm": "Raw", @@ -50,7 +50,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "MySQL", - "framework": "None", + "framework": "revel", "language": "Go", "flavor": "None", "orm": "Raw", @@ -72,7 +72,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "MySQL", - "framework": "None", + "framework": "revel", "language": "Go", "flavor": "None", "orm": "micro", @@ -94,7 +94,7 @@ "approach": "Realistic", "classification": "Fullstack", "database": "MySQL", - "framework": "None", + "framework": "revel", "language": "Go", "flavor": "None", "orm": "micro", diff --git a/frameworks/Go/silverlining/README.md b/frameworks/Go/silverlining/README.md new file mode 100644 index 00000000000..be83f90c2f4 --- /dev/null +++ b/frameworks/Go/silverlining/README.md @@ -0,0 +1,8 @@ +# [silverlining](https://github.com/go-www/silverlining) (GoLang) Benchmarking Test + +Silverlining is a low-level HTTP Framework for Go Programming Language. + +## Test URLs + + http://localhost:8080/plaintext + http://localhost:8080/json diff --git a/frameworks/Go/silverlining/benchmark_config.json b/frameworks/Go/silverlining/benchmark_config.json new file mode 100644 index 00000000000..1d8c6f82bac --- /dev/null +++ b/frameworks/Go/silverlining/benchmark_config.json @@ -0,0 +1,47 @@ +{ + "framework": "silverlining", + "tests": [ + { + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "database": "None", + "framework": "silverlining", + "language": "Go", + "flavor": "None", + "orm": "Raw", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "silverlining", + "notes": "", + "versus": "go" + } + }, + { + "prefork": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "database": "None", + "framework": "silverlining", + "language": "Go", + "flavor": "None", + "orm": "Raw", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "silverlining", + "notes": "", + "versus": "go" + } + } + ] +} diff --git a/frameworks/Go/beego/config.toml b/frameworks/Go/silverlining/config.toml similarity index 59% rename from frameworks/Go/beego/config.toml rename to frameworks/Go/silverlining/config.toml index b930344166f..f6753674f17 100644 --- a/frameworks/Go/beego/config.toml +++ b/frameworks/Go/silverlining/config.toml @@ -1,29 +1,28 @@ [framework] -name = "beego" +name = "silverlining" [main] urls.plaintext = "/plaintext" urls.json = "/json" approach = "Realistic" -classification = "Fullstack" +classification = "Platform" database = "None" database_os = "Linux" os = "Linux" -orm = "raw" +orm = "Raw" platform = "None" webserver = "None" versus = "go" -[orm-mysql] -urls.db = "/db" -urls.query = "/queries?queries=" -urls.update = "/update?queries=" +[prefork] +urls.plaintext = "/plaintext" +urls.json = "/json" approach = "Realistic" -classification = "Fullstack" -database = "MySQL" +classification = "Platform" +database = "None" database_os = "Linux" os = "Linux" -orm = "Micro" +orm = "Raw" platform = "None" webserver = "None" versus = "go" diff --git a/frameworks/Go/silverlining/silverlining-prefork.dockerfile b/frameworks/Go/silverlining/silverlining-prefork.dockerfile new file mode 100644 index 00000000000..45a8e72f199 --- /dev/null +++ b/frameworks/Go/silverlining/silverlining-prefork.dockerfile @@ -0,0 +1,11 @@ +FROM golang:1.18 + +WORKDIR /silverlining + +COPY ./src /silverlining + +RUN GOAMD64=v3 go build -ldflags="-s -w" -o app . + +EXPOSE 8080 + +CMD ./app -prefork diff --git a/frameworks/Go/silverlining/silverlining.dockerfile b/frameworks/Go/silverlining/silverlining.dockerfile new file mode 100644 index 00000000000..e8f5136d730 --- /dev/null +++ b/frameworks/Go/silverlining/silverlining.dockerfile @@ -0,0 +1,11 @@ +FROM golang:1.18 + +WORKDIR /silverlining + +COPY ./src /silverlining + +RUN GOAMD64=v3 go build -ldflags="-s -w" -o app . + +EXPOSE 8080 + +CMD ./app diff --git a/frameworks/Go/silverlining/src/go.mod b/frameworks/Go/silverlining/src/go.mod new file mode 100644 index 00000000000..8fc66fea193 --- /dev/null +++ b/frameworks/Go/silverlining/src/go.mod @@ -0,0 +1,14 @@ +module silverlining/src + +go 1.18 + +require github.com/go-www/silverlining v1.1.1 + +require ( + github.com/gobwas/httphead v0.1.0 // indirect + github.com/gobwas/pool v0.2.1 // indirect + github.com/gobwas/ws v1.1.0 // indirect + github.com/goccy/go-json v0.9.6 // indirect + github.com/libp2p/go-reuseport v0.1.0 // indirect + golang.org/x/sys v0.0.0-20220325203850-36772127a21f // indirect +) diff --git a/frameworks/Go/silverlining/src/go.sum b/frameworks/Go/silverlining/src/go.sum new file mode 100644 index 00000000000..9daa20e725e --- /dev/null +++ b/frameworks/Go/silverlining/src/go.sum @@ -0,0 +1,26 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/go-www/silverlining v1.1.1 h1:PI+ggLQ7CLzG9NfqPRScnqwLwn5JmXcYkGtin1uYyD8= +github.com/go-www/silverlining v1.1.1/go.mod h1:FqUQC31dCT7m21Q8nA0b7n0rfLEkUGAzjXmY7AZBoqU= +github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU= +github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= +github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og= +github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.1.0 h1:7RFti/xnNkMJnrK7D1yQ/iCIB5OrrY/54/H930kIbHA= +github.com/gobwas/ws v1.1.0/go.mod h1:nzvNcVha5eUziGrbxFCo6qFIojQHjJV5cLYIbezhfL0= +github.com/goccy/go-json v0.9.6 h1:5/4CtRQdtsX0sal8fdVhTaiMN01Ri8BExZZ8iRmHQ6E= +github.com/goccy/go-json v0.9.6/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/libp2p/go-reuseport v0.1.0 h1:0ooKOx2iwyIkf339WCZ2HN3ujTDbkK0PjC7JVoP1AiM= +github.com/libp2p/go-reuseport v0.1.0/go.mod h1:bQVn9hmfcTaoo0c9v5pBhOarsU1eNOBZdaAd2hzXRKU= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +golang.org/x/sys v0.0.0-20190228124157-a34e9553db1e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20201207223542-d4d67f95c62d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20220325203850-36772127a21f h1:TrmogKRsSOxRMJbLYGrB4SBbW+LJcEllYBLME5Zk5pU= +golang.org/x/sys v0.0.0-20220325203850-36772127a21f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/frameworks/Go/silverlining/src/main.go b/frameworks/Go/silverlining/src/main.go new file mode 100644 index 00000000000..2f60ca2c5be --- /dev/null +++ b/frameworks/Go/silverlining/src/main.go @@ -0,0 +1,58 @@ +package main + +import ( + "flag" + "log" + + "github.com/go-www/silverlining" +) + +var BindAddr string +var prefork *bool + +func init() { + flag.StringVar(&BindAddr, "bind", ":8080", "set bind host") + prefork = flag.Bool("prefork", false, "use prefork") + flag.Parse() +} + +func main() { + log.Printf("Listening on http://localhost%s", BindAddr) + + Handler := func(r *silverlining.Context) { + switch string(r.Path()) { + case "/plaintext": + r.ResponseHeaders().Set("Content-Type", "text/plain") + r.WriteFullBodyString(200, "Hello, World!") + case "/json": + type Message struct { + Message string `json:"message"` + } + msg := Message{Message: "Hello, World!"} + r.WriteJSON(200, msg) + default: + r.WriteFullBody(404, nil) + } + } + + var err error + if *prefork { + var id int + id, err = silverlining.PreforkChildID() + if err != nil { + log.Fatalln(err) + } + + if id == 0 { + log.Println("Starting prefork leader process") + } else { + log.Printf("Starting prefork replica process %d", id) + } + err = silverlining.ListenAndServePrefork(BindAddr, Handler) + } else { + err = silverlining.ListenAndServe(BindAddr, Handler) + } + if err != nil { + log.Fatalln(err) + } +} diff --git a/frameworks/Haskell/ihp/benchmark_config.json b/frameworks/Haskell/ihp/benchmark_config.json index aeba9e97aed..48dca2b8888 100755 --- a/frameworks/Haskell/ihp/benchmark_config.json +++ b/frameworks/Haskell/ihp/benchmark_config.json @@ -23,7 +23,8 @@ "database_os": "Linux", "display_name": "Integrated Haskell Platfrom (IHP)", "notes": "", - "versus": "None" + "versus": "None", + "tags": ["broken"] } } ] diff --git a/frameworks/Haskell/postgrest/postgrest.conf b/frameworks/Haskell/postgrest/postgrest.conf new file mode 100644 index 00000000000..688ef63d886 --- /dev/null +++ b/frameworks/Haskell/postgrest/postgrest.conf @@ -0,0 +1,19 @@ +db-uri = "$(PGRST_DB_URI)" +db-schema = "$(PGRST_DB_SCHEMA)" +db-anon-role = "$(PGRST_DB_ANON_ROLE)" +db-pool = "$(PGRST_DB_POOL)" +db-extra-search-path = "$(PGRST_DB_EXTRA_SEARCH_PATH)" + +server-host = "$(PGRST_SERVER_HOST)" +server-port = "$(PGRST_SERVER_PORT)" + +openapi-server-proxy-uri = "$(PGRST_OPENAPI_SERVER_PROXY_URI)" +jwt-secret = "$(PGRST_JWT_SECRET)" +secret-is-base64 = "$(PGRST_SECRET_IS_BASE64)" +jwt-aud = "$(PGRST_JWT_AUD)" +role-claim-key = "$(PGRST_ROLE_CLAIM_KEY)" + +max-rows = "$(PGRST_MAX_ROWS)" +pre-request = "$(PGRST_PRE_REQUEST)" +root-spec = "$(PGRST_ROOT_SPEC)" +raw-media-types = "$(PGRST_RAW_MEDIA_TYPES)" \ No newline at end of file diff --git a/frameworks/Haskell/postgrest/postgrest.dockerfile b/frameworks/Haskell/postgrest/postgrest.dockerfile index 3dc1df2ac28..fb5f224e9bd 100644 --- a/frameworks/Haskell/postgrest/postgrest.dockerfile +++ b/frameworks/Haskell/postgrest/postgrest.dockerfile @@ -1,10 +1,10 @@ -FROM postgrest/postgrest:latest +FROM postgrest/postgrest:latest AS builder FROM alpine RUN apk add postgresql-client bash -COPY --from=0 /usr/local/bin/postgrest /usr/local/bin/postgrest -COPY --from=0 /etc/postgrest.conf /etc/postgrest.conf +COPY --from=builder /bin/postgrest /usr/local/bin/postgrest +COPY postgrest.conf /etc/postgrest.conf ENV PGRST_DB_SCHEMA=public ENV PGRST_DB_ANON_ROLE= diff --git a/frameworks/Java/act/pom.xml b/frameworks/Java/act/pom.xml index a27f7409769..6d22ea17292 100644 --- a/frameworks/Java/act/pom.xml +++ b/frameworks/Java/act/pom.xml @@ -69,8 +69,8 @@ 1.8 - 5.1.47 - 42.3.3 + 8.0.28 + 42.4.1 1.3.2 3.4 com.techempower.act.AppEntry diff --git a/frameworks/Java/activeweb/pom.xml b/frameworks/Java/activeweb/pom.xml index 9d3836a8b28..0669a0bc1f5 100644 --- a/frameworks/Java/activeweb/pom.xml +++ b/frameworks/Java/activeweb/pom.xml @@ -105,7 +105,7 @@ mysql mysql-connector-java - 8.0.18 + 8.0.28 org.slf4j @@ -120,7 +120,7 @@ com.fasterxml.jackson.core jackson-databind - 2.10.0.pr1 + 2.13.2.1 diff --git a/frameworks/Java/armeria/pom.xml b/frameworks/Java/armeria/pom.xml index 18b8c0b5057..5ea46f3eb54 100644 --- a/frameworks/Java/armeria/pom.xml +++ b/frameworks/Java/armeria/pom.xml @@ -16,7 +16,7 @@ 11 - 1.9.1 + 1.17.0 @@ -33,7 +33,7 @@ org.postgresql postgresql - 42.3.3 + 42.4.1 com.github.spullara.mustache.java diff --git a/frameworks/Java/bayou/pom.xml b/frameworks/Java/bayou/pom.xml index 057dbe2b7e7..495d94cf319 100644 --- a/frameworks/Java/bayou/pom.xml +++ b/frameworks/Java/bayou/pom.xml @@ -33,7 +33,7 @@ com.fasterxml.jackson.core jackson-databind - 2.10.0.pr1 + 2.13.2.1 diff --git a/frameworks/Java/blade/README.md b/frameworks/Java/blade/README.md index 06950482a68..5f56046f4ca 100644 --- a/frameworks/Java/blade/README.md +++ b/frameworks/Java/blade/README.md @@ -9,9 +9,9 @@ This is the Blade portion of a [benchmarking test suite](../) comparing a variet ## Infrastructure Software Versions -* [Blade 2.0.12.ALPHA](https://github.com/biezhi/blade) +* [Blade 2.1.2.RELEASE](https://github.com/lets-blade/blade) * [Java OpenJDK 1.8](http://openjdk.java.net/) -* [HikariCP 3.2.0](https://github.com/brettwooldridge/HikariCP) +* [HikariCP 4.0.3](https://github.com/brettwooldridge/HikariCP) ## Test URLs diff --git a/frameworks/Java/blade/pom.xml b/frameworks/Java/blade/pom.xml index 51d747a6f4e..c51e9cfc449 100644 --- a/frameworks/Java/blade/pom.xml +++ b/frameworks/Java/blade/pom.xml @@ -14,44 +14,31 @@ UTF-8 11 11 - 2.0.15.RELEASE - 4.1.48.Final - 0.2.6 - 3.4.2 - 8.0.18 - 0.1.3 - 2.1.10 + 2.1.2.RELEASE + 4.1.76.Final + 0.3.1 + 4.0.3 + 8.0.28 + 0.2.1 - com.bladejava - blade-mvc + com.hellokaton + blade-core ${blade.version} - io.github.biezhi + com.hellokaton anima ${anima.version} - com.bladejava + com.hellokaton blade-template-jetbrick ${blade-jetbrick.version} - - - com.github.subchen - jetbrick-template - - - - - - com.github.subchen - jetbrick-template - ${jetbrick-version} diff --git a/frameworks/Java/blade/src/main/java/hello/Application.java b/frameworks/Java/blade/src/main/java/hello/Application.java index e29a1aa0a10..7bedbbd2a61 100644 --- a/frameworks/Java/blade/src/main/java/hello/Application.java +++ b/frameworks/Java/blade/src/main/java/hello/Application.java @@ -1,9 +1,9 @@ package hello; -import com.blade.Blade; -import com.blade.mvc.Const; -import com.blade.mvc.RouteContext; -import com.blade.mvc.http.StringBody; +import com.hellokaton.blade.Blade; +import com.hellokaton.blade.mvc.RouteContext; +import com.hellokaton.blade.mvc.http.StringBody; +import com.hellokaton.blade.mvc.BladeConst; import hello.model.Fortune; import hello.model.Message; import hello.model.World; @@ -12,23 +12,22 @@ import java.util.concurrent.ThreadLocalRandom; import java.util.stream.Stream; -import static io.github.biezhi.anima.Anima.select; -import static io.github.biezhi.anima.Anima.update; +import static com.hellokaton.anima.Anima.select; +import static com.hellokaton.anima.Anima.update; +import static com.hellokaton.blade.mvc.BladeConst.ENV_KEY_REQUEST_LOG; import static java.util.Comparator.comparing; import static java.util.stream.Collectors.toList; /** * Blade Application * - * @author biezhi - * @date 2018/10/17 + * @author hellokaton + * @date 2022/5/10 */ public class Application { private static final StringBody PLAINTEXT = StringBody.of("Hello, World!"); private static final String JSON_CONTENT_TYPE = "application/json"; - private static final String SERVER_HEADER = "Server"; - private static final String SERVER_VALUE = "Blade-" + Const.VERSION; private static final String ADDITIONAL_FORTUNE = "Additional fortune added at request time."; private static final int DB_ROWS = 10000; @@ -54,22 +53,24 @@ private static List generateIdList(int size) { private static void db(RouteContext ctx) { World world = select().from(World.class).byId(generateId()); - ctx.json(world).contentType(JSON_CONTENT_TYPE).header(SERVER_HEADER, SERVER_VALUE); + ctx.contentType(JSON_CONTENT_TYPE) + .json(world); } private static void queries(RouteContext ctx) { - int queries = getQueries(ctx.fromString("queries", "1")); + int queries = getQueries(ctx.query("queries", "1")); List idList = generateIdList(queries); List worlds = idList.stream() .map(id -> select().from(World.class).byId(id)) .collect(toList()); - ctx.json(worlds).contentType(JSON_CONTENT_TYPE).header(SERVER_HEADER, SERVER_VALUE); + ctx.contentType(JSON_CONTENT_TYPE) + .json(worlds); } private static void updates(RouteContext ctx) { - int queries = getQueries(ctx.fromString("queries", "1")); + int queries = getQueries(ctx.query("queries", "1")); List idList = generateIdList(queries); @@ -77,7 +78,8 @@ private static void updates(RouteContext ctx) { .map(id -> select().from(World.class).byId(id)) .peek(Application::updateWorld).collect(toList()); - ctx.json(worlds).contentType(JSON_CONTENT_TYPE).header(SERVER_HEADER, SERVER_VALUE); + ctx.contentType(JSON_CONTENT_TYPE) + .json(worlds); } private static void updateWorld(World world) { @@ -98,21 +100,25 @@ private static void fortunes(RouteContext ctx) { fortunes.sort(comparing(Fortune::getMessage)); ctx.attribute("fortunes", fortunes); - ctx.header(SERVER_HEADER, SERVER_VALUE); ctx.render("fortunes.html"); } public static void main(String[] args) { - Blade.of() - .get("/json", ctx -> ctx.json(new Message()).contentType(JSON_CONTENT_TYPE) - .header(SERVER_HEADER, SERVER_VALUE)) - .get("/plaintext", ctx -> ctx.body(PLAINTEXT).contentType("text/plain") - .header(SERVER_HEADER, SERVER_VALUE)) + Blade.create() + .get("/json", ctx -> + ctx.contentType(JSON_CONTENT_TYPE) + .json(new Message()) + ) + .get("/plaintext", ctx -> + ctx.contentType("text/plain") + .body(PLAINTEXT) + ) .get("/db", Application::db) .get("/queries", Application::queries) .get("/updates", Application::updates) .get("/fortunes", Application::fortunes) .disableSession() + .setEnv(ENV_KEY_REQUEST_LOG, false) .start(Application.class, args); } diff --git a/frameworks/Java/blade/src/main/java/hello/Bootstrap.java b/frameworks/Java/blade/src/main/java/hello/Bootstrap.java index 18adfdf4022..1616f231912 100644 --- a/frameworks/Java/blade/src/main/java/hello/Bootstrap.java +++ b/frameworks/Java/blade/src/main/java/hello/Bootstrap.java @@ -1,18 +1,18 @@ package hello; -import com.blade.Blade; -import com.blade.ioc.annotation.Bean; -import com.blade.loader.BladeLoader; -import com.blade.mvc.view.template.JetbrickTemplateEngine; +import com.hellokaton.anima.Anima; +import com.hellokaton.blade.Blade; +import com.hellokaton.blade.ioc.annotation.Bean; +import com.hellokaton.blade.loader.BladeLoader; +import com.hellokaton.blade.template.JetbrickTemplateEngine; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; -import io.github.biezhi.anima.Anima; /** * Application Bootstrap * - * @author biezhi - * @date 2018/10/17 + * @author hellokaton + * @date 2022/5/10 */ @Bean public class Bootstrap implements BladeLoader { @@ -25,12 +25,12 @@ public void load(Blade blade) { HikariConfig config = new HikariConfig(); - String url = blade.env("jdbc.url", ""); - String username = blade.env("jdbc.username", ""); - String password = blade.env("jdbc.password", ""); - String cachePrepStmts = blade.env("datasource.cachePrepStmts", "true"); - String prepStmtCacheSize = blade.env("datasource.prepStmtCacheSize", "250"); - String prepStmtCacheSqlLimit = blade.env("datasource.prepStmtCacheSqlLimit", "2048"); + String url = blade.getEnv("jdbc.url", ""); + String username = blade.getEnv("jdbc.username", ""); + String password = blade.getEnv("jdbc.password", ""); + String cachePrepStmts = blade.getEnv("datasource.cachePrepStmts", "true"); + String prepStmtCacheSize = blade.getEnv("datasource.prepStmtCacheSize", "250"); + String prepStmtCacheSqlLimit = blade.getEnv("datasource.prepStmtCacheSqlLimit", "2048"); config.setJdbcUrl(url); config.setUsername(username); diff --git a/frameworks/Java/blade/src/main/java/hello/model/Fortune.java b/frameworks/Java/blade/src/main/java/hello/model/Fortune.java index eb55b41d154..4a17bbffdc9 100644 --- a/frameworks/Java/blade/src/main/java/hello/model/Fortune.java +++ b/frameworks/Java/blade/src/main/java/hello/model/Fortune.java @@ -1,7 +1,8 @@ package hello.model; -import io.github.biezhi.anima.Model; -import io.github.biezhi.anima.annotation.Table; + +import com.hellokaton.anima.Model; +import com.hellokaton.anima.annotation.Table; @Table(name = "fortune") public class Fortune extends Model { diff --git a/frameworks/Java/blade/src/main/java/hello/model/World.java b/frameworks/Java/blade/src/main/java/hello/model/World.java index fc15d85cab1..b315a2cd9ef 100644 --- a/frameworks/Java/blade/src/main/java/hello/model/World.java +++ b/frameworks/Java/blade/src/main/java/hello/model/World.java @@ -1,8 +1,9 @@ package hello.model; -import io.github.biezhi.anima.Model; -import io.github.biezhi.anima.annotation.Column; -import io.github.biezhi.anima.annotation.Table; + +import com.hellokaton.anima.Model; +import com.hellokaton.anima.annotation.Column; +import com.hellokaton.anima.annotation.Table; /** * World model diff --git a/frameworks/Java/dropwizard/pom.xml b/frameworks/Java/dropwizard/pom.xml index 461f9a2a255..8fcbeb3c6b6 100644 --- a/frameworks/Java/dropwizard/pom.xml +++ b/frameworks/Java/dropwizard/pom.xml @@ -17,7 +17,7 @@ 1.3.12 1.1.1 2.3.0 - 8.0.18 + 8.0.28 2.9.4 42.3.3 3.8.0 diff --git a/frameworks/Java/grizzly/pom.xml b/frameworks/Java/grizzly/pom.xml index 16c6b8be004..9146b8e5e24 100644 --- a/frameworks/Java/grizzly/pom.xml +++ b/frameworks/Java/grizzly/pom.xml @@ -72,7 +72,7 @@ com.fasterxml.jackson.core jackson-databind - 2.10.0.pr1 + 2.13.2.1 diff --git a/frameworks/Java/hserver/README.md b/frameworks/Java/hserver/README.md index 9c56a406b3a..e7b3ed7ff3f 100644 --- a/frameworks/Java/hserver/README.md +++ b/frameworks/Java/hserver/README.md @@ -2,12 +2,15 @@ This is the HServer portion of a [benchmarking test suite](../) comparing a variety of web development platforms. ### Tests -* [HServer JSON test source](src/main/java/com/test/hserver/controller/TestController.java) +* [JSON test source](src/main/java/com/test/hserver/controller/TestController.java) * [Plaintext test source](src/main/java/com/test/hserver/controller/TestController.java) +* [Data-Store test source](src/main/java/com/test/hserver/controller/TestController.java) +* [Data-Update test source](src/main/java/com/test/hserver/controller/TestController.java) +* [Fortunes test source](src/main/java/com/test/hserver/controller/TestController.java) ## Infrastructure Software Versions -* [HServer 2.9.73](https://gitee.com/HServer/HServer) +* [HServer](https://gitee.com/HServer/HServer) * [Java OpenJDK 1.8](http://openjdk.java.net/) ## Test URLs diff --git a/frameworks/Java/hserver/hserver.dockerfile b/frameworks/Java/hserver/hserver.dockerfile index db53ffec067..483485da30f 100644 --- a/frameworks/Java/hserver/hserver.dockerfile +++ b/frameworks/Java/hserver/hserver.dockerfile @@ -1,10 +1,10 @@ -FROM maven:3.6.1-jdk-11-slim as maven +FROM maven:3.6.3-openjdk-8-slim as maven WORKDIR /hserver COPY pom.xml pom.xml COPY src src RUN mvn package -FROM openjdk:11.0.3-jdk-slim +FROM openjdk:8u275-jdk-slim WORKDIR /hserver COPY --from=maven /hserver/target/hserver-1.0.jar app.jar diff --git a/frameworks/Java/hserver/pom.xml b/frameworks/Java/hserver/pom.xml index 15cb6825bf3..8699552d1ba 100644 --- a/frameworks/Java/hserver/pom.xml +++ b/frameworks/Java/hserver/pom.xml @@ -10,8 +10,8 @@ UTF-8 3.3.1 - 42.3.3 - 2.9.80 + 42.4.1 + 2.9.90 diff --git a/frameworks/Java/hserver/src/main/java/com/test/hserver/controller/TestController.java b/frameworks/Java/hserver/src/main/java/com/test/hserver/controller/TestController.java index 8a3923368cb..7ce11b120c1 100644 --- a/frameworks/Java/hserver/src/main/java/com/test/hserver/controller/TestController.java +++ b/frameworks/Java/hserver/src/main/java/com/test/hserver/controller/TestController.java @@ -32,13 +32,13 @@ public class TestController { @GET("/json") public Message json(HttpResponse response) { - response.setHeader("Date", DateUtil.getNow()); + response.setHeader("Date", DateUtil.getTime()); return new Message(); } @GET("/plaintext") public String plaintext(HttpResponse response) { - response.setHeader("Date", DateUtil.getNow()); + response.setHeader("Date", DateUtil.getTime()); return HELLO; } @@ -54,7 +54,7 @@ public void db(HttpResponse response) throws SQLException { } } } - response.setHeader("Date", DateUtil.getNow()); + response.setHeader("Date", DateUtil.getTime()); response.sendJson(result); } @@ -72,7 +72,7 @@ public void queries(String queries,HttpResponse response) throws Exception { } } } - response.setHeader("Date", DateUtil.getNow()); + response.setHeader("Date", DateUtil.getTime()); response.sendJson(result); } @@ -107,7 +107,7 @@ public void updates(String queries,HttpResponse response) throws Exception { statement.executeUpdate(); } } - response.setHeader("Date", DateUtil.getNow()); + response.setHeader("Date", DateUtil.getTime()); response.sendJson(result); } @@ -125,7 +125,7 @@ public void fortunes(HttpResponse response) throws Exception { } fortunes.add(new Fortune(0, "Additional fortune added at request time.")); Collections.sort(fortunes); - response.setHeader("Date", DateUtil.getNow()); + response.setHeader("Date", DateUtil.getTime()); Map data=new HashMap<>(); data.put("data",fortunes); response.sendTemplate("fortunes.ftl",data); diff --git a/frameworks/Java/hserver/src/main/java/com/test/hserver/task/TimeAdd.java b/frameworks/Java/hserver/src/main/java/com/test/hserver/task/TimeAdd.java new file mode 100644 index 00000000000..159c7f1c33a --- /dev/null +++ b/frameworks/Java/hserver/src/main/java/com/test/hserver/task/TimeAdd.java @@ -0,0 +1,15 @@ +package com.test.hserver.task; + +import com.test.hserver.util.DateUtil; +import top.hserver.core.ioc.annotation.Bean; +import top.hserver.core.ioc.annotation.Task; + +@Bean +public class TimeAdd { + + @Task(name = "时间计算", time = "1000") + public void add() { + DateUtil.time = DateUtil.getNow(); + } + +} diff --git a/frameworks/Java/hserver/src/main/java/com/test/hserver/util/DateUtil.java b/frameworks/Java/hserver/src/main/java/com/test/hserver/util/DateUtil.java index 8676be9f543..0874b9bd9eb 100644 --- a/frameworks/Java/hserver/src/main/java/com/test/hserver/util/DateUtil.java +++ b/frameworks/Java/hserver/src/main/java/com/test/hserver/util/DateUtil.java @@ -14,5 +14,12 @@ public class DateUtil { public static String getNow() { return GMT_FMT.format(LocalDateTime.now().atZone(zoneId)); } - + public static String time; + public static String getTime(){ + if (time==null){ + time=getNow(); + return time; + } + return time; + } } diff --git a/frameworks/Java/hserver/src/main/resources/app.properties b/frameworks/Java/hserver/src/main/resources/app.properties index d1c63c77cb6..c2124bd4802 100644 --- a/frameworks/Java/hserver/src/main/resources/app.properties +++ b/frameworks/Java/hserver/src/main/resources/app.properties @@ -3,4 +3,6 @@ username= benchmarkdbuser password= benchmarkdbpass maximumPoolSize= 256 level=info - +businessPool=-1 +bossPool=2 +workerPool=4 diff --git a/frameworks/Java/httpserver/pom.xml b/frameworks/Java/httpserver/pom.xml index 7d6267a456c..08eec590fe3 100644 --- a/frameworks/Java/httpserver/pom.xml +++ b/frameworks/Java/httpserver/pom.xml @@ -18,7 +18,7 @@ com.fasterxml.jackson.core jackson-databind - 2.9.9 + 2.13.2.1 com.fasterxml.jackson.module @@ -29,7 +29,7 @@ org.postgresql postgresql - 42.3.3 + 42.4.1 com.zaxxer diff --git a/frameworks/Java/isocket-nio/.gitignore b/frameworks/Java/isocket-nio/.gitignore new file mode 100644 index 00000000000..2422d597b85 --- /dev/null +++ b/frameworks/Java/isocket-nio/.gitignore @@ -0,0 +1,8 @@ +.settings +.project +.classpath + +/logs +/target +.idea +*.iml \ No newline at end of file diff --git a/frameworks/Java/isocket-nio/isocket-nio.dockerfile b/frameworks/Java/isocket-nio/isocket-nio.dockerfile index 25c8d3dcd38..4ebf25f50e7 100644 --- a/frameworks/Java/isocket-nio/isocket-nio.dockerfile +++ b/frameworks/Java/isocket-nio/isocket-nio.dockerfile @@ -10,4 +10,4 @@ COPY --from=maven /isocket/target/isocket-nio-benchmark-1.0-jar-with-dependencie EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:+AggressiveOpts", "-cp", "app.jar", "cn.ibaijia.tfb.HttpBootstrap"] +CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-cp", "app.jar", "cn.ibaijia.tfb.HttpBootstrap"] diff --git a/frameworks/Java/isocket-nio/pom.xml b/frameworks/Java/isocket-nio/pom.xml index 008a0bc8e4a..b2ec0b03ef1 100644 --- a/frameworks/Java/isocket-nio/pom.xml +++ b/frameworks/Java/isocket-nio/pom.xml @@ -10,8 +10,8 @@ 1.7.7 1.2.17 2.17.1 - 1.0.3-SNAPSHOT - 1.2.44 + 1.0.4-SNAPSHOT + 1.2.83 @@ -44,12 +44,16 @@ slf4j-api ${slf4j.version} - org.apache.logging.log4j log4j-api ${log4j2.version} + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j2.version} + org.apache.logging.log4j log4j-core diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/Consts.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/Consts.java deleted file mode 100644 index 42f75863fb9..00000000000 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/Consts.java +++ /dev/null @@ -1,12 +0,0 @@ -package cn.ibaijia.tfb; - -public class Consts { - - public static final byte[] TEXT_TYPE = "text/plain".getBytes(); - public static final byte[] JSON_TYPE = "application/json".getBytes(); - - public static final String URL_TEXT_PLAIN = "/plaintext"; - public static final String URL_JSON = "/json"; - public static final String URL_STATE = "/state"; - -} diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/DateUtil.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/DateUtil.java deleted file mode 100644 index c7be2b3d52c..00000000000 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/DateUtil.java +++ /dev/null @@ -1,33 +0,0 @@ -package cn.ibaijia.tfb; - -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Locale; - -public class DateUtil { - - private static final SimpleDateFormat dateFormat = new SimpleDateFormat("E, dd MMM yyyy HH:mm:ss z", Locale.ENGLISH);//Fri, 09 Jul 2021 09:10:42 UTC - - private static byte[] date = ("\r\nDate:" + dateFormat.format(new Date())).getBytes(); - - public static byte[] getDate() { - return date; - } - - public static void start() { - new Thread(new Runnable() { - @Override - public void run() { - while (true) { - date = ("\r\nDate:" + dateFormat.format(new Date())).getBytes(); - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - - } - } - } - }).start(); - } - -} diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/HttpBootstrap.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/HttpBootstrap.java index 46baa377113..c3e37e51f8b 100644 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/HttpBootstrap.java +++ b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/HttpBootstrap.java @@ -2,9 +2,10 @@ import cn.ibaijia.isocket.Server; import cn.ibaijia.isocket.listener.SessionProcessErrorListener; +import cn.ibaijia.isocket.protocol.SimpleHttpProtocol; +import cn.ibaijia.isocket.protocol.http.DateUtil; import cn.ibaijia.isocket.session.Session; import cn.ibaijia.tfb.processor.PlanTextProcessor; -import cn.ibaijia.tfb.protocol.SimpleHttpProtocol; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -15,21 +16,15 @@ public class HttpBootstrap { private static final Logger logger = LoggerFactory.getLogger(HttpBootstrap.class); public static void main(String[] args) { - DateUtil.start(); Server server = new Server("0.0.0.0", 8080); server.addProtocol(new SimpleHttpProtocol()); server.setProcessor(new PlanTextProcessor()); - server.setSessionProcessErrorListener(new SessionProcessErrorListener() { - @Override - public void run(Session session, Object o, Throwable throwable) { - logger.error("session on process error.", throwable); - } - }); + server.setSessionProcessErrorListener((session, o, throwable) -> logger.error("session on process error.", throwable)); server.setUseDirectBuffer(true); server.setUsePool(true); - server.setPoolPageSize(32 * 1024); + server.setPoolSize(1 * 1024); server.setBuffSize(1 * 1024); - server.setBacklog(16 * 1024); + server.setBacklog(4 * 1024); server.start(); } diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/Header.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/Header.java deleted file mode 100644 index 1a69f89cb6f..00000000000 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/Header.java +++ /dev/null @@ -1,12 +0,0 @@ -package cn.ibaijia.tfb.http; - -public class Header { - - public String name; - public String value; - - public Header(String name, String value) { - this.name = name; - this.value = value; - } -} diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/HttpEntity.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/HttpEntity.java deleted file mode 100644 index 3ac9a6b4c34..00000000000 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/HttpEntity.java +++ /dev/null @@ -1,17 +0,0 @@ -package cn.ibaijia.tfb.http; - -public abstract class HttpEntity { - - public String charset = "UTF-8"; - - public abstract byte[] getHeader(byte[] name); - - public abstract byte[] getHeader(String name); - - public abstract void setHeader(byte[] name, byte[] value); - - public abstract void setContentType(String contentType); - - public abstract void setContentType(byte[] contentType); - -} diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/HttpRequestEntity.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/HttpRequestEntity.java deleted file mode 100644 index cde2624f10b..00000000000 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/HttpRequestEntity.java +++ /dev/null @@ -1,99 +0,0 @@ -package cn.ibaijia.tfb.http; - -import cn.ibaijia.tfb.Consts; - -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -/** - * @author longzl - */ -public class HttpRequestEntity extends HttpEntity { - - - public ByteBuffer bodyBuffer = null; - public boolean chunked = false; - public int contentLength = -1; - public int crNum = 0; - public int lfNum = 0; - public byte[] tmp; - - - public String method; - public String url; - public String protocol; - - /** - * 请求体 - */ - public String body; - private int count = 0; - /** - * 第一次 请求header时解析 第一行不要 - */ - private Map headers = new HashMap<>(8); - private byte[] contentType = Consts.TEXT_TYPE; - - @Override - public byte[] getHeader(byte[] name) { - for (Map.Entry entry : headers.entrySet()) { - if (Arrays.equals(entry.getKey(), name)) { - return entry.getValue(); - } - } - return null; - } - - @Override - public byte[] getHeader(String name) { - return getHeader(name.getBytes()); - } - - @Override - public void setHeader(byte[] name, byte[] value) { - count ++; - this.headers.put(name, value); - } - - public void printAllHeaders() { - for (Map.Entry entry : headers.entrySet()) { - System.out.println(count); - System.out.println(new String(entry.getKey()) + ":" + new String(entry.getValue())); - } - } - - @Override - public void setContentType(String contentType) { - this.contentType = contentType.getBytes(); - } - - @Override - public void setContentType(byte[] contentType) { - this.contentType = contentType; - } - - public void processBody() { - bodyBuffer.flip(); - byte[] bytes = new byte[bodyBuffer.remaining()]; - bodyBuffer.get(bytes); - body = new String(bytes); - } - - public boolean complete() { - if (contentLength < 1) { - return true; - } else { - return body != null; - } - } - - public boolean headerComplete() { - return this.crNum == 2 && this.lfNum == 2; - } - - public boolean isReadHeadLine() { - return this.protocol == null; - } -} diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/HttpResponseEntity.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/HttpResponseEntity.java deleted file mode 100644 index e0edb9c7bb5..00000000000 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/http/HttpResponseEntity.java +++ /dev/null @@ -1,104 +0,0 @@ -package cn.ibaijia.tfb.http; - -import cn.ibaijia.tfb.Consts; -import cn.ibaijia.tfb.DateUtil; - -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * @author longzl - */ -public class HttpResponseEntity extends HttpEntity { - private static final byte[] PROTOCOL = "HTTP/1.1 ".getBytes(); - private static final byte[] STATUS_200 = "200 OK".getBytes(); - private static final byte[] CRLF = "\r\n".getBytes(); - private static final byte[] COLON = ":".getBytes(); - private static final byte[] SERVER_NAME = "\r\nServer:tfb\r\n".getBytes(); - private static final byte[] CONTENT_LENGTH_HEAD = "\r\nContent-Length:".getBytes(); - private static final byte[] CONTENT_TYPE_HEAD = "\r\nContent-Type:".getBytes(); - - private byte[] contentType = Consts.TEXT_TYPE; - /** - * 响应体 - */ - public String body; - - /** - * 请求头 或者 响应头 - */ - public Map headers = new HashMap<>(); - - @Override - public byte[] getHeader(byte[] name) { - for (Map.Entry entry : headers.entrySet()) { - if (Arrays.equals(entry.getKey(), name)) { - return entry.getValue(); - } - } - return null; - } - - @Override - public byte[] getHeader(String name) { - return getHeader(name.getBytes()); - } - - @Override - public void setHeader(byte[] name, byte[] value) { - headers.put(name, value); - } - - @Override - public void setContentType(String contentType) { - this.contentType = contentType.getBytes(); - } - - @Override - public void setContentType(byte[] contentType) { - this.contentType = contentType; - } - - public ByteBuffer toBuffer(ByteBuffer byteBuffer) { - byteBuffer.put(PROTOCOL); - byteBuffer.put(STATUS_200); - byteBuffer.put(DateUtil.getDate()); - byteBuffer.put(CONTENT_LENGTH_HEAD); - byteBuffer.put(String.valueOf(body.length()).getBytes()); - byteBuffer.put(CONTENT_TYPE_HEAD); - byteBuffer.put(contentType); - byteBuffer.put(SERVER_NAME); - for (Map.Entry header : headers.entrySet()) { - byteBuffer.put(header.getKey()); - byteBuffer.put(COLON); - byteBuffer.put(header.getValue()); - byteBuffer.put(CRLF); - } - byteBuffer.put(CRLF); - byteBuffer.put(body.getBytes()); - byteBuffer.flip(); - return byteBuffer; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - HttpResponseEntity that = (HttpResponseEntity) o; - return Objects.equals(body, that.body) && - Objects.equals(headers, that.headers); - } - - @Override - public int hashCode() { - - return Objects.hash(body, headers); - } -} diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/Message.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/Message.java index 410b0721237..0bd60ac0125 100644 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/Message.java +++ b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/Message.java @@ -1,5 +1,8 @@ package cn.ibaijia.tfb.processor; +/** + * @author longzl + */ public class Message { public String message; diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/PlanTextProcessor.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/PlanTextProcessor.java index f90048936cc..cc1d5f688a1 100644 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/PlanTextProcessor.java +++ b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/PlanTextProcessor.java @@ -1,11 +1,11 @@ package cn.ibaijia.tfb.processor; import cn.ibaijia.isocket.processor.Processor; +import cn.ibaijia.isocket.protocol.http.Consts; +import cn.ibaijia.isocket.protocol.http.HttpEntity; +import cn.ibaijia.isocket.protocol.http.HttpRequestEntity; +import cn.ibaijia.isocket.protocol.http.HttpResponseEntity; import cn.ibaijia.isocket.session.Session; -import cn.ibaijia.tfb.Consts; -import cn.ibaijia.tfb.http.HttpEntity; -import cn.ibaijia.tfb.http.HttpRequestEntity; -import cn.ibaijia.tfb.http.HttpResponseEntity; import com.alibaba.fastjson.JSON; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -20,7 +20,6 @@ public class PlanTextProcessor implements Processor { public boolean process(final Session session, final HttpEntity httpEntity) { HttpRequestEntity httpRequestEntity = (HttpRequestEntity) httpEntity; String url = httpRequestEntity.url; - logger.trace("url:{}", url); if (Consts.URL_TEXT_PLAIN.equals(url)) { HttpResponseEntity httpResponseEntity = new HttpResponseEntity(); httpResponseEntity.setContentType(Consts.TEXT_TYPE); @@ -31,12 +30,6 @@ public boolean process(final Session session, final HttpEntity httpEntity) { httpResponseEntity.setContentType(Consts.JSON_TYPE); httpResponseEntity.body = JSON.toJSONString(new Message("Hello, World!")); session.write(httpResponseEntity); - } else if (Consts.URL_STATE.equals(url)) { - HttpResponseEntity httpResponseEntity = new HttpResponseEntity(); - httpResponseEntity.setContentType(Consts.JSON_TYPE); - State state = new State(); - httpResponseEntity.body = JSON.toJSONString(state); - session.write(httpResponseEntity); } else { HttpResponseEntity httpResponseEntity = new HttpResponseEntity(); httpResponseEntity.setContentType(Consts.TEXT_TYPE); diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/State.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/State.java deleted file mode 100644 index 9896d0e1f31..00000000000 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/processor/State.java +++ /dev/null @@ -1,8 +0,0 @@ -package cn.ibaijia.tfb.processor; - -public class State { - -// public int sessionCount; -// public BufferState bufferState; - -} diff --git a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/protocol/SimpleHttpProtocol.java b/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/protocol/SimpleHttpProtocol.java deleted file mode 100644 index 7b5fc372895..00000000000 --- a/frameworks/Java/isocket-nio/src/main/java/cn/ibaijia/tfb/protocol/SimpleHttpProtocol.java +++ /dev/null @@ -1,163 +0,0 @@ -package cn.ibaijia.tfb.protocol; - -import cn.ibaijia.isocket.protocol.Protocol; -import cn.ibaijia.isocket.session.Session; -import cn.ibaijia.tfb.http.HttpEntity; -import cn.ibaijia.tfb.http.HttpRequestEntity; -import cn.ibaijia.tfb.http.HttpResponseEntity; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.nio.ByteBuffer; -import java.util.Arrays; - -/** - * @author longzl - */ -public class SimpleHttpProtocol implements Protocol { - - private static final Logger logger = LoggerFactory.getLogger(SimpleHttpProtocol.class); - /** - * CR13 \r - * LF10 \n - * SPACE0 \SP - * COLON : - */ - private static final byte CR13 = (byte) 13; - private static final byte LF10 = (byte) 10; - private static final byte SPACE0 = (byte) 32; - private static final byte COLON = (byte) 58; - - private static final String httpEntityKey = "httpEntity"; - - /** - * 解析HTTP请求 - * - * @param byteBuffer - * @param session - * @return - */ - @Override - public HttpEntity decode(ByteBuffer byteBuffer, Session session) { - HttpRequestEntity httpEntity = (HttpRequestEntity) session.getAttribute(httpEntityKey); - if (httpEntity == null) { - httpEntity = new HttpRequestEntity(); - session.setAttribute(httpEntityKey, httpEntity); - } - //解析header - if (!httpEntity.headerComplete() && byteBuffer.hasRemaining()) { - readHeader(byteBuffer, httpEntity); - } - - if (httpEntity.headerComplete()) { - if (httpEntity.complete()) { - session.setAttribute(httpEntityKey, null); - return httpEntity; - } - // 解析request body - if (httpEntity.bodyBuffer != null && byteBuffer.hasRemaining()) { - readBody(byteBuffer, httpEntity); - } - } - return null; - } - - private void readHeader(ByteBuffer byteBuffer, HttpRequestEntity httpEntity) { - try { - ByteBuffer buf = byteBuffer.duplicate(); - int startPos = 0; - int endPos = 0; - while (byteBuffer.hasRemaining()) { - byte b = byteBuffer.get(); - endPos++; - if (b == CR13) { - httpEntity.crNum++; - } else if (b == LF10) { - httpEntity.lfNum++; - } else { - httpEntity.crNum = 0; - httpEntity.lfNum = 0; - } - - if (httpEntity.headerComplete()) { - return; - } - - if (httpEntity.isReadHeadLine()) { - if (b == SPACE0) { - int len = endPos - startPos - 1; - byte[] bytes = new byte[len]; - buf.get(bytes, 0, len); - startPos = endPos; - buf.position(startPos); - if (httpEntity.method == null) { - httpEntity.method = new String(bytes); - } else if (httpEntity.url == null) { - httpEntity.url = new String(bytes); - } - } else if (httpEntity.crNum == 1 && httpEntity.lfNum == 1) { - int len = endPos - startPos - 2; - byte[] bytes = new byte[len]; - buf.get(bytes, 0, len); - startPos = endPos; - buf.position(startPos); - httpEntity.protocol = new String(bytes); - } - } else { - if (b == COLON && httpEntity.tmp == null) { - int len = endPos - startPos - 1; - byte[] bytes = new byte[len]; - buf.get(bytes, 0, len); - startPos = endPos; - buf.position(startPos); - httpEntity.tmp = bytes; - } else if (httpEntity.crNum == 1 && httpEntity.lfNum == 1) { - int len = endPos - startPos - 2; - byte[] bytes = new byte[len]; - buf.get(bytes, 0, len); - startPos = endPos; - buf.position(startPos); - httpEntity.setHeader(httpEntity.tmp, bytes); - httpEntity.tmp = null; -// if (Arrays.equals(CONTENT_LENGTH, httpEntity.tmp)) { -// httpEntity.contentLength = (value == null ? 0 : Integer.valueOf(value)); -// httpEntity.bodyBuffer = ByteBuffer.allocate(httpEntity.contentLength);//TODO can pooling -// } -// if (Arrays.equals(CHUNKED, httpEntity.tmp)) { -// httpEntity.chunked = true; -// throw new RuntimeException("not support chunked"); -// } - } - } - } - } catch (Exception e) { - logger.error("readHeader error.", e); - } - } - - private void readBody(ByteBuffer byteBuffer, HttpRequestEntity httpEntity) { - try { - if (httpEntity.bodyBuffer.hasRemaining()) { - if (byteBuffer.remaining() <= httpEntity.bodyBuffer.remaining()) { - httpEntity.bodyBuffer.put(byteBuffer); - } else { - byte[] bytes = new byte[httpEntity.bodyBuffer.remaining()]; - byteBuffer.get(bytes); - httpEntity.bodyBuffer.put(bytes); - } - } - if (!httpEntity.bodyBuffer.hasRemaining()) { - httpEntity.processBody(); - } - } catch (Exception e) { - logger.error("readHeader error.", e); - } - } - - @Override - public ByteBuffer encode(HttpEntity httpEntity, Session session) { - ByteBuffer byteBuffer = session.getHandler().getPooledByteBuff().get(); - HttpResponseEntity httpResponseEntity = (HttpResponseEntity) httpEntity; - return httpResponseEntity.toBuffer(byteBuffer); - } -} diff --git a/frameworks/Java/isocket-nio/src/main/resources/log4j2.xml b/frameworks/Java/isocket-nio/src/main/resources/log4j2.xml index 4a318937d6e..dfdb29dd766 100644 --- a/frameworks/Java/isocket-nio/src/main/resources/log4j2.xml +++ b/frameworks/Java/isocket-nio/src/main/resources/log4j2.xml @@ -1,5 +1,5 @@ - + isocket-nio-tfb @@ -25,9 +25,7 @@ - - - + diff --git a/frameworks/Java/javalin/pom.xml b/frameworks/Java/javalin/pom.xml index fa5e27e1a07..103c453254f 100644 --- a/frameworks/Java/javalin/pom.xml +++ b/frameworks/Java/javalin/pom.xml @@ -13,10 +13,10 @@ 11 3.13.3 1.7.30 - 2.12.0 + 2.12.6.1 4.0.2 - 42.3.3 - 4.2.0 + 42.4.1 + 4.2.1 3.1.5 diff --git a/frameworks/Java/jetty/pom.xml b/frameworks/Java/jetty/pom.xml index c860b1d4bf4..79112565a05 100644 --- a/frameworks/Java/jetty/pom.xml +++ b/frameworks/Java/jetty/pom.xml @@ -11,7 +11,7 @@ UTF-8 11 11 - 9.4.41.v20210516 + 10.0.10 hello.handler.HelloWebServer diff --git a/frameworks/Java/jlhttp/pom.xml b/frameworks/Java/jlhttp/pom.xml index 4c5a3e6fe07..aa491b6d940 100644 --- a/frameworks/Java/jlhttp/pom.xml +++ b/frameworks/Java/jlhttp/pom.xml @@ -24,7 +24,7 @@ com.fasterxml.jackson.core jackson-databind - 2.10.0.pr1 + 2.12.6.1 com.fasterxml.jackson.module @@ -35,7 +35,7 @@ org.postgresql postgresql - 42.3.3 + 42.4.1 com.zaxxer diff --git a/frameworks/Java/jooby/pom.xml b/frameworks/Java/jooby/pom.xml index 74e32d97818..8839413f957 100644 --- a/frameworks/Java/jooby/pom.xml +++ b/frameworks/Java/jooby/pom.xml @@ -12,9 +12,9 @@ 2.9.5 - 4.1.49.Final + 4.1.77.Final 1.9.5 - 42.3.3 + 42.4.1 UTF-8 11 11 diff --git a/frameworks/Java/light-java/pom.xml b/frameworks/Java/light-java/pom.xml index f65103ef13d..499d698332b 100644 --- a/frameworks/Java/light-java/pom.xml +++ b/frameworks/Java/light-java/pom.xml @@ -25,10 +25,10 @@ 11 2.0.1 1.2.3 - 2.1.6.Final + 2.2.19.Final 3.3.1 - 8.0.18 - 42.3.3 + 8.0.28 + 42.4.1 1.8.4 0.9.6 3.8.0 diff --git a/frameworks/Java/magician-io/README.md b/frameworks/Java/magician-io/README.md index 59dafa64d82..569e6fe7707 100644 --- a/frameworks/Java/magician-io/README.md +++ b/frameworks/Java/magician-io/README.md @@ -5,7 +5,7 @@ This is Magician's official website address[http://magician-io.com](http://magic ## Versions - Java OpenJDK 1.8 -- Martian 3.2.13 +- Magician 2.0.3 ##Test URLs ### JSON Encoding Test diff --git a/frameworks/Java/magician-io/pom.xml b/frameworks/Java/magician-io/pom.xml index 829aa2f0999..f7e98bfe5c0 100644 --- a/frameworks/Java/magician-io/pom.xml +++ b/frameworks/Java/magician-io/pom.xml @@ -18,13 +18,13 @@ com.github.yuyenews Magician - 1.1.15 + 2.0.3 com.alibaba fastjson - 1.2.76 + 2.0.8 diff --git a/frameworks/Java/magician-io/src/main/java/com/test/io/Start.java b/frameworks/Java/magician-io/src/main/java/com/test/io/Start.java index 213db26ff33..d71d56b74c6 100644 --- a/frameworks/Java/magician-io/src/main/java/com/test/io/Start.java +++ b/frameworks/Java/magician-io/src/main/java/com/test/io/Start.java @@ -1,13 +1,11 @@ package com.test.io; -import com.test.io.handler.JsonHandler; -import com.test.io.handler.TextHandler; import io.magician.Magician; -import io.magician.common.event.EventGroup; +import io.magician.common.config.MagicianConfig; +import io.magician.network.HttpServer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.concurrent.Executors; public class Start { @@ -16,13 +14,17 @@ public class Start { public static void main(String[] args) { try { - EventGroup ioEventGroup = new EventGroup(1, Executors.newCachedThreadPool()); - EventGroup workerEventGroup = new EventGroup(4, Executors.newCachedThreadPool()); + MagicianConfig magicianConfig = new MagicianConfig(); + magicianConfig.setNumberOfPorts(1); + magicianConfig.setBossThreads(3); + magicianConfig.setWorkThreads(5); - Magician.createTCPServer(ioEventGroup, workerEventGroup) - .handler("/json", new JsonHandler()) - .handler("/plaintext", new TextHandler()) - .bind(8080, 10000); + + HttpServer httpServer = Magician.createHttp() + .scan("com.test.io") + .setConfig(magicianConfig); + + httpServer.bind(8080); } catch (Exception e){ logger.error("启动服务出现异常", e); diff --git a/frameworks/Java/magician-io/src/main/java/com/test/io/handler/JsonHandler.java b/frameworks/Java/magician-io/src/main/java/com/test/io/handler/JsonHandler.java index 50345dcb2a4..e12a3582d6b 100644 --- a/frameworks/Java/magician-io/src/main/java/com/test/io/handler/JsonHandler.java +++ b/frameworks/Java/magician-io/src/main/java/com/test/io/handler/JsonHandler.java @@ -2,27 +2,35 @@ import com.alibaba.fastjson.JSON; import com.test.io.vo.MessageVO; -import io.magician.tcp.codec.impl.http.request.MagicianRequest; -import io.magician.tcp.handler.MagicianHandler; +import io.magician.application.request.MagicianRequest; +import io.magician.application.request.MagicianResponse; +import io.magician.common.annotation.HttpHandler; +import io.magician.network.handler.HttpBaseHandler; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; -public class JsonHandler implements MagicianHandler { +@HttpHandler(path = "/json") +public class JsonHandler implements HttpBaseHandler { private SimpleDateFormat simpleDateFormat = new SimpleDateFormat("E, dd MMM yyyy H:m:s z", Locale.US); + @Override - public void request(MagicianRequest magicianRequest) { + public void request(MagicianRequest magicianRequest, MagicianResponse magicianResponse) { String str = simpleDateFormat.format(new Date()); MessageVO messageVO = new MessageVO(); messageVO.setMessage("Hello, World!"); - magicianRequest.getResponse() - .setResponseHeader("Server","magician") - .setResponseHeader("Date", str) - .sendJson(200, JSON.toJSONString(messageVO)); + try { + magicianResponse + .setResponseHeader("Server","magician") + .setResponseHeader("Date", str) + .sendJson(JSON.toJSONString(messageVO)); + } catch (Exception e) { + throw new RuntimeException(e); + } } } diff --git a/frameworks/Java/magician-io/src/main/java/com/test/io/handler/TextHandler.java b/frameworks/Java/magician-io/src/main/java/com/test/io/handler/TextHandler.java index 6b5640c8b47..e6f7ef84431 100644 --- a/frameworks/Java/magician-io/src/main/java/com/test/io/handler/TextHandler.java +++ b/frameworks/Java/magician-io/src/main/java/com/test/io/handler/TextHandler.java @@ -1,23 +1,31 @@ package com.test.io.handler; -import io.magician.tcp.codec.impl.http.request.MagicianRequest; -import io.magician.tcp.handler.MagicianHandler; +import io.magician.application.request.MagicianRequest; +import io.magician.application.request.MagicianResponse; +import io.magician.common.annotation.HttpHandler; +import io.magician.network.handler.HttpBaseHandler; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; -public class TextHandler implements MagicianHandler { +@HttpHandler(path = "plaintext") +public class TextHandler implements HttpBaseHandler { private SimpleDateFormat simpleDateFormat = new SimpleDateFormat("E, dd MMM yyyy H:m:s z", Locale.US); + @Override - public void request(MagicianRequest magicianRequest) { + public void request(MagicianRequest magicianRequest, MagicianResponse magicianResponse) { String str = simpleDateFormat.format(new Date()); - magicianRequest.getResponse() - .setResponseHeader("Server","magician") - .setResponseHeader("Date", str) - .sendText(200, "Hello, World!"); + try { + magicianRequest.getResponse() + .setResponseHeader("Server","magician") + .setResponseHeader("Date", str) + .sendText("Hello, World!"); + } catch (Exception e) { + throw new RuntimeException(e); + } } } diff --git a/frameworks/Java/microhttp/README.md b/frameworks/Java/microhttp/README.md new file mode 100755 index 00000000000..845ab9e38d2 --- /dev/null +++ b/frameworks/Java/microhttp/README.md @@ -0,0 +1,24 @@ +# Microhttp Benchmarking Test + +### Test Type Implementation Source Code + +* [JSON](src/main/java/hello/HelloWebServer.java) +* [PLAINTEXT](src/main/java/hello/HelloWebServer.java) +* [DB](src/main/java/db/DbWebServer.java) + +## Important Libraries + +The tests were run with: +* [OpenJDK 17.0.2](http://openjdk.java.net/) +* [Microhttp 0.8](https://github.com/ebarlas/microhttp) +* [Jackson 2.13.3](https://github.com/FasterXML/jackson) +* [MySQL Connector 8.0.29](https://github.com/mysql/mysql-connector-j) + +## Test URLs +### JSON + +http://localhost:8080/json + +### PLAINTEXT + +http://localhost:8080/plaintext \ No newline at end of file diff --git a/frameworks/Java/microhttp/benchmark_config.json b/frameworks/Java/microhttp/benchmark_config.json new file mode 100755 index 00000000000..d5c6e73490f --- /dev/null +++ b/frameworks/Java/microhttp/benchmark_config.json @@ -0,0 +1,44 @@ +{ + "framework": "microhttp", + "tests": [ + { + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "database": "None", + "framework": "microhttp", + "language": "Java", + "flavor": "None", + "orm": "None", + "platform": "Microhttp", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "microhttp", + "notes": "", + "versus": "None" + }, + "mysql": { + "db_url": "/db", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "database": "MySQL", + "framework": "microhttp", + "language": "Java", + "flavor": "None", + "orm": "Raw", + "platform": "Microhttp", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "microhttp", + "notes": "", + "versus": "None" + } + } + ] +} diff --git a/frameworks/Java/microhttp/microhttp-mysql.dockerfile b/frameworks/Java/microhttp/microhttp-mysql.dockerfile new file mode 100644 index 00000000000..f6babf1e21a --- /dev/null +++ b/frameworks/Java/microhttp/microhttp-mysql.dockerfile @@ -0,0 +1,13 @@ +FROM maven:3.8.4-openjdk-17-slim as maven +WORKDIR /microhttp +COPY pom.xml pom.xml +COPY src src +RUN mvn compile assembly:single -q + +FROM openjdk:17.0.2 +WORKDIR /microhttp +COPY --from=maven /microhttp/target/microhttp-example-0.1-jar-with-dependencies.jar app.jar + +EXPOSE 8080 + +CMD ["java", "-cp", "app.jar", "db.DbWebServer"] diff --git a/frameworks/Java/microhttp/microhttp.dockerfile b/frameworks/Java/microhttp/microhttp.dockerfile new file mode 100644 index 00000000000..9609c8e4eda --- /dev/null +++ b/frameworks/Java/microhttp/microhttp.dockerfile @@ -0,0 +1,13 @@ +FROM maven:3.8.4-openjdk-17-slim as maven +WORKDIR /microhttp +COPY pom.xml pom.xml +COPY src src +RUN mvn compile assembly:single -q + +FROM openjdk:17.0.2 +WORKDIR /microhttp +COPY --from=maven /microhttp/target/microhttp-example-0.1-jar-with-dependencies.jar app.jar + +EXPOSE 8080 + +CMD ["java", "-jar", "app.jar"] diff --git a/frameworks/Java/microhttp/pom.xml b/frameworks/Java/microhttp/pom.xml new file mode 100644 index 00000000000..a7e88bc5207 --- /dev/null +++ b/frameworks/Java/microhttp/pom.xml @@ -0,0 +1,87 @@ + + + 4.0.0 + + com.techempower + microhttp-example + 0.1 + + + 17 + 17 + + + jar + + + + org.microhttp + microhttp + 0.8 + + + com.fasterxml.jackson.core + jackson-core + 2.13.3 + + + com.fasterxml.jackson.core + jackson-databind + 2.13.3 + + + mysql + mysql-connector-java + 8.0.29 + + + org.junit.jupiter + junit-jupiter + 5.8.2 + test + + + + + + + true + org.apache.maven.plugins + maven-compiler-plugin + 3.10.0 + + false + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M5 + + + maven-assembly-plugin + + + + hello.HelloWebServer + + + + jar-with-dependencies + + + + + make-assembly + package + + single + + + + + + + diff --git a/frameworks/Java/microhttp/src/main/java/db/DbConnection.java b/frameworks/Java/microhttp/src/main/java/db/DbConnection.java new file mode 100644 index 00000000000..d2441b2a07b --- /dev/null +++ b/frameworks/Java/microhttp/src/main/java/db/DbConnection.java @@ -0,0 +1,58 @@ +package db; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.time.Duration; +import java.util.Properties; + +public class DbConnection { + + private static final Properties PROPERTIES = getProperties(); + private static final String URL = "jdbc:mysql://tfb-database:3306/hello_world"; + private static final String SELECT = "select id, randomNumber from world where id = ?"; + + private static Properties getProperties() { + Properties properties = new Properties(); + properties.put("user", "benchmarkdbuser"); + properties.put("password", "benchmarkdbpass"); + properties.put("useSSL", "false"); + return properties; + } + + private final Connection connection; + private final PreparedStatement statement; + + private long lastUse; + + public DbConnection() throws SQLException { + this.connection = DriverManager.getConnection(URL, PROPERTIES); + this.statement = connection.prepareStatement(SELECT); + this.lastUse = System.nanoTime(); + } + + public WorldRow executeQuery(int id) throws SQLException { + lastUse = System.nanoTime(); + statement.setInt(1, id); + try (ResultSet rs = statement.executeQuery()) { + rs.next(); + return new WorldRow(rs.getInt(1), rs.getInt(2)); + } + } + + public boolean isIdle(Duration maxIdle) { + return System.nanoTime() - lastUse > maxIdle.toNanos(); + } + + public void close() { + try { + statement.close(); + } catch (SQLException ignore) {} + try { + connection.close(); + } catch (SQLException ignore) {} + } + +} diff --git a/frameworks/Java/microhttp/src/main/java/db/DbConnectionPool.java b/frameworks/Java/microhttp/src/main/java/db/DbConnectionPool.java new file mode 100644 index 00000000000..f821571a889 --- /dev/null +++ b/frameworks/Java/microhttp/src/main/java/db/DbConnectionPool.java @@ -0,0 +1,83 @@ +package db; + +import java.sql.SQLException; +import java.time.Duration; +import java.util.concurrent.ConcurrentLinkedQueue; + +public class DbConnectionPool { + + private final ConcurrentLinkedQueue connections; + + private final int initialSize; + private final Duration maxIdle; + + public DbConnectionPool(int initialSize, Duration maxIdle) { + this.connections = new ConcurrentLinkedQueue<>(); + this.initialSize = initialSize; + this.maxIdle = maxIdle; + } + + public void start() { + startDaemon(this::initialize, "db-initialize"); + startDaemon(this::keepAlive, "db-keep-alive"); + } + + private static void startDaemon(Runnable task, String name) { + Thread t = new Thread(task, name); + t.setDaemon(true); + t.start(); + } + + public WorldRow executeQuery(int id) throws SQLException { + DbConnection connection = connections.poll(); + if (connection == null) { + connection = new DbConnection(); + } + + try { + WorldRow result = connection.executeQuery(id); + connections.add(connection); + return result; + } catch (SQLException e) { + connection.close(); + throw e; + } + } + + private void initialize() { + for (int i = 0; i < initialSize; i++) { + try { + connections.add(new DbConnection()); + } catch (SQLException e) { + break; + } + } + } + + private void keepAlive() { + while (true) { + DbConnection connection; + while ((connection = connections.peek()) != null && connection.isIdle(maxIdle)) { + rotateConnection(); // probabilistic - may rotate next connection in line, that's okay + } + try { + Thread.sleep(1_000); // wait a moment for next idle check + } catch (InterruptedException e) { + break; + } + } + } + + private void rotateConnection() { + DbConnection connection = connections.poll(); // take from front of queue + if (connection != null) { + try { + connection.executeQuery(1); + connections.add(connection); // rotate to back of queue + } catch (SQLException ignore) { + connection.close(); + } + } + } + +} diff --git a/frameworks/Java/microhttp/src/main/java/db/DbWebServer.java b/frameworks/Java/microhttp/src/main/java/db/DbWebServer.java new file mode 100644 index 00000000000..ba75dea87c0 --- /dev/null +++ b/frameworks/Java/microhttp/src/main/java/db/DbWebServer.java @@ -0,0 +1,138 @@ +package db; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.microhttp.EventLoop; +import org.microhttp.Header; +import org.microhttp.LogEntry; +import org.microhttp.Logger; +import org.microhttp.Options; +import org.microhttp.Request; +import org.microhttp.Response; + +import java.io.IOException; +import java.time.Duration; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.util.List; +import java.util.concurrent.Executor; +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadLocalRandom; +import java.util.function.Consumer; + +public class DbWebServer { + + static final String SERVER = "microhttp"; + + static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.RFC_1123_DATE_TIME.withZone(ZoneOffset.UTC); + + static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + final int port; + final DbConnectionPool connectionPool; + final Executor executor; + + volatile String date = DATE_FORMATTER.format(Instant.now()); + + DbWebServer(int port) { + this.port = port; + this.connectionPool = new DbConnectionPool(32, Duration.ofSeconds(30)); + this.executor = Executors.newFixedThreadPool(256); + } + + void start() throws IOException, InterruptedException { + connectionPool.start(); + startDateUpdater(); + Options options = new Options() + .withHost(null) // wildcard any-address binding + .withPort(port) + .withReuseAddr(true) + .withReusePort(true) + .withAcceptLength(8_192) + .withMaxRequestSize(1_024 * 1_024) + .withReadBufferSize(1_024 * 64) + .withResolution(Duration.ofMillis(1_000)) + .withRequestTimeout(Duration.ofSeconds(90)); + EventLoop eventLoop = new EventLoop(options, new DisabledLogger(), this::handle); + eventLoop.start(); + eventLoop.join(); + } + + void startDateUpdater() { + Thread thread = new Thread(this::runDateUpdater); + thread.setDaemon(true); + thread.setPriority(Thread.MIN_PRIORITY); + thread.start(); + } + + void runDateUpdater() { + while (true) { + try { + Thread.sleep(1_000); + } catch (InterruptedException e) { + return; + } + date = DATE_FORMATTER.format(Instant.now()); + } + } + + void handle(Request request, Consumer callback) { + if (request.uri().equals("/db")) { + executor.execute(() -> handleDbQuery(callback)); + } else { + List
headers = List.of( + new Header("Date", date), + new Header("Server", SERVER)); + callback.accept(new Response(404, "Not Found", headers, new byte[0])); + } + } + + void handleDbQuery(Consumer callback) { + try { + WorldRow row = connectionPool.executeQuery(1 + ThreadLocalRandom.current().nextInt(10_000)); + List
headers = List.of( + new Header("Content-Type", "application/json"), + new Header("Date", date), + new Header("Server", SERVER)); + callback.accept(new Response(200, "OK", headers, jsonBody(row))); + } catch (Exception e) { + List
headers = List.of( + new Header("Date", date), + new Header("Server", SERVER)); + callback.accept(new Response(500, "Internal Server Error", headers, new byte[0])); + } + } + + static byte[] jsonBody(WorldRow row) { + try { + return OBJECT_MAPPER.writeValueAsBytes(row); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public static void main(String[] args) throws IOException, InterruptedException { + int port = args.length > 0 + ? Integer.parseInt(args[0]) + : 8080; + new DbWebServer(port).start(); + } + + static class DisabledLogger implements Logger { + @Override + public boolean enabled() { + return false; + } + + @Override + public void log(LogEntry... logEntries) { + + } + + @Override + public void log(Exception e, LogEntry... logEntries) { + + } + } + +} diff --git a/frameworks/Java/microhttp/src/main/java/db/WorldRow.java b/frameworks/Java/microhttp/src/main/java/db/WorldRow.java new file mode 100644 index 00000000000..fb4a9436bcc --- /dev/null +++ b/frameworks/Java/microhttp/src/main/java/db/WorldRow.java @@ -0,0 +1,3 @@ +package db; + +public record WorldRow(int id, int randomNumber) {} diff --git a/frameworks/Java/microhttp/src/main/java/hello/HelloWebServer.java b/frameworks/Java/microhttp/src/main/java/hello/HelloWebServer.java new file mode 100644 index 00000000000..dfba893d411 --- /dev/null +++ b/frameworks/Java/microhttp/src/main/java/hello/HelloWebServer.java @@ -0,0 +1,131 @@ +package hello; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.microhttp.EventLoop; +import org.microhttp.Header; +import org.microhttp.LogEntry; +import org.microhttp.Logger; +import org.microhttp.Options; +import org.microhttp.Request; +import org.microhttp.Response; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.util.List; +import java.util.function.Consumer; + +public class HelloWebServer { + + static final String MESSAGE = "Hello, World!"; + static final byte[] TEXT_BYTES = MESSAGE.getBytes(StandardCharsets.UTF_8); + + static final String SERVER = "microhttp"; + + static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.RFC_1123_DATE_TIME.withZone(ZoneOffset.UTC); + + static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + final int port; + + volatile String date = DATE_FORMATTER.format(Instant.now()); + + record JsonMessage(String message) { + } + + HelloWebServer(int port) { + this.port = port; + } + + void start() throws IOException, InterruptedException { + startUpdater(); + Options options = new Options() + .withHost(null) // wildcard any-address binding + .withPort(port) + .withReuseAddr(true) + .withReusePort(true) + .withAcceptLength(8_192) + .withMaxRequestSize(1_024 * 1_024) + .withReadBufferSize(1_024 * 64) + .withResolution(Duration.ofMillis(1_000)) + .withRequestTimeout(Duration.ofSeconds(90)); + EventLoop eventLoop = new EventLoop(options, new DisabledLogger(), this::handle); + eventLoop.start(); + eventLoop.join(); + } + + void startUpdater() { + Thread thread = new Thread(this::runDateUpdater); + thread.setDaemon(true); + thread.setPriority(Thread.MIN_PRIORITY); + thread.start(); + } + + void runDateUpdater() { + while (true) { + try { + Thread.sleep(1_000); + } catch (InterruptedException e) { + return; + } + date = DATE_FORMATTER.format(Instant.now()); + } + } + + void handle(Request request, Consumer callback) { + if (request.uri().equals("/plaintext")) { + List
headers = List.of( + new Header("Content-Type", "text/plain"), + new Header("Date", date), + new Header("Server", SERVER)); + callback.accept(new Response(200, "OK", headers, TEXT_BYTES)); + } else if (request.uri().equals("/json")) { + List
headers = List.of( + new Header("Content-Type", "application/json"), + new Header("Date", date), + new Header("Server", SERVER)); + callback.accept(new Response(200, "OK", headers, jsonBody())); + } else { + List
headers = List.of( + new Header("Date", date), + new Header("Server", SERVER)); + callback.accept(new Response(404, "Not Found", headers, new byte[0])); + } + } + + static byte[] jsonBody() { + try { + return OBJECT_MAPPER.writeValueAsBytes(new JsonMessage(MESSAGE)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public static void main(String[] args) throws IOException, InterruptedException { + int port = args.length > 0 + ? Integer.parseInt(args[0]) + : 8080; + new HelloWebServer(port).start(); + } + + static class DisabledLogger implements Logger { + @Override + public boolean enabled() { + return false; + } + + @Override + public void log(LogEntry... logEntries) { + + } + + @Override + public void log(Exception e, LogEntry... logEntries) { + + } + } + +} diff --git a/frameworks/Java/microhttp/src/test/java/hello/HelloWebServerTest.java b/frameworks/Java/microhttp/src/test/java/hello/HelloWebServerTest.java new file mode 100644 index 00000000000..cd23d578662 --- /dev/null +++ b/frameworks/Java/microhttp/src/test/java/hello/HelloWebServerTest.java @@ -0,0 +1,64 @@ +package hello; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; + +class HelloWebServerTest { + + @Test + void plainTextAndJson() throws IOException, InterruptedException { + HelloWebServer server = new HelloWebServer(8080); + Runnable task = () -> { + try { + server.start(); + } catch (IOException e) { + throw new RuntimeException(e); + } + }; + Thread thread = new Thread(task); + thread.setDaemon(true); + thread.start(); + HttpClient client = HttpClient.newBuilder() + .version(HttpClient.Version.HTTP_1_1) + .build(); + verifyPlainText(client); + verifyJson(client); + verifyOther(client); + } + + static void verifyPlainText(HttpClient client) throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .GET() + .uri(URI.create("http://localhost:8080/plaintext")) + .build(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + Assertions.assertEquals(200, response.statusCode()); + Assertions.assertEquals("Hello, World!", response.body()); + } + + static void verifyJson(HttpClient client) throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .GET() + .uri(URI.create("http://localhost:8080/json")) + .build(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + Assertions.assertEquals(200, response.statusCode()); + Assertions.assertEquals("{\"message\":\"Hello, World!\"}", response.body()); + } + + static void verifyOther(HttpClient client) throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .GET() + .uri(URI.create("http://localhost:8080/unknown")) + .build(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + Assertions.assertEquals(404, response.statusCode()); + } + +} diff --git a/frameworks/Java/minijax/pom.xml b/frameworks/Java/minijax/pom.xml index 3dbeaff940c..25025d7df30 100644 --- a/frameworks/Java/minijax/pom.xml +++ b/frameworks/Java/minijax/pom.xml @@ -14,7 +14,7 @@ 2.7.4 2.2.1 0.3.14 - 8.0.18 + 8.0.28 3.1.0 diff --git a/frameworks/Java/nanohttpd/pom.xml b/frameworks/Java/nanohttpd/pom.xml index e76fd5c1fab..9649728f3f6 100644 --- a/frameworks/Java/nanohttpd/pom.xml +++ b/frameworks/Java/nanohttpd/pom.xml @@ -23,7 +23,7 @@ com.fasterxml.jackson.core jackson-databind - 2.10.0.pr1 + 2.12.6.1 com.fasterxml.jackson.module diff --git a/frameworks/Java/netty/pom.xml b/frameworks/Java/netty/pom.xml index d5069091ebf..cdfccdb596f 100644 --- a/frameworks/Java/netty/pom.xml +++ b/frameworks/Java/netty/pom.xml @@ -11,7 +11,7 @@ 11 11 - 4.1.71.Final + 4.1.77.Final jar diff --git a/frameworks/Java/ninja-standalone/pom.xml b/frameworks/Java/ninja-standalone/pom.xml index 8b416348713..596d0883038 100644 --- a/frameworks/Java/ninja-standalone/pom.xml +++ b/frameworks/Java/ninja-standalone/pom.xml @@ -23,7 +23,7 @@ 6.0.20.Final 2.3.0 9.4.18.v20190429 - 8.0.18 + 8.0.28 6.5.0 2.0.2 diff --git a/frameworks/Java/proteus/pom.xml b/frameworks/Java/proteus/pom.xml index ad14cb706f1..e5c05fddc36 100644 --- a/frameworks/Java/proteus/pom.xml +++ b/frameworks/Java/proteus/pom.xml @@ -247,12 +247,12 @@ mysql mysql-connector-java - 8.0.18 + 8.0.28 org.postgresql postgresql - 42.3.3 + 42.4.1 diff --git a/frameworks/Java/quarkus/README.md b/frameworks/Java/quarkus/README.md index 899ca69a984..becf37c699c 100644 --- a/frameworks/Java/quarkus/README.md +++ b/frameworks/Java/quarkus/README.md @@ -4,19 +4,16 @@ This is the Quarkus portion of a [benchmarking test suite](../) comparing a vari ## Implementations -There are currently 6 repository implementations: +There are currently 3 implementations: - RESTEasy and Hibernate ORM - RESTEasy Reactive and Hibernate ORM - RESTEasy Reactive and Hibernate Reactive -- RESTEasy Reactive and Vert.x PG Client -- Reactive Routes and Hibernate Reactive -- Reactive Routes and Vert.x PG Client ## Versions * [Java OpenJDK 11](http://openjdk.java.net/) -* [Quarkus 1.11.0.Beta1](https://quarkus.io) +* [Quarkus 2.9.1.Final](https://quarkus.io) ## Test URLs diff --git a/frameworks/Java/quarkus/benchmark_config.json b/frameworks/Java/quarkus/benchmark_config.json index faed66a8183..fc0f5a8bdc9 100644 --- a/frameworks/Java/quarkus/benchmark_config.json +++ b/frameworks/Java/quarkus/benchmark_config.json @@ -18,79 +18,10 @@ "flavor": "None", "orm": "Full", "platform": "JAX-RS", - "webserver": "Undertow", + "webserver": "Vert.x", "os": "Linux", "database_os": "Linux", - "display_name": "Quarkus + RESTEasy + Hibernate ORM", - "notes": "", - "versus": "Netty" - }, - "reactive-routes-hibernate-reactive": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "update_url": "/updates?queries=", - "fortune_url": "/fortunes", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "fullstack", - "database": "Postgres", - "framework": "Quarkus", - "language": "Java", - "flavor": "None", - "orm": "Full", - "platform": "Vertx-Web", - "webserver": "Vertx", - "os": "Linux", - "database_os": "Linux", - "display_name": "Quarkus + Reactive Routes + Hibernate Reactive", - "notes": "", - "versus": "Netty" - }, - "reactive-routes-pgclient": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "update_url": "/updates?queries=", - "fortune_url": "/fortunes", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "fullstack", - "database": "Postgres", - "framework": "Quarkus", - "language": "Java", - "flavor": "None", - "orm": "Micro", - "platform": "Vertx-Web", - "webserver": "Vertx", - "os": "Linux", - "database_os": "Linux", - "display_name": "Quarkus + Reactive Routes + PgClient", - "notes": "", - "versus": "Netty" - }, - "resteasy-reactive-pgclient": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "update_url": "/updates?queries=", - "fortune_url": "/fortunes", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "fullstack", - "database": "Postgres", - "framework": "Quarkus", - "language": "Java", - "flavor": "None", - "orm": "Micro", - "platform": "RESTEasy Reactive", - "webserver": "Vertx", - "os": "Linux", - "database_os": "Linux", - "display_name": "Quarkus RESTEasy Reactive + PgClient", + "display_name": "quarkus [RESTEasy, Hibernate ORM]", "notes": "", "versus": "Netty" }, @@ -108,12 +39,12 @@ "framework": "Quarkus", "language": "Java", "flavor": "None", - "orm": "Micro", + "orm": "Full", "platform": "RESTEasy Reactive", - "webserver": "Vertx", + "webserver": "Vert.x", "os": "Linux", "database_os": "Linux", - "display_name": "Quarkus RESTEasy Reactive + Hibernate", + "display_name": "quarkus [RESTEasy Reactive, Hibernate ORM]", "notes": "", "versus": "Netty" }, @@ -131,12 +62,12 @@ "framework": "Quarkus", "language": "Java", "flavor": "None", - "orm": "Micro", + "orm": "Full", "platform": "RESTEasy Reactive", - "webserver": "Vertx", + "webserver": "Vert.x", "os": "Linux", "database_os": "Linux", - "display_name": "Quarkus RESTEasy Reactive + Hibernate Reactive", + "display_name": "quarkus [RESTEasy Reactive, Hibernate Reactive]", "notes": "", "versus": "Netty" } diff --git a/frameworks/Java/quarkus/config.toml b/frameworks/Java/quarkus/config.toml index e80bec69dac..4ad3ba27425 100644 --- a/frameworks/Java/quarkus/config.toml +++ b/frameworks/Java/quarkus/config.toml @@ -1,23 +1,6 @@ [framework] name = "quarkus" -[resteasy-reactive-pgclient] -urls.plaintext = "/plaintext" -urls.json = "/json" -urls.db = "/db" -urls.query = "/queries?queries=" -urls.update = "/updates?queries=" -urls.fortune = "/fortunes" -approach = "Realistic" -classification = "fullstack" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Micro" -platform = "RESTEasy Reactive" -webserver = "Vertx" -versus = "Netty" - [main] urls.plaintext = "/plaintext" urls.json = "/json" @@ -35,40 +18,6 @@ platform = "JAX-RS" webserver = "Undertow" versus = "Netty" -[reactive-routes-pgclient] -urls.plaintext = "/plaintext" -urls.json = "/json" -urls.db = "/db" -urls.query = "/queries?queries=" -urls.update = "/updates?queries=" -urls.fortune = "/fortunes" -approach = "Realistic" -classification = "fullstack" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Micro" -platform = "Vertx-Web" -webserver = "Vertx" -versus = "Netty" - -[reactive-routes-hibernate-reactive] -urls.plaintext = "/plaintext" -urls.json = "/json" -urls.db = "/db" -urls.query = "/queries?queries=" -urls.update = "/updates?queries=" -urls.fortune = "/fortunes" -approach = "Realistic" -classification = "fullstack" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Full" -platform = "Vertx-Web" -webserver = "Vertx" -versus = "Netty" - [resteasy-reactive-hibernate] urls.plaintext = "/plaintext" urls.json = "/json" @@ -81,7 +30,7 @@ classification = "fullstack" database = "Postgres" database_os = "Linux" os = "Linux" -orm = "Micro" +orm = "Full" platform = "RESTEasy Reactive" webserver = "Vertx" versus = "Netty" @@ -98,7 +47,7 @@ classification = "fullstack" database = "Postgres" database_os = "Linux" os = "Linux" -orm = "Micro" +orm = "Full" platform = "RESTEasy Reactive" webserver = "Vertx" versus = "Netty" diff --git a/frameworks/Java/quarkus/pom.xml b/frameworks/Java/quarkus/pom.xml index 792344c0fb9..1cecec065dc 100644 --- a/frameworks/Java/quarkus/pom.xml +++ b/frameworks/Java/quarkus/pom.xml @@ -8,7 +8,7 @@ pom - 1.11.0.Beta1 + 2.9.1.Final UTF-8 11 11 @@ -16,41 +16,29 @@ resteasy-hibernate - reactive-routes-hibernate-reactive - reactive-routes-pgclient - resteasy-reactive-pgclient resteasy-reactive-hibernate resteasy-reactive-hibernate-reactive + quarkus-benchmark-common - io.quarkus - quarkus-bom - ${quarkus.version} - pom - import + org.hibernate.reactive + hibernate-reactive-core + 1.1.6.Final - io.quarkus.benchmark - base + io.quarkus + quarkus-benchmark-common ${project.version} - io.vertx - vertx-pg-client - 3.9.0 - - - io.vertx - vertx-sql-client - 3.9.0 - - - io.vertx - vertx-web - 3.9.3 + io.quarkus + quarkus-bom + ${quarkus.version} + pom + import diff --git a/frameworks/Java/quarkus/quarkus-benchmark-common/pom.xml b/frameworks/Java/quarkus/quarkus-benchmark-common/pom.xml new file mode 100644 index 00000000000..70a0737e352 --- /dev/null +++ b/frameworks/Java/quarkus/quarkus-benchmark-common/pom.xml @@ -0,0 +1,37 @@ + + + + benchmark + io.quarkus + 1.0-SNAPSHOT + + 4.0.0 + + quarkus-benchmark-common + + + + org.junit.jupiter + junit-jupiter + test + + + io.netty + netty-common + + + + + skip.tests.by.default + + true + + + true + + + + + \ No newline at end of file diff --git a/frameworks/Java/quarkus/quarkus-benchmark-common/src/main/java/io/quarkus/benchmark/utils/LocalRandom.java b/frameworks/Java/quarkus/quarkus-benchmark-common/src/main/java/io/quarkus/benchmark/utils/LocalRandom.java new file mode 100644 index 00000000000..4ce5fd06ddc --- /dev/null +++ b/frameworks/Java/quarkus/quarkus-benchmark-common/src/main/java/io/quarkus/benchmark/utils/LocalRandom.java @@ -0,0 +1,21 @@ +package io.quarkus.benchmark.utils; + +public interface LocalRandom { + + /** + * @return an Integer representing a random number in the space expected by the benchmark: [1-10000]. + */ + Integer getNextRandom(); + + /** + * Also according to benchmark requirements, except that in this special case + * of the update test we need to ensure we'll actually generate an update operation: + * for this we need to generate a random number between 1 to 10000, but different + * from the current field value. + * + * @param exclusion + * @return an Integer representing a random number in the space expected by the benchmark: [1-10000], + * but always excluding the one matching exclusion. + */ + Integer getNextRandomExcluding(int exclusion); +} diff --git a/frameworks/Java/quarkus/quarkus-benchmark-common/src/main/java/io/quarkus/benchmark/utils/Randomizer.java b/frameworks/Java/quarkus/quarkus-benchmark-common/src/main/java/io/quarkus/benchmark/utils/Randomizer.java new file mode 100644 index 00000000000..8b73b509675 --- /dev/null +++ b/frameworks/Java/quarkus/quarkus-benchmark-common/src/main/java/io/quarkus/benchmark/utils/Randomizer.java @@ -0,0 +1,78 @@ +package io.quarkus.benchmark.utils; + +import io.netty.util.concurrent.FastThreadLocal; + +import java.util.ArrayList; + +/** + * The rules of the benchmark frequently require randomly generated numbers + * in the range from 1 to 10000. + * Often multiple numbers are needed, and in this case we need to avoid duplicates + * because otherwise the ORM optimisations will invalidate our operations + * (Hibernate ORM will skip unnecessary operations but this is specifically disallowed, + * and it's not possible to disable this behaviour in ORM as it's an intrinsic + * aspect of correctness of an ORM). + * Because of this twist in the rules, we're better off writing a custom helper + * than making vanilla use of the Java platform randomizer. + */ +public final class Randomizer { + + static final short MIN_OF_RANGE = 1; + static final short MAX_OF_RANGE = 10000; + static final short RANGE_SPACE = MAX_OF_RANGE - MIN_OF_RANGE + 1; + private final static Integer[] randomSequenceBoxed = initRange(); + + private static final FastThreadLocal localRandom = new FastThreadLocal<>() { + @Override + protected io.quarkus.benchmark.utils.Randomizer.ThreadlocalizedRandomizer initialValue() { + return new ThreadlocalizedRandomizer(); + } + }; + + public static LocalRandom current() { + return localRandom.get(); + } + + private static Integer[] initRange() { + ArrayList boxedSequence = new java.util.ArrayList<>(MAX_OF_RANGE); + short value = MIN_OF_RANGE; + for (int i = 0; i < MAX_OF_RANGE; i++) { + boxedSequence.add(Integer.valueOf(value++)); + } + java.util.Collections.shuffle(boxedSequence); + return boxedSequence.toArray(new Integer[0]); + } + + private static final class ThreadlocalizedRandomizer implements LocalRandom { + + private final java.util.concurrent.ThreadLocalRandom random; + private int currentIndex; + + private ThreadlocalizedRandomizer() { + this.random = java.util.concurrent.ThreadLocalRandom.current(); + this.currentIndex = random.nextInt(RANGE_SPACE); + } + + @Override + public Integer getNextRandom() { + currentIndex++; + if (currentIndex == MAX_OF_RANGE) { + currentIndex = 0; + } + return randomSequenceBoxed[currentIndex]; + } + + @Override + public Integer getNextRandomExcluding(int exclusion) { + final Integer nextRandom = getNextRandom(); + if (nextRandom.intValue() == exclusion) { + //Since it's a sequence of shuffled unique numbers, and this is consumed by a single thread, + //we know at this stage the next try will be different for sure. + return getNextRandom(); + } + return nextRandom; + } + + } + +} diff --git a/frameworks/Java/quarkus/quarkus-benchmark-common/src/test/java/io/quarkus/benchmark/utils/LocalRandomTest.java b/frameworks/Java/quarkus/quarkus-benchmark-common/src/test/java/io/quarkus/benchmark/utils/LocalRandomTest.java new file mode 100644 index 00000000000..ac556a8cefd --- /dev/null +++ b/frameworks/Java/quarkus/quarkus-benchmark-common/src/test/java/io/quarkus/benchmark/utils/LocalRandomTest.java @@ -0,0 +1,62 @@ +package io.quarkus.benchmark.utils; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +class LocalRandomTest { + + /** + * Testing {@link LocalRandom#getNextRandom()} + */ + @Test + public void fullCoverage() { + int[] results = new int[Randomizer.MAX_OF_RANGE + 1]; + final LocalRandom random = Randomizer.current(); + for (int i = 0; i < Randomizer.RANGE_SPACE; i++) { + final int value = random.getNextRandom().intValue(); + results[value]++; + } + for (int i = 0; i < Randomizer.MIN_OF_RANGE; i++) { + assertEquals(0, results[i]); + } + for (int i = Randomizer.MIN_OF_RANGE; i < Randomizer.MAX_OF_RANGE + 1; i++) { + assertEquals(1, results[i]); + } + for (int i = 0; i < Randomizer.RANGE_SPACE; i++) { + final int value = random.getNextRandom().intValue(); + results[value]++; + } + for (int i = Randomizer.MIN_OF_RANGE; i < Randomizer.MAX_OF_RANGE + 1; i++) { + assertEquals(2, results[i]); + } + } + + /** + * Testing {@link LocalRandom#getNextRandomExcluding(int)} + */ + @Test + public void fullCoverageExcept() { + final int except = 37; + int[] results = new int[Randomizer.MAX_OF_RANGE + 1]; + final LocalRandom random = Randomizer.current(); + for (int i = 0; i < Randomizer.RANGE_SPACE - 1; i++) { + final int value = random.getNextRandomExcluding(except).intValue(); + results[value]++; + } + for (int i = 0; i < Randomizer.MIN_OF_RANGE; i++) { + assertEquals(0, results[i]); + } + for (int i = Randomizer.MIN_OF_RANGE; i < Randomizer.MAX_OF_RANGE + 1; i++) { + assertEquals((i == except ? 0 : 1), results[i]); + } + for (int i = 0; i < Randomizer.RANGE_SPACE - 1; i++) { + final int value = random.getNextRandomExcluding(except).intValue(); + results[value]++; + } + for (int i = Randomizer.MIN_OF_RANGE; i < Randomizer.MAX_OF_RANGE + 1; i++) { + assertEquals((i == except ? 0 : 2), results[i]); + } + } + +} \ No newline at end of file diff --git a/frameworks/Java/quarkus/quarkus-reactive-routes-hibernate-reactive.dockerfile b/frameworks/Java/quarkus/quarkus-reactive-routes-hibernate-reactive.dockerfile deleted file mode 100644 index 6a89bc7a166..00000000000 --- a/frameworks/Java/quarkus/quarkus-reactive-routes-hibernate-reactive.dockerfile +++ /dev/null @@ -1,31 +0,0 @@ -FROM maven:3.6.3-jdk-11-slim as maven -WORKDIR /quarkus -ENV MODULE=reactive-routes-hibernate-reactive - -COPY pom.xml pom.xml -COPY $MODULE/pom.xml $MODULE/pom.xml - -# Uncomment to test pre-release quarkus -#RUN mkdir -p /root/.m2/repository/io -#COPY m2-quarkus /root/.m2/repository/io/quarkus - -WORKDIR /quarkus/$MODULE -RUN mvn dependency:go-offline -q -WORKDIR /quarkus - -COPY $MODULE/src $MODULE/src - -WORKDIR /quarkus/$MODULE -RUN mvn package -q -WORKDIR /quarkus - -FROM openjdk:11.0.6-jdk-slim -WORKDIR /quarkus -ENV MODULE=reactive-routes-hibernate-reactive - -COPY --from=maven /quarkus/$MODULE/target/lib lib -COPY --from=maven /quarkus/$MODULE/target/$MODULE-1.0-SNAPSHOT-runner.jar app.jar - -EXPOSE 8080 - -CMD ["java", "-server", "-XX:-UseBiasedLocking", "-XX:+UseStringDeduplication", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Djava.lang.Integer.IntegerCache.high=10000", "-Dvertx.disableHttpHeadersValidation=true", "-Dvertx.disableMetrics=true", "-Dvertx.disableH2c=true", "-Dvertx.disableWebsockets=true", "-Dvertx.flashPolicyHandler=false", "-Dvertx.threadChecks=false", "-Dvertx.disableContextTimings=true", "-Dvertx.disableTCCL=true", "-Dhibernate.allow_update_outside_transaction=true", "-Djboss.threads.eqe.statistics=false", "-jar", "app.jar"] diff --git a/frameworks/Java/quarkus/quarkus-reactive-routes-pgclient.dockerfile b/frameworks/Java/quarkus/quarkus-reactive-routes-pgclient.dockerfile deleted file mode 100644 index ec5b6dee0c0..00000000000 --- a/frameworks/Java/quarkus/quarkus-reactive-routes-pgclient.dockerfile +++ /dev/null @@ -1,31 +0,0 @@ -FROM maven:3.6.3-jdk-11-slim as maven -WORKDIR /quarkus -ENV MODULE=reactive-routes-pgclient - -COPY pom.xml pom.xml -COPY $MODULE/pom.xml $MODULE/pom.xml - -# Uncomment to test pre-release quarkus -#RUN mkdir -p /root/.m2/repository/io -#COPY m2-quarkus /root/.m2/repository/io/quarkus - -WORKDIR /quarkus/$MODULE -RUN mvn dependency:go-offline -q -WORKDIR /quarkus - -COPY $MODULE/src $MODULE/src - -WORKDIR /quarkus/$MODULE -RUN mvn package -q -WORKDIR /quarkus - -FROM openjdk:11.0.6-jdk-slim -WORKDIR /quarkus -ENV MODULE=reactive-routes-pgclient - -COPY --from=maven /quarkus/$MODULE/target/lib lib -COPY --from=maven /quarkus/$MODULE/target/$MODULE-1.0-SNAPSHOT-runner.jar app.jar - -EXPOSE 8080 - -CMD ["java", "-server", "-XX:-UseBiasedLocking", "-XX:+UseStringDeduplication", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Djava.lang.Integer.IntegerCache.high=10000", "-Dvertx.disableHttpHeadersValidation=true", "-Dvertx.disableMetrics=true", "-Dvertx.disableH2c=true", "-Dvertx.disableWebsockets=true", "-Dvertx.flashPolicyHandler=false", "-Dvertx.threadChecks=false", "-Dvertx.disableContextTimings=true", "-Dvertx.disableTCCL=true", "-Dhibernate.allow_update_outside_transaction=true", "-Djboss.threads.eqe.statistics=false", "-jar", "app.jar"] diff --git a/frameworks/Java/quarkus/quarkus-resteasy-reactive-hibernate-reactive.dockerfile b/frameworks/Java/quarkus/quarkus-resteasy-reactive-hibernate-reactive.dockerfile index 1637cc7bcc3..3aaeda182bc 100644 --- a/frameworks/Java/quarkus/quarkus-resteasy-reactive-hibernate-reactive.dockerfile +++ b/frameworks/Java/quarkus/quarkus-resteasy-reactive-hibernate-reactive.dockerfile @@ -1,31 +1,39 @@ -FROM maven:3.6.3-jdk-11-slim as maven +FROM docker.io/maven:3.8.4-eclipse-temurin-11 as maven WORKDIR /quarkus ENV MODULE=resteasy-reactive-hibernate-reactive COPY pom.xml pom.xml -COPY $MODULE/pom.xml $MODULE/pom.xml +COPY quarkus-benchmark-common quarkus-benchmark-common/ +COPY resteasy-hibernate resteasy-hibernate/ +COPY resteasy-reactive-hibernate resteasy-reactive-hibernate/ +COPY resteasy-reactive-hibernate-reactive resteasy-reactive-hibernate-reactive/ # Uncomment to test pre-release quarkus #RUN mkdir -p /root/.m2/repository/io #COPY m2-quarkus /root/.m2/repository/io/quarkus +WORKDIR /quarkus +RUN mvn -DskipTests install -pl :benchmark,:quarkus-benchmark-common -B -q + WORKDIR /quarkus/$MODULE -RUN mvn dependency:go-offline -q +RUN mvn dependency:go-offline -B -q WORKDIR /quarkus COPY $MODULE/src $MODULE/src WORKDIR /quarkus/$MODULE -RUN mvn package -q +RUN mvn package -B -q WORKDIR /quarkus -FROM openjdk:11.0.6-jdk-slim +FROM docker.io/eclipse-temurin:11-jdk WORKDIR /quarkus ENV MODULE=resteasy-reactive-hibernate-reactive -COPY --from=maven /quarkus/$MODULE/target/lib lib -COPY --from=maven /quarkus/$MODULE/target/$MODULE-1.0-SNAPSHOT-runner.jar app.jar +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/lib/ lib +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/app/ app +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/quarkus/ quarkus +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/quarkus-run.jar quarkus-run.jar +COPY run_quarkus.sh run_quarkus.sh EXPOSE 8080 - -CMD ["java", "-server", "-XX:-UseBiasedLocking", "-XX:+UseStringDeduplication", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Djava.lang.Integer.IntegerCache.high=10000", "-Dvertx.disableHttpHeadersValidation=true", "-Dvertx.disableMetrics=true", "-Dvertx.disableH2c=true", "-Dvertx.disableWebsockets=true", "-Dvertx.flashPolicyHandler=false", "-Dvertx.threadChecks=false", "-Dvertx.disableContextTimings=true", "-Dvertx.disableTCCL=true", "-Dhibernate.allow_update_outside_transaction=true", "-Djboss.threads.eqe.statistics=false", "-jar", "app.jar"] +ENTRYPOINT "./run_quarkus.sh" diff --git a/frameworks/Java/quarkus/quarkus-resteasy-reactive-hibernate.dockerfile b/frameworks/Java/quarkus/quarkus-resteasy-reactive-hibernate.dockerfile index 6f69cd68d95..df3a345e216 100644 --- a/frameworks/Java/quarkus/quarkus-resteasy-reactive-hibernate.dockerfile +++ b/frameworks/Java/quarkus/quarkus-resteasy-reactive-hibernate.dockerfile @@ -1,31 +1,39 @@ -FROM maven:3.6.3-jdk-11-slim as maven +FROM docker.io/maven:3.8.4-eclipse-temurin-11 as maven WORKDIR /quarkus ENV MODULE=resteasy-reactive-hibernate COPY pom.xml pom.xml -COPY $MODULE/pom.xml $MODULE/pom.xml +COPY quarkus-benchmark-common quarkus-benchmark-common/ +COPY resteasy-hibernate resteasy-hibernate/ +COPY resteasy-reactive-hibernate resteasy-reactive-hibernate/ +COPY resteasy-reactive-hibernate-reactive resteasy-reactive-hibernate-reactive/ # Uncomment to test pre-release quarkus #RUN mkdir -p /root/.m2/repository/io #COPY m2-quarkus /root/.m2/repository/io/quarkus +WORKDIR /quarkus +RUN mvn -DskipTests install -pl :benchmark,:quarkus-benchmark-common -B -q + WORKDIR /quarkus/$MODULE -RUN mvn dependency:go-offline -q +RUN mvn dependency:go-offline -B -q WORKDIR /quarkus COPY $MODULE/src $MODULE/src WORKDIR /quarkus/$MODULE -RUN mvn package -q +RUN mvn package -B -q WORKDIR /quarkus -FROM openjdk:11.0.6-jdk-slim +FROM docker.io/eclipse-temurin:11-jdk WORKDIR /quarkus ENV MODULE=resteasy-reactive-hibernate -COPY --from=maven /quarkus/$MODULE/target/lib lib -COPY --from=maven /quarkus/$MODULE/target/$MODULE-1.0-SNAPSHOT-runner.jar app.jar +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/lib/ lib +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/app/ app +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/quarkus/ quarkus +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/quarkus-run.jar quarkus-run.jar +COPY run_quarkus.sh run_quarkus.sh EXPOSE 8080 - -CMD ["java", "-server", "-XX:-UseBiasedLocking", "-XX:+UseStringDeduplication", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Djava.lang.Integer.IntegerCache.high=10000", "-Dvertx.disableHttpHeadersValidation=true", "-Dvertx.disableMetrics=true", "-Dvertx.disableH2c=true", "-Dvertx.disableWebsockets=true", "-Dvertx.flashPolicyHandler=false", "-Dvertx.threadChecks=false", "-Dvertx.disableContextTimings=true", "-Dvertx.disableTCCL=true", "-Dhibernate.allow_update_outside_transaction=true", "-Djboss.threads.eqe.statistics=false", "-jar", "app.jar"] +ENTRYPOINT "./run_quarkus.sh" diff --git a/frameworks/Java/quarkus/quarkus-resteasy-reactive-pgclient.dockerfile b/frameworks/Java/quarkus/quarkus-resteasy-reactive-pgclient.dockerfile deleted file mode 100644 index 0fe3fdca5da..00000000000 --- a/frameworks/Java/quarkus/quarkus-resteasy-reactive-pgclient.dockerfile +++ /dev/null @@ -1,31 +0,0 @@ -FROM maven:3.6.3-jdk-11-slim as maven -WORKDIR /quarkus -ENV MODULE=resteasy-reactive-pgclient - -COPY pom.xml pom.xml -COPY $MODULE/pom.xml $MODULE/pom.xml - -# Uncomment to test pre-release quarkus -#RUN mkdir -p /root/.m2/repository/io -#COPY m2-quarkus /root/.m2/repository/io/quarkus - -WORKDIR /quarkus/$MODULE -RUN mvn dependency:go-offline -q -WORKDIR /quarkus - -COPY $MODULE/src $MODULE/src - -WORKDIR /quarkus/$MODULE -RUN mvn package -q -WORKDIR /quarkus - -FROM openjdk:11.0.6-jdk-slim -WORKDIR /quarkus -ENV MODULE=resteasy-reactive-pgclient - -COPY --from=maven /quarkus/$MODULE/target/lib lib -COPY --from=maven /quarkus/$MODULE/target/$MODULE-1.0-SNAPSHOT-runner.jar app.jar - -EXPOSE 8080 - -CMD ["java", "-server", "-XX:-UseBiasedLocking", "-XX:+UseStringDeduplication", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Djava.lang.Integer.IntegerCache.high=10000", "-Dvertx.disableHttpHeadersValidation=true", "-Dvertx.disableMetrics=true", "-Dvertx.disableH2c=true", "-Dvertx.disableWebsockets=true", "-Dvertx.flashPolicyHandler=false", "-Dvertx.threadChecks=false", "-Dvertx.disableContextTimings=true", "-Dvertx.disableTCCL=true", "-Dhibernate.allow_update_outside_transaction=true", "-Djboss.threads.eqe.statistics=false", "-jar", "app.jar"] diff --git a/frameworks/Java/quarkus/quarkus.dockerfile b/frameworks/Java/quarkus/quarkus.dockerfile index e130c373552..2f9697c7c18 100644 --- a/frameworks/Java/quarkus/quarkus.dockerfile +++ b/frameworks/Java/quarkus/quarkus.dockerfile @@ -1,31 +1,39 @@ -FROM maven:3.6.3-jdk-11-slim as maven +FROM docker.io/maven:3.8.4-eclipse-temurin-11 as maven WORKDIR /quarkus ENV MODULE=resteasy-hibernate COPY pom.xml pom.xml -COPY $MODULE/pom.xml $MODULE/pom.xml +COPY quarkus-benchmark-common quarkus-benchmark-common/ +COPY resteasy-hibernate resteasy-hibernate/ +COPY resteasy-reactive-hibernate resteasy-reactive-hibernate/ +COPY resteasy-reactive-hibernate-reactive resteasy-reactive-hibernate-reactive/ # Uncomment to test pre-release quarkus #RUN mkdir -p /root/.m2/repository/io #COPY m2-quarkus /root/.m2/repository/io/quarkus +WORKDIR /quarkus +RUN mvn -DskipTests install -pl :benchmark,:quarkus-benchmark-common -B -q + WORKDIR /quarkus/$MODULE -RUN mvn dependency:go-offline -q +RUN mvn dependency:go-offline -B -q WORKDIR /quarkus COPY $MODULE/src $MODULE/src WORKDIR /quarkus/$MODULE -RUN mvn package -q +RUN mvn package -B -q WORKDIR /quarkus -FROM openjdk:11.0.6-jdk-slim +FROM docker.io/eclipse-temurin:11-jdk WORKDIR /quarkus ENV MODULE=resteasy-hibernate -COPY --from=maven /quarkus/$MODULE/target/lib lib -COPY --from=maven /quarkus/$MODULE/target/$MODULE-1.0-SNAPSHOT-runner.jar app.jar +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/lib/ lib +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/app/ app +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/quarkus/ quarkus +COPY --from=maven /quarkus/$MODULE/target/quarkus-app/quarkus-run.jar quarkus-run.jar +COPY run_quarkus.sh run_quarkus.sh EXPOSE 8080 - -CMD ["java", "-server", "-XX:-UseBiasedLocking", "-XX:+UseStringDeduplication", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Djava.lang.Integer.IntegerCache.high=10000", "-Dvertx.disableHttpHeadersValidation=true", "-Dvertx.disableMetrics=true", "-Dvertx.disableH2c=true", "-Dvertx.disableWebsockets=true", "-Dvertx.flashPolicyHandler=false", "-Dvertx.threadChecks=false", "-Dvertx.disableContextTimings=true", "-Dvertx.disableTCCL=true", "-Dhibernate.allow_update_outside_transaction=true", "-Djboss.threads.eqe.statistics=false", "-jar", "app.jar"] +ENTRYPOINT "./run_quarkus.sh" diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/pom.xml b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/pom.xml deleted file mode 100644 index c955632ae41..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/pom.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - 4.0.0 - - - io.quarkus - benchmark - 1.0-SNAPSHOT - ../ - - - io.quarkus.benchmark - reactive-routes-hibernate-reactive - - - - io.quarkus - quarkus-reactive-pg-client - - - io.quarkus - quarkus-scheduler - - - io.quarkus - quarkus-hibernate-reactive - - - io.quarkus - quarkus-vertx-web - - - com.github.spullara.mustache.java - compiler - 0.9.6 - - - io.vertx - vertx-pg-client - - - io.vertx - vertx-sql-client - - - diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java deleted file mode 100644 index c46279cc783..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java +++ /dev/null @@ -1,28 +0,0 @@ -package io.quarkus.benchmark.filter; - -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; - -import javax.inject.Singleton; - -import io.quarkus.scheduler.Scheduled; -import io.quarkus.vertx.web.RouteFilter; -import io.vertx.ext.web.RoutingContext; - -@Singleton -public class ServerHeaderFilter { - - private String date; - - @Scheduled(every="1s") - void increment() { - date = DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now()); - } - - @RouteFilter(100) - void myFilter(RoutingContext rc) { - rc.response().putHeader( "Server", "Quarkus"); - rc.response().putHeader( "Date", date); - rc.next(); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/model/Fortune.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/model/Fortune.java deleted file mode 100644 index 5228080b02b..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/model/Fortune.java +++ /dev/null @@ -1,56 +0,0 @@ -package io.quarkus.benchmark.model; - -import org.hibernate.annotations.Immutable; - -import java.util.Objects; - -import javax.persistence.Entity; -import javax.persistence.Id; - -@Entity -@Immutable -public class Fortune { - - @Id - private int id; - private String message; - - public Fortune() {} - - public Fortune(int id, String message) { - this.id = id; - this.message = message; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getMessage() { - return message; - } - - public void setMessage(String message) { - this.message = message; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - Fortune fortune = (Fortune) o; - return id == fortune.id && - Objects.equals(message, fortune.message); - } - - @Override - public int hashCode() { - return Objects.hash(id, message); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/model/World.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/model/World.java deleted file mode 100644 index a9a2acc4a82..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/model/World.java +++ /dev/null @@ -1,29 +0,0 @@ -package io.quarkus.benchmark.model; - -import javax.persistence.Entity; -import javax.persistence.Id; - -@Entity -public class World { - - @Id - private int id; - private int randomNumber; - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getRandomNumber() { - return randomNumber; - } - - public void setRandomNumber(int randomNumber) { - this.randomNumber = randomNumber; - } - -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/BaseRepository.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/BaseRepository.java deleted file mode 100644 index 8cc1e2e9a01..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/BaseRepository.java +++ /dev/null @@ -1,19 +0,0 @@ -package io.quarkus.benchmark.repository; - -import java.util.function.Function; - -import javax.inject.Inject; - -import org.hibernate.reactive.mutiny.Mutiny; - -import io.smallrye.mutiny.Uni; - -public class BaseRepository { - @Inject - protected Mutiny.SessionFactory sf; - - public Uni inSession(Function> work){ - return sf.withSession(session -> work.apply(session)); - } - -} diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java deleted file mode 100644 index a34254a0825..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java +++ /dev/null @@ -1,25 +0,0 @@ -package io.quarkus.benchmark.repository; - -import java.util.List; - -import javax.enterprise.context.ApplicationScoped; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Root; - -import io.quarkus.benchmark.model.Fortune; -import io.smallrye.mutiny.Uni; - -@ApplicationScoped -public class FortuneRepository extends BaseRepository { - - public Uni> findAll() { - return inSession(s -> { - CriteriaBuilder criteriaBuilder = sf.getCriteriaBuilder(); - CriteriaQuery fortuneQuery = criteriaBuilder.createQuery(Fortune.class); - Root from = fortuneQuery.from(Fortune.class); - fortuneQuery.select(from); - return s.createQuery(fortuneQuery).getResultList(); - }); - } -} diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java deleted file mode 100644 index bc236b0e4ca..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java +++ /dev/null @@ -1,65 +0,0 @@ -package io.quarkus.benchmark.repository; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Set; -import java.util.concurrent.ThreadLocalRandom; - -import javax.inject.Singleton; - -import org.hibernate.reactive.mutiny.Mutiny; -import org.hibernate.reactive.mutiny.Mutiny.Session; - -import io.quarkus.benchmark.model.World; -import io.smallrye.mutiny.Uni; - - -@Singleton -public class WorldRepository extends BaseRepository { - - /** - * This method is not required (nor specified) by the benchmark rules, - * but is quite handy to seed a local database and be able to experiment - * with the app locally. - */ - public Uni createData() { - return inSession(s -> { - final ThreadLocalRandom random = ThreadLocalRandom.current(); - int MAX = 10000; - Uni[] unis = new Uni[MAX]; - for (int i=0; i null); - } - return Uni.combine().all().unis(unis).combinedWith(l -> null) - .flatMap(v -> s.flush()) - .map(v -> null); - }); - } - - public Uni find(int id) { - return inSession(session -> singleFind(session, id)); - } - - public Uni> update(Mutiny.Session s, Collection worlds) { - return s.flush() - .map(v -> worlds); - } - - public Uni> find(Session s, Set ids) { - //The rules require individual load: we can't use the Hibernate feature which allows load by multiple IDs as one single operation - ArrayList> l = new ArrayList<>(ids.size()); - for (Integer id : ids) { - l.add(singleFind(s, id)); - } - return Uni.combine().all().unis(l).combinedWith(list -> (List)list); - } - - private static Uni singleFind(final Mutiny.Session ss, final Integer id) { - return ss.find(World.class, id); - } - -} diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/BaseResource.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/BaseResource.java deleted file mode 100644 index b9ba205a368..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/BaseResource.java +++ /dev/null @@ -1,34 +0,0 @@ -package io.quarkus.benchmark.resource; - -import java.io.PrintWriter; -import java.io.StringWriter; - -import javax.inject.Inject; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - -import io.vertx.ext.web.RoutingContext; - -public abstract class BaseResource { - - @Inject - ObjectMapper mapper; - - void sendJson(RoutingContext rc, Object value) { - try { - rc.response().putHeader("Content-Type", "application/json"); - rc.response().end(mapper.writeValueAsString(value)); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - Void handleFail(RoutingContext rc, Throwable t) { - var sw = new StringWriter(); - t.printStackTrace(new PrintWriter(sw)); - rc.response().setStatusCode(500).end(sw.toString()); - return null; - } - -} diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/DbResource.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/DbResource.java deleted file mode 100644 index 70ccb75946b..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/DbResource.java +++ /dev/null @@ -1,130 +0,0 @@ -package io.quarkus.benchmark.resource; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.ThreadLocalRandom; - -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; - -import org.hibernate.FlushMode; -import org.hibernate.reactive.mutiny.Mutiny; - -import io.quarkus.benchmark.model.World; -import io.quarkus.benchmark.repository.WorldRepository; -import io.quarkus.vertx.web.Route; -import io.smallrye.mutiny.Uni; -import io.vertx.ext.web.RoutingContext; - - -@ApplicationScoped -public class DbResource extends BaseResource { - - @Inject - WorldRepository worldRepository; - - @Route(path = "db") - public void db(RoutingContext rc) { - randomWorld().subscribe().with(world -> sendJson(rc, world), - t -> handleFail(rc, t)); - } - - @Route(path = "queries") - public void queries(RoutingContext rc) { - var queries = rc.request().getParam("queries"); - worldRepository.inSession(session -> randomWorldForRead(session, parseQueryCount(queries))) - .subscribe().with(list -> sendJson(rc, list), - t -> handleFail(rc, t)); - } - - //Rules: https://github.com/TechEmpower/FrameworkBenchmarks/wiki/Project-Information-Framework-Tests-Overview#database-updates - //N.B. the benchmark seems to be designed to get in deadlocks when using a "safe pattern" of updating - // the entity within the same transaction as the one which read it. - // We therefore need to do a "read then write" while relinquishing the transaction between the two operations, as - // all other tested frameworks seem to do. - @Route(path = "updates") - public void updates(RoutingContext rc) { - var queries = rc.request().getParam("queries"); - worldRepository.inSession(session -> { - // FIXME: not supported - // session.setJdbcBatchSize(worlds.size()); - session.setFlushMode(FlushMode.MANUAL); - - var worlds = randomWorldForRead(session, parseQueryCount(queries)); - return worlds.flatMap(worldsCollection -> { - worldsCollection.forEach( w -> { - //Read the one field, as required by the following rule: - // # vi. At least the randomNumber field must be read from the database result set. - final int previousRead = w.getRandomNumber(); - //Update it, but make sure to exclude the current number as Hibernate optimisations would have us "fail" - //the verification: - w.setRandomNumber(randomWorldNumber(previousRead)); - } ); - - return worldRepository.update(session, worldsCollection); - }); - }).subscribe().with(list -> sendJson(rc, list), - t -> handleFail(rc, t)); - } - - private Uni> randomWorldForRead(Mutiny.Session session, int count) { - Set ids = new HashSet<>(count); - int counter = 0; - while (counter < count) { - counter += ids.add(Integer.valueOf(randomWorldNumber())) ? 1 : 0; - } - return worldRepository.find(session, ids); - } - - @Route(path = "createdata") - public void createData(RoutingContext rc) { - worldRepository.createData().subscribe().with(v -> rc.response().end("Data created"), - t -> handleFail(rc, t)); - } - - private Uni randomWorld() { - int i = randomWorldNumber(); - return worldRepository.find(i); - } - - private int randomWorldNumber() { - return 1 + ThreadLocalRandom.current().nextInt(10000); - } - - /** - * Also according to benchmark requirements, except that in this special case - * of the update test we need to ensure we'll actually generate an update operation: - * for this we need to generate a random number between 1 to 10000, but different - * from the current field value. - * @param previousRead - * @return - */ - private int randomWorldNumber(final int previousRead) { - //conceptually split the random space in those before previousRead, - //and those after: this approach makes sure to not affect the random characteristics. - final int trueRandom = ThreadLocalRandom.current().nextInt(9999) + 2; - if (trueRandom<=previousRead) { - //all figures equal or before the current field read need to be shifted back by one - //so to avoid hitting the same number while not affecting the distribution. - return trueRandom - 1; - } - else { - //Those after are generated by taking the generated value 2...10000 as is. - return trueRandom; - } - } - - private int parseQueryCount(String textValue) { - if (textValue == null) { - return 1; - } - int parsedValue; - try { - parsedValue = Integer.parseInt(textValue); - } catch (NumberFormatException e) { - return 1; - } - return Math.min(500, Math.max(1, parsedValue)); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java deleted file mode 100644 index d430ef31122..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java +++ /dev/null @@ -1,47 +0,0 @@ -package io.quarkus.benchmark.resource; - -import java.io.StringWriter; -import java.util.Collections; -import java.util.Comparator; - -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; - -import io.quarkus.benchmark.model.Fortune; -import io.quarkus.benchmark.repository.FortuneRepository; -import io.quarkus.vertx.web.Route; -import io.vertx.ext.web.RoutingContext; - -@ApplicationScoped -public class FortuneResource extends BaseResource { - - @Inject - FortuneRepository repository; - - private final Mustache template; - private Comparator fortuneComparator; - - public FortuneResource() { - MustacheFactory mf = new DefaultMustacheFactory(); - template = mf.compile("fortunes.mustache"); - fortuneComparator = Comparator.comparing(fortune -> fortune.getMessage()); - } - - @Route(path = "fortunes") - public void fortunes(RoutingContext rc) { - repository.findAll() - .subscribe().with( fortunes -> { - fortunes.add(new Fortune(0, "Additional fortune added at request time.")); - fortunes.sort(fortuneComparator); - StringWriter writer = new StringWriter(); - template.execute(writer, Collections.singletonMap("fortunes", fortunes)); - rc.response().putHeader("Content-Type", "text/html;charset=UTF-8"); - rc.response().end(writer.toString()); - }, - t -> handleFail(rc, t)); - } -} diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/JsonResource.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/JsonResource.java deleted file mode 100644 index 9d932a13639..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/JsonResource.java +++ /dev/null @@ -1,21 +0,0 @@ -package io.quarkus.benchmark.resource; - -import java.util.Collections; - -import javax.enterprise.context.ApplicationScoped; - -import io.quarkus.vertx.web.Route; -import io.vertx.ext.web.RoutingContext; - -@ApplicationScoped -public class JsonResource extends BaseResource { - - private static final String MESSAGE = "message"; - private static final String HELLO = "Hello, World!"; - - @Route(path = "json") - public void json(RoutingContext rc) { - sendJson(rc, Collections.singletonMap( MESSAGE, HELLO )); - } -} - diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java deleted file mode 100644 index c8649b551c2..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java +++ /dev/null @@ -1,17 +0,0 @@ -package io.quarkus.benchmark.resource; - -import javax.enterprise.context.ApplicationScoped; - -import io.quarkus.vertx.web.Route; -import io.vertx.ext.web.RoutingContext; - -@ApplicationScoped -public class PlaintextResource { - private static final String HELLO = "Hello, World!"; - - @Route(path = "plaintext") - public void plaintext(RoutingContext rc) { - rc.response().putHeader("Content-Type", "text/plain"); - rc.response().end(HELLO); - } -} diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/resources/application.properties b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/resources/application.properties deleted file mode 100644 index 18e5ae2367b..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/resources/application.properties +++ /dev/null @@ -1,20 +0,0 @@ -quarkus.datasource.db-kind=postgresql -quarkus.datasource.username=benchmarkdbuser -quarkus.datasource.password=benchmarkdbpass -#quarkus.datasource.max-size=64 - -# Reactive config -quarkus.datasource.reactive=true -quarkus.datasource.reactive.url=postgresql://tfb-database:5432/hello_world -%dev.quarkus.datasource.reactive.url=postgresql://localhost:5432/hello_world - -quarkus.datasource.reactive.thread-local=true -quarkus.datasource.reactive.cache-prepared-statements=true -quarkus.datasource.reactive.max-size=4 - -#quarkus.vertx.storage=false - -quarkus.log.console.enable=true -quarkus.log.console.level=INFO -quarkus.log.file.enable=false -quarkus.log.level=INFO \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/resources/fortunes.mustache b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/resources/fortunes.mustache deleted file mode 100644 index f9664a72eee..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/src/main/resources/fortunes.mustache +++ /dev/null @@ -1,20 +0,0 @@ - - - - Fortunes - - - - - - - - {{#fortunes}} - - - - - {{/fortunes}} -
idmessage
{{id}}{{message}}
- - diff --git a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/start-app.sh b/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/start-app.sh deleted file mode 100755 index 01b3e22ea2c..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-hibernate-reactive/start-app.sh +++ /dev/null @@ -1,2 +0,0 @@ -java -XX:+FlightRecorder -XX:+UseParallelGC -Dquarkus.datasource.url=vertx-reactive:postgresql://localhost:5432/hello_world -Dquarkus.http.host=127.0.0.1 -Djava.lang.Integer.IntegerCache.high=10000 -Dvertx.disableHttpHeadersValidation=true -Dvertx.disableMetrics=true -Dvertx.disableH2c=true -Dvertx.disableWebsockets=true -Dvertx.flashPolicyHandler=false -Dvertx.threadChecks=false -Dvertx.disableContextTimings=true -Dvertx.disableTCCL=true -Dhibernate.allow_update_outside_transaction=true -Djboss.threads.eqe.statistics=false -jar target/pgclient-1.0-SNAPSHOT-runner.jar - diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/pom.xml b/frameworks/Java/quarkus/reactive-routes-pgclient/pom.xml deleted file mode 100644 index d9425b9128d..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/pom.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - 4.0.0 - - - io.quarkus - benchmark - 1.0-SNAPSHOT - ../ - - - io.quarkus.benchmark - reactive-routes-pgclient - - - - io.quarkus - quarkus-scheduler - - - io.quarkus - quarkus-reactive-pg-client - - - io.quarkus - quarkus-vertx-web - - - io.vertx - vertx-web-templ-rocker - - - io.netty - netty-transport-native-epoll - linux-x86_64 - - - com.github.spullara.mustache.java - compiler - 0.9.6 - - - diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java deleted file mode 100644 index aa7c7fda4f9..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java +++ /dev/null @@ -1,33 +0,0 @@ -package io.quarkus.benchmark.filter; - -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; - -import javax.inject.Singleton; - -import io.quarkus.scheduler.Scheduled; -import io.quarkus.vertx.web.RouteFilter; -import io.vertx.core.http.HttpHeaders; -import io.vertx.ext.web.RoutingContext; - -@Singleton -public class ServerHeaderFilter { - - private static final CharSequence SERVER_HEADER_NAME = HttpHeaders.createOptimized("Server"); - private static final CharSequence SERVER_HEADER_VALUE = HttpHeaders.createOptimized("Quarkus"); - private static final CharSequence DATE_HEADER_NAME = HttpHeaders.createOptimized("Date"); - - private CharSequence date; - - @Scheduled(every="1s") - void increment() { - date = HttpHeaders.createOptimized(DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now())); - } - - @RouteFilter(100) - void myFilter(RoutingContext rc) { - rc.response().putHeader( SERVER_HEADER_NAME, SERVER_HEADER_VALUE); - rc.response().putHeader( DATE_HEADER_NAME, date); - rc.next(); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/model/Fortune.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/model/Fortune.java deleted file mode 100644 index 0e32970c9db..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/model/Fortune.java +++ /dev/null @@ -1,48 +0,0 @@ -package io.quarkus.benchmark.model; - -import java.util.Objects; - -public class Fortune { - - private int id; - private String message; - - public Fortune() {} - - public Fortune(int id, String message) { - this.id = id; - this.message = message; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getMessage() { - return message; - } - - public void setMessage(String message) { - this.message = message; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - Fortune fortune = (Fortune) o; - return id == fortune.id && - Objects.equals(message, fortune.message); - } - - @Override - public int hashCode() { - return Objects.hash(id, message); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/model/World.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/model/World.java deleted file mode 100644 index 0d5205ae361..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/model/World.java +++ /dev/null @@ -1,35 +0,0 @@ -package io.quarkus.benchmark.model; - -public class World implements Comparable{ - - private int id; - private int randomNumber; - - public World() {} - - public World(int id, int randomNumber) { - this.id = id; - this.randomNumber = randomNumber; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getRandomNumber() { - return randomNumber; - } - - public void setRandomNumber(int randomNumber) { - this.randomNumber = randomNumber; - } - - @Override - public int compareTo(World o) { - return Integer.compare(id, o.id); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java deleted file mode 100644 index 4415583fa1d..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java +++ /dev/null @@ -1,30 +0,0 @@ -package io.quarkus.benchmark.repository; - -import java.util.ArrayList; -import java.util.List; - -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; - -import io.quarkus.benchmark.model.Fortune; -import io.smallrye.mutiny.Uni; -import io.vertx.mutiny.sqlclient.Row; - -@ApplicationScoped -public class FortuneRepository { - - @Inject - PgClients clients; - - public Uni> findAll() { - return clients.getClient().preparedQuery("SELECT * FROM Fortune" ) - .execute() - .map(rowset -> { - List ret = new ArrayList<>(rowset.size()+1); - for(Row r : rowset) { - ret.add(new Fortune(r.getInteger("id"), r.getString("message"))); - } - return ret; - }); - } -} diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClientFactory.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClientFactory.java deleted file mode 100644 index ef99489cb26..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClientFactory.java +++ /dev/null @@ -1,56 +0,0 @@ -package io.quarkus.benchmark.repository; - -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import javax.enterprise.context.ApplicationScoped; -import javax.enterprise.inject.Produces; -import javax.inject.Inject; - -import org.eclipse.microprofile.config.inject.ConfigProperty; - -import io.vertx.mutiny.core.Vertx; -import io.vertx.mutiny.pgclient.PgPool; -import io.vertx.pgclient.PgConnectOptions; -import io.vertx.sqlclient.PoolOptions; - -@ApplicationScoped -public class PgClientFactory { - - // vertx-reactive:postgresql://tfb-database:5432/hello_world - private static final String PG_URI_MATCHER = "vertx-reactive:postgresql://([-a-zA-Z]+):([0-9]+)/(.*)"; - - @ConfigProperty(name = "quarkus.datasource.url") - String url; - - @ConfigProperty(name = "quarkus.datasource.username") - String user; - - @ConfigProperty(name = "quarkus.datasource.password") - String pass; - - @Inject - Vertx vertx; - - @Produces - @ApplicationScoped - public PgClients pgClients() { - return new PgClients(this); - } - - - PgPool sqlClient(int size) { - PoolOptions options = new PoolOptions(); - PgConnectOptions connectOptions = new PgConnectOptions(); - Matcher matcher = Pattern.compile(PG_URI_MATCHER).matcher(url); - matcher.matches(); - connectOptions.setDatabase(matcher.group(3)); - connectOptions.setHost(matcher.group(1)); - connectOptions.setPort(Integer.parseInt(matcher.group(2))); - connectOptions.setUser(user); - connectOptions.setPassword(pass); - connectOptions.setCachePreparedStatements(true); - options.setMaxSize(size); - return PgPool.pool(vertx, connectOptions, options); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClients.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClients.java deleted file mode 100644 index 871e9b75a6e..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClients.java +++ /dev/null @@ -1,38 +0,0 @@ -package io.quarkus.benchmark.repository; - -import io.vertx.mutiny.pgclient.PgPool; -import io.vertx.mutiny.sqlclient.SqlClient; - -class PgClients { - private static final int POOL_SIZE = 4; - - private ThreadLocal sqlClient = new ThreadLocal<>(); - private ThreadLocal pool = new ThreadLocal<>(); - private PgClientFactory pgClientFactory; - - // for ArC - public PgClients() { - } - - public PgClients(PgClientFactory pgClientFactory) { - this.pgClientFactory = pgClientFactory; - } - - SqlClient getClient() { - SqlClient ret = sqlClient.get(); - if(ret == null) { - ret = pgClientFactory.sqlClient(1); - sqlClient.set(ret); - } - return ret; - } - - synchronized PgPool getPool() { - PgPool ret = pool.get(); - if(ret == null) { - ret = pgClientFactory.sqlClient(POOL_SIZE); - pool.set(ret); - } - return ret; - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java deleted file mode 100644 index 2d9c71d1259..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java +++ /dev/null @@ -1,40 +0,0 @@ -package io.quarkus.benchmark.repository; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; - -import io.quarkus.benchmark.model.World; -import io.smallrye.mutiny.Uni; -import io.vertx.mutiny.sqlclient.Row; -import io.vertx.mutiny.sqlclient.Tuple; - -@ApplicationScoped -public class WorldRepository { - - @Inject - PgClients clients; - - public Uni find(int id) { - return clients.getClient().preparedQuery("SELECT id, randomNumber FROM World WHERE id = $1") - .execute(Tuple.of(id)) - .map(rowset -> { - Row row = rowset.iterator().next(); - return new World(row.getInteger(0), row.getInteger(1)); - }); - } - - public Uni update(World[] worlds) { - Arrays.sort(worlds); - List args = new ArrayList<>(worlds.length); - for (World world : worlds) { - args.add(Tuple.of(world.getId(), world.getRandomNumber())); - } - return clients.getPool().preparedQuery("UPDATE World SET randomNumber = $2 WHERE id = $1") - .executeBatch(args) - .map(v -> null); - } -} diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/BaseResource.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/BaseResource.java deleted file mode 100644 index 22e9965f777..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/BaseResource.java +++ /dev/null @@ -1,20 +0,0 @@ -package io.quarkus.benchmark.resource; - -import io.vertx.core.http.HttpHeaders; -import io.vertx.core.json.Json; -import io.vertx.ext.web.RoutingContext; - -public abstract class BaseResource { - - void sendJson(RoutingContext rc, Object value) { - rc.response() - .putHeader(HttpHeaders.CONTENT_TYPE, "application/json") - .end(Json.encodeToBuffer(value)); - } - - Void handleFail(RoutingContext rc, Throwable t) { - rc.response().setStatusCode(500).end(t.toString()); - return null; - } - -} diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/DbResource.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/DbResource.java deleted file mode 100644 index 6333ea49331..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/DbResource.java +++ /dev/null @@ -1,85 +0,0 @@ -package io.quarkus.benchmark.resource; - -import java.util.Arrays; -import java.util.concurrent.ThreadLocalRandom; - -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; - -import io.quarkus.benchmark.model.World; -import io.quarkus.benchmark.repository.WorldRepository; -import io.quarkus.vertx.web.Route; -import io.smallrye.mutiny.Uni; -import io.smallrye.mutiny.groups.UniAndGroupIterable; -import io.vertx.ext.web.RoutingContext; - - -@ApplicationScoped -public class DbResource extends BaseResource { - - @Inject - WorldRepository worldRepository; - - @Route(path = "db") - public void db(RoutingContext rc) { - randomWorld().subscribe().with(world -> sendJson(rc, world), - t -> handleFail(rc, t)); - } - - @Route(path = "queries") - public void queries(RoutingContext rc) { - var queries = rc.request().getParam("queries"); - var worlds = new Uni[parseQueryCount(queries)]; - var ret = new World[worlds.length]; - Arrays.setAll(worlds, i -> { - return randomWorld().map(w -> ret[i] = w); - }); - - Uni.combine().all().unis(worlds) - .combinedWith(v -> Arrays.asList(ret)) - .subscribe().with(list -> sendJson(rc, list), - t -> handleFail(rc, t)); - } - - @Route(path = "updates") - public void updates(RoutingContext rc) { - var queries = rc.request().getParam("queries"); - var worlds = new Uni[parseQueryCount(queries)]; - var ret = new World[worlds.length]; - Arrays.setAll(worlds, i -> { - return randomWorld().map(w -> { - w.setRandomNumber(randomWorldNumber()); - ret[i] = w; - return w; - }); - }); - - Uni.combine().all().unis(worlds) - .combinedWith(v -> null) - .flatMap(v -> worldRepository.update(ret)) - .map(v -> Arrays.asList(ret)) - .subscribe().with(list -> sendJson(rc, list), - t -> handleFail(rc, t)); - } - - private Uni randomWorld() { - return worldRepository.find(randomWorldNumber()); - } - - private int randomWorldNumber() { - return 1 + ThreadLocalRandom.current().nextInt(10000); - } - - private int parseQueryCount(String textValue) { - if (textValue == null) { - return 1; - } - int parsedValue; - try { - parsedValue = Integer.parseInt(textValue); - } catch (NumberFormatException e) { - return 1; - } - return Math.min(500, Math.max(1, parsedValue)); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java deleted file mode 100644 index acaf98b5487..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java +++ /dev/null @@ -1,49 +0,0 @@ -package io.quarkus.benchmark.resource; - -import java.io.StringWriter; -import java.util.Collections; -import java.util.Comparator; - -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; - -import io.quarkus.benchmark.model.Fortune; -import io.quarkus.benchmark.repository.FortuneRepository; -import io.quarkus.vertx.web.Route; -import io.vertx.core.http.HttpHeaders; -import io.vertx.ext.web.RoutingContext; - -@ApplicationScoped -public class FortuneResource extends BaseResource { - - @Inject - FortuneRepository repository; - private Mustache template; - private Comparator fortuneComparator; - - - public FortuneResource() { - MustacheFactory mf = new DefaultMustacheFactory(); - template = mf.compile("fortunes.mustache"); - fortuneComparator = Comparator.comparing(fortune -> fortune.getMessage()); - } - - @Route(path = "fortunes") - public void fortunes(RoutingContext rc) { - repository.findAll() - .subscribe().with(fortunes -> { - fortunes.add(new Fortune(0, "Additional fortune added at request time.")); - fortunes.sort(fortuneComparator); - StringWriter writer = new StringWriter(); - template.execute(writer, Collections.singletonMap("fortunes", fortunes)); - rc.response() - .putHeader(HttpHeaders.CONTENT_TYPE, "text/html; charset=UTF-8") - .end(writer.toString()); - }, - t -> handleFail(rc, t)); - } -} diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/JsonResource.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/JsonResource.java deleted file mode 100644 index f6eb41e21e6..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/JsonResource.java +++ /dev/null @@ -1,18 +0,0 @@ -package io.quarkus.benchmark.resource; - -import javax.enterprise.context.ApplicationScoped; - -import io.quarkus.vertx.web.Route; -import io.vertx.ext.web.RoutingContext; - -@ApplicationScoped -public class JsonResource extends BaseResource { - - private static final String HELLO = "Hello, World!"; - - @Route(path = "json") - public void json(RoutingContext rc) { - sendJson(rc, new Message(HELLO)); - } -} - diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/Message.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/Message.java deleted file mode 100644 index 73f0ffefb8a..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/Message.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.quarkus.benchmark.resource; - -public class Message { - private final String message; - - public Message(String message) { - this.message = message; - } - - public String getMessage() { - return message; - } -} diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java deleted file mode 100644 index 3e6291c98f6..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java +++ /dev/null @@ -1,23 +0,0 @@ -package io.quarkus.benchmark.resource; - -import javax.enterprise.context.ApplicationScoped; - -import io.quarkus.vertx.web.Route; -import io.vertx.core.buffer.Buffer; -import io.vertx.core.http.HttpHeaders; -import io.vertx.ext.web.RoutingContext; - -@ApplicationScoped -public class PlaintextResource { - private static final String HELLO_WORLD = "Hello, world!"; - private static final Buffer HELLO_WORLD_BUFFER = Buffer.factory.directBuffer(HELLO_WORLD, "UTF-8"); - - private static final CharSequence CONTENT_TYPE_HEADER_NAME = HttpHeaders.createOptimized("Content-Type"); - private static final CharSequence CONTENT_TYPE_HEADER_VALUE = HttpHeaders.createOptimized("text/plain"); - - @Route(path = "plaintext") - public void plaintext(RoutingContext rc) { - rc.response().putHeader(CONTENT_TYPE_HEADER_NAME, CONTENT_TYPE_HEADER_VALUE); - rc.response().end(HELLO_WORLD_BUFFER); - } -} diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/resources/application.properties b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/resources/application.properties deleted file mode 100644 index 4d99cfbc4ec..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/resources/application.properties +++ /dev/null @@ -1,9 +0,0 @@ -quarkus.datasource.url=vertx-reactive:postgresql://tfb-database:5432/hello_world -quarkus.datasource.username=benchmarkdbuser -quarkus.datasource.password=benchmarkdbpass -quarkus.datasource.reactive.max-size=64 -quarkus.log.console.enable=true -quarkus.log.console.level=INFO -quarkus.log.file.enable=false -quarkus.log.level=INFO -quarkus.vertx.prefer-native-transport=true \ No newline at end of file diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/resources/fortunes.mustache b/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/resources/fortunes.mustache deleted file mode 100644 index f9664a72eee..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/src/main/resources/fortunes.mustache +++ /dev/null @@ -1,20 +0,0 @@ - - - - Fortunes - - - - - - - - {{#fortunes}} - - - - - {{/fortunes}} -
idmessage
{{id}}{{message}}
- - diff --git a/frameworks/Java/quarkus/reactive-routes-pgclient/start-app.sh b/frameworks/Java/quarkus/reactive-routes-pgclient/start-app.sh deleted file mode 100755 index 01b3e22ea2c..00000000000 --- a/frameworks/Java/quarkus/reactive-routes-pgclient/start-app.sh +++ /dev/null @@ -1,2 +0,0 @@ -java -XX:+FlightRecorder -XX:+UseParallelGC -Dquarkus.datasource.url=vertx-reactive:postgresql://localhost:5432/hello_world -Dquarkus.http.host=127.0.0.1 -Djava.lang.Integer.IntegerCache.high=10000 -Dvertx.disableHttpHeadersValidation=true -Dvertx.disableMetrics=true -Dvertx.disableH2c=true -Dvertx.disableWebsockets=true -Dvertx.flashPolicyHandler=false -Dvertx.threadChecks=false -Dvertx.disableContextTimings=true -Dvertx.disableTCCL=true -Dhibernate.allow_update_outside_transaction=true -Djboss.threads.eqe.statistics=false -jar target/pgclient-1.0-SNAPSHOT-runner.jar - diff --git a/frameworks/Java/quarkus/resteasy-hibernate/pom.xml b/frameworks/Java/quarkus/resteasy-hibernate/pom.xml index d1dbf82f15b..863cc6f8803 100644 --- a/frameworks/Java/quarkus/resteasy-hibernate/pom.xml +++ b/frameworks/Java/quarkus/resteasy-hibernate/pom.xml @@ -6,13 +6,16 @@ io.quarkus benchmark 1.0-SNAPSHOT - ../ io.quarkus.benchmark resteasy-hibernate + + io.quarkus + quarkus-benchmark-common + io.quarkus quarkus-hibernate-orm @@ -34,11 +37,38 @@ quarkus-jdbc-postgresql - com.github.spullara.mustache.java - compiler - 0.9.6 + com.fizzed + rocker-compiler + 1.3.0 + + + io.netty + netty-transport-native-epoll + linux-x86_64 - + + + + com.fizzed + rocker-maven-plugin + 1.3.0 + + + generate-rocker-templates + generate-sources + + generate + + + ${project.basedir}/src/main/resources + true + + + + + + + diff --git a/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java b/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java index 7043dd284e2..cb0998a760b 100644 --- a/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java +++ b/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java @@ -3,6 +3,7 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; +import javax.annotation.PostConstruct; import javax.inject.Singleton; import javax.ws.rs.container.ContainerRequestContext; import javax.ws.rs.container.ContainerResponseContext; @@ -16,7 +17,12 @@ @Provider public class ServerHeaderFilter implements ContainerResponseFilter { - private String date; + private volatile String date; + + @PostConstruct + public void init() { + date = DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now()); + } @Scheduled(every="1s") void increment() { diff --git a/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java b/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java index ee05796d892..bf0aa4636e0 100644 --- a/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java +++ b/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java @@ -47,7 +47,6 @@ public World findSingleAndStateless(int id) { } } - @Transactional public void updateAll(Collection worlds) { try (Session s = sf.openSession()) { s.setJdbcBatchSize(worlds.size()); diff --git a/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/resource/DbResource.java b/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/resource/DbResource.java index 08ec68484cb..dd416526c61 100644 --- a/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/resource/DbResource.java +++ b/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/resource/DbResource.java @@ -30,9 +30,7 @@ public class DbResource { @GET @Path("/db") public World db() { - World world = randomWorldForRead(); - if (world==null) throw new IllegalStateException( "No data found in DB. Did you seed the database? Make sure to invoke /createdata once." ); - return world; + return worldRepository.findSingleAndStateless(randomWorldNumber()); } @GET @@ -72,10 +70,6 @@ public String createData() { return "OK"; } - private World randomWorldForRead() { - return worldRepository.findSingleAndStateless(randomWorldNumber()); - } - private Collection randomWorldForRead(int count) { Set ids = new HashSet<>(count); int counter = 0; diff --git a/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java b/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java index fe71803f631..3ce9c06cafb 100644 --- a/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java +++ b/frameworks/Java/quarkus/resteasy-hibernate/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java @@ -1,10 +1,9 @@ package io.quarkus.benchmark.resource; -import java.io.StringWriter; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; +import com.fizzed.rocker.Rocker; +import com.fizzed.rocker.RockerOutput; +import io.quarkus.benchmark.model.Fortune; +import io.quarkus.benchmark.repository.FortuneRepository; import javax.inject.Inject; import javax.inject.Singleton; @@ -13,31 +12,23 @@ import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; - -import io.quarkus.benchmark.model.Fortune; -import io.quarkus.benchmark.repository.FortuneRepository; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; @Singleton @Path("/") -@Produces(MediaType.TEXT_HTML+"; charset=UTF-8") +@Produces(MediaType.TEXT_HTML + "; charset=UTF-8") @Consumes(MediaType.APPLICATION_JSON) public class FortuneResource { @Inject FortuneRepository repository; - private final Mustache template; - private final Comparator fortuneComparator; - - public FortuneResource() { - MustacheFactory mf = new DefaultMustacheFactory(); - template = mf.compile("fortunes.mustache"); - fortuneComparator = Comparator.comparing(fortune -> fortune.getMessage()); - } + private static final String FORTUNES_MAP_KEY = "fortunes"; + private static final String FORTUNES_TEMPLATE_FILENAME = "Fortunes.rocker.html"; + private static final Comparator fortuneComparator = Comparator.comparing(fortune -> fortune.getMessage()); @GET @Path("/fortunes") @@ -46,9 +37,10 @@ public String fortunes() { fortunes.add(new Fortune(0, "Additional fortune added at request time.")); fortunes.sort(fortuneComparator); - StringWriter writer = new StringWriter(); - template.execute(writer, Collections.singletonMap("fortunes", fortunes)); + RockerOutput output = Rocker.template(FORTUNES_TEMPLATE_FILENAME) + .bind(Collections.singletonMap(FORTUNES_MAP_KEY, fortunes)) + .render(); - return writer.toString(); + return output.toString(); } } diff --git a/frameworks/Java/quarkus/resteasy-hibernate/src/main/resources/Fortunes.rocker.html b/frameworks/Java/quarkus/resteasy-hibernate/src/main/resources/Fortunes.rocker.html new file mode 100644 index 00000000000..c3cf547f319 --- /dev/null +++ b/frameworks/Java/quarkus/resteasy-hibernate/src/main/resources/Fortunes.rocker.html @@ -0,0 +1,21 @@ +@import java.util.* +@import io.quarkus.benchmark.model.* +@args(List fortunes) + + +Fortunes + + + + + + + @for ((ForIterator i, Fortune fortune) : fortunes) { + + + + + } +
idmessage
@fortune.getId()@fortune.getMessage()
+ + diff --git a/frameworks/Java/quarkus/resteasy-hibernate/src/main/resources/application.properties b/frameworks/Java/quarkus/resteasy-hibernate/src/main/resources/application.properties index 756e8f174d3..ea312cdef10 100644 --- a/frameworks/Java/quarkus/resteasy-hibernate/src/main/resources/application.properties +++ b/frameworks/Java/quarkus/resteasy-hibernate/src/main/resources/application.properties @@ -8,9 +8,9 @@ quarkus.datasource.jdbc.url=jdbc:postgresql://tfb-database:5432/hello_world?logg quarkus.datasource.jdbc.driver=org.postgresql.Driver quarkus.datasource.jdbc.transactions=disabled quarkus.datasource.jdbc.detect-statement-leaks=false -quarkus.datasource.jdbc.max-size=64 +quarkus.datasource.jdbc.max-size=512 quarkus.datasource.jdbc.min-size=16 -quarkus.datasource.jdbc.initial-size=64 +quarkus.datasource.jdbc.initial-size=512 quarkus.log.console.enable=true quarkus.log.console.level=INFO @@ -20,11 +20,18 @@ quarkus.log.level=INFO # Fully disable Hibernate ORM statistics gathering:: quarkus.log.category."org.hibernate.engine.internal.StatisticalLoggingSessionEventListener".level=WARN +# Explicitly disable 2LC as it's not used: +quarkus.hibernate-orm.second-level-caching-enabled=false + # To create the schema: %dev.quarkus.hibernate-orm.database.generation=drop-and-create %dev.quarkus.hibernate-orm.sql-load-script=import.sql quarkus.hibernate-orm.database.generation=validate quarkus.hibernate-orm.log.sql=false +mp.context.ThreadContext.propagated=None +mp.context.ThreadContext.cleared=None +mp.context.ThreadContext.unchanged=Remaining + diff --git a/frameworks/Java/quarkus/resteasy-hibernate/src/main/resources/fortunes.mustache b/frameworks/Java/quarkus/resteasy-hibernate/src/main/resources/fortunes.mustache deleted file mode 100644 index f9664a72eee..00000000000 --- a/frameworks/Java/quarkus/resteasy-hibernate/src/main/resources/fortunes.mustache +++ /dev/null @@ -1,20 +0,0 @@ - - - - Fortunes - - - - - - - - {{#fortunes}} - - - - - {{/fortunes}} -
idmessage
{{id}}{{message}}
- - diff --git a/frameworks/Java/quarkus/resteasy-hibernate/start-app.sh b/frameworks/Java/quarkus/resteasy-hibernate/start-app.sh deleted file mode 100755 index f224e71fb26..00000000000 --- a/frameworks/Java/quarkus/resteasy-hibernate/start-app.sh +++ /dev/null @@ -1 +0,0 @@ -java -XX:+FlightRecorder -XX:+UseParallelGC -Dquarkus.datasource.url=jdbc:postgresql://localhost:5432/hello_world?loggerLevel=OFF\&disableColumnSanitiser=true\&assumeMinServerVersion=12\&sslmode=disable -Dquarkus.http.host=127.0.0.1 -Djava.lang.Integer.IntegerCache.high=10000 -Dvertx.disableHttpHeadersValidation=true -Dvertx.disableMetrics=true -Dvertx.disableH2c=true -Dvertx.disableWebsockets=true -Dvertx.flashPolicyHandler=false -Dvertx.threadChecks=false -Dvertx.disableContextTimings=true -Dvertx.disableTCCL=true -Dhibernate.allow_update_outside_transaction=true -Djboss.threads.eqe.statistics=false -jar target/hibernate-1.0-SNAPSHOT-runner.jar diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/pom.xml b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/pom.xml index 77a1e4ba881..d0b6b2a25f9 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/pom.xml +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/pom.xml @@ -6,13 +6,16 @@ io.quarkus benchmark 1.0-SNAPSHOT - ../ io.quarkus.benchmark resteasy-reactive-hibernate-reactive + + io.quarkus + quarkus-benchmark-common + io.quarkus quarkus-scheduler @@ -42,10 +45,32 @@ netty-transport-native-epoll linux-x86_64 - - com.github.spullara.mustache.java - compiler - 0.9.6 - + + + + + com.fizzed + rocker-maven-plugin + 1.3.0 + + + generate-rocker-templates + generate-sources + + generate + + + ${project.basedir}/src/main/resources + true + true + false + + + + + + + + diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java index d19fe291e53..7537384fbfd 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java @@ -3,8 +3,10 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; +import javax.annotation.PostConstruct; import javax.inject.Singleton; +import io.vertx.core.MultiMap; import org.jboss.resteasy.reactive.server.ServerResponseFilter; import io.quarkus.scheduler.Scheduled; @@ -18,7 +20,12 @@ public class ServerHeaderFilter { private static final CharSequence SERVER_HEADER_VALUE = HttpHeaders.createOptimized("Quarkus"); private static final CharSequence DATE_HEADER_NAME = HttpHeaders.createOptimized("Date"); - private CharSequence date; + private volatile CharSequence date; + + @PostConstruct + public void init() { + date = HttpHeaders.createOptimized(DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now())); + } @Scheduled(every="1s") void increment() { @@ -27,7 +34,8 @@ void increment() { @ServerResponseFilter public void filter(HttpServerResponse response) { - response.putHeader(SERVER_HEADER_NAME, SERVER_HEADER_VALUE); - response.putHeader(DATE_HEADER_NAME, date); + MultiMap headers = response.headers(); + headers.add(SERVER_HEADER_NAME, SERVER_HEADER_VALUE); + headers.add(DATE_HEADER_NAME, date); } } \ No newline at end of file diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/BaseRepository.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/BaseRepository.java index 8cc1e2e9a01..f171c3c9f1d 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/BaseRepository.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/BaseRepository.java @@ -9,11 +9,16 @@ import io.smallrye.mutiny.Uni; public class BaseRepository { + @Inject protected Mutiny.SessionFactory sf; - public Uni inSession(Function> work){ - return sf.withSession(session -> work.apply(session)); + public Uni inSession(Function> work) { + return sf.openSession().chain( session -> work.apply( session ).eventually( session::close ) ); + } + + public Uni inStatelessSession(Function> work) { + return sf.openStatelessSession().chain( session -> work.apply( session ).eventually( session::close ) ); } } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java index a34254a0825..33c9673a061 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java @@ -3,9 +3,6 @@ import java.util.List; import javax.enterprise.context.ApplicationScoped; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Root; import io.quarkus.benchmark.model.Fortune; import io.smallrye.mutiny.Uni; @@ -14,12 +11,9 @@ public class FortuneRepository extends BaseRepository { public Uni> findAll() { - return inSession(s -> { - CriteriaBuilder criteriaBuilder = sf.getCriteriaBuilder(); - CriteriaQuery fortuneQuery = criteriaBuilder.createQuery(Fortune.class); - Root from = fortuneQuery.from(Fortune.class); - fortuneQuery.select(from); - return s.createQuery(fortuneQuery).getResultList(); - }); + return inStatelessSession( + session -> session.createQuery("SELECT F FROM Fortune F", Fortune.class).getResultList() + ); } + } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java index bc236b0e4ca..1733bbee963 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java @@ -1,20 +1,17 @@ package io.quarkus.benchmark.repository; import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.Set; -import java.util.concurrent.ThreadLocalRandom; import javax.inject.Singleton; +import io.quarkus.benchmark.utils.LocalRandom; +import io.quarkus.benchmark.utils.Randomizer; import org.hibernate.reactive.mutiny.Mutiny; -import org.hibernate.reactive.mutiny.Mutiny.Session; import io.quarkus.benchmark.model.World; import io.smallrye.mutiny.Uni; - @Singleton public class WorldRepository extends BaseRepository { @@ -25,13 +22,13 @@ public class WorldRepository extends BaseRepository { */ public Uni createData() { return inSession(s -> { - final ThreadLocalRandom random = ThreadLocalRandom.current(); + final LocalRandom random = Randomizer.current(); int MAX = 10000; Uni[] unis = new Uni[MAX]; - for (int i=0; i null); } return Uni.combine().all().unis(unis).combinedWith(l -> null) @@ -40,26 +37,45 @@ public Uni createData() { }); } - public Uni find(int id) { - return inSession(session -> singleFind(session, id)); + public Uni> update(Mutiny.Session session, List worlds) { + return session + .setBatchSize(worlds.size()) + .flush() + .map(v -> worlds); } - public Uni> update(Mutiny.Session s, Collection worlds) { - return s.flush() - .map(v -> worlds); - } + public Uni> findStateless(int count) { + return inStatelessSession(session -> findStateless(session, count)); + } + + private Uni> findStateless(Mutiny.StatelessSession s, int count) { + //The rules require individual load: we can't use the Hibernate feature which allows load by multiple IDs + // as one single operation as Hibernate is too smart and will switch to use batched loads automatically. + // Hence, use this awkward alternative: + final LocalRandom localRandom = Randomizer.current(); + List> l = new ArrayList<>(count); + for (int i = 0; i < count; i++) { + l.add(s.get(World.class, localRandom.getNextRandom())); + } + return Uni.join().all(l).andFailFast(); + } - public Uni> find(Session s, Set ids) { - //The rules require individual load: we can't use the Hibernate feature which allows load by multiple IDs as one single operation - ArrayList> l = new ArrayList<>(ids.size()); - for (Integer id : ids) { - l.add(singleFind(s, id)); + public Uni> findManaged(Mutiny.Session s, int count) { + final List worlds = new ArrayList<>(count); + //The rules require individual load: we can't use the Hibernate feature which allows load by multiple IDs + // as one single operation as Hibernate is too smart and will switch to use batched loads. + // But also, we can't use "Uni#join" as we did in the above method as managed entities shouldn't use pipelining - + // so we also have to avoid Mutiny optimising things by establishing an explicit chain: + final LocalRandom localRandom = Randomizer.current(); + Uni loopRoot = Uni.createFrom().voidItem(); + for (int i = 0; i < count; i++) { + loopRoot = loopRoot.chain(() -> s.find(World.class, localRandom.getNextRandom()).invoke(word -> worlds.add(word)).replaceWithVoid()); } - return Uni.combine().all().unis(l).combinedWith(list -> (List)list); + return loopRoot.map(v -> worlds); } - private static Uni singleFind(final Mutiny.Session ss, final Integer id) { - return ss.find(World.class, id); + public Uni findStateless() { + return inStatelessSession(session -> session.get(World.class, Randomizer.current().getNextRandom())); } } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/DbResource.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/DbResource.java index 4f5316fd95f..1d81de25131 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/DbResource.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/DbResource.java @@ -1,9 +1,6 @@ package io.quarkus.benchmark.resource; -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.ThreadLocalRandom; +import java.util.List; import javax.inject.Inject; import javax.ws.rs.GET; @@ -12,12 +9,16 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; +import io.quarkus.benchmark.utils.LocalRandom; +import io.quarkus.benchmark.utils.Randomizer; import org.hibernate.FlushMode; import org.hibernate.reactive.mutiny.Mutiny; import io.quarkus.benchmark.model.World; import io.quarkus.benchmark.repository.WorldRepository; +import io.smallrye.context.api.CurrentThreadContext; import io.smallrye.mutiny.Uni; +import org.eclipse.microprofile.context.ThreadContext; @Produces(MediaType.APPLICATION_JSON) @Path("/") @@ -28,80 +29,55 @@ public class DbResource { @GET @Path("db") + @CurrentThreadContext(propagated = {}, cleared = {}, unchanged = ThreadContext.ALL_REMAINING) public Uni db() { - return randomWorld(); + return worldRepository.findStateless(); } @GET @Path("queries") - public Uni> queries(@QueryParam("queries") String queries) { - return worldRepository.inSession(session -> randomWorldForRead(session, parseQueryCount(queries))); + @CurrentThreadContext(propagated = {}, cleared = {}, unchanged = ThreadContext.ALL_REMAINING) + public Uni> queries(@QueryParam("queries") String queries) { + final int queryCount = parseQueryCount(queries); + return worldRepository.findStateless(queryCount); } - private Uni> randomWorldForRead(Mutiny.Session session, int count) { - Set ids = new HashSet<>(count); - int counter = 0; - while (counter < count) { - counter += ids.add(Integer.valueOf(randomWorldNumber())) ? 1 : 0; - } - return worldRepository.find(session, ids); + @GET + @Path("createData") + @CurrentThreadContext(propagated = {}, cleared = {}, unchanged = ThreadContext.ALL_REMAINING) + public Uni createData() { + return worldRepository.createData(); + } + + private Uni> randomWorldsForWrite(Mutiny.Session session, int count) { + return worldRepository.findManaged(session, count); } @GET @Path("updates") - public Uni> updates(@QueryParam("queries") String queries) { + @CurrentThreadContext(propagated = {}, cleared = {}, unchanged = ThreadContext.ALL_REMAINING) + public Uni> updates(@QueryParam("queries") String queries) { return worldRepository.inSession(session -> { - // FIXME: not supported - // session.setJdbcBatchSize(worlds.size()); + session.setFlushMode(FlushMode.MANUAL); - var worlds = randomWorldForRead(session, parseQueryCount(queries)); + Uni> worlds = randomWorldsForWrite(session, parseQueryCount(queries)); return worlds.flatMap(worldsCollection -> { + final LocalRandom localRandom = Randomizer.current(); worldsCollection.forEach( w -> { //Read the one field, as required by the following rule: // # vi. At least the randomNumber field must be read from the database result set. final int previousRead = w.getRandomNumber(); //Update it, but make sure to exclude the current number as Hibernate optimisations would have us "fail" //the verification: - w.setRandomNumber(randomWorldNumber(previousRead)); + w.setRandomNumber(localRandom.getNextRandomExcluding(previousRead)); } ); - + return worldRepository.update(session, worldsCollection); }); }); } - private Uni randomWorld() { - return worldRepository.find(randomWorldNumber()); - } - - private int randomWorldNumber() { - return 1 + ThreadLocalRandom.current().nextInt(10000); - } - - /** - * Also according to benchmark requirements, except that in this special case - * of the update test we need to ensure we'll actually generate an update operation: - * for this we need to generate a random number between 1 to 10000, but different - * from the current field value. - * @param previousRead - * @return - */ - private int randomWorldNumber(final int previousRead) { - //conceptually split the random space in those before previousRead, - //and those after: this approach makes sure to not affect the random characteristics. - final int trueRandom = ThreadLocalRandom.current().nextInt(9999) + 2; - if (trueRandom<=previousRead) { - //all figures equal or before the current field read need to be shifted back by one - //so to avoid hitting the same number while not affecting the distribution. - return trueRandom - 1; - } - else { - //Those after are generated by taking the generated value 2...10000 as is. - return trueRandom; - } - } - private int parseQueryCount(String textValue) { if (textValue == null) { return 1; diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java index 4b23849cb2e..1d92d04a0e0 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java @@ -1,47 +1,38 @@ package io.quarkus.benchmark.resource; -import java.io.StringWriter; -import java.util.Collections; -import java.util.Comparator; +import io.quarkus.benchmark.model.Fortune; +import io.quarkus.benchmark.repository.FortuneRepository; +import io.smallrye.context.api.CurrentThreadContext; +import io.smallrye.mutiny.Uni; +import io.vertx.core.buffer.Buffer; +import io.vertx.ext.web.templ.rocker.impl.VertxBufferOutput; +import org.eclipse.microprofile.context.ThreadContext; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; - -import io.quarkus.benchmark.model.Fortune; -import io.quarkus.benchmark.repository.FortuneRepository; -import io.smallrye.mutiny.Uni; +import java.util.Comparator; @Path("/fortunes") public class FortuneResource { @Inject FortuneRepository repository; - private Mustache template; - private Comparator fortuneComparator; - - public FortuneResource() { - MustacheFactory mf = new DefaultMustacheFactory(); - template = mf.compile("fortunes.mustache"); - fortuneComparator = Comparator.comparing(fortune -> fortune.getMessage()); - } + private static final Comparator fortuneComparator = Comparator.comparing(fortune -> fortune.getMessage()); @Produces("text/html; charset=UTF-8") @GET - public Uni fortunes() { + @CurrentThreadContext(propagated = {}, cleared = {}, unchanged = ThreadContext.ALL_REMAINING) + public Uni fortunes() { return repository.findAll() .map(fortunes -> { fortunes.add(new Fortune(0, "Additional fortune added at request time.")); fortunes.sort(fortuneComparator); - StringWriter writer = new StringWriter(); - template.execute(writer, Collections.singletonMap("fortunes", fortunes)); - return writer.toString(); + return views.Fortunes.template(fortunes) + .render(VertxBufferOutput.FACTORY) + .getBuffer(); }); } } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/JsonResource.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/JsonResource.java index c1cd3a3eb14..7e49ce06dbf 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/JsonResource.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/JsonResource.java @@ -5,6 +5,10 @@ import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; +import io.smallrye.common.annotation.NonBlocking; +import io.smallrye.context.api.CurrentThreadContext; +import org.eclipse.microprofile.context.ThreadContext; + @Path("/json") public class JsonResource { @@ -12,7 +16,10 @@ public class JsonResource { @Produces(MediaType.APPLICATION_JSON) @GET + @CurrentThreadContext(propagated = {}, cleared = {}, unchanged = ThreadContext.ALL_REMAINING) + @NonBlocking public Message json() { + // https://github.com/TechEmpower/FrameworkBenchmarks/wiki/Project-Information-Framework-Tests-Overview#json-serialization return new Message(HELLO); } } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java index 58094ad21ef..a651b7914e0 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java @@ -1,19 +1,34 @@ package io.quarkus.benchmark.resource; +import java.nio.charset.StandardCharsets; + import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufAllocator; +import io.smallrye.common.annotation.NonBlocking; +import io.smallrye.context.api.CurrentThreadContext; import io.vertx.core.buffer.Buffer; +import org.eclipse.microprofile.context.ThreadContext; @Path("/plaintext") public class PlaintextResource { private static final String HELLO_WORLD = "Hello, world!"; - private static final Buffer HELLO_WORLD_BUFFER = Buffer.factory.directBuffer(HELLO_WORLD, "UTF-8"); + private static final Buffer HELLO_WORLD_BUFFER; + + static { + ByteBuf nettyBuffer = ByteBufAllocator.DEFAULT.directBuffer(); + nettyBuffer.writeBytes(HELLO_WORLD.getBytes(StandardCharsets.UTF_8)); + HELLO_WORLD_BUFFER = Buffer.buffer(nettyBuffer); + } @Produces(MediaType.TEXT_PLAIN) @GET + @CurrentThreadContext(propagated = {}, cleared = {}, unchanged = ThreadContext.ALL_REMAINING) + @NonBlocking public Buffer plaintext() { return HELLO_WORLD_BUFFER; } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/resources/application.properties b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/resources/application.properties index 18e5ae2367b..eb6418b7ee2 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/resources/application.properties +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/resources/application.properties @@ -8,13 +8,22 @@ quarkus.datasource.reactive=true quarkus.datasource.reactive.url=postgresql://tfb-database:5432/hello_world %dev.quarkus.datasource.reactive.url=postgresql://localhost:5432/hello_world -quarkus.datasource.reactive.thread-local=true quarkus.datasource.reactive.cache-prepared-statements=true -quarkus.datasource.reactive.max-size=4 +quarkus.datasource.reactive.max-size=512 +quarkus.datasource.reactive.postgresql.pipelining-limit=100000 + +# Explicitly disable 2LC as it's not used: +quarkus.hibernate-orm.second-level-caching-enabled=false #quarkus.vertx.storage=false quarkus.log.console.enable=true quarkus.log.console.level=INFO quarkus.log.file.enable=false -quarkus.log.level=INFO \ No newline at end of file +quarkus.log.level=INFO + +quarkus.vertx.prefer-native-transport=true + +mp.context.ThreadContext.propagated=None +mp.context.ThreadContext.cleared=None +mp.context.ThreadContext.unchanged=Remaining diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/resources/fortunes.mustache b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/resources/fortunes.mustache deleted file mode 100644 index f9664a72eee..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/resources/fortunes.mustache +++ /dev/null @@ -1,20 +0,0 @@ - - - - Fortunes - - - - - - - - {{#fortunes}} - - - - - {{/fortunes}} -
idmessage
{{id}}{{message}}
- - diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/resources/views/Fortunes.rocker.html b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/resources/views/Fortunes.rocker.html new file mode 100644 index 00000000000..cfa4f8341e2 --- /dev/null +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/src/main/resources/views/Fortunes.rocker.html @@ -0,0 +1,8 @@ +@import java.util.* +@import io.quarkus.benchmark.model.* +@args(List fortunes) +Fortunes +@for ((ForIterator i, Fortune fortune) : fortunes) { + +} +
idmessage
@fortune.getId()@fortune.getMessage()
\ No newline at end of file diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/start-app.sh b/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/start-app.sh deleted file mode 100755 index 01b3e22ea2c..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate-reactive/start-app.sh +++ /dev/null @@ -1,2 +0,0 @@ -java -XX:+FlightRecorder -XX:+UseParallelGC -Dquarkus.datasource.url=vertx-reactive:postgresql://localhost:5432/hello_world -Dquarkus.http.host=127.0.0.1 -Djava.lang.Integer.IntegerCache.high=10000 -Dvertx.disableHttpHeadersValidation=true -Dvertx.disableMetrics=true -Dvertx.disableH2c=true -Dvertx.disableWebsockets=true -Dvertx.flashPolicyHandler=false -Dvertx.threadChecks=false -Dvertx.disableContextTimings=true -Dvertx.disableTCCL=true -Dhibernate.allow_update_outside_transaction=true -Djboss.threads.eqe.statistics=false -jar target/pgclient-1.0-SNAPSHOT-runner.jar - diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/pom.xml b/frameworks/Java/quarkus/resteasy-reactive-hibernate/pom.xml index 2336a8ee4c5..e54d6d1273f 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/pom.xml +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate/pom.xml @@ -6,13 +6,16 @@ io.quarkus benchmark 1.0-SNAPSHOT - ../ io.quarkus.benchmark resteasy-reactive-hibernate + + io.quarkus + quarkus-benchmark-common + io.quarkus quarkus-hibernate-orm @@ -34,11 +37,40 @@ quarkus-jdbc-postgresql - com.github.spullara.mustache.java - compiler - 0.9.6 + io.vertx + vertx-web-templ-rocker + + + io.netty + netty-transport-native-epoll + linux-x86_64 - + + + + com.fizzed + rocker-maven-plugin + 1.3.0 + + + generate-rocker-templates + generate-sources + + generate + + + ${project.basedir}/src/main/resources + true + true + false + + + + + + + + diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java index 7043dd284e2..cb0998a760b 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java @@ -3,6 +3,7 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; +import javax.annotation.PostConstruct; import javax.inject.Singleton; import javax.ws.rs.container.ContainerRequestContext; import javax.ws.rs.container.ContainerResponseContext; @@ -16,7 +17,12 @@ @Provider public class ServerHeaderFilter implements ContainerResponseFilter { - private String date; + private volatile String date; + + @PostConstruct + public void init() { + date = DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now()); + } @Scheduled(every="1s") void increment() { diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java index 9b845ee39e9..8a9c4d77c25 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java @@ -4,10 +4,6 @@ import javax.inject.Inject; import javax.inject.Singleton; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Root; - import org.hibernate.SessionFactory; import org.hibernate.StatelessSession; @@ -21,11 +17,7 @@ public class FortuneRepository { public List findAllStateless() { try (StatelessSession s = sf.openStatelessSession()) { - CriteriaBuilder criteriaBuilder = sf.getCriteriaBuilder(); - CriteriaQuery fortuneQuery = criteriaBuilder.createQuery(Fortune.class); - Root from = fortuneQuery.from(Fortune.class); - fortuneQuery.select(from); - return s.createQuery(fortuneQuery).getResultList(); + return s.createQuery("SELECT F FROM Fortune F", Fortune.class).getResultList(); } } } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java index ee05796d892..6053a7d718c 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java @@ -1,10 +1,5 @@ package io.quarkus.benchmark.repository; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Set; -import java.util.concurrent.ThreadLocalRandom; - import javax.inject.Inject; import javax.inject.Singleton; import javax.transaction.Transactional; @@ -15,7 +10,8 @@ import org.hibernate.StatelessSession; import io.quarkus.benchmark.model.World; - +import io.quarkus.benchmark.utils.LocalRandom; +import io.quarkus.benchmark.utils.Randomizer; @Singleton public class WorldRepository { @@ -31,47 +27,53 @@ public class WorldRepository { @Transactional public void createData() { try (StatelessSession statelessSession = sf.openStatelessSession()) { - final ThreadLocalRandom random = ThreadLocalRandom.current(); + final LocalRandom random = Randomizer.current(); for (int i=1; i<=10000; i++) { final World world = new World(); world.setId(i); - world.setRandomNumber(1 + random.nextInt(10000)); + world.setRandomNumber(random.getNextRandom()); statelessSession.insert(world); } } } - public World findSingleAndStateless(int id) { + public World loadSingleWorldById(Integer id) { try (StatelessSession ss = sf.openStatelessSession()) { - return singleStatelessWorldLoad(ss,id); + return (World) ss.get(World.class, id); } } - @Transactional - public void updateAll(Collection worlds) { - try (Session s = sf.openSession()) { - s.setJdbcBatchSize(worlds.size()); - s.setHibernateFlushMode(FlushMode.MANUAL); - for (World w : worlds) { - s.update(w); + public World[] loadNWorlds(final int count) { + final World[] list = new World[count]; + final LocalRandom random = Randomizer.current(); + try (StatelessSession ss = sf.openStatelessSession()) { + //The rules require individual load: we can't use the Hibernate feature which allows load by multiple IDs as one single operation + for (int i=0;i findReadonly(Set ids) { - try (StatelessSession s = sf.openStatelessSession()) { - //The rules require individual load: we can't use the Hibernate feature which allows load by multiple IDs as one single operation - ArrayList l = new ArrayList<>(ids.size()); - for (Integer id : ids) { - l.add(singleStatelessWorldLoad(s,id)); + public World[] updateNWorlds(final int count) { + //We're again forced to use the "individual load" pattern by the rules: + final World[] list = loadNWorlds(count); + final LocalRandom random = Randomizer.current(); + try (Session s = sf.openSession()) { + s.setJdbcBatchSize(count); + s.setHibernateFlushMode(FlushMode.MANUAL); + for (World w : list) { + //Read the one field, as required by the following rule: + // # vi. At least the randomNumber field must be read from the database result set. + final int previousRead = w.getRandomNumber(); + //Update it, but make sure to exclude the current number as Hibernate optimisations would otherwise + //skip the write operation: + w.setRandomNumber(random.getNextRandomExcluding(previousRead)); + s.update(w); } - return l; + s.flush(); } - } - - private static World singleStatelessWorldLoad(final StatelessSession ss, final Integer id) { - return (World) ss.get(World.class, id); + return list; } } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/DbResource.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/DbResource.java index 0354cc3aadd..29d0eec279a 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/DbResource.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/DbResource.java @@ -1,10 +1,5 @@ package io.quarkus.benchmark.resource; -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.ThreadLocalRandom; - import javax.inject.Inject; import javax.inject.Singleton; import javax.ws.rs.Consumes; @@ -16,8 +11,7 @@ import io.quarkus.benchmark.model.World; import io.quarkus.benchmark.repository.WorldRepository; -import io.smallrye.common.annotation.Blocking; - +import io.quarkus.benchmark.utils.Randomizer; @Singleton @Path("/") @@ -28,25 +22,19 @@ public class DbResource { @Inject WorldRepository worldRepository; - @Blocking @GET @Path("/db") public World db() { - World world = randomWorldForRead(); - if (world==null) throw new IllegalStateException( "No data found in DB. Did you seed the database? Make sure to invoke /createdata once." ); - return world; + return worldRepository.loadSingleWorldById(Randomizer.current().getNextRandom()); } - @Blocking @GET @Path("/queries") public World[] queries(@QueryParam("queries") String queries) { final int count = parseQueryCount(queries); - World[] worlds = randomWorldForRead(count).toArray(new World[0]); - return worlds; + return worldRepository.loadNWorlds(count); } - @Blocking @GET @Path("/updates") //Rules: https://github.com/TechEmpower/FrameworkBenchmarks/wiki/Project-Information-Framework-Tests-Overview#database-updates @@ -56,20 +44,9 @@ public World[] queries(@QueryParam("queries") String queries) { // all other tested frameworks seem to do. public World[] updates(@QueryParam("queries") String queries) { final int count = parseQueryCount(queries); - final Collection worlds = randomWorldForRead(count); - worlds.forEach( w -> { - //Read the one field, as required by the following rule: - // # vi. At least the randomNumber field must be read from the database result set. - final int previousRead = w.getRandomNumber(); - //Update it, but make sure to exclude the current number as Hibernate optimisations would have us "fail" - //the verification: - w.setRandomNumber(randomWorldNumber(previousRead)); - } ); - worldRepository.updateAll(worlds); - return worlds.toArray(new World[0]); + return worldRepository.updateNWorlds(count); } - @Blocking @GET @Path( "/createdata" ) public String createData() { @@ -77,52 +54,7 @@ public String createData() { return "OK"; } - private World randomWorldForRead() { - return worldRepository.findSingleAndStateless(randomWorldNumber()); - } - - private Collection randomWorldForRead(int count) { - Set ids = new HashSet<>(count); - int counter = 0; - while (counter < count) { - counter += ids.add(Integer.valueOf(randomWorldNumber())) ? 1 : 0; - } - return worldRepository.findReadonly(ids); - } - - /** - * According to benchmark requirements - * @return returns a number from 1 to 10000 - */ - private int randomWorldNumber() { - return 1 + ThreadLocalRandom.current().nextInt(10000); - } - - - /** - * Also according to benchmark requirements, except that in this special case - * of the update test we need to ensure we'll actually generate an update operation: - * for this we need to generate a random number between 1 to 10000, but different - * from the current field value. - * @param previousRead - * @return - */ - private int randomWorldNumber(final int previousRead) { - //conceptually split the random space in those before previousRead, - //and those after: this approach makes sure to not affect the random characteristics. - final int trueRandom = ThreadLocalRandom.current().nextInt(9999) + 2; - if (trueRandom<=previousRead) { - //all figures equal or before the current field read need to be shifted back by one - //so to avoid hitting the same number while not affecting the distribution. - return trueRandom - 1; - } - else { - //Those after are generated by taking the generated value 2...10000 as is. - return trueRandom; - } - } - - private int parseQueryCount(String textValue) { + private int parseQueryCount(final String textValue) { if (textValue == null) { return 1; } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java index 2724a301a9d..902c37aa58b 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java @@ -1,10 +1,9 @@ package io.quarkus.benchmark.resource; -import java.io.StringWriter; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; +import io.quarkus.benchmark.model.Fortune; +import io.quarkus.benchmark.repository.FortuneRepository; +import io.vertx.core.buffer.Buffer; +import io.vertx.ext.web.templ.rocker.impl.VertxBufferOutput; import javax.inject.Inject; import javax.inject.Singleton; @@ -13,14 +12,8 @@ import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; - -import io.quarkus.benchmark.model.Fortune; -import io.quarkus.benchmark.repository.FortuneRepository; -import io.smallrye.common.annotation.Blocking; +import java.util.Comparator; +import java.util.List; @Singleton @Path("/") @@ -31,26 +24,17 @@ public class FortuneResource { @Inject FortuneRepository repository; - private final Mustache template; - private final Comparator fortuneComparator; - - public FortuneResource() { - MustacheFactory mf = new DefaultMustacheFactory(); - template = mf.compile("fortunes.mustache"); - fortuneComparator = Comparator.comparing(fortune -> fortune.getMessage()); - } + private static final Comparator fortuneComparator = Comparator.comparing(fortune -> fortune.getMessage()); - @Blocking @GET @Path("/fortunes") - public String fortunes() { - List fortunes = new ArrayList<>(repository.findAllStateless()); + public Buffer fortunes() { + List fortunes = repository.findAllStateless(); fortunes.add(new Fortune(0, "Additional fortune added at request time.")); fortunes.sort(fortuneComparator); - - StringWriter writer = new StringWriter(); - template.execute(writer, Collections.singletonMap("fortunes", fortunes)); - - return writer.toString(); + return views.Fortunes.template(fortunes) + .render(VertxBufferOutput.FACTORY) + .getBuffer(); } + } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/JsonResource.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/JsonResource.java index c8bbb155545..f1ac7a22eb8 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/JsonResource.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/JsonResource.java @@ -1,5 +1,7 @@ package io.quarkus.benchmark.resource; +import io.smallrye.common.annotation.NonBlocking; + import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; @@ -11,6 +13,7 @@ public class JsonResource { @GET @Produces(MediaType.APPLICATION_JSON) + @NonBlocking public Message json() { return new Message(HELLO); } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java index 8acadbfeb7e..4dc3aa1b847 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java @@ -1,17 +1,34 @@ package io.quarkus.benchmark.resource; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufAllocator; +import io.smallrye.common.annotation.NonBlocking; +import io.smallrye.context.api.CurrentThreadContext; +import io.vertx.core.buffer.Buffer; +import org.eclipse.microprofile.context.ThreadContext; + import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; +import java.nio.charset.StandardCharsets; @Path("/plaintext") public class PlaintextResource { - private static final String HELLO = "Hello, World!"; + private static final String HELLO_WORLD = "Hello, world!"; + private static final Buffer HELLO_WORLD_BUFFER; + + static { + ByteBuf nettyBuffer = ByteBufAllocator.DEFAULT.directBuffer(); + nettyBuffer.writeBytes(HELLO_WORLD.getBytes(StandardCharsets.UTF_8)); + HELLO_WORLD_BUFFER = Buffer.buffer(nettyBuffer); + } - @GET @Produces(MediaType.TEXT_PLAIN) - public String plaintext() { - return HELLO; + @GET + @CurrentThreadContext(propagated = {}, cleared = {}, unchanged = ThreadContext.ALL_REMAINING) + @NonBlocking + public Buffer plaintext() { + return HELLO_WORLD_BUFFER; } } diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/resources/application.properties b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/resources/application.properties index 756e8f174d3..a4fd992259c 100644 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/resources/application.properties +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/resources/application.properties @@ -8,9 +8,9 @@ quarkus.datasource.jdbc.url=jdbc:postgresql://tfb-database:5432/hello_world?logg quarkus.datasource.jdbc.driver=org.postgresql.Driver quarkus.datasource.jdbc.transactions=disabled quarkus.datasource.jdbc.detect-statement-leaks=false -quarkus.datasource.jdbc.max-size=64 +quarkus.datasource.jdbc.max-size=512 quarkus.datasource.jdbc.min-size=16 -quarkus.datasource.jdbc.initial-size=64 +quarkus.datasource.jdbc.initial-size=512 quarkus.log.console.enable=true quarkus.log.console.level=INFO @@ -19,6 +19,8 @@ quarkus.log.level=INFO # Fully disable Hibernate ORM statistics gathering:: quarkus.log.category."org.hibernate.engine.internal.StatisticalLoggingSessionEventListener".level=WARN +# Explicitly disable 2LC as it's not used: +quarkus.hibernate-orm.second-level-caching-enabled=false # To create the schema: %dev.quarkus.hibernate-orm.database.generation=drop-and-create @@ -26,5 +28,8 @@ quarkus.log.category."org.hibernate.engine.internal.StatisticalLoggingSessionEve quarkus.hibernate-orm.database.generation=validate quarkus.hibernate-orm.log.sql=false +mp.context.ThreadContext.propagated=None +mp.context.ThreadContext.cleared=None +mp.context.ThreadContext.unchanged=Remaining diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/resources/fortunes.mustache b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/resources/fortunes.mustache deleted file mode 100644 index f9664a72eee..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/resources/fortunes.mustache +++ /dev/null @@ -1,20 +0,0 @@ - - - - Fortunes - - - - - - - - {{#fortunes}} - - - - - {{/fortunes}} -
idmessage
{{id}}{{message}}
- - diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/resources/views/Fortunes.rocker.html b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/resources/views/Fortunes.rocker.html new file mode 100644 index 00000000000..cfa4f8341e2 --- /dev/null +++ b/frameworks/Java/quarkus/resteasy-reactive-hibernate/src/main/resources/views/Fortunes.rocker.html @@ -0,0 +1,8 @@ +@import java.util.* +@import io.quarkus.benchmark.model.* +@args(List fortunes) +Fortunes +@for ((ForIterator i, Fortune fortune) : fortunes) { + +} +
idmessage
@fortune.getId()@fortune.getMessage()
\ No newline at end of file diff --git a/frameworks/Java/quarkus/resteasy-reactive-hibernate/start-app.sh b/frameworks/Java/quarkus/resteasy-reactive-hibernate/start-app.sh deleted file mode 100755 index f224e71fb26..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-hibernate/start-app.sh +++ /dev/null @@ -1 +0,0 @@ -java -XX:+FlightRecorder -XX:+UseParallelGC -Dquarkus.datasource.url=jdbc:postgresql://localhost:5432/hello_world?loggerLevel=OFF\&disableColumnSanitiser=true\&assumeMinServerVersion=12\&sslmode=disable -Dquarkus.http.host=127.0.0.1 -Djava.lang.Integer.IntegerCache.high=10000 -Dvertx.disableHttpHeadersValidation=true -Dvertx.disableMetrics=true -Dvertx.disableH2c=true -Dvertx.disableWebsockets=true -Dvertx.flashPolicyHandler=false -Dvertx.threadChecks=false -Dvertx.disableContextTimings=true -Dvertx.disableTCCL=true -Dhibernate.allow_update_outside_transaction=true -Djboss.threads.eqe.statistics=false -jar target/hibernate-1.0-SNAPSHOT-runner.jar diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/.factorypath b/frameworks/Java/quarkus/resteasy-reactive-pgclient/.factorypath deleted file mode 100644 index 268ec46be2b..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/.factorypath +++ /dev/null @@ -1,117 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/pom.xml b/frameworks/Java/quarkus/resteasy-reactive-pgclient/pom.xml deleted file mode 100644 index eca074a80a1..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/pom.xml +++ /dev/null @@ -1,47 +0,0 @@ - - - 4.0.0 - - - io.quarkus - benchmark - 1.0-SNAPSHOT - ../ - - - io.quarkus.benchmark - resteasy-reactive-pgclient - - - - io.quarkus - quarkus-scheduler - - - io.quarkus - quarkus-reactive-pg-client - - - io.quarkus - quarkus-resteasy-reactive - - - io.quarkus - quarkus-resteasy-reactive-jackson - - - io.vertx - vertx-web-templ-rocker - - - io.netty - netty-transport-native-epoll - linux-x86_64 - - - com.github.spullara.mustache.java - compiler - 0.9.6 - - - diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java deleted file mode 100644 index d19fe291e53..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/filter/ServerHeaderFilter.java +++ /dev/null @@ -1,33 +0,0 @@ -package io.quarkus.benchmark.filter; - -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; - -import javax.inject.Singleton; - -import org.jboss.resteasy.reactive.server.ServerResponseFilter; - -import io.quarkus.scheduler.Scheduled; -import io.vertx.core.http.HttpHeaders; -import io.vertx.core.http.HttpServerResponse; - -@Singleton -public class ServerHeaderFilter { - - private static final CharSequence SERVER_HEADER_NAME = HttpHeaders.createOptimized("Server"); - private static final CharSequence SERVER_HEADER_VALUE = HttpHeaders.createOptimized("Quarkus"); - private static final CharSequence DATE_HEADER_NAME = HttpHeaders.createOptimized("Date"); - - private CharSequence date; - - @Scheduled(every="1s") - void increment() { - date = HttpHeaders.createOptimized(DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now())); - } - - @ServerResponseFilter - public void filter(HttpServerResponse response) { - response.putHeader(SERVER_HEADER_NAME, SERVER_HEADER_VALUE); - response.putHeader(DATE_HEADER_NAME, date); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/model/Fortune.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/model/Fortune.java deleted file mode 100644 index 0e32970c9db..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/model/Fortune.java +++ /dev/null @@ -1,48 +0,0 @@ -package io.quarkus.benchmark.model; - -import java.util.Objects; - -public class Fortune { - - private int id; - private String message; - - public Fortune() {} - - public Fortune(int id, String message) { - this.id = id; - this.message = message; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public String getMessage() { - return message; - } - - public void setMessage(String message) { - this.message = message; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - Fortune fortune = (Fortune) o; - return id == fortune.id && - Objects.equals(message, fortune.message); - } - - @Override - public int hashCode() { - return Objects.hash(id, message); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/model/World.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/model/World.java deleted file mode 100644 index 0d5205ae361..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/model/World.java +++ /dev/null @@ -1,35 +0,0 @@ -package io.quarkus.benchmark.model; - -public class World implements Comparable{ - - private int id; - private int randomNumber; - - public World() {} - - public World(int id, int randomNumber) { - this.id = id; - this.randomNumber = randomNumber; - } - - public int getId() { - return id; - } - - public void setId(int id) { - this.id = id; - } - - public int getRandomNumber() { - return randomNumber; - } - - public void setRandomNumber(int randomNumber) { - this.randomNumber = randomNumber; - } - - @Override - public int compareTo(World o) { - return Integer.compare(id, o.id); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java deleted file mode 100644 index 4415583fa1d..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/FortuneRepository.java +++ /dev/null @@ -1,30 +0,0 @@ -package io.quarkus.benchmark.repository; - -import java.util.ArrayList; -import java.util.List; - -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; - -import io.quarkus.benchmark.model.Fortune; -import io.smallrye.mutiny.Uni; -import io.vertx.mutiny.sqlclient.Row; - -@ApplicationScoped -public class FortuneRepository { - - @Inject - PgClients clients; - - public Uni> findAll() { - return clients.getClient().preparedQuery("SELECT * FROM Fortune" ) - .execute() - .map(rowset -> { - List ret = new ArrayList<>(rowset.size()+1); - for(Row r : rowset) { - ret.add(new Fortune(r.getInteger("id"), r.getString("message"))); - } - return ret; - }); - } -} diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClientFactory.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClientFactory.java deleted file mode 100644 index ef99489cb26..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClientFactory.java +++ /dev/null @@ -1,56 +0,0 @@ -package io.quarkus.benchmark.repository; - -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import javax.enterprise.context.ApplicationScoped; -import javax.enterprise.inject.Produces; -import javax.inject.Inject; - -import org.eclipse.microprofile.config.inject.ConfigProperty; - -import io.vertx.mutiny.core.Vertx; -import io.vertx.mutiny.pgclient.PgPool; -import io.vertx.pgclient.PgConnectOptions; -import io.vertx.sqlclient.PoolOptions; - -@ApplicationScoped -public class PgClientFactory { - - // vertx-reactive:postgresql://tfb-database:5432/hello_world - private static final String PG_URI_MATCHER = "vertx-reactive:postgresql://([-a-zA-Z]+):([0-9]+)/(.*)"; - - @ConfigProperty(name = "quarkus.datasource.url") - String url; - - @ConfigProperty(name = "quarkus.datasource.username") - String user; - - @ConfigProperty(name = "quarkus.datasource.password") - String pass; - - @Inject - Vertx vertx; - - @Produces - @ApplicationScoped - public PgClients pgClients() { - return new PgClients(this); - } - - - PgPool sqlClient(int size) { - PoolOptions options = new PoolOptions(); - PgConnectOptions connectOptions = new PgConnectOptions(); - Matcher matcher = Pattern.compile(PG_URI_MATCHER).matcher(url); - matcher.matches(); - connectOptions.setDatabase(matcher.group(3)); - connectOptions.setHost(matcher.group(1)); - connectOptions.setPort(Integer.parseInt(matcher.group(2))); - connectOptions.setUser(user); - connectOptions.setPassword(pass); - connectOptions.setCachePreparedStatements(true); - options.setMaxSize(size); - return PgPool.pool(vertx, connectOptions, options); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClients.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClients.java deleted file mode 100644 index 871e9b75a6e..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/PgClients.java +++ /dev/null @@ -1,38 +0,0 @@ -package io.quarkus.benchmark.repository; - -import io.vertx.mutiny.pgclient.PgPool; -import io.vertx.mutiny.sqlclient.SqlClient; - -class PgClients { - private static final int POOL_SIZE = 4; - - private ThreadLocal sqlClient = new ThreadLocal<>(); - private ThreadLocal pool = new ThreadLocal<>(); - private PgClientFactory pgClientFactory; - - // for ArC - public PgClients() { - } - - public PgClients(PgClientFactory pgClientFactory) { - this.pgClientFactory = pgClientFactory; - } - - SqlClient getClient() { - SqlClient ret = sqlClient.get(); - if(ret == null) { - ret = pgClientFactory.sqlClient(1); - sqlClient.set(ret); - } - return ret; - } - - synchronized PgPool getPool() { - PgPool ret = pool.get(); - if(ret == null) { - ret = pgClientFactory.sqlClient(POOL_SIZE); - pool.set(ret); - } - return ret; - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java deleted file mode 100644 index 2d9c71d1259..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/repository/WorldRepository.java +++ /dev/null @@ -1,40 +0,0 @@ -package io.quarkus.benchmark.repository; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; - -import io.quarkus.benchmark.model.World; -import io.smallrye.mutiny.Uni; -import io.vertx.mutiny.sqlclient.Row; -import io.vertx.mutiny.sqlclient.Tuple; - -@ApplicationScoped -public class WorldRepository { - - @Inject - PgClients clients; - - public Uni find(int id) { - return clients.getClient().preparedQuery("SELECT id, randomNumber FROM World WHERE id = $1") - .execute(Tuple.of(id)) - .map(rowset -> { - Row row = rowset.iterator().next(); - return new World(row.getInteger(0), row.getInteger(1)); - }); - } - - public Uni update(World[] worlds) { - Arrays.sort(worlds); - List args = new ArrayList<>(worlds.length); - for (World world : worlds) { - args.add(Tuple.of(world.getId(), world.getRandomNumber())); - } - return clients.getPool().preparedQuery("UPDATE World SET randomNumber = $2 WHERE id = $1") - .executeBatch(args) - .map(v -> null); - } -} diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/DbResource.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/DbResource.java deleted file mode 100644 index fe87893d095..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/DbResource.java +++ /dev/null @@ -1,83 +0,0 @@ -package io.quarkus.benchmark.resource; - -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.ThreadLocalRandom; - -import javax.inject.Inject; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; - -import io.quarkus.benchmark.model.World; -import io.quarkus.benchmark.repository.WorldRepository; -import io.smallrye.mutiny.Uni; - -@Produces(MediaType.APPLICATION_JSON) -@Path("/") -public class DbResource { - - @Inject - WorldRepository worldRepository; - - @GET - @Path("db") - public Uni db() { - return randomWorld(); - } - - @GET - @Path("queries") - public Uni> queries(@QueryParam("queries") String queries) { - var worlds = new Uni[parseQueryCount(queries)]; - var ret = new World[worlds.length]; - Arrays.setAll(worlds, i -> { - return randomWorld().map(w -> ret[i] = w); - }); - - return Uni.combine().all().unis(worlds) - .combinedWith(v -> Arrays.asList(ret)); - } - - @GET - @Path("updates") - public Uni> updates(@QueryParam("queries") String queries) { - var worlds = new Uni[parseQueryCount(queries)]; - var ret = new World[worlds.length]; - Arrays.setAll(worlds, i -> { - return randomWorld().map(w -> { - w.setRandomNumber(randomWorldNumber()); - ret[i] = w; - return w; - }); - }); - - return Uni.combine().all().unis(worlds) - .combinedWith(v -> null) - .flatMap(v -> worldRepository.update(ret)) - .map(v -> Arrays.asList(ret)); - } - - private Uni randomWorld() { - return worldRepository.find(randomWorldNumber()); - } - - private int randomWorldNumber() { - return 1 + ThreadLocalRandom.current().nextInt(10000); - } - - private int parseQueryCount(String textValue) { - if (textValue == null) { - return 1; - } - int parsedValue; - try { - parsedValue = Integer.parseInt(textValue); - } catch (NumberFormatException e) { - return 1; - } - return Math.min(500, Math.max(1, parsedValue)); - } -} \ No newline at end of file diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java deleted file mode 100644 index 4b23849cb2e..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/FortuneResource.java +++ /dev/null @@ -1,47 +0,0 @@ -package io.quarkus.benchmark.resource; - -import java.io.StringWriter; -import java.util.Collections; -import java.util.Comparator; - -import javax.inject.Inject; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; - -import io.quarkus.benchmark.model.Fortune; -import io.quarkus.benchmark.repository.FortuneRepository; -import io.smallrye.mutiny.Uni; - -@Path("/fortunes") -public class FortuneResource { - - @Inject - FortuneRepository repository; - private Mustache template; - private Comparator fortuneComparator; - - - public FortuneResource() { - MustacheFactory mf = new DefaultMustacheFactory(); - template = mf.compile("fortunes.mustache"); - fortuneComparator = Comparator.comparing(fortune -> fortune.getMessage()); - } - - @Produces("text/html; charset=UTF-8") - @GET - public Uni fortunes() { - return repository.findAll() - .map(fortunes -> { - fortunes.add(new Fortune(0, "Additional fortune added at request time.")); - fortunes.sort(fortuneComparator); - StringWriter writer = new StringWriter(); - template.execute(writer, Collections.singletonMap("fortunes", fortunes)); - return writer.toString(); - }); - } -} diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/JsonResource.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/JsonResource.java deleted file mode 100644 index c1cd3a3eb14..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/JsonResource.java +++ /dev/null @@ -1,19 +0,0 @@ -package io.quarkus.benchmark.resource; - -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; - -@Path("/json") -public class JsonResource { - - private static final String HELLO = "Hello, World!"; - - @Produces(MediaType.APPLICATION_JSON) - @GET - public Message json() { - return new Message(HELLO); - } -} - diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/Message.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/Message.java deleted file mode 100644 index 73f0ffefb8a..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/Message.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.quarkus.benchmark.resource; - -public class Message { - private final String message; - - public Message(String message) { - this.message = message; - } - - public String getMessage() { - return message; - } -} diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java deleted file mode 100644 index 58094ad21ef..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/java/io/quarkus/benchmark/resource/PlaintextResource.java +++ /dev/null @@ -1,20 +0,0 @@ -package io.quarkus.benchmark.resource; - -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; - -import io.vertx.core.buffer.Buffer; - -@Path("/plaintext") -public class PlaintextResource { - private static final String HELLO_WORLD = "Hello, world!"; - private static final Buffer HELLO_WORLD_BUFFER = Buffer.factory.directBuffer(HELLO_WORLD, "UTF-8"); - - @Produces(MediaType.TEXT_PLAIN) - @GET - public Buffer plaintext() { - return HELLO_WORLD_BUFFER; - } -} diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/resources/application.properties b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/resources/application.properties deleted file mode 100644 index 0a1d6083f99..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/resources/application.properties +++ /dev/null @@ -1,10 +0,0 @@ -quarkus.datasource.url=vertx-reactive:postgresql://tfb-database:5432/hello_world -%dev.quarkus.datasource.url=vertx-reactive:postgresql://localhost:5432/hello_world -quarkus.datasource.username=benchmarkdbuser -quarkus.datasource.password=benchmarkdbpass -quarkus.datasource.reactive.max-size=64 -quarkus.log.console.enable=true -quarkus.log.console.level=INFO -quarkus.log.file.enable=false -quarkus.log.level=INFO -quarkus.vertx.prefer-native-transport=true \ No newline at end of file diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/resources/fortunes.mustache b/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/resources/fortunes.mustache deleted file mode 100644 index f9664a72eee..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/src/main/resources/fortunes.mustache +++ /dev/null @@ -1,20 +0,0 @@ - - - - Fortunes - - - - - - - - {{#fortunes}} - - - - - {{/fortunes}} -
idmessage
{{id}}{{message}}
- - diff --git a/frameworks/Java/quarkus/resteasy-reactive-pgclient/start-app.sh b/frameworks/Java/quarkus/resteasy-reactive-pgclient/start-app.sh deleted file mode 100755 index 01b3e22ea2c..00000000000 --- a/frameworks/Java/quarkus/resteasy-reactive-pgclient/start-app.sh +++ /dev/null @@ -1,2 +0,0 @@ -java -XX:+FlightRecorder -XX:+UseParallelGC -Dquarkus.datasource.url=vertx-reactive:postgresql://localhost:5432/hello_world -Dquarkus.http.host=127.0.0.1 -Djava.lang.Integer.IntegerCache.high=10000 -Dvertx.disableHttpHeadersValidation=true -Dvertx.disableMetrics=true -Dvertx.disableH2c=true -Dvertx.disableWebsockets=true -Dvertx.flashPolicyHandler=false -Dvertx.threadChecks=false -Dvertx.disableContextTimings=true -Dvertx.disableTCCL=true -Dhibernate.allow_update_outside_transaction=true -Djboss.threads.eqe.statistics=false -jar target/pgclient-1.0-SNAPSHOT-runner.jar - diff --git a/frameworks/Java/quarkus/run_quarkus.sh b/frameworks/Java/quarkus/run_quarkus.sh new file mode 100755 index 00000000000..893bf103cf6 --- /dev/null +++ b/frameworks/Java/quarkus/run_quarkus.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +# JFR: -XX:+FlightRecorder -XX:StartFlightRecording=duration=60s,filename=/quarkus/trace.jfr +# and use docker cp to read it + +# PROFILING: -XX:+UnlockDiagnosticVMOptions -XX:+DebugNonSafepoints + +# DEBUG: -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005 + +# Consider using -Dquarkus.http.io-threads=$((`grep --count ^processor /proc/cpuinfo`)) \ + +JAVA_OPTIONS="-server \ + -Dquarkus.vertx.prefer-native-transport=true \ + -XX:-StackTraceInThrowable \ + -Dquarkus.http.accept-backlog=-1 \ + -Dio.netty.buffer.checkBounds=false \ + -Dio.netty.buffer.checkAccessible=false \ + -Djava.util.logging.manager=org.jboss.logmanager.LogManager \ + -XX:-UseBiasedLocking \ + -XX:+UseStringDeduplication \ + -XX:+UseNUMA \ + -XX:+UseParallelGC \ + -Djava.lang.Integer.IntegerCache.high=10000 \ + -Dvertx.disableURIValidation=true \ + -Dvertx.disableHttpHeadersValidation=true \ + -Dvertx.disableMetrics=true \ + -Dvertx.disableH2c=true \ + -Dvertx.disableWebsockets=true \ + -Dvertx.flashPolicyHandler=false \ + -Dvertx.threadChecks=false \ + -Dvertx.disableContextTimings=true \ + -Dhibernate.allow_update_outside_transaction=true \ + -Dio.quarkus.vertx.core.runtime.context.VertxContextSafetyToggle.I_HAVE_CHECKED_EVERYTHING=true \ + -Djboss.threads.eqe.statistics=false \ + -Dmutiny.disableCallBackDecorators=true \ + $@" + +java $JAVA_OPTIONS -jar quarkus-run.jar diff --git a/frameworks/Java/rapidoid/pom.xml b/frameworks/Java/rapidoid/pom.xml index 79d57238b8a..eed7ef401d9 100644 --- a/frameworks/Java/rapidoid/pom.xml +++ b/frameworks/Java/rapidoid/pom.xml @@ -23,12 +23,12 @@ mysql mysql-connector-java - 8.0.18 + 8.0.28 org.postgresql postgresql - 42.3.3 + 42.4.1 com.zaxxer diff --git a/frameworks/Java/redkale/benchmark_config.json b/frameworks/Java/redkale/benchmark_config.json index cc2177a1842..465d9365ab3 100644 --- a/frameworks/Java/redkale/benchmark_config.json +++ b/frameworks/Java/redkale/benchmark_config.json @@ -5,6 +5,11 @@ "default": { "plaintext_url": "/plaintext", "json_url": "/json", + "db_url": "/db", + "query_url": "/queries?q=", + "fortune_url": "/fortunes", + "update_url": "/updates?q=", + "cached_query_url": "/cached-worlds?q=", "port": 8080, "approach": "Realistic", "classification": "Fullstack", @@ -27,7 +32,7 @@ "db_url": "/db", "query_url": "/queries?q=", "fortune_url": "/fortunes", - "update_url2": "/updates?q=", + "update_url": "/updates?q=", "cached_query_url": "/cached-worlds?q=", "port": 8080, "approach": "Realistic", @@ -51,7 +56,7 @@ "db_url": "/db", "query_url": "/queries?q=", "fortune_url": "/fortunes", - "update_url2": "/updates?q=", + "update_url": "/updates?q=", "cached_query_url": "/cached-worlds?q=", "port": 8080, "approach": "Realistic", @@ -69,54 +74,15 @@ "notes": "", "versus": "Redkale" }, - "cache": { - "cached_query_url": "/cached-worlds?q=", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "Postgres", - "framework": "Redkale", - "language": "Java", - "flavor": "None", - "orm": "Raw", - "platform": "Redkale", - "webserver": "Redkale", - "os": "Linux", - "database_os": "Linux", - "display_name": "redkale", - "notes": "", - "versus": "Redkale" - }, - "postgres": { + "mysql": { "db_url": "/db", "query_url": "/queries?q=", "fortune_url": "/fortunes", - "update_url2": "/updates?q=", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "Postgres", - "framework": "Redkale", - "language": "Java", - "flavor": "None", - "orm": "Raw", - "platform": "Redkale", - "webserver": "Redkale", - "os": "Linux", - "database_os": "Linux", - "display_name": "redkale-postgres", - "notes": "", - "versus": "Redkale" - }, - "mongodb": { - "db_url": "/db", - "query_url2": "/queries?q=", - "fortune_url": "/fortunes", - "update_url2": "/updates?q=", + "update_url": "/updates?q=", "port": 8080, "approach": "Realistic", "classification": "Fullstack", - "database": "Mongodb", + "database": "MySQL", "framework": "Redkale", "language": "Java", "flavor": "None", @@ -125,7 +91,7 @@ "webserver": "Redkale", "os": "Linux", "database_os": "Linux", - "display_name": "redkale-mongodb", + "display_name": "redkale-mysql", "notes": "", "versus": "Redkale" } diff --git a/frameworks/Java/redkale/conf/persistence-mongodb.xml b/frameworks/Java/redkale/conf/persistence-mongodb.xml deleted file mode 100644 index 5081624cc65..00000000000 --- a/frameworks/Java/redkale/conf/persistence-mongodb.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/frameworks/Java/redkale/conf/persistence-mysql.xml b/frameworks/Java/redkale/conf/persistence-mysql.xml deleted file mode 100644 index f2754f27ddc..00000000000 --- a/frameworks/Java/redkale/conf/persistence-mysql.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/frameworks/Java/redkale/conf/persistence.xml b/frameworks/Java/redkale/conf/persistence.xml deleted file mode 100644 index 291ecc0ae7c..00000000000 --- a/frameworks/Java/redkale/conf/persistence.xml +++ /dev/null @@ -1,22 +0,0 @@ - - - - - ALL - - - - - - - - - ALL - - - - - - - - diff --git a/frameworks/Java/redkale/conf/source-mysql.properties b/frameworks/Java/redkale/conf/source-mysql.properties new file mode 100644 index 00000000000..7510be6f998 --- /dev/null +++ b/frameworks/Java/redkale/conf/source-mysql.properties @@ -0,0 +1,6 @@ + + +############ DataSource ############ +redkale.datasource[].url = jdbc:mysql://tfb-database:3306/hello_world?useSSL=false&rewriteBatchedStatements=true&serverTimezone=UTC&characterEncoding=utf8 +redkale.datasource[].user = benchmarkdbuser +redkale.datasource[].password = benchmarkdbpass diff --git a/frameworks/Java/redkale/conf/source.properties b/frameworks/Java/redkale/conf/source.properties new file mode 100644 index 00000000000..e8ae20ccb12 --- /dev/null +++ b/frameworks/Java/redkale/conf/source.properties @@ -0,0 +1,6 @@ + + +############ DataSource ############ +redkale.datasource[].url = jdbc:postgresql://tfb-database:5432/hello_world +redkale.datasource[].user = benchmarkdbuser +redkale.datasource[].password = benchmarkdbpass diff --git a/frameworks/Java/redkale/config.toml b/frameworks/Java/redkale/config.toml index 78eb7fed82c..38946edc91d 100644 --- a/frameworks/Java/redkale/config.toml +++ b/frameworks/Java/redkale/config.toml @@ -4,9 +4,14 @@ name = "redkale" [main] urls.plaintext = "/plaintext" urls.json = "/json" +urls.db = "/db" +urls.fortune = "/fortunes" +urls.query = "/queries?q=" +urls.update = "/updates?q=" +urls.cached_query = "/cached-worlds?q=" approach = "Realistic" classification = "Fullstack" -database = "None" +database = "Postgres" database_os = "Linux" os = "Linux" orm = "Raw" @@ -20,7 +25,7 @@ urls.json = "/json" urls.db = "/db" urls.fortune = "/fortunes" urls.query = "/queries?q=" -urls.update2 = "/updates?q=" +urls.update = "/updates?q=" urls.cached_query = "/cached-worlds?q=" approach = "Realistic" classification = "Fullstack" @@ -38,19 +43,7 @@ urls.json = "/json" urls.db = "/db" urls.fortune = "/fortunes" urls.query = "/queries?q=" -urls.update2 = "/updates?q=" -urls.cached_query = "/cached-worlds?q=" -approach = "Realistic" -classification = "Fullstack" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = "Redkale" -webserver = "Redkale" -versus = "Redkale" - -[cache] +urls.update = "/updates?q=" urls.cached_query = "/cached-worlds?q=" approach = "Realistic" classification = "Fullstack" @@ -62,29 +55,14 @@ platform = "Redkale" webserver = "Redkale" versus = "Redkale" -[postgres] +[mysql] urls.db = "/db" urls.fortune = "/fortunes" urls.query = "/queries?q=" -urls.update2 = "/updates?q=" -approach = "Realistic" -classification = "Fullstack" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = "Redkale" -webserver = "Redkale" -versus = "Redkale" - -[mongodb] -urls.db = "/db" -urls.fortune = "/fortunes" -urls.query2 = "/queries?q=" -urls.update2 = "/updates?q=" +urls.update = "/updates?q=" approach = "Realistic" classification = "Fullstack" -database = "Mongodb" +database = "MySQL" database_os = "Linux" os = "Linux" orm = "Raw" diff --git a/frameworks/Java/redkale/pom-mongodb.xml b/frameworks/Java/redkale/pom-mongodb.xml deleted file mode 100644 index dc297018b8d..00000000000 --- a/frameworks/Java/redkale/pom-mongodb.xml +++ /dev/null @@ -1,123 +0,0 @@ - - 4.0.0 - org.redkalex - redkale-benchmark - 1.0.0 - - - - org.redkale.boot.Application - 2.7.0-SNAPSHOT - 1.0.0 - UTF-8 - 11 - 11 - - - - - org.redkale - redkale - ${redkale.version} - - - - org.redkalex - redkale-plugins - ${redkale.version} - - - - org.mongodb - mongodb-driver-reactivestreams - 4.3.2 - - - - - - central - Central Repository - https://repo.maven.apache.org/maven2 - - - sonatype-nexus-snapshots - Sonatype Nexus Snapshots - https://oss.sonatype.org/content/repositories/snapshots - - - - - - central - Central Repository - https://repo.maven.apache.org/maven2 - - - sonatype-nexus-snapshots - Sonatype Nexus Snapshots - https://oss.sonatype.org/content/repositories/snapshots - - false - - - true - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.8.0 - - - - org.redkale.maven.plugins - redkale-maven-plugin - 1.1.0-SNAPSHOT - - - --allow-incomplete-classpath - --no-fallback - - - - - redkale-compile - process-classes - - compile - - - - - - - org.apache.maven.plugins - maven-shade-plugin - 3.2.0 - - - package - - shade - - - - - ${main.class} - - - - - - - - - - - \ No newline at end of file diff --git a/frameworks/Java/redkale/pom.xml b/frameworks/Java/redkale/pom.xml index c93ee011b2f..b69d1efd407 100644 --- a/frameworks/Java/redkale/pom.xml +++ b/frameworks/Java/redkale/pom.xml @@ -7,8 +7,7 @@ org.redkale.boot.Application - 2.7.0-SNAPSHOT - 1.0.0 + 2.8.0-SNAPSHOT UTF-8 11 11 @@ -66,16 +65,15 @@ org.apache.maven.plugins maven-compiler-plugin - 3.8.0 + 3.10.0 org.redkale.maven.plugins redkale-maven-plugin - 1.1.0-SNAPSHOT + 1.2.0-SNAPSHOT - --allow-incomplete-classpath --no-fallback @@ -93,7 +91,7 @@ org.apache.maven.plugins maven-shade-plugin - 3.2.0 + 3.3.0 package diff --git a/frameworks/Java/redkale/redkale-cache.dockerfile b/frameworks/Java/redkale/redkale-cache.dockerfile deleted file mode 100644 index d4c547cf714..00000000000 --- a/frameworks/Java/redkale/redkale-cache.dockerfile +++ /dev/null @@ -1,10 +0,0 @@ -FROM maven:3.8.4-openjdk-17-slim as maven -WORKDIR /redkale -COPY src src -COPY conf conf -COPY pom.xml pom.xml -RUN mvn package -q - -EXPOSE 8080 - -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=80000", "-DAPP_HOME=./", "-jar", "/redkale/target/redkale-benchmark-1.0.0.jar"] \ No newline at end of file diff --git a/frameworks/Java/redkale/redkale-graalvm.dockerfile b/frameworks/Java/redkale/redkale-graalvm.dockerfile index 087e2caeb8a..205bf8284da 100644 --- a/frameworks/Java/redkale/redkale-graalvm.dockerfile +++ b/frameworks/Java/redkale/redkale-graalvm.dockerfile @@ -1,4 +1,4 @@ -FROM maven:3.8.1-openjdk-17-slim as maven +FROM maven:3.8.6-openjdk-18-slim as maven WORKDIR /redkale COPY src src COPY conf conf @@ -6,7 +6,7 @@ COPY pom.xml pom.xml RUN mvn package -q -FROM ghcr.io/graalvm/graalvm-ce:21.3.0 +FROM instructure/graalvm-ce:22-java17 WORKDIR /redkale COPY conf conf COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark.jar diff --git a/frameworks/Java/redkale/redkale-mongodb.dockerfile b/frameworks/Java/redkale/redkale-mongodb.dockerfile deleted file mode 100644 index 2379207dc1a..00000000000 --- a/frameworks/Java/redkale/redkale-mongodb.dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM maven:3.8.4-openjdk-17-slim as maven -WORKDIR /redkale -COPY src src -COPY conf conf -RUN rm conf/persistence.xml -RUN mv conf/persistence-mongodb.xml conf/persistence.xml -COPY pom-mongodb.xml pom.xml -RUN mvn package -q - -EXPOSE 8080 - -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=80000", "-DAPP_HOME=./", "-jar", "/redkale/target/redkale-benchmark-1.0.0.jar"] \ No newline at end of file diff --git a/frameworks/Java/redkale/redkale-mysql.dockerfile b/frameworks/Java/redkale/redkale-mysql.dockerfile index 0b0f91b6805..640a714796d 100644 --- a/frameworks/Java/redkale/redkale-mysql.dockerfile +++ b/frameworks/Java/redkale/redkale-mysql.dockerfile @@ -1,9 +1,9 @@ -FROM maven:3.8.4-openjdk-17-slim as maven +FROM maven:3.8.6-openjdk-18-slim as maven WORKDIR /redkale COPY src src COPY conf conf -RUN rm conf/persistence.xml -RUN mv conf/persistence-mysql.xml conf/persistence.xml +RUN rm conf/source.properties +RUN mv conf/source-mysql.properties conf/source.properties COPY pom.xml pom.xml RUN mvn package -q diff --git a/frameworks/Java/redkale/redkale-native.dockerfile b/frameworks/Java/redkale/redkale-native.dockerfile index d205cbbb5a8..f3ea7e3ce8b 100644 --- a/frameworks/Java/redkale/redkale-native.dockerfile +++ b/frameworks/Java/redkale/redkale-native.dockerfile @@ -1,4 +1,4 @@ -FROM maven:3.8.4-openjdk-17-slim as maven +FROM maven:3.8.6-openjdk-18-slim as maven WORKDIR /redkale COPY src src COPY conf conf @@ -6,7 +6,7 @@ COPY pom.xml pom.xml RUN mvn package -q -FROM ghcr.io/graalvm/graalvm-ce:21.3.0 +FROM ghcr.io/graalvm/graalvm-ce:ol8-java17-22.1.0-b1 RUN gu install native-image WORKDIR /redkale COPY conf conf diff --git a/frameworks/Java/redkale/redkale-postgres.dockerfile b/frameworks/Java/redkale/redkale-postgres.dockerfile deleted file mode 100644 index d4c547cf714..00000000000 --- a/frameworks/Java/redkale/redkale-postgres.dockerfile +++ /dev/null @@ -1,10 +0,0 @@ -FROM maven:3.8.4-openjdk-17-slim as maven -WORKDIR /redkale -COPY src src -COPY conf conf -COPY pom.xml pom.xml -RUN mvn package -q - -EXPOSE 8080 - -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=80000", "-DAPP_HOME=./", "-jar", "/redkale/target/redkale-benchmark-1.0.0.jar"] \ No newline at end of file diff --git a/frameworks/Java/redkale/redkale.dockerfile b/frameworks/Java/redkale/redkale.dockerfile index d4c547cf714..e178ec23f52 100644 --- a/frameworks/Java/redkale/redkale.dockerfile +++ b/frameworks/Java/redkale/redkale.dockerfile @@ -1,4 +1,4 @@ -FROM maven:3.8.4-openjdk-17-slim as maven +FROM maven:3.8.6-openjdk-18-slim as maven WORKDIR /redkale COPY src src COPY conf conf diff --git a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/BenchmarkService.java b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/BenchmarkService.java index 852a640b048..73f2f057294 100644 --- a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/BenchmarkService.java +++ b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/BenchmarkService.java @@ -5,6 +5,7 @@ */ package org.redkalex.benchmark; +import java.util.*; import java.util.concurrent.*; import java.util.stream.*; import javax.annotation.Resource; @@ -37,7 +38,7 @@ public byte[] getHelloBytes() { @RestMapping(name = "json") public Message getHelloMessage() { - return Message.create("Hello, World!"); + return new Message("Hello, World!"); } @RestMapping(name = "db") @@ -46,25 +47,28 @@ public CompletableFuture findWorldAsync() { } @RestMapping(name = "queries") - public CompletableFuture queryWorldAsync(int q) { + public CompletableFuture> queryWorldAsync(int q) { int size = Math.min(500, Math.max(1, q)); IntStream ids = ThreadLocalRandom.current().ints(size, 1, 10001); - return source.findsAsync(World.class, ids.boxed()); + return source.findsListAsync(World.class, ids.boxed()); } @RestMapping(name = "updates") - public CompletableFuture updateWorldAsync(int q) { + public CompletableFuture> updateWorldAsync(int q) { int size = Math.min(500, Math.max(1, q)); IntStream ids = ThreadLocalRandom.current().ints(size, 1, 10001); int[] newNumbers = ThreadLocalRandom.current().ints(size, 1, 10001).toArray(); - return source.findsAsync(World.class, ids.boxed()).thenCompose(words -> source.updateAsync(World.setNewNumbers(words, newNumbers)).thenApply(v -> words)); + return source.findsListAsync(World.class, ids.boxed()) + .thenCompose(words -> source.updateAsync(World.setNewNumbers(words.toArray(new World[words.size()]), newNumbers)) + .thenApply(v -> words)); } @RestMapping(name = "fortunes") public CompletableFuture queryFortunes() { return source.queryListAsync(Fortune.class).thenApply(fortunes -> { fortunes.add(new Fortune(0, "Additional fortune added at request time.")); - return HttpScope.refer("").attr("fortunes", Fortune.sort(fortunes)); + Collections.sort(fortunes); + return HttpScope.refer("").referObj(fortunes); }); } diff --git a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Fortune.java b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Fortune.java index d297b141062..bd723918bae 100644 --- a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Fortune.java +++ b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Fortune.java @@ -5,7 +5,6 @@ */ package org.redkalex.benchmark; -import java.util.*; import javax.persistence.*; import org.redkale.convert.json.JsonConvert; @@ -29,11 +28,6 @@ public Fortune(int id, String message) { this.message = message; } - public static List sort(List fortunes) { - Collections.sort(fortunes); - return fortunes; - } - public int getId() { return id; } diff --git a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/FortuneRender.java b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/FortuneRender.java index c3f313b44e4..09a67689b84 100644 --- a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/FortuneRender.java +++ b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/FortuneRender.java @@ -39,7 +39,7 @@ public void init(HttpContext context, AnyValue config) { public void renderTo(HttpRequest request, HttpResponse response, Convert convert, HttpScope scope) { ByteArray array = localByteArray.get().clear(); array.put(text1); - for (Fortune item : (List) scope.find("fortunes")) { + for (Fortune item : (List) scope.getReferObj()) { array.put(text2).put(String.valueOf(item.getId()).getBytes(StandardCharsets.UTF_8)) .put(text3).put(escape(item.getMessage()).toString().getBytes(StandardCharsets.UTF_8)).put(text4); } diff --git a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Message.java b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Message.java index 38defd73570..d74d5eb1f67 100644 --- a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Message.java +++ b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Message.java @@ -16,8 +16,6 @@ @Bean public final class Message { - private static final Message instance = new Message(); - @ConvertSmallString private String message; @@ -28,11 +26,6 @@ public Message(String message) { this.message = message; } - public static Message create(String str) { - instance.message = str; - return instance; - } - public String getMessage() { return message; } diff --git a/frameworks/Java/restexpress/pom.xml b/frameworks/Java/restexpress/pom.xml index 9f3b357cd5f..743f4cf5168 100644 --- a/frameworks/Java/restexpress/pom.xml +++ b/frameworks/Java/restexpress/pom.xml @@ -37,7 +37,7 @@ com.fasterxml.jackson.core jackson-databind - 2.9.9 + 2.12.6.1 com.fasterxml.jackson.core @@ -63,7 +63,7 @@ mysql mysql-connector-java - 8.0.18 + 8.0.28 diff --git a/frameworks/Java/servlet/pom.xml b/frameworks/Java/servlet/pom.xml index b870ee831b8..fd63f64978f 100644 --- a/frameworks/Java/servlet/pom.xml +++ b/frameworks/Java/servlet/pom.xml @@ -13,7 +13,7 @@ 11 11 1.2.3.Final - 2.10.0.pr1 + 2.13.2.1 src/main/webapp/WEB-INF/web.xml @@ -73,7 +73,7 @@ mysql mysql-connector-java - 8.0.18 + 8.0.28 @@ -108,7 +108,7 @@ org.postgresql postgresql - 42.3.3 + 42.4.1 diff --git a/frameworks/Java/servlet3/pom.xml b/frameworks/Java/servlet3/pom.xml index 8a67a365bc3..8ef17e8639d 100644 --- a/frameworks/Java/servlet3/pom.xml +++ b/frameworks/Java/servlet3/pom.xml @@ -80,7 +80,7 @@ com.fasterxml.jackson.core jackson-databind - 2.10.0.pr1 + 2.12.6.1 diff --git a/frameworks/Java/simple-server/pom.xml b/frameworks/Java/simple-server/pom.xml old mode 100755 new mode 100644 index 6d61d58abfd..7567547cf74 --- a/frameworks/Java/simple-server/pom.xml +++ b/frameworks/Java/simple-server/pom.xml @@ -27,7 +27,7 @@ 1.18.4 - 2.10.0 + 2.17.1
diff --git a/frameworks/Java/smart-socket/pom.xml b/frameworks/Java/smart-socket/pom.xml index c22be4d342c..1c48ccf3ea4 100644 --- a/frameworks/Java/smart-socket/pom.xml +++ b/frameworks/Java/smart-socket/pom.xml @@ -10,7 +10,7 @@ UTF-8 11 11 - 2.11.0 + 2.17.1 0.1.7-SNAPSHOT 5.0.0 0.9.23 @@ -56,7 +56,7 @@ org.postgresql postgresql - 42.3.3 + 42.4.1 diff --git a/frameworks/Java/spark/pom.xml b/frameworks/Java/spark/pom.xml index f3dad72e365..4d1e718db13 100644 --- a/frameworks/Java/spark/pom.xml +++ b/frameworks/Java/spark/pom.xml @@ -14,8 +14,8 @@ 11 2.9.0 5.4.24.Final - 2.8.5 - 8.0.18 + 2.8.9 + 8.0.28 1.7.25 hello.web.SparkApplication diff --git a/frameworks/Java/spring-webflux/README.md b/frameworks/Java/spring-webflux/README.md index 3f16722f6d0..91e18bc9ceb 100755 --- a/frameworks/Java/spring-webflux/README.md +++ b/frameworks/Java/spring-webflux/README.md @@ -19,7 +19,7 @@ For mongoDB access, spring-data-mongodb with reactive support is used. See [Mong ### JSON Serialization Test -* [JSON test source](src/main/java/benchmark/web/WebfluxRouterr.java) +* [JSON test source](src/main/java/benchmark/web/WebfluxRouter.java) ### Database Query Test diff --git a/frameworks/Java/spring-webflux/pom.xml b/frameworks/Java/spring-webflux/pom.xml index 8c8db890adb..43b7bedcaef 100644 --- a/frameworks/Java/spring-webflux/pom.xml +++ b/frameworks/Java/spring-webflux/pom.xml @@ -21,7 +21,7 @@ 11 UTF-8 1.0.0.M2 - 42.3.3 + 42.4.1 0.11.4 0.2.4 1.0.0.M7 @@ -108,12 +108,12 @@ org.springframework spring-core - 5.2.0.M2 + 5.2.22.BUILD-SNAPSHOT org.springframework spring-beans - 5.2.0.M2 + 5.2.21.BUILD-SNAPSHOT diff --git a/frameworks/Java/spring/README.md b/frameworks/Java/spring/README.md index e97b9a28f6e..74d57566085 100644 --- a/frameworks/Java/spring/README.md +++ b/frameworks/Java/spring/README.md @@ -2,12 +2,10 @@ This is the Spring MVC portion of a [benchmarking test suite](../) comparing a variety of web development platforms. -An embedded tomcat is used for the web server, with nearly everything configured with default settings. +An embedded undertow is used for the web server, with nearly everything configured with default settings. The only thing changed is Hikari can use up to (2 * cores count) connections (the default is 10). See [About-Pool-Sizing](https://github.com/brettwooldridge/HikariCP/wiki/About-Pool-Sizing) -Tomcat use a fixed thread pool that can grow up to 200 threads. - There are two implementations : * For postgresql access, JdbcTemplate is used. See [JdbcDbRepository](src/main/java/hello/JdbcDbRepository.java). * For mongoDB access, MongoTemplate is used. See [MongoDbRepository](src/main/java/hello/MongoDbRepository.java). @@ -38,8 +36,10 @@ There are two implementations : ## Versions -* [Java OpenJDK 11](http://openjdk.java.net/) -* [Spring boot 2.1.2](https://spring.io/projects/spring-boot) +* [OpenJDK Runtime Environment Temurin-11.0.16+8 (build 11.0.16+8)](https://adoptium.net/es/temurin/releases/?version=11) +* [Spring boot 2.6.9](https://spring.io/projects/spring-boot) + +The change to use OpenJDK Temurin is inspired in [whichjdk](https://whichjdk.com/) page advice. ## Test URLs diff --git a/frameworks/Java/spring/pom.xml b/frameworks/Java/spring/pom.xml index d3ec1253041..8e5ab6b75fc 100644 --- a/frameworks/Java/spring/pom.xml +++ b/frameworks/Java/spring/pom.xml @@ -1,71 +1,72 @@ - - 4.0.0 + 4.0.0 - hello - hello-spring - 1.0-SNAPSHOT + hello + hello-spring + 1.0-SNAPSHOT - - org.springframework.boot - spring-boot-starter-parent - 2.3.1.RELEASE - + + org.springframework.boot + spring-boot-starter-parent + 2.6.9 + - - 11 - 11 - UTF-8 - 42.3.3 - + + 11 + 42.3.3 + - - - org.springframework.boot - spring-boot-starter-web - - - org.springframework.boot - spring-boot-starter-data-jpa - - - org.springframework.boot - spring-boot-starter-data-mongodb - - - org.springframework.boot - spring-boot-starter-jdbc - - - org.springframework.boot - spring-boot-starter-mustache - - - org.postgresql - postgresql - ${postgresql.version} - - + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-tomcat + + + + + org.springframework.boot + spring-boot-starter-undertow + + + org.springframework.boot + spring-boot-starter-data-jpa + + + org.springframework.boot + spring-boot-starter-data-mongodb + + + org.springframework.boot + spring-boot-starter-mustache + - - - - org.springframework.boot - spring-boot-maven-plugin - - - org.apache.maven.plugins - maven-compiler-plugin - 3.8.1 - - false - - - - + + org.postgresql + postgresql + + - + + + + org.springframework.boot + spring-boot-maven-plugin + + + org.apache.maven.plugins + maven-compiler-plugin + + false + + + + + + \ No newline at end of file diff --git a/frameworks/Java/spring/spring-jpa.dockerfile b/frameworks/Java/spring/spring-jpa.dockerfile index 4370573b077..0fe6648fede 100644 --- a/frameworks/Java/spring/spring-jpa.dockerfile +++ b/frameworks/Java/spring/spring-jpa.dockerfile @@ -1,13 +1,34 @@ +FROM eclipse-temurin:11 as jre-build + +# Create a custom Java runtime +RUN $JAVA_HOME/bin/jlink \ + --add-modules ALL-MODULE-PATH \ + --strip-debug \ + --no-man-pages \ + --no-header-files \ + --compress=2 \ + --output /javaruntime + FROM maven:3.6.1-jdk-11-slim as maven +ENV JAVA_HOME=/opt/java/openjdk +ENV PATH "${JAVA_HOME}/bin:${PATH}" +COPY --from=jre-build /javaruntime $JAVA_HOME + +RUN mvn -version WORKDIR /spring COPY src src COPY pom.xml pom.xml RUN mvn package -q -FROM openjdk:11.0.3-jdk-slim +FROM debian:buster-slim +ENV JAVA_HOME=/opt/java/openjdk +ENV PATH "${JAVA_HOME}/bin:${PATH}" +COPY --from=jre-build /javaruntime $JAVA_HOME + +RUN java -version WORKDIR /spring COPY --from=maven /spring/target/hello-spring-1.0-SNAPSHOT.jar app.jar EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dlogging.level.root=OFF", "-jar", "app.jar", "--spring.profiles.active=jpa"] +CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseG1GC", "-XX:+DisableExplicitGC", "-XX:+UseStringDeduplication", "-Dlogging.level.root=OFF", "-jar", "app.jar", "--spring.profiles.active=jpa"] diff --git a/frameworks/Java/spring/spring-mongo.dockerfile b/frameworks/Java/spring/spring-mongo.dockerfile index 8906279b133..02a6b172c5b 100644 --- a/frameworks/Java/spring/spring-mongo.dockerfile +++ b/frameworks/Java/spring/spring-mongo.dockerfile @@ -1,13 +1,34 @@ +FROM eclipse-temurin:11 as jre-build + +# Create a custom Java runtime +RUN $JAVA_HOME/bin/jlink \ + --add-modules ALL-MODULE-PATH \ + --strip-debug \ + --no-man-pages \ + --no-header-files \ + --compress=2 \ + --output /javaruntime + FROM maven:3.6.1-jdk-11-slim as maven +ENV JAVA_HOME=/opt/java/openjdk +ENV PATH "${JAVA_HOME}/bin:${PATH}" +COPY --from=jre-build /javaruntime $JAVA_HOME + +RUN mvn -version WORKDIR /spring COPY src src COPY pom.xml pom.xml RUN mvn package -q -FROM openjdk:11.0.3-jdk-slim +FROM debian:buster-slim +ENV JAVA_HOME=/opt/java/openjdk +ENV PATH "${JAVA_HOME}/bin:${PATH}" +COPY --from=jre-build /javaruntime $JAVA_HOME + +RUN java -version WORKDIR /spring COPY --from=maven /spring/target/hello-spring-1.0-SNAPSHOT.jar app.jar EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dlogging.level.root=OFF", "-jar", "app.jar", "--spring.profiles.active=mongo"] +CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseG1GC", "-XX:+DisableExplicitGC", "-XX:+UseStringDeduplication", "-Dlogging.level.root=OFF", "-jar", "app.jar", "--spring.profiles.active=mongo"] diff --git a/frameworks/Java/spring/spring.dockerfile b/frameworks/Java/spring/spring.dockerfile index 61b2a1afed1..8cc3165ce89 100644 --- a/frameworks/Java/spring/spring.dockerfile +++ b/frameworks/Java/spring/spring.dockerfile @@ -1,13 +1,34 @@ +FROM eclipse-temurin:11 as jre-build + +# Create a custom Java runtime +RUN $JAVA_HOME/bin/jlink \ + --add-modules ALL-MODULE-PATH \ + --strip-debug \ + --no-man-pages \ + --no-header-files \ + --compress=2 \ + --output /javaruntime + FROM maven:3.6.1-jdk-11-slim as maven +ENV JAVA_HOME=/opt/java/openjdk +ENV PATH "${JAVA_HOME}/bin:${PATH}" +COPY --from=jre-build /javaruntime $JAVA_HOME + +RUN mvn -version WORKDIR /spring COPY src src COPY pom.xml pom.xml RUN mvn package -q -FROM openjdk:11.0.3-jdk-slim +FROM debian:buster-slim +ENV JAVA_HOME=/opt/java/openjdk +ENV PATH "${JAVA_HOME}/bin:${PATH}" +COPY --from=jre-build /javaruntime $JAVA_HOME + +RUN java -version WORKDIR /spring COPY --from=maven /spring/target/hello-spring-1.0-SNAPSHOT.jar app.jar EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dlogging.level.root=OFF", "-jar", "app.jar", "--spring.profiles.active=jdbc"] +CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseG1GC", "-XX:+DisableExplicitGC", "-XX:+UseStringDeduplication", "-Dlogging.level.root=OFF", "-jar", "app.jar", "--spring.profiles.active=jdbc"] diff --git a/frameworks/Java/spring/src/main/java/hello/App.java b/frameworks/Java/spring/src/main/java/hello/App.java index 6eb1be20f68..be67325d460 100644 --- a/frameworks/Java/spring/src/main/java/hello/App.java +++ b/frameworks/Java/spring/src/main/java/hello/App.java @@ -1,6 +1,7 @@ package hello; -import com.zaxxer.hikari.HikariDataSource; +import javax.sql.DataSource; + import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; @@ -8,21 +9,22 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Profile; -import javax.sql.DataSource; +import com.zaxxer.hikari.HikariDataSource; @SpringBootApplication(exclude = DataSourceAutoConfiguration.class) public class App { - public static void main(String[] args) { - SpringApplication.run(App.class, args); - } + public static void main(String[] args) { + SpringApplication.run(App.class, args); + } - @Bean - @Profile({"jdbc", "jpa"}) - public DataSource datasource(DataSourceProperties dataSourceProperties) { - HikariDataSource dataSource = dataSourceProperties.initializeDataSourceBuilder().type(HikariDataSource.class).build(); - dataSource.setMaximumPoolSize(Runtime.getRuntime().availableProcessors() * 2); + @Bean + @Profile({ "jdbc", "jpa" }) + public DataSource datasource(DataSourceProperties dataSourceProperties) { + HikariDataSource dataSource = dataSourceProperties.initializeDataSourceBuilder().type(HikariDataSource.class) + .build(); + dataSource.setMaximumPoolSize(Runtime.getRuntime().availableProcessors() * 2); - return dataSource; - } + return dataSource; + } } diff --git a/frameworks/Java/spring/src/main/java/hello/controller/HelloController.java b/frameworks/Java/spring/src/main/java/hello/controller/HelloController.java index 3e77af161a6..7e6d6586386 100644 --- a/frameworks/Java/spring/src/main/java/hello/controller/HelloController.java +++ b/frameworks/Java/spring/src/main/java/hello/controller/HelloController.java @@ -3,113 +3,119 @@ import static java.util.Comparator.comparing; import java.util.List; -import java.util.Map; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.IntStream; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.ModelAttribute; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + import hello.model.Fortune; import hello.model.World; import hello.repository.DbRepository; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.ModelAttribute; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.ResponseBody; -@Controller +@RestController public final class HelloController { - @Autowired - private DbRepository dbRepository; - - @RequestMapping("/plaintext") - @ResponseBody - String plaintext() { - return "Hello, World!"; - } - - @RequestMapping("/json") - @ResponseBody - Map json() { - return Map.of("message", "Hello, World!"); - } - - @RequestMapping("/db") - @ResponseBody - World db() { - return dbRepository.getWorld(randomWorldNumber()); - } - - @RequestMapping("/queries") - @ResponseBody - World[] queries(@RequestParam String queries) { - return randomWorldNumbers() - .mapToObj(dbRepository::getWorld) - .limit(parseQueryCount(queries)) - .toArray(World[]::new); - } - - @RequestMapping("/updates") - @ResponseBody - World[] updates(@RequestParam String queries) { - return randomWorldNumbers() - .mapToObj(dbRepository::getWorld) - .map(world -> { - // Ensure that the new random number is not equal to the old one. - // That would cause the JPA-based implementation to avoid sending the - // UPDATE query to the database, which would violate the test - // requirements. - int newRandomNumber; - do { - newRandomNumber = randomWorldNumber(); - } while (newRandomNumber == world.randomnumber); - return dbRepository.updateWorld(world, newRandomNumber); - }) - .limit(parseQueryCount(queries)) - .toArray(World[]::new); - } - - @RequestMapping("/fortunes") - @ModelAttribute("fortunes") - List fortunes() { - var fortunes = dbRepository.fortunes(); - - fortunes.add(new Fortune(0, "Additional fortune added at request time.")); - fortunes.sort(comparing(fortune -> fortune.message)); - return fortunes; - } - - private static final int MIN_WORLD_NUMBER = 1; - private static final int MAX_WORLD_NUMBER_PLUS_ONE = 10_001; - - private static int randomWorldNumber() { - return ThreadLocalRandom - .current() - .nextInt(MIN_WORLD_NUMBER, MAX_WORLD_NUMBER_PLUS_ONE); - } - - private static IntStream randomWorldNumbers() { - return ThreadLocalRandom - .current() - .ints(MIN_WORLD_NUMBER, MAX_WORLD_NUMBER_PLUS_ONE) - // distinct() allows us to avoid using Hibernate's first-level cache in - // the JPA-based implementation. Using a cache like that would bypass - // querying the database, which would violate the test requirements. - .distinct(); - } - - private static int parseQueryCount(String textValue) { - if (textValue == null) { - return 1; - } - int parsedValue; - try { - parsedValue = Integer.parseInt(textValue); - } catch (NumberFormatException e) { - return 1; - } - return Math.min(500, Math.max(1, parsedValue)); - } - + private DbRepository dbRepository; + + public HelloController(DbRepository dbRepository) { + this.dbRepository = dbRepository; + } + + @GetMapping(value = "/plaintext", produces = MediaType.TEXT_PLAIN_VALUE) + String plaintext() { + return "Hello, World!"; + } + + @GetMapping("/json") + Message json() { + return new Message("Hello, World!"); + } + + @GetMapping("/db") + World db() { + return dbRepository.getWorld(randomWorldNumber()); + } + + @GetMapping("/queries") + World[] queries(@RequestParam(required = false) String queries) { + return randomWorldNumbers().mapToObj(dbRepository::getWorld).limit(parseQueryCount(queries)) + .toArray(World[]::new); + } + + @GetMapping("/updates") + World[] updates(@RequestParam(required = false) String queries) { + return randomWorldNumbers().mapToObj(dbRepository::getWorld).map(world -> { + // Ensure that the new random number is not equal to the old one. + // That would cause the JPA-based implementation to avoid sending the + // UPDATE query to the database, which would violate the test + // requirements. + + // Locally the records doesn't exist, maybe in the yours is ok but we need to + // make this check + if (world == null) { + return null; + } + + int newRandomNumber; + do { + newRandomNumber = randomWorldNumber(); + } while (newRandomNumber == world.randomnumber); + + return dbRepository.updateWorld(world, newRandomNumber); + }).limit(parseQueryCount(queries)).toArray(World[]::new); + } + + @GetMapping("/fortunes") + @ModelAttribute("fortunes") + List fortunes() { + var fortunes = dbRepository.fortunes(); + + fortunes.add(new Fortune(0, "Additional fortune added at request time.")); + fortunes.sort(comparing(fortune -> fortune.message)); + return fortunes; + } + + private static final int MIN_WORLD_NUMBER = 1; + private static final int MAX_WORLD_NUMBER_PLUS_ONE = 10_001; + + private static int randomWorldNumber() { + return ThreadLocalRandom.current().nextInt(MIN_WORLD_NUMBER, MAX_WORLD_NUMBER_PLUS_ONE); + } + + private static IntStream randomWorldNumbers() { + return ThreadLocalRandom.current().ints(MIN_WORLD_NUMBER, MAX_WORLD_NUMBER_PLUS_ONE) + // distinct() allows us to avoid using Hibernate's first-level cache in + // the JPA-based implementation. Using a cache like that would bypass + // querying the database, which would violate the test requirements. + .distinct(); + } + + private static int parseQueryCount(String textValue) { + if (textValue == null) { + return 1; + } + int parsedValue; + try { + parsedValue = Integer.parseInt(textValue); + } catch (NumberFormatException e) { + return 1; + } + return Math.min(500, Math.max(1, parsedValue)); + } + + static class Message { + private final String message; + + public Message(String message) { + this.message = message; + } + + public String getMessage() { + return message; + } + } } diff --git a/frameworks/Java/spring/src/main/java/hello/jpa/FortuneRepository.java b/frameworks/Java/spring/src/main/java/hello/jpa/FortuneRepository.java index dc93a68e06f..30dea98cc27 100644 --- a/frameworks/Java/spring/src/main/java/hello/jpa/FortuneRepository.java +++ b/frameworks/Java/spring/src/main/java/hello/jpa/FortuneRepository.java @@ -1,11 +1,12 @@ package hello.jpa; -import hello.model.Fortune; import org.springframework.context.annotation.Profile; import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Component; +import org.springframework.stereotype.Repository; + +import hello.model.Fortune; -@Component +@Repository @Profile("jpa") public interface FortuneRepository extends JpaRepository { } diff --git a/frameworks/Java/spring/src/main/java/hello/jpa/JpaDbRepository.java b/frameworks/Java/spring/src/main/java/hello/jpa/JpaDbRepository.java index cd2009736c0..2b58841a035 100644 --- a/frameworks/Java/spring/src/main/java/hello/jpa/JpaDbRepository.java +++ b/frameworks/Java/spring/src/main/java/hello/jpa/JpaDbRepository.java @@ -1,40 +1,38 @@ package hello.jpa; +import java.util.List; + +import org.springframework.context.annotation.Profile; +import org.springframework.stereotype.Service; + import hello.model.Fortune; import hello.model.World; import hello.repository.DbRepository; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.context.annotation.Profile; -import org.springframework.stereotype.Component; -import java.util.List; - -@Component +@Service @Profile("jpa") public class JpaDbRepository implements DbRepository { - private final Logger log = LoggerFactory.getLogger(getClass()); - private final WorldRepository worldRepository; - private final FortuneRepository fortuneRepository; - - public JpaDbRepository(WorldRepository worldRepository, FortuneRepository fortuneRepository) { - this.worldRepository = worldRepository; - this.fortuneRepository = fortuneRepository; - } - - @Override - public World getWorld(int id) { - return worldRepository.findById(id).orElse(null); - } - - @Override - public World updateWorld(World world, int randomNumber) { - world.randomnumber = randomNumber; - return worldRepository.save(world); - } - - @Override - public List fortunes() { - return fortuneRepository.findAll(); - } + private final WorldRepository worldRepository; + private final FortuneRepository fortuneRepository; + + public JpaDbRepository(WorldRepository worldRepository, FortuneRepository fortuneRepository) { + this.worldRepository = worldRepository; + this.fortuneRepository = fortuneRepository; + } + + @Override + public World getWorld(int id) { + return worldRepository.findById(id).orElse(null); + } + + @Override + public World updateWorld(World world, int randomNumber) { + world.randomnumber = randomNumber; + return worldRepository.save(world); + } + + @Override + public List fortunes() { + return fortuneRepository.findAll(); + } } diff --git a/frameworks/Java/spring/src/main/java/hello/jpa/WorldRepository.java b/frameworks/Java/spring/src/main/java/hello/jpa/WorldRepository.java index c9240b15b74..70361aa40d6 100644 --- a/frameworks/Java/spring/src/main/java/hello/jpa/WorldRepository.java +++ b/frameworks/Java/spring/src/main/java/hello/jpa/WorldRepository.java @@ -1,11 +1,12 @@ package hello.jpa; -import hello.model.World; import org.springframework.context.annotation.Profile; import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Component; +import org.springframework.stereotype.Repository; + +import hello.model.World; -@Component +@Repository @Profile("jpa") public interface WorldRepository extends JpaRepository { } diff --git a/frameworks/Java/spring/src/main/java/hello/model/Fortune.java b/frameworks/Java/spring/src/main/java/hello/model/Fortune.java index 817ef810503..fb76942f212 100644 --- a/frameworks/Java/spring/src/main/java/hello/model/Fortune.java +++ b/frameworks/Java/spring/src/main/java/hello/model/Fortune.java @@ -1,29 +1,33 @@ package hello.model; import javax.persistence.Entity; + import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.Field; @Document @Entity public final class Fortune { - @Id - @javax.persistence.Id - public int id; - public String message; + @Id + @javax.persistence.Id + public int id; + @Field("message") + public String message; - protected Fortune() {} + protected Fortune() { + } - public Fortune(int id, String message) { - this.id = id; - this.message = message; - } + public Fortune(int id, String message) { + this.id = id; + this.message = message; + } - public int getId() { - return id; - } + public int getId() { + return id; + } - public String getMessage() { - return message; - } + public String getMessage() { + return message; + } } \ No newline at end of file diff --git a/frameworks/Java/spring/src/main/java/hello/model/World.java b/frameworks/Java/spring/src/main/java/hello/model/World.java index 38d716be8ac..1edbbe07148 100644 --- a/frameworks/Java/spring/src/main/java/hello/model/World.java +++ b/frameworks/Java/spring/src/main/java/hello/model/World.java @@ -1,6 +1,7 @@ package hello.model; import javax.persistence.Entity; + import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; @@ -9,16 +10,17 @@ @Entity public final class World { - @Id - @javax.persistence.Id - public int id; - @Field("randomNumber") - public int randomnumber; + @Id + @javax.persistence.Id + public int id; + @Field("randomNumber") + public int randomnumber; - protected World() {} + protected World() { + } - public World(int id, int randomnumber) { - this.id = id; - this.randomnumber = randomnumber; - } + public World(int id, int randomnumber) { + this.id = id; + this.randomnumber = randomnumber; + } } \ No newline at end of file diff --git a/frameworks/Java/spring/src/main/java/hello/repository/DbRepository.java b/frameworks/Java/spring/src/main/java/hello/repository/DbRepository.java index 2ef754035fa..5cfa8c7d5c3 100644 --- a/frameworks/Java/spring/src/main/java/hello/repository/DbRepository.java +++ b/frameworks/Java/spring/src/main/java/hello/repository/DbRepository.java @@ -1,14 +1,14 @@ package hello.repository; +import java.util.List; + import hello.model.Fortune; import hello.model.World; -import java.util.List; - public interface DbRepository { - World getWorld(int id); + World getWorld(int id); - World updateWorld(World world, int randomNumber); + World updateWorld(World world, int randomNumber); - List fortunes(); + List fortunes(); } diff --git a/frameworks/Java/spring/src/main/java/hello/repository/JdbcDbRepository.java b/frameworks/Java/spring/src/main/java/hello/repository/JdbcDbRepository.java index d8ebc0089df..4b9009a6ef7 100644 --- a/frameworks/Java/spring/src/main/java/hello/repository/JdbcDbRepository.java +++ b/frameworks/Java/spring/src/main/java/hello/repository/JdbcDbRepository.java @@ -1,52 +1,52 @@ package hello.repository; -import hello.model.Fortune; -import hello.model.World; +import java.util.List; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Profile; +import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.stereotype.Component; +import org.springframework.stereotype.Repository; -import java.util.List; +import hello.model.Fortune; +import hello.model.World; -@Component +@Repository @Profile("jdbc") public class JdbcDbRepository implements DbRepository { - private final Logger log = LoggerFactory.getLogger(getClass()); - private final JdbcTemplate jdbcTemplate; - - public JdbcDbRepository(JdbcTemplate jdbcTemplate) { - this.jdbcTemplate = jdbcTemplate; - } - - @Override - public World getWorld(int id) { - log.debug("getWorld({})", id); - return jdbcTemplate.queryForObject( - "SELECT * FROM world WHERE id = ?", - (rs, rn) -> new World(rs.getInt("id"), rs.getInt("randomnumber")), - id); - } - - private World updateWorld(World world) { - jdbcTemplate.update( - "UPDATE world SET randomnumber = ? WHERE id = ?", - world.randomnumber, - world.id); - return world; - } - - @Override - public World updateWorld(World world, int randomNumber) { - world.randomnumber = randomNumber; - return updateWorld(world); - } - - @Override - public List fortunes() { - return jdbcTemplate.query( - "SELECT * FROM fortune", - (rs, rn) -> new Fortune(rs.getInt("id"), rs.getString("message"))); - } + private final Logger log = LoggerFactory.getLogger(getClass()); + private final JdbcTemplate jdbcTemplate; + + public JdbcDbRepository(JdbcTemplate jdbcTemplate) { + this.jdbcTemplate = jdbcTemplate; + } + + @Override + public World getWorld(int id) { + log.debug("getWorld({})", id); + try { + return jdbcTemplate.queryForObject("SELECT * FROM world WHERE id = ?", + (rs, rn) -> new World(rs.getInt("id"), rs.getInt("randomnumber")), id); + } catch (EmptyResultDataAccessException e) { + return null; + } + } + + private World updateWorld(World world) { + jdbcTemplate.update("UPDATE world SET randomnumber = ? WHERE id = ?", world.randomnumber, world.id); + return world; + } + + @Override + public World updateWorld(World world, int randomNumber) { + world.randomnumber = randomNumber; + return updateWorld(world); + } + + @Override + public List fortunes() { + return jdbcTemplate.query("SELECT * FROM fortune", + (rs, rn) -> new Fortune(rs.getInt("id"), rs.getString("message"))); + } } diff --git a/frameworks/Java/spring/src/main/java/hello/repository/MongoDbRepository.java b/frameworks/Java/spring/src/main/java/hello/repository/MongoDbRepository.java index d64d8781f40..3f3814a47ce 100644 --- a/frameworks/Java/spring/src/main/java/hello/repository/MongoDbRepository.java +++ b/frameworks/Java/spring/src/main/java/hello/repository/MongoDbRepository.java @@ -1,41 +1,40 @@ package hello.repository; -import hello.model.Fortune; -import hello.model.World; +import java.util.List; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Profile; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.stereotype.Component; +import org.springframework.stereotype.Repository; -import java.util.List; +import hello.model.Fortune; +import hello.model.World; -@Component +@Repository @Profile("mongo") public class MongoDbRepository implements DbRepository { - private final Logger log = LoggerFactory.getLogger(getClass()); - private final MongoTemplate mongoTemplate; - - public MongoDbRepository(MongoTemplate mongoTemplate) { - this.mongoTemplate = mongoTemplate; - } - - - @Override - public World getWorld(int id) { - log.debug("getWorld({})", id); - return mongoTemplate.findById(id, World.class); - } - - - @Override - public World updateWorld(World world, int randomNumber) { - world.randomnumber = randomNumber; - return mongoTemplate.save(world); - } - - @Override - public List fortunes() { - return mongoTemplate.findAll(Fortune.class); - } + private final Logger log = LoggerFactory.getLogger(getClass()); + private final MongoTemplate mongoTemplate; + + public MongoDbRepository(MongoTemplate mongoTemplate) { + this.mongoTemplate = mongoTemplate; + } + + @Override + public World getWorld(int id) { + log.debug("getWorld({})", id); + return mongoTemplate.findById(id, World.class); + } + + @Override + public World updateWorld(World world, int randomNumber) { + world.randomnumber = randomNumber; + return mongoTemplate.save(world); + } + + @Override + public List fortunes() { + return mongoTemplate.findAll(Fortune.class); + } } diff --git a/frameworks/Java/spring/src/main/resources/application.yml b/frameworks/Java/spring/src/main/resources/application.yml index 7e2d4610435..0b31099d10f 100644 --- a/frameworks/Java/spring/src/main/resources/application.yml +++ b/frameworks/Java/spring/src/main/resources/application.yml @@ -1,6 +1,8 @@ --- spring: - profiles: jdbc,jpa + config: + activate: + on-profile: jdbc,jpa datasource: url: jdbc:postgresql://${database.host}:${database.port}/${database.name} username: ${database.username} @@ -15,13 +17,18 @@ database: --- spring: - profiles: jpa + config: + activate: + on-profile: jpa jpa: database-platform: org.hibernate.dialect.PostgreSQLDialect + open-in-view: false --- spring: - profiles: mongo + config: + activate: + on-profile: mongo spring.data.mongodb: host: tfb-database diff --git a/frameworks/Java/tapestry/pom.xml b/frameworks/Java/tapestry/pom.xml index d10474a4369..9f4645bcd78 100644 --- a/frameworks/Java/tapestry/pom.xml +++ b/frameworks/Java/tapestry/pom.xml @@ -32,7 +32,7 @@ mysql mysql-connector-java - 8.0.18 + 8.0.28 @@ -95,7 +95,7 @@ of testing facilities designed for use with TestNG (http://testng.org/), so it's com.fasterxml.jackson.core jackson-databind - 2.10.0.pr1 + 2.13.2.1 org.glassfish.jaxb diff --git a/frameworks/Java/undertow-jersey/pom.xml b/frameworks/Java/undertow-jersey/pom.xml index 32f64db3102..ea116722be3 100644 --- a/frameworks/Java/undertow-jersey/pom.xml +++ b/frameworks/Java/undertow-jersey/pom.xml @@ -145,7 +145,7 @@ com.fasterxml.jackson.core jackson-databind - 2.9.9 + 2.13.2.1 com.fasterxml.jackson.module @@ -168,13 +168,13 @@ mysql mysql-connector-java - 8.0.18 + 8.0.28 io.undertow undertow-core - 2.1.6.Final + 2.2.19.Final diff --git a/frameworks/Java/undertow/pom.xml b/frameworks/Java/undertow/pom.xml index e7b2e97ef1f..7244e5c4253 100644 --- a/frameworks/Java/undertow/pom.xml +++ b/frameworks/Java/undertow/pom.xml @@ -11,21 +11,18 @@ 1.0-SNAPSHOT - 11 - 11 + 18 UTF-8 - 3.4.1 - 2.10.5.1 - 3.2.1 - 0.9.6 - 42.3.3 - 2.1.6.Final + 5.0.1 + 2.13.3 + 3.10.1 + 3.3.0 + 3.2.2 + 0.9.10 + 42.4.1 + 2.2.19.Final - - 3.6 - - io.undertow @@ -55,43 +52,48 @@ - app + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.10.1 + + + org.apache.maven.plugins - maven-shade-plugin - ${maven-shade-plugin.version} - - false - - - *:* - - module-info.class - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - + maven-dependency-plugin + ${maven-dependency-plugin.version} package - shade + copy-dependencies - - - - hello.HelloWebServer - - + runtime + ${project.build.directory}/lib + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + app + + + true + lib/ + hello.HelloWebServer + + + + diff --git a/frameworks/Java/undertow/src/main/java/hello/HelloWebServer.java b/frameworks/Java/undertow/src/main/java/hello/HelloWebServer.java index a2204f976bd..25358159394 100644 --- a/frameworks/Java/undertow/src/main/java/hello/HelloWebServer.java +++ b/frameworks/Java/undertow/src/main/java/hello/HelloWebServer.java @@ -54,11 +54,10 @@ static HttpHandler serverHeaderHandler(HttpHandler next) { } static HttpHandler pathHandler(Mode mode) { - switch (mode) { - case NO_DATABASE: return noDatabasePathHandler(); - case POSTGRESQL: return postgresqlPathHandler(); - } - throw new AssertionError(mode); + return switch (mode) { + case NO_DATABASE -> noDatabasePathHandler(); + case POSTGRESQL -> postgresqlPathHandler(); + }; } static HttpHandler noDatabasePathHandler() { @@ -267,19 +266,15 @@ static void sendJson(HttpServerExchange exchange, Object value) private static final ObjectMapper objectMapper = new ObjectMapper(); - public static final class Fortune { - public final int id; - public final String message; - - public Fortune(int id, String message) { - this.id = id; - this.message = Objects.requireNonNull(message); + public record Fortune(int id, String message) { + public Fortune { + Objects.requireNonNull(message); } } public static final class World { - public int id; - public int randomNumber; + public final int id; + public int randomNumber; // non-final, so this class can't be a record public World(int id, int randomNumber) { this.id = id; diff --git a/frameworks/Java/undertow/undertow-postgresql.dockerfile b/frameworks/Java/undertow/undertow-postgresql.dockerfile index 9a3ea787508..614124bb6ec 100644 --- a/frameworks/Java/undertow/undertow-postgresql.dockerfile +++ b/frameworks/Java/undertow/undertow-postgresql.dockerfile @@ -1,12 +1,13 @@ -FROM maven:3.6.1-jdk-11-slim as maven +FROM maven:3.8.6-openjdk-18 as maven WORKDIR /undertow -COPY pom.xml pom.xml +COPY pom.xml . COPY src src RUN mvn package -q -FROM openjdk:11.0.3-jdk-slim +FROM openjdk:18 WORKDIR /undertow -COPY --from=maven /undertow/target/app.jar app.jar +COPY --from=maven /undertow/target/lib lib +COPY --from=maven /undertow/target/app.jar . EXPOSE 8080 diff --git a/frameworks/Java/undertow/undertow.dockerfile b/frameworks/Java/undertow/undertow.dockerfile index a4e8ff0a383..61233d7feaa 100644 --- a/frameworks/Java/undertow/undertow.dockerfile +++ b/frameworks/Java/undertow/undertow.dockerfile @@ -1,12 +1,13 @@ -FROM maven:3.6.1-jdk-11-slim as maven +FROM maven:3.8.6-openjdk-18 as maven WORKDIR /undertow -COPY pom.xml pom.xml +COPY pom.xml . COPY src src RUN mvn package -q -FROM openjdk:11.0.3-jdk-slim +FROM openjdk:18 WORKDIR /undertow -COPY --from=maven /undertow/target/app.jar app.jar +COPY --from=maven /undertow/target/lib lib +COPY --from=maven /undertow/target/app.jar . EXPOSE 8080 diff --git a/frameworks/Java/vertx/pom.xml b/frameworks/Java/vertx/pom.xml old mode 100755 new mode 100644 index 97469e95e14..7204ec5f2b6 --- a/frameworks/Java/vertx/pom.xml +++ b/frameworks/Java/vertx/pom.xml @@ -11,7 +11,7 @@ vertx.App 4.1.4 - 2.11.4 + 2.13.3 4.1.67.Final @@ -26,6 +26,11 @@ vertx-pg-client ${stack.version} + + com.fasterxml.jackson.core + jackson-core + ${jackson.version} + com.fasterxml.jackson.core jackson-databind diff --git a/frameworks/Java/wicket/pom.xml b/frameworks/Java/wicket/pom.xml index 8c8473d6ab0..ec30df52f27 100644 --- a/frameworks/Java/wicket/pom.xml +++ b/frameworks/Java/wicket/pom.xml @@ -64,7 +64,7 @@ mysql mysql-connector-java - 8.0.27 + 8.0.28 diff --git a/frameworks/JavaScript/express/app.js b/frameworks/JavaScript/express/app.js index e7be9ca378b..c8614691135 100755 --- a/frameworks/JavaScript/express/app.js +++ b/frameworks/JavaScript/express/app.js @@ -9,14 +9,17 @@ const cluster = require('cluster'), const bodyParser = require('body-parser'); -if (cluster.isMaster) { +if (cluster.isPrimary) { + console.log(`Primary ${process.pid} is running`); + // Fork workers. for (let i = 0; i < numCPUs; i++) { cluster.fork(); } - cluster.on('exit', (worker, code, signal) => - console.log('worker ' + worker.pid + ' died')); + cluster.on('exit', (worker, code, signal) => { + console.log(`worker ${worker.process.pid} died`); + }); } else { const app = module.exports = express(); diff --git a/frameworks/JavaScript/express/express-mongodb.dockerfile b/frameworks/JavaScript/express/express-mongodb.dockerfile index 26034ef1c0e..400275b655e 100644 --- a/frameworks/JavaScript/express/express-mongodb.dockerfile +++ b/frameworks/JavaScript/express/express-mongodb.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim COPY ./ ./ diff --git a/frameworks/JavaScript/express/express-mysql.dockerfile b/frameworks/JavaScript/express/express-mysql.dockerfile index efa6e91c8c8..eb95c7fb36b 100644 --- a/frameworks/JavaScript/express/express-mysql.dockerfile +++ b/frameworks/JavaScript/express/express-mysql.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim COPY ./ ./ diff --git a/frameworks/JavaScript/express/express-postgres.dockerfile b/frameworks/JavaScript/express/express-postgres.dockerfile index 3eabc16f7ef..7c1b04a5bff 100644 --- a/frameworks/JavaScript/express/express-postgres.dockerfile +++ b/frameworks/JavaScript/express/express-postgres.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim COPY ./ ./ diff --git a/frameworks/JavaScript/express/express.dockerfile b/frameworks/JavaScript/express/express.dockerfile index 57dc5bd3f72..e6963b43f71 100644 --- a/frameworks/JavaScript/express/express.dockerfile +++ b/frameworks/JavaScript/express/express.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim COPY ./ ./ diff --git a/frameworks/JavaScript/express/package.json b/frameworks/JavaScript/express/package.json index 209be5a6667..931b545704c 100644 --- a/frameworks/JavaScript/express/package.json +++ b/frameworks/JavaScript/express/package.json @@ -6,7 +6,7 @@ "body-parser": "1.19.0", "dateformat": "3.0.3", "escape-html": "1.0.3", - "express": "4.17.1", + "express": "4.17.3", "mongoose": "5.7.5", "mysql2": "2.2.5", "pg": "8.5.0", diff --git a/frameworks/JavaScript/fastify/app.js b/frameworks/JavaScript/fastify/app.js index f0edf160912..3c93710e83d 100755 --- a/frameworks/JavaScript/fastify/app.js +++ b/frameworks/JavaScript/fastify/app.js @@ -1,16 +1,16 @@ const cluster = require("cluster"); const numCPUs = require("os").cpus().length; -if (cluster.isMaster) { +if (cluster.isPrimary) { + console.log(`Primary ${process.pid} is running`); + // Fork workers. for (let i = 0; i < numCPUs; i++) { cluster.fork(); } - console.log("Master starting " + new Date().toISOString()); - - cluster.on("exit", () => { - process.exit(1); + cluster.on('exit', (worker, code, signal) => { + console.log(`worker ${worker.process.pid} died`); }); } else { // worker task diff --git a/frameworks/JavaScript/fastify/fastify-mysql.dockerfile b/frameworks/JavaScript/fastify/fastify-mysql.dockerfile index 0b636fb25bf..d87ab7ae260 100644 --- a/frameworks/JavaScript/fastify/fastify-mysql.dockerfile +++ b/frameworks/JavaScript/fastify/fastify-mysql.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.2 +FROM node:16.14.0 COPY ./ ./ diff --git a/frameworks/JavaScript/fastify/fastify-postgres.dockerfile b/frameworks/JavaScript/fastify/fastify-postgres.dockerfile index a85696f3459..6bbfffcd723 100644 --- a/frameworks/JavaScript/fastify/fastify-postgres.dockerfile +++ b/frameworks/JavaScript/fastify/fastify-postgres.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.2 +FROM node:16.14.0 COPY ./ ./ diff --git a/frameworks/JavaScript/fastify/fastify.dockerfile b/frameworks/JavaScript/fastify/fastify.dockerfile index a4b031e57e0..8ec90f0d0fb 100644 --- a/frameworks/JavaScript/fastify/fastify.dockerfile +++ b/frameworks/JavaScript/fastify/fastify.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.2 +FROM node:16.14.0 COPY ./ ./ diff --git a/frameworks/JavaScript/fastify/package.json b/frameworks/JavaScript/fastify/package.json index e89f4339c8b..42d2aa6c237 100644 --- a/frameworks/JavaScript/fastify/package.json +++ b/frameworks/JavaScript/fastify/package.json @@ -5,7 +5,7 @@ "main": "app.js", "private": true, "dependencies": { - "fastify": "3.12.0", + "fastify": "3.27.4", "handlebars": "4.7.6", "knex": "0.21.17", "mongodb": "3.5.9", diff --git a/frameworks/JavaScript/hapi/app.js b/frameworks/JavaScript/hapi/app.js index 54053cd46e9..cb84be2f7d0 100755 --- a/frameworks/JavaScript/hapi/app.js +++ b/frameworks/JavaScript/hapi/app.js @@ -1,15 +1,16 @@ const cluster = require('cluster'); const numCPUs = require('os').cpus().length; -if (cluster.isMaster) { +if (cluster.isPrimary) { + console.log(`Primary ${process.pid} is running`); + // Fork workers. for (let i = 0; i < numCPUs; i++) { cluster.fork(); } - console.log('Master starting ' + new Date().toISOString(" ")); cluster.on('exit', (worker, code, signal) => { - process.exit(1); + console.log(`worker ${worker.process.pid} died`); }); } else { // worker task diff --git a/frameworks/JavaScript/hapi/hapi-mysql.dockerfile b/frameworks/JavaScript/hapi/hapi-mysql.dockerfile index ad3373cb219..fbb35d2d373 100644 --- a/frameworks/JavaScript/hapi/hapi-mysql.dockerfile +++ b/frameworks/JavaScript/hapi/hapi-mysql.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.2-slim COPY ./ ./ diff --git a/frameworks/JavaScript/hapi/hapi-nginx.dockerfile b/frameworks/JavaScript/hapi/hapi-nginx.dockerfile index bc6cbbc5653..55a7891b60c 100644 --- a/frameworks/JavaScript/hapi/hapi-nginx.dockerfile +++ b/frameworks/JavaScript/hapi/hapi-nginx.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.2-slim RUN apt-get update RUN apt-get install nginx -y diff --git a/frameworks/JavaScript/hapi/hapi-postgres.dockerfile b/frameworks/JavaScript/hapi/hapi-postgres.dockerfile index 31c25f7f32d..d6c2d9130c4 100644 --- a/frameworks/JavaScript/hapi/hapi-postgres.dockerfile +++ b/frameworks/JavaScript/hapi/hapi-postgres.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.2-slim COPY ./ ./ diff --git a/frameworks/JavaScript/hapi/hapi.dockerfile b/frameworks/JavaScript/hapi/hapi.dockerfile index 980402384d5..2399af15060 100644 --- a/frameworks/JavaScript/hapi/hapi.dockerfile +++ b/frameworks/JavaScript/hapi/hapi.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.2-slim COPY ./ ./ diff --git a/frameworks/JavaScript/just/README.md b/frameworks/JavaScript/just/README.md index 2b177b78f35..3e60d351e7b 100644 --- a/frameworks/JavaScript/just/README.md +++ b/frameworks/JavaScript/just/README.md @@ -1,10 +1,10 @@ # [Just-JS](https://github.com/just-js) Benchmarking Test -This test benchmarks the [Just-JS](https://github.com/just-js) framework. Just-JS is an in progress javascript framework for x86_64 linux. +This test benchmarks the [Just-JS](https://github.com/just-js) framework. Author: Andrew Johnston -### Test Type Implementation Source Code +## Test Type Implementation Source Code * [JSON] techempower.js * [PLAINTEXT] techempower.js @@ -15,6 +15,7 @@ Author: Andrew Johnston * [FORTUNES] techempower.js ## Test URLs + ### JSON http://localhost:8080/json @@ -43,3 +44,18 @@ http://localhost:8080/fortunes http://localhost:8080/cached-world?q= +## Building the Docker Image +```bash +docker build -t techempower:latest -f just.dockerfile . +``` + +## Running the TFB Postgres Docker Container +```bash +## docker network create -d bridge tfb +docker run -p 5432:5432 -d --rm --name tfb-database --network tfb techempower/tfb.database.postgres:latest +``` + +## Running the Docker Container +```bash +docker run -p 8080:8080 -d --rm --name tfb-server --network tfb techempower:latest +``` \ No newline at end of file diff --git a/frameworks/JavaScript/just/benchmark_config.json b/frameworks/JavaScript/just/benchmark_config.json index 225ad0297cf..3b195272644 100644 --- a/frameworks/JavaScript/just/benchmark_config.json +++ b/frameworks/JavaScript/just/benchmark_config.json @@ -25,7 +25,7 @@ "database_os": "Linux", "display_name": "just-js", "notes": "", - "versus": "nodejs" + "versus": "" } } ] diff --git a/frameworks/JavaScript/just/config.toml b/frameworks/JavaScript/just/config.toml deleted file mode 100644 index a153d1dea7c..00000000000 --- a/frameworks/JavaScript/just/config.toml +++ /dev/null @@ -1,20 +0,0 @@ -[framework] -name = "just" - -[main] -urls.plaintext = "/plaintext" -urls.json = "/json" -urls.db = "/db" -urls.query = "/query?q=" -urls.update = "/update?q=" -urls.fortune = "/fortunes" -urls.cached_query = "/cached-world?q=" -approach = "Realistic" -classification = "Platform" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = "just-js" -webserver = "None" -versus = "nodejs" diff --git a/frameworks/JavaScript/just/fortunes.html b/frameworks/JavaScript/just/fortunes.html new file mode 100644 index 00000000000..a995209001a --- /dev/null +++ b/frameworks/JavaScript/just/fortunes.html @@ -0,0 +1,18 @@ + + +Fortunes + + + + + + + {{#each this}} + + + + + {{/each}} +
idmessage
{{id}}{{message}}
+ + diff --git a/frameworks/JavaScript/just/just.dockerfile b/frameworks/JavaScript/just/just.dockerfile index 12b6e79b237..9410a4d6c9b 100644 --- a/frameworks/JavaScript/just/just.dockerfile +++ b/frameworks/JavaScript/just/just.dockerfile @@ -1,27 +1,20 @@ -FROM debian:stretch-slim AS builder +FROM debian:buster-slim AS pre-build RUN apt update +RUN apt upgrade -y RUN apt install -y g++ curl make tar gzip libfindbin-libs-perl -RUN curl -L -o 0.0.2.tar.gz -L https://github.com/just-js/just/archive/0.0.2.tar.gz -RUN tar -zxvf 0.0.2.tar.gz -WORKDIR /just-0.0.2 -RUN make runtime -RUN curl -L -o modules.tar.gz https://github.com/just-js/modules/archive/0.0.3.tar.gz -RUN tar -zxvf modules.tar.gz -RUN mv modules-0.0.3 modules -RUN JUST_HOME=$(pwd) make -C modules/picohttp/ deps http.so -RUN JUST_HOME=$(pwd) make -C modules/html/ html.so -FROM debian:stretch-slim +FROM pre-build AS builder +WORKDIR /build +RUN sh -c "$(curl -sSL https://raw.githubusercontent.com/just-js/just/0.1.8/install.sh)" +RUN make -C just install +ENV JUST_HOME=/build/just +ENV JUST_TARGET=/build/just WORKDIR /app -RUN mkdir -p /app/lib -COPY lib/stringify.js lib/connection.js lib/dns.js lib/http.js lib/lookup.js lib/pg.js lib/stats.js lib/tcp.js lib/md5.js lib/monitor.js ./lib/ -COPY techempower.js spawn.js ./ -COPY --from=builder /just-0.0.2/just /bin/just -COPY --from=builder /just-0.0.2/modules/picohttp/http.so ./ -COPY --from=builder /just-0.0.2/modules/html/html.so ./ -ENV LD_LIBRARY_PATH=/app -ENV PGPOOL=1 +COPY techempower.js util.js tfb.config.js ./ +RUN just build --clean --cleanall --static techempower.js -EXPOSE 8080 - -CMD ["just", "spawn.js", "techempower.js"] +FROM gcr.io/distroless/static:latest +WORKDIR /app +COPY --from=builder /app/techempower /app/techempower +COPY fortunes.html /app/fortunes.html +CMD ["./techempower"] diff --git a/frameworks/JavaScript/just/lib/connection.js b/frameworks/JavaScript/just/lib/connection.js deleted file mode 100644 index 6abb6c4475d..00000000000 --- a/frameworks/JavaScript/just/lib/connection.js +++ /dev/null @@ -1,360 +0,0 @@ -const { lookup } = require('lookup.js') -const { createClient } = require('tcp.js') -const { md5AuthMessage, syncMessage, startupMessage, createParser, getPGError, constants } = require('pg.js') -const { html } = just.library('html.so', 'html') - -const { - AuthenticationOk, - ErrorResponse, - RowDescription, - CommandComplete, - ParseComplete, - NoData, - ReadyForQuery -} = constants.messageTypes - -const { INT4OID } = constants.fieldTypes - -function getMessageName (type) { - const code = String.fromCharCode(type) - let name = '' - Object.keys(constants.messageTypes).some(key => { - if (constants.messageTypes[key] === type) { - name = key - return true - } - }) - return { type, code, name } -} - -function setupSocket (sock, config) { - function compile (query, onComplete) { - const buf = new ArrayBuffer(4096) - const dv = new DataView(buf) - let len = 0 - const fun = { - dv, - size: 0, - described: false, - buffer: new ArrayBuffer(65536), - messages: { - prepare: { start: 0, len: 0 }, - bind: { start: 0, len: 0 }, - exec: { start: 0, len: 0 }, - describe: { start: 0, len: 0 }, - flush: { start: 0, len: 0 }, - sync: { start: 0, len: 0 } - }, - paramStart: 0 - } - fun.buffer.offset = 0 - const { name, sql, params = [], formats = [], fields = [], portal = '', maxRows = 0 } = query - fun.call = (onComplete, syncIt = true, flushIt = false) => { - let off = fun.paramStart - // 32 bit integers only for now - for (let i = 0; i < params.length; i++) { - off += 4 - dv.setUint32(off, params[i]) - off += 4 - } - const { bind, exec, flush, sync } = fun.messages - off = bind.start - let len = 0 - if (flushIt) { - len = flush.start + flush.len - off - } else if (syncIt) { - len = sync.start + sync.len - off - } else { - len = exec.start + exec.len - off - } - const r = sock.write(buf, len, off) - if (r < len) { - just.error('short write') - } - callbacks.push(onComplete) - } - fun.append = (onComplete, syncIt = true, flushIt = false) => { - let off = fun.paramStart - // 32 bit integers only for now - for (let i = 0; i < params.length; i++) { - off += 4 - dv.setUint32(off, params[i]) - off += 4 - } - const { bind, exec, flush, sync } = fun.messages - off = bind.start - let len = 0 - if (flushIt) { - len = flush.start + flush.len - off - } else if (syncIt) { - len = sync.start + sync.len - off - } else { - len = exec.start + exec.len - off - } - fun.buffer.offset += fun.buffer.copyFrom(buf, fun.buffer.offset, len, off) - callbacks.push(onComplete) - } - fun.send = () => { - const r = sock.write(fun.buffer, fun.buffer.offset, 0) - if (r < len) { - just.error('short write') - } - fun.buffer.offset = 0 - } - fun.bind = (flushIt = true, onComplete) => { - const { bind, flush } = fun.messages - sock.write(buf, bind.len, bind.start) - if (flushIt) { - sock.write(buf, flush.len, flush.start) - } - callbacks.push(onComplete) - } - fun.exec = (flushIt = true, onComplete) => { - const { exec, flush } = fun.messages - sock.write(buf, exec.len, exec.start) - if (flushIt) { - sock.write(buf, flush.len, flush.start) - } - callbacks.push(onComplete) - } - fun.prepare = (flushIt = true, onComplete) => { - const { prepare, flush } = fun.messages - sock.write(buf, prepare.len, prepare.start) - if (flushIt) { - sock.write(buf, flush.len, flush.start) - } - callbacks.push(onComplete) - } - fun.describe = (flushIt = true, onComplete) => { - const { describe, flush } = fun.messages - sock.write(buf, describe.len, describe.start) - if (flushIt) { - sock.write(buf, flush.len, flush.start) - } - callbacks.push(onComplete) - } - let off = 0 - // Prepare Message - fun.messages.prepare.start = off - len = 1 + 4 + sql.length + 1 + name.length + 1 + 2 + (formats.length * 4) - dv.setUint8(off++, 80) // 'P' - dv.setUint32(off, len - 1) - off += 4 - off += buf.writeString(name, off) - dv.setUint8(off++, 0) - off += buf.writeString(sql, off) - dv.setUint8(off++, 0) - dv.setUint16(off, formats.length) - off += 2 - for (let i = 0; i < formats.length; i++) { - dv.setUint32(off, formats[i].oid) - off += 4 - } - fun.messages.prepare.len = off - fun.messages.prepare.start - // Describe Message - fun.messages.describe.start = off - len = 7 + name.length - dv.setUint8(off++, 68) // 'D' - dv.setUint32(off, len - 1) - off += 4 - dv.setUint8(off++, 83) // 'S' - off += buf.writeString(name, off) - dv.setUint8(off++, 0) - fun.messages.describe.len = off - fun.messages.describe.start - - // Bind Message - fun.messages.bind.start = off - dv.setUint8(off++, 66) // 'B' - off += 4 // length - will be filled in later - if (portal.length) { - off += buf.writeString(portal, off) - dv.setUint8(off++, 0) - off += buf.writeString(name, off) - dv.setUint8(off++, 0) - } else { - dv.setUint8(off++, 0) - off += buf.writeString(name, off) - dv.setUint8(off++, 0) - } - dv.setUint16(off, formats.length || 0) - off += 2 - for (let i = 0; i < formats.length; i++) { - dv.setUint16(off, formats[i].format) - off += 2 - } - dv.setUint16(off, params.length || 0) - off += 2 - fun.paramStart = off - for (let i = 0; i < params.length; i++) { - if ((formats[i] || formats[0]).format === 1) { - dv.setUint32(off, 4) - off += 4 - dv.setUint32(off, params[i]) - off += 4 - } else { - const paramString = params[i].toString() - dv.setUint32(off, paramString.length) - off += 4 - off += buf.writeString(paramString, off) - } - } - dv.setUint16(off, fields.length) - off += 2 - for (let i = 0; i < fields.length; i++) { - dv.setUint16(off, fields[i].format) - off += 2 - } - fun.messages.bind.len = off - fun.messages.bind.start - dv.setUint32(fun.messages.bind.start + 1, fun.messages.bind.len - 1) - // Exec Message - fun.messages.exec.start = off - len = 6 + portal.length + 4 - dv.setUint8(off++, 69) // 'E' - dv.setUint32(off, len - 1) - off += 4 - if (portal.length) { - off += buf.writeString(portal, off) - } - dv.setUint8(off++, 0) - dv.setUint32(off, maxRows) - off += 4 - fun.messages.exec.len = off - fun.messages.exec.start - // Sync Message - fun.messages.sync.start = off - dv.setUint8(off++, 83) // 'S' - dv.setUint32(off, 4) - off += 4 - fun.messages.sync.len = off - fun.messages.sync.start - // Flush Message - fun.messages.flush.start = off - dv.setUint8(off++, 72) // 'H' - dv.setUint32(off, 4) - off += 4 - fun.messages.flush.len = off - fun.messages.flush.start - fun.size = off - fun.buf = buf.slice(0, off) - Object.assign(query, fun) - let readString = just.sys.readString - if (query.htmlEscape) { - readString = html.escape - } - query.getRows = () => { - const { buf, dv } = parser - const { fields } = query - const { start, rows } = parser.query - let off = start - const result = [] - let i = 0 - let j = 0 - let row - for (i = 0; i < rows; i++) { - off += 5 - const cols = dv.getUint16(off) - off += 2 - row = Array(cols) - result.push(row) - for (j = 0; j < cols; j++) { - len = dv.getUint32(off) - const { oid, format } = (fields[j] || fields[0]) - off += 4 - if (format === 0) { // Non-Binary - if (oid === INT4OID) { - row[j] = parseInt(buf.readString(len, off), 10) - } else { - row[j] = readString(buf, len, off) - } - } else { - if (oid === INT4OID) { - row[j] = dv.getInt32(off) - } else { - row[j] = buf.slice(off, off + len) - } - } - off += len - } - } - return result - } - query.getResult = () => parser.getResult() - if (!onComplete) return query - fun.prepare(true, err => { - if (err) return onComplete(err) - fun.describe(true, err => { - if (err) return onComplete(err) - onComplete() - }) - }) - return query - } - - function start (onStart) { - callbacks.push(onStart) - sock.write(startupMessage(config)) - } - - function authenticate (onAuthenticate) { - callbacks.push(onAuthenticate) - sock.write(md5AuthMessage({ user, pass, salt: parser.salt })) - } - - function onMessage () { - const { type } = parser - if (type === CommandComplete) { - callbacks.shift()() - return - } - if (type === ReadyForQuery) { - if (!sock.authenticated) { - sock.authenticated = true - callbacks.shift()() - } - return - } - if (type === ErrorResponse) { - callbacks.shift()(new Error(getPGError(parser.errors))) - return - } - if (type === AuthenticationOk || type === ParseComplete || type === RowDescription || type === NoData) callbacks.shift()() - } - - const buf = new ArrayBuffer(64 * 1024) - sock.authenticated = false - const parser = sock.parser = createParser(buf) - const callbacks = [] - const { user, pass } = config - parser.onMessage = onMessage - sock.authenticate = authenticate - sock.sync = () => sock.write(syncMessage()) - sock.start = start - sock.compile = compile - sock.onData = bytes => parser.parse(bytes) - sock.onClose = () => { - just.error('pg socket closed') - } - sock.getParams = () => parser.parameters - sock.size = () => callbacks.length - sock.query = parser.query - sock.buffer = buf - return sock -} - -function connect (config, onPGConnect) { - lookup(config.hostname, (err, ip) => { - if (err) { - onPGConnect(err) - return - } - config.address = ip - const sock = createClient(config.address, config.port) - sock.onClose = () => { - just.error('pg socket closed') - } - sock.onConnect = err => { - onPGConnect(err, setupSocket(sock, config)) - return sock.buffer - } - sock.connect() - }) -} - -module.exports = { connect, constants, getMessageName } diff --git a/frameworks/JavaScript/just/lib/dns.js b/frameworks/JavaScript/just/lib/dns.js deleted file mode 100644 index 42b87b0f7ec..00000000000 --- a/frameworks/JavaScript/just/lib/dns.js +++ /dev/null @@ -1,193 +0,0 @@ -const opcode = { - QUERY: 0, - IQUERY: 1, - STATUS: 2 -} - -const qtype = { - A: 1, - NS: 2, - MD: 3, - MF: 4, - CNAME: 5, - SOA: 6, - MB: 7, - MG: 8, - MR: 9, - NULL: 10, - WKS: 11, - PTR: 12, - HINFO: 13, - MINFO: 14, - MX: 15, - TXT: 16, - // Additional - AXFR: 252, - MAILB: 253, - MAILA: 254, - ANY: 255 -} - -const qclass = { - IN: 1, - CS: 2, - CH: 3, - HS: 4, - ANY: 255 -} - -const rcode = { - NOERROR: 0, - FORMAT: 1, - SERVER: 2, - NAME: 3, - NOTIMPL: 4, - REFUSED: 5 -} - -const types = { opcode, qtype, qclass, rcode } - -function readName (offset, buf, view) { - let name = [] - let qnameSize = view.getUint8(offset++) - while (qnameSize) { - if ((qnameSize & 192) === 192) { - let off = (qnameSize - 192) << 8 - off += view.getUint8(offset++) - name = name.concat(readName(off, buf, view)) - qnameSize = 0 - } else { - name.push(buf.readString(qnameSize, offset)) - offset += qnameSize - qnameSize = view.getUint8(offset++) - } - } - return name -} - -const parse = (buf, len) => { - const bytes = new Uint8Array(buf) - const view = new DataView(buf) - const id = view.getUint16(0) - const flags = view.getUint16(2) - const QR = (flags >> 15) & 0b1 - const opCode = (flags >> 11) & 0b1111 - const AA = (flags >> 10) & 0b1 - const TC = (flags >> 9) & 0b1 - const RD = (flags >> 8) & 0b1 - const RA = (flags >> 7) & 0b1 - const Z = (flags >> 4) & 0b111 - const RCODE = flags & 0b1111 - const qcount = view.getUint16(4) - const ancount = view.getUint16(6) - const nscount = view.getUint16(8) - const arcount = view.getUint16(10) - const question = [] - const answer = [] - const authority = [] - const additional = [] - const start = 12 - let off = start - let i = off - let counter = qcount - while (counter--) { - let size = 0 - const sections = [] - while (bytes[i++]) size++ - if (size > 0) { - while (off - start < size) { - const qnameSize = view.getUint8(off++) - sections.push(buf.readString(qnameSize, off)) - off += qnameSize - } - } - off++ - const qtype = view.getUint16(off) - off += 2 - const qclass = view.getUint16(off) - off += 2 - question.push({ qtype, qclass, name: sections }) - } - counter = ancount - while (counter--) { - const next = view.getUint16(off) - let name - if ((0b1100000000000000 & next) === 0b1100000000000000) { - name = readName(next & 0b11111111111111, buf, view) - off += 2 - } else { - name = readName(off, buf, view) - off += name.length + (name.reduce((a, v) => a + v.length, 0)) + 1 - } - const qtype = view.getUint16(off) - off += 2 - const qclass = view.getUint16(off) - off += 2 - const ttl = view.getUint32(off) - off += 4 - const rdLength = view.getUint16(off) - off += 2 - if (qtype === 5) { - const cname = readName(off, buf, view) - answer.push({ name, cname, qtype, qclass, ttl }) - } else if (qtype === 1) { - answer.push({ name, qtype, qclass, ttl, ip: bytes.slice(off, off + rdLength) }) - } - off += rdLength - } - return { bytes: bytes.slice(0, len), qcount, nscount, ancount, arcount, id, flags, QR, opCode, AA, TC, RD, RA, Z, RCODE, question, answer, authority, additional } -} - -const create = (domain, buf, id, qtype = 1, qclass = 1) => { - const view = new DataView(buf) - const bytes = new Uint8Array(buf) - view.setUint16(0, id) - view.setUint16(2, 0b0000000101000000) - view.setUint16(4, 1) - view.setUint16(6, 0) - view.setUint16(8, 0) - view.setUint16(10, 0) - let off = 12 - const parts = domain.split('.') - for (const part of parts) { - view.setUint8(off++, part.length) - buf.writeString(part, off) - off += part.length - } - bytes[off++] = 0 - view.setUint16(off, qtype) - off += 2 - view.setUint16(off, qclass) - off += 2 - return off -} - -const qtypes = {} -Object.keys(types.qtype).forEach(k => { - qtypes[types.qtype[k]] = k -}) -const qclasses = {} -Object.keys(types.qclass).forEach(k => { - qclasses[types.qclass[k]] = k -}) -const opcodes = {} -Object.keys(types.opcode).forEach(k => { - opcodes[types.opcode[k]] = k -}) -const rcodes = {} -Object.keys(types.rcode).forEach(k => { - rcodes[types.rcode[k]] = k -}) - -function getFlags (message) { - const flags = [] - if (message.QR) flags.push('qr') - if (message.AA) flags.push('aa') - if (message.TC) flags.push('tc') - if (message.RD) flags.push('rd') - if (message.RA) flags.push('ra') - if (message.Z) flags.push('z') - return flags.join(' ') -} - -module.exports = { getFlags, create, parse, types, qtypes, qclasses, opcodes, rcodes } diff --git a/frameworks/JavaScript/just/lib/http.js b/frameworks/JavaScript/just/lib/http.js deleted file mode 100644 index 28104b8c740..00000000000 --- a/frameworks/JavaScript/just/lib/http.js +++ /dev/null @@ -1,38 +0,0 @@ -const { http } = just.library('http.so', 'http') -const { parseRequests, getRequests, getUrl } = http - -const free = [] - -function createParser (buffer) { - if (free.length) { - const parser = free.shift() - parser.buffer.offset = 0 - return parser - } - const answer = [0] - const parser = { buffer } - function parse (bytes, off = 0) { - const count = parseRequests(buffer, buffer.offset + bytes, off, answer) - if (count > 0) { - parser.onRequests(count) - } - if (answer[0] > 0) { - const start = buffer.offset + bytes - answer[0] - const len = answer[0] - if (start > buffer.offset) { - buffer.copyFrom(buffer, 0, len, start) - } - buffer.offset = len - return - } - buffer.offset = 0 - } - buffer.offset = 0 - parser.parse = parse - parser.get = count => getRequests(count) - parser.url = index => getUrl(index) - parser.free = () => free.push(parser) - return parser -} - -module.exports = { createParser } diff --git a/frameworks/JavaScript/just/lib/lookup.js b/frameworks/JavaScript/just/lib/lookup.js deleted file mode 100644 index 5aa78d55dd5..00000000000 --- a/frameworks/JavaScript/just/lib/lookup.js +++ /dev/null @@ -1,117 +0,0 @@ -const { create, parse } = require('dns.js') -const { udp, net } = just -const { loop } = just.factory -const { readFile, isFile } = require('fs') - -const dnsServer = just.env().DNS_SERVER || '127.0.0.11' - -function parseLine (line) { - const parts = line.split(/\s+/) - const [address, ...hosts] = parts - return { address, hosts } -} - -const rxipv4 = /\d+\.\d+\.\d+\.\d+/ -const rxComment = /(\s+)?#.+/ -const rxName = /nameserver\s+(.+)/ - -function readHosts () { - const ipv4 = {} - const ipv6 = {} - const fileName = '/etc/hosts' - if (!isFile(fileName)) { - just.error(`${fileName} not found`) - return { ipv4, ipv6 } - } - const hosts = readFile(fileName) - const lines = hosts.split('\n').filter(line => line.trim()) - for (const line of lines) { - if (line.match(rxComment)) continue - const { address, hosts } = parseLine(line) - if (address.match(rxipv4)) { - for (const host of hosts) { - ipv4[host] = address - } - } else { - for (const host of hosts) { - ipv6[host] = address - } - } - } - return { ipv4, ipv6 } -} - -function lookupHosts (hostname) { - const { ipv4 } = readHosts() - return ipv4[hostname] -} - -function readResolv () { - const fileName = '/etc/resolv.conf' - const results = [] - if (!isFile(fileName)) { - just.error(`${fileName} not found`) - return results - } - const resolv = readFile(fileName) - const lines = resolv.split('\n').filter(line => line.trim()) - for (const line of lines) { - const match = line.match(rxName) - if (match && match.length > 1) { - const [, ip] = match - if (ip.match(rxipv4)) { - results.push(ip) - } - } - } - return results -} - -function lookup (query = 'www.google.com', onRecord = () => {}, address = dnsServer, port = 53, buf = new ArrayBuffer(65536)) { - const ip = lookupHosts(query) - if (ip) { - onRecord(null, ip) - return - } - const ips = readResolv() - if (ips.length) { - address = ips[0] - } - const fd = net.socket(net.AF_INET, net.SOCK_DGRAM | net.SOCK_NONBLOCK, 0) - net.bind(fd, address, port) - loop.add(fd, (fd, event) => { - just.clearTimeout(timer) - const answer = [] - const len = udp.recvmsg(fd, buf, answer) - if (len <= 0) { - onRecord(new Error('Bad Message Length')) - return - } - const message = parse(buf, len) - if (!message.answer.length) { - onRecord(new Error(`Address Not Found for ${query}`)) - return - } - const { ip } = message.answer[0] - const result = `${ip[0]}.${ip[1]}.${ip[2]}.${ip[3]}` - loop.remove(fd) - net.close(fd) - onRecord(null, result) - }) - const len = create(query, buf, 1) - const rc = udp.sendmsg(fd, buf, address, port, len) - if (rc === -1) { - const errno = just.sys.errno() - onRecord(new Error(`Error sending ${query} to ${address}: ${just.sys.strerror(errno)} (${errno})`)) - loop.remove(fd) - net.close(fd) - return - } - const timer = just.setTimeout(() => { - onRecord(new Error(`Request timed out for ${query} at ${address}`)) - loop.remove(fd) - net.close(fd) - }, 1000) -} - -module.exports = { lookup } diff --git a/frameworks/JavaScript/just/lib/md5.js b/frameworks/JavaScript/just/lib/md5.js deleted file mode 100644 index 9a779ea84a9..00000000000 --- a/frameworks/JavaScript/just/lib/md5.js +++ /dev/null @@ -1,683 +0,0 @@ -/** - * [js-md5]{@link https://github.com/emn178/js-md5} - * - * @namespace md5 - * @version 0.7.3 - * @author Chen, Yi-Cyuan [emn178@gmail.com] - * @copyright Chen, Yi-Cyuan 2014-2017 - * @license MIT - */ -(function () { - 'use strict'; - - var ERROR = 'input is invalid type'; - var WINDOW = typeof window === 'object'; - var root = WINDOW ? window : {}; - if (root.JS_MD5_NO_WINDOW) { - WINDOW = false; - } - var WEB_WORKER = !WINDOW && typeof self === 'object'; - var NODE_JS = !root.JS_MD5_NO_NODE_JS && typeof process === 'object' && process.versions && process.versions.node; - if (NODE_JS) { - root = global; - } else if (WEB_WORKER) { - root = self; - } - var COMMON_JS = !root.JS_MD5_NO_COMMON_JS && typeof module === 'object' && module.exports; - var AMD = typeof define === 'function' && define.amd; - var ARRAY_BUFFER = !root.JS_MD5_NO_ARRAY_BUFFER && typeof ArrayBuffer !== 'undefined'; - var HEX_CHARS = '0123456789abcdef'.split(''); - var EXTRA = [128, 32768, 8388608, -2147483648]; - var SHIFT = [0, 8, 16, 24]; - var OUTPUT_TYPES = ['hex', 'array', 'digest', 'buffer', 'arrayBuffer', 'base64']; - var BASE64_ENCODE_CHAR = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); - - var blocks = [], buffer8; - if (ARRAY_BUFFER) { - var buffer = new ArrayBuffer(68); - buffer8 = new Uint8Array(buffer); - blocks = new Uint32Array(buffer); - } - - if (root.JS_MD5_NO_NODE_JS || !Array.isArray) { - Array.isArray = function (obj) { - return Object.prototype.toString.call(obj) === '[object Array]'; - }; - } - - if (ARRAY_BUFFER && (root.JS_MD5_NO_ARRAY_BUFFER_IS_VIEW || !ArrayBuffer.isView)) { - ArrayBuffer.isView = function (obj) { - return typeof obj === 'object' && obj.buffer && obj.buffer.constructor === ArrayBuffer; - }; - } - - /** - * @method hex - * @memberof md5 - * @description Output hash as hex string - * @param {String|Array|Uint8Array|ArrayBuffer} message message to hash - * @returns {String} Hex string - * @example - * md5.hex('The quick brown fox jumps over the lazy dog'); - * // equal to - * md5('The quick brown fox jumps over the lazy dog'); - */ - /** - * @method digest - * @memberof md5 - * @description Output hash as bytes array - * @param {String|Array|Uint8Array|ArrayBuffer} message message to hash - * @returns {Array} Bytes array - * @example - * md5.digest('The quick brown fox jumps over the lazy dog'); - */ - /** - * @method array - * @memberof md5 - * @description Output hash as bytes array - * @param {String|Array|Uint8Array|ArrayBuffer} message message to hash - * @returns {Array} Bytes array - * @example - * md5.array('The quick brown fox jumps over the lazy dog'); - */ - /** - * @method arrayBuffer - * @memberof md5 - * @description Output hash as ArrayBuffer - * @param {String|Array|Uint8Array|ArrayBuffer} message message to hash - * @returns {ArrayBuffer} ArrayBuffer - * @example - * md5.arrayBuffer('The quick brown fox jumps over the lazy dog'); - */ - /** - * @method buffer - * @deprecated This maybe confuse with Buffer in node.js. Please use arrayBuffer instead. - * @memberof md5 - * @description Output hash as ArrayBuffer - * @param {String|Array|Uint8Array|ArrayBuffer} message message to hash - * @returns {ArrayBuffer} ArrayBuffer - * @example - * md5.buffer('The quick brown fox jumps over the lazy dog'); - */ - /** - * @method base64 - * @memberof md5 - * @description Output hash as base64 string - * @param {String|Array|Uint8Array|ArrayBuffer} message message to hash - * @returns {String} base64 string - * @example - * md5.base64('The quick brown fox jumps over the lazy dog'); - */ - var createOutputMethod = function (outputType) { - return function (message) { - return new Md5(true).update(message)[outputType](); - }; - }; - - /** - * @method create - * @memberof md5 - * @description Create Md5 object - * @returns {Md5} Md5 object. - * @example - * var hash = md5.create(); - */ - /** - * @method update - * @memberof md5 - * @description Create and update Md5 object - * @param {String|Array|Uint8Array|ArrayBuffer} message message to hash - * @returns {Md5} Md5 object. - * @example - * var hash = md5.update('The quick brown fox jumps over the lazy dog'); - * // equal to - * var hash = md5.create(); - * hash.update('The quick brown fox jumps over the lazy dog'); - */ - var createMethod = function () { - var method = createOutputMethod('hex'); - if (NODE_JS) { - method = nodeWrap(method); - } - method.create = function () { - return new Md5(); - }; - method.update = function (message) { - return method.create().update(message); - }; - for (var i = 0; i < OUTPUT_TYPES.length; ++i) { - var type = OUTPUT_TYPES[i]; - method[type] = createOutputMethod(type); - } - return method; - }; - - var nodeWrap = function (method) { - var crypto = eval("require('crypto')"); - var Buffer = eval("require('buffer').Buffer"); - var nodeMethod = function (message) { - if (typeof message === 'string') { - return crypto.createHash('md5').update(message, 'utf8').digest('hex'); - } else { - if (message === null || message === undefined) { - throw ERROR; - } else if (message.constructor === ArrayBuffer) { - message = new Uint8Array(message); - } - } - if (Array.isArray(message) || ArrayBuffer.isView(message) || - message.constructor === Buffer) { - return crypto.createHash('md5').update(new Buffer(message)).digest('hex'); - } else { - return method(message); - } - }; - return nodeMethod; - }; - - /** - * Md5 class - * @class Md5 - * @description This is internal class. - * @see {@link md5.create} - */ - function Md5(sharedMemory) { - if (sharedMemory) { - blocks[0] = blocks[16] = blocks[1] = blocks[2] = blocks[3] = - blocks[4] = blocks[5] = blocks[6] = blocks[7] = - blocks[8] = blocks[9] = blocks[10] = blocks[11] = - blocks[12] = blocks[13] = blocks[14] = blocks[15] = 0; - this.blocks = blocks; - this.buffer8 = buffer8; - } else { - if (ARRAY_BUFFER) { - var buffer = new ArrayBuffer(68); - this.buffer8 = new Uint8Array(buffer); - this.blocks = new Uint32Array(buffer); - } else { - this.blocks = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; - } - } - this.h0 = this.h1 = this.h2 = this.h3 = this.start = this.bytes = this.hBytes = 0; - this.finalized = this.hashed = false; - this.first = true; - } - - /** - * @method update - * @memberof Md5 - * @instance - * @description Update hash - * @param {String|Array|Uint8Array|ArrayBuffer} message message to hash - * @returns {Md5} Md5 object. - * @see {@link md5.update} - */ - Md5.prototype.update = function (message) { - if (this.finalized) { - return; - } - - var notString, type = typeof message; - if (type !== 'string') { - if (type === 'object') { - if (message === null) { - throw ERROR; - } else if (ARRAY_BUFFER && message.constructor === ArrayBuffer) { - message = new Uint8Array(message); - } else if (!Array.isArray(message)) { - if (!ARRAY_BUFFER || !ArrayBuffer.isView(message)) { - throw ERROR; - } - } - } else { - throw ERROR; - } - notString = true; - } - var code, index = 0, i, length = message.length, blocks = this.blocks; - var buffer8 = this.buffer8; - - while (index < length) { - if (this.hashed) { - this.hashed = false; - blocks[0] = blocks[16]; - blocks[16] = blocks[1] = blocks[2] = blocks[3] = - blocks[4] = blocks[5] = blocks[6] = blocks[7] = - blocks[8] = blocks[9] = blocks[10] = blocks[11] = - blocks[12] = blocks[13] = blocks[14] = blocks[15] = 0; - } - - if (notString) { - if (ARRAY_BUFFER) { - for (i = this.start; index < length && i < 64; ++index) { - buffer8[i++] = message[index]; - } - } else { - for (i = this.start; index < length && i < 64; ++index) { - blocks[i >> 2] |= message[index] << SHIFT[i++ & 3]; - } - } - } else { - if (ARRAY_BUFFER) { - for (i = this.start; index < length && i < 64; ++index) { - code = message.charCodeAt(index); - if (code < 0x80) { - buffer8[i++] = code; - } else if (code < 0x800) { - buffer8[i++] = 0xc0 | (code >> 6); - buffer8[i++] = 0x80 | (code & 0x3f); - } else if (code < 0xd800 || code >= 0xe000) { - buffer8[i++] = 0xe0 | (code >> 12); - buffer8[i++] = 0x80 | ((code >> 6) & 0x3f); - buffer8[i++] = 0x80 | (code & 0x3f); - } else { - code = 0x10000 + (((code & 0x3ff) << 10) | (message.charCodeAt(++index) & 0x3ff)); - buffer8[i++] = 0xf0 | (code >> 18); - buffer8[i++] = 0x80 | ((code >> 12) & 0x3f); - buffer8[i++] = 0x80 | ((code >> 6) & 0x3f); - buffer8[i++] = 0x80 | (code & 0x3f); - } - } - } else { - for (i = this.start; index < length && i < 64; ++index) { - code = message.charCodeAt(index); - if (code < 0x80) { - blocks[i >> 2] |= code << SHIFT[i++ & 3]; - } else if (code < 0x800) { - blocks[i >> 2] |= (0xc0 | (code >> 6)) << SHIFT[i++ & 3]; - blocks[i >> 2] |= (0x80 | (code & 0x3f)) << SHIFT[i++ & 3]; - } else if (code < 0xd800 || code >= 0xe000) { - blocks[i >> 2] |= (0xe0 | (code >> 12)) << SHIFT[i++ & 3]; - blocks[i >> 2] |= (0x80 | ((code >> 6) & 0x3f)) << SHIFT[i++ & 3]; - blocks[i >> 2] |= (0x80 | (code & 0x3f)) << SHIFT[i++ & 3]; - } else { - code = 0x10000 + (((code & 0x3ff) << 10) | (message.charCodeAt(++index) & 0x3ff)); - blocks[i >> 2] |= (0xf0 | (code >> 18)) << SHIFT[i++ & 3]; - blocks[i >> 2] |= (0x80 | ((code >> 12) & 0x3f)) << SHIFT[i++ & 3]; - blocks[i >> 2] |= (0x80 | ((code >> 6) & 0x3f)) << SHIFT[i++ & 3]; - blocks[i >> 2] |= (0x80 | (code & 0x3f)) << SHIFT[i++ & 3]; - } - } - } - } - this.lastByteIndex = i; - this.bytes += i - this.start; - if (i >= 64) { - this.start = i - 64; - this.hash(); - this.hashed = true; - } else { - this.start = i; - } - } - if (this.bytes > 4294967295) { - this.hBytes += this.bytes / 4294967296 << 0; - this.bytes = this.bytes % 4294967296; - } - return this; - }; - - Md5.prototype.finalize = function () { - if (this.finalized) { - return; - } - this.finalized = true; - var blocks = this.blocks, i = this.lastByteIndex; - blocks[i >> 2] |= EXTRA[i & 3]; - if (i >= 56) { - if (!this.hashed) { - this.hash(); - } - blocks[0] = blocks[16]; - blocks[16] = blocks[1] = blocks[2] = blocks[3] = - blocks[4] = blocks[5] = blocks[6] = blocks[7] = - blocks[8] = blocks[9] = blocks[10] = blocks[11] = - blocks[12] = blocks[13] = blocks[14] = blocks[15] = 0; - } - blocks[14] = this.bytes << 3; - blocks[15] = this.hBytes << 3 | this.bytes >>> 29; - this.hash(); - }; - - Md5.prototype.hash = function () { - var a, b, c, d, bc, da, blocks = this.blocks; - - if (this.first) { - a = blocks[0] - 680876937; - a = (a << 7 | a >>> 25) - 271733879 << 0; - d = (-1732584194 ^ a & 2004318071) + blocks[1] - 117830708; - d = (d << 12 | d >>> 20) + a << 0; - c = (-271733879 ^ (d & (a ^ -271733879))) + blocks[2] - 1126478375; - c = (c << 17 | c >>> 15) + d << 0; - b = (a ^ (c & (d ^ a))) + blocks[3] - 1316259209; - b = (b << 22 | b >>> 10) + c << 0; - } else { - a = this.h0; - b = this.h1; - c = this.h2; - d = this.h3; - a += (d ^ (b & (c ^ d))) + blocks[0] - 680876936; - a = (a << 7 | a >>> 25) + b << 0; - d += (c ^ (a & (b ^ c))) + blocks[1] - 389564586; - d = (d << 12 | d >>> 20) + a << 0; - c += (b ^ (d & (a ^ b))) + blocks[2] + 606105819; - c = (c << 17 | c >>> 15) + d << 0; - b += (a ^ (c & (d ^ a))) + blocks[3] - 1044525330; - b = (b << 22 | b >>> 10) + c << 0; - } - - a += (d ^ (b & (c ^ d))) + blocks[4] - 176418897; - a = (a << 7 | a >>> 25) + b << 0; - d += (c ^ (a & (b ^ c))) + blocks[5] + 1200080426; - d = (d << 12 | d >>> 20) + a << 0; - c += (b ^ (d & (a ^ b))) + blocks[6] - 1473231341; - c = (c << 17 | c >>> 15) + d << 0; - b += (a ^ (c & (d ^ a))) + blocks[7] - 45705983; - b = (b << 22 | b >>> 10) + c << 0; - a += (d ^ (b & (c ^ d))) + blocks[8] + 1770035416; - a = (a << 7 | a >>> 25) + b << 0; - d += (c ^ (a & (b ^ c))) + blocks[9] - 1958414417; - d = (d << 12 | d >>> 20) + a << 0; - c += (b ^ (d & (a ^ b))) + blocks[10] - 42063; - c = (c << 17 | c >>> 15) + d << 0; - b += (a ^ (c & (d ^ a))) + blocks[11] - 1990404162; - b = (b << 22 | b >>> 10) + c << 0; - a += (d ^ (b & (c ^ d))) + blocks[12] + 1804603682; - a = (a << 7 | a >>> 25) + b << 0; - d += (c ^ (a & (b ^ c))) + blocks[13] - 40341101; - d = (d << 12 | d >>> 20) + a << 0; - c += (b ^ (d & (a ^ b))) + blocks[14] - 1502002290; - c = (c << 17 | c >>> 15) + d << 0; - b += (a ^ (c & (d ^ a))) + blocks[15] + 1236535329; - b = (b << 22 | b >>> 10) + c << 0; - a += (c ^ (d & (b ^ c))) + blocks[1] - 165796510; - a = (a << 5 | a >>> 27) + b << 0; - d += (b ^ (c & (a ^ b))) + blocks[6] - 1069501632; - d = (d << 9 | d >>> 23) + a << 0; - c += (a ^ (b & (d ^ a))) + blocks[11] + 643717713; - c = (c << 14 | c >>> 18) + d << 0; - b += (d ^ (a & (c ^ d))) + blocks[0] - 373897302; - b = (b << 20 | b >>> 12) + c << 0; - a += (c ^ (d & (b ^ c))) + blocks[5] - 701558691; - a = (a << 5 | a >>> 27) + b << 0; - d += (b ^ (c & (a ^ b))) + blocks[10] + 38016083; - d = (d << 9 | d >>> 23) + a << 0; - c += (a ^ (b & (d ^ a))) + blocks[15] - 660478335; - c = (c << 14 | c >>> 18) + d << 0; - b += (d ^ (a & (c ^ d))) + blocks[4] - 405537848; - b = (b << 20 | b >>> 12) + c << 0; - a += (c ^ (d & (b ^ c))) + blocks[9] + 568446438; - a = (a << 5 | a >>> 27) + b << 0; - d += (b ^ (c & (a ^ b))) + blocks[14] - 1019803690; - d = (d << 9 | d >>> 23) + a << 0; - c += (a ^ (b & (d ^ a))) + blocks[3] - 187363961; - c = (c << 14 | c >>> 18) + d << 0; - b += (d ^ (a & (c ^ d))) + blocks[8] + 1163531501; - b = (b << 20 | b >>> 12) + c << 0; - a += (c ^ (d & (b ^ c))) + blocks[13] - 1444681467; - a = (a << 5 | a >>> 27) + b << 0; - d += (b ^ (c & (a ^ b))) + blocks[2] - 51403784; - d = (d << 9 | d >>> 23) + a << 0; - c += (a ^ (b & (d ^ a))) + blocks[7] + 1735328473; - c = (c << 14 | c >>> 18) + d << 0; - b += (d ^ (a & (c ^ d))) + blocks[12] - 1926607734; - b = (b << 20 | b >>> 12) + c << 0; - bc = b ^ c; - a += (bc ^ d) + blocks[5] - 378558; - a = (a << 4 | a >>> 28) + b << 0; - d += (bc ^ a) + blocks[8] - 2022574463; - d = (d << 11 | d >>> 21) + a << 0; - da = d ^ a; - c += (da ^ b) + blocks[11] + 1839030562; - c = (c << 16 | c >>> 16) + d << 0; - b += (da ^ c) + blocks[14] - 35309556; - b = (b << 23 | b >>> 9) + c << 0; - bc = b ^ c; - a += (bc ^ d) + blocks[1] - 1530992060; - a = (a << 4 | a >>> 28) + b << 0; - d += (bc ^ a) + blocks[4] + 1272893353; - d = (d << 11 | d >>> 21) + a << 0; - da = d ^ a; - c += (da ^ b) + blocks[7] - 155497632; - c = (c << 16 | c >>> 16) + d << 0; - b += (da ^ c) + blocks[10] - 1094730640; - b = (b << 23 | b >>> 9) + c << 0; - bc = b ^ c; - a += (bc ^ d) + blocks[13] + 681279174; - a = (a << 4 | a >>> 28) + b << 0; - d += (bc ^ a) + blocks[0] - 358537222; - d = (d << 11 | d >>> 21) + a << 0; - da = d ^ a; - c += (da ^ b) + blocks[3] - 722521979; - c = (c << 16 | c >>> 16) + d << 0; - b += (da ^ c) + blocks[6] + 76029189; - b = (b << 23 | b >>> 9) + c << 0; - bc = b ^ c; - a += (bc ^ d) + blocks[9] - 640364487; - a = (a << 4 | a >>> 28) + b << 0; - d += (bc ^ a) + blocks[12] - 421815835; - d = (d << 11 | d >>> 21) + a << 0; - da = d ^ a; - c += (da ^ b) + blocks[15] + 530742520; - c = (c << 16 | c >>> 16) + d << 0; - b += (da ^ c) + blocks[2] - 995338651; - b = (b << 23 | b >>> 9) + c << 0; - a += (c ^ (b | ~d)) + blocks[0] - 198630844; - a = (a << 6 | a >>> 26) + b << 0; - d += (b ^ (a | ~c)) + blocks[7] + 1126891415; - d = (d << 10 | d >>> 22) + a << 0; - c += (a ^ (d | ~b)) + blocks[14] - 1416354905; - c = (c << 15 | c >>> 17) + d << 0; - b += (d ^ (c | ~a)) + blocks[5] - 57434055; - b = (b << 21 | b >>> 11) + c << 0; - a += (c ^ (b | ~d)) + blocks[12] + 1700485571; - a = (a << 6 | a >>> 26) + b << 0; - d += (b ^ (a | ~c)) + blocks[3] - 1894986606; - d = (d << 10 | d >>> 22) + a << 0; - c += (a ^ (d | ~b)) + blocks[10] - 1051523; - c = (c << 15 | c >>> 17) + d << 0; - b += (d ^ (c | ~a)) + blocks[1] - 2054922799; - b = (b << 21 | b >>> 11) + c << 0; - a += (c ^ (b | ~d)) + blocks[8] + 1873313359; - a = (a << 6 | a >>> 26) + b << 0; - d += (b ^ (a | ~c)) + blocks[15] - 30611744; - d = (d << 10 | d >>> 22) + a << 0; - c += (a ^ (d | ~b)) + blocks[6] - 1560198380; - c = (c << 15 | c >>> 17) + d << 0; - b += (d ^ (c | ~a)) + blocks[13] + 1309151649; - b = (b << 21 | b >>> 11) + c << 0; - a += (c ^ (b | ~d)) + blocks[4] - 145523070; - a = (a << 6 | a >>> 26) + b << 0; - d += (b ^ (a | ~c)) + blocks[11] - 1120210379; - d = (d << 10 | d >>> 22) + a << 0; - c += (a ^ (d | ~b)) + blocks[2] + 718787259; - c = (c << 15 | c >>> 17) + d << 0; - b += (d ^ (c | ~a)) + blocks[9] - 343485551; - b = (b << 21 | b >>> 11) + c << 0; - - if (this.first) { - this.h0 = a + 1732584193 << 0; - this.h1 = b - 271733879 << 0; - this.h2 = c - 1732584194 << 0; - this.h3 = d + 271733878 << 0; - this.first = false; - } else { - this.h0 = this.h0 + a << 0; - this.h1 = this.h1 + b << 0; - this.h2 = this.h2 + c << 0; - this.h3 = this.h3 + d << 0; - } - }; - - /** - * @method hex - * @memberof Md5 - * @instance - * @description Output hash as hex string - * @returns {String} Hex string - * @see {@link md5.hex} - * @example - * hash.hex(); - */ - Md5.prototype.hex = function () { - this.finalize(); - - var h0 = this.h0, h1 = this.h1, h2 = this.h2, h3 = this.h3; - - return HEX_CHARS[(h0 >> 4) & 0x0F] + HEX_CHARS[h0 & 0x0F] + - HEX_CHARS[(h0 >> 12) & 0x0F] + HEX_CHARS[(h0 >> 8) & 0x0F] + - HEX_CHARS[(h0 >> 20) & 0x0F] + HEX_CHARS[(h0 >> 16) & 0x0F] + - HEX_CHARS[(h0 >> 28) & 0x0F] + HEX_CHARS[(h0 >> 24) & 0x0F] + - HEX_CHARS[(h1 >> 4) & 0x0F] + HEX_CHARS[h1 & 0x0F] + - HEX_CHARS[(h1 >> 12) & 0x0F] + HEX_CHARS[(h1 >> 8) & 0x0F] + - HEX_CHARS[(h1 >> 20) & 0x0F] + HEX_CHARS[(h1 >> 16) & 0x0F] + - HEX_CHARS[(h1 >> 28) & 0x0F] + HEX_CHARS[(h1 >> 24) & 0x0F] + - HEX_CHARS[(h2 >> 4) & 0x0F] + HEX_CHARS[h2 & 0x0F] + - HEX_CHARS[(h2 >> 12) & 0x0F] + HEX_CHARS[(h2 >> 8) & 0x0F] + - HEX_CHARS[(h2 >> 20) & 0x0F] + HEX_CHARS[(h2 >> 16) & 0x0F] + - HEX_CHARS[(h2 >> 28) & 0x0F] + HEX_CHARS[(h2 >> 24) & 0x0F] + - HEX_CHARS[(h3 >> 4) & 0x0F] + HEX_CHARS[h3 & 0x0F] + - HEX_CHARS[(h3 >> 12) & 0x0F] + HEX_CHARS[(h3 >> 8) & 0x0F] + - HEX_CHARS[(h3 >> 20) & 0x0F] + HEX_CHARS[(h3 >> 16) & 0x0F] + - HEX_CHARS[(h3 >> 28) & 0x0F] + HEX_CHARS[(h3 >> 24) & 0x0F]; - }; - - /** - * @method toString - * @memberof Md5 - * @instance - * @description Output hash as hex string - * @returns {String} Hex string - * @see {@link md5.hex} - * @example - * hash.toString(); - */ - Md5.prototype.toString = Md5.prototype.hex; - - /** - * @method digest - * @memberof Md5 - * @instance - * @description Output hash as bytes array - * @returns {Array} Bytes array - * @see {@link md5.digest} - * @example - * hash.digest(); - */ - Md5.prototype.digest = function () { - this.finalize(); - - var h0 = this.h0, h1 = this.h1, h2 = this.h2, h3 = this.h3; - return [ - h0 & 0xFF, (h0 >> 8) & 0xFF, (h0 >> 16) & 0xFF, (h0 >> 24) & 0xFF, - h1 & 0xFF, (h1 >> 8) & 0xFF, (h1 >> 16) & 0xFF, (h1 >> 24) & 0xFF, - h2 & 0xFF, (h2 >> 8) & 0xFF, (h2 >> 16) & 0xFF, (h2 >> 24) & 0xFF, - h3 & 0xFF, (h3 >> 8) & 0xFF, (h3 >> 16) & 0xFF, (h3 >> 24) & 0xFF - ]; - }; - - /** - * @method array - * @memberof Md5 - * @instance - * @description Output hash as bytes array - * @returns {Array} Bytes array - * @see {@link md5.array} - * @example - * hash.array(); - */ - Md5.prototype.array = Md5.prototype.digest; - - /** - * @method arrayBuffer - * @memberof Md5 - * @instance - * @description Output hash as ArrayBuffer - * @returns {ArrayBuffer} ArrayBuffer - * @see {@link md5.arrayBuffer} - * @example - * hash.arrayBuffer(); - */ - Md5.prototype.arrayBuffer = function () { - this.finalize(); - - var buffer = new ArrayBuffer(16); - var blocks = new Uint32Array(buffer); - blocks[0] = this.h0; - blocks[1] = this.h1; - blocks[2] = this.h2; - blocks[3] = this.h3; - return buffer; - }; - - /** - * @method buffer - * @deprecated This maybe confuse with Buffer in node.js. Please use arrayBuffer instead. - * @memberof Md5 - * @instance - * @description Output hash as ArrayBuffer - * @returns {ArrayBuffer} ArrayBuffer - * @see {@link md5.buffer} - * @example - * hash.buffer(); - */ - Md5.prototype.buffer = Md5.prototype.arrayBuffer; - - /** - * @method base64 - * @memberof Md5 - * @instance - * @description Output hash as base64 string - * @returns {String} base64 string - * @see {@link md5.base64} - * @example - * hash.base64(); - */ - Md5.prototype.base64 = function () { - var v1, v2, v3, base64Str = '', bytes = this.array(); - for (var i = 0; i < 15;) { - v1 = bytes[i++]; - v2 = bytes[i++]; - v3 = bytes[i++]; - base64Str += BASE64_ENCODE_CHAR[v1 >>> 2] + - BASE64_ENCODE_CHAR[(v1 << 4 | v2 >>> 4) & 63] + - BASE64_ENCODE_CHAR[(v2 << 2 | v3 >>> 6) & 63] + - BASE64_ENCODE_CHAR[v3 & 63]; - } - v1 = bytes[i]; - base64Str += BASE64_ENCODE_CHAR[v1 >>> 2] + - BASE64_ENCODE_CHAR[(v1 << 4) & 63] + - '=='; - return base64Str; - }; - - var exports = createMethod(); - - if (COMMON_JS) { - module.exports = exports; - } else { - /** - * @method md5 - * @description Md5 hash function, export to global in browsers. - * @param {String|Array|Uint8Array|ArrayBuffer} message message to hash - * @returns {String} md5 hashes - * @example - * md5(''); // d41d8cd98f00b204e9800998ecf8427e - * md5('The quick brown fox jumps over the lazy dog'); // 9e107d9d372bb6826bd81d3542a419d6 - * md5('The quick brown fox jumps over the lazy dog.'); // e4d909c290d0fb1ca068ffaddf22cbd0 - * - * // It also supports UTF-8 encoding - * md5('中文'); // a7bac2239fcdcb3a067903d8077c4a07 - * - * // It also supports byte `Array`, `Uint8Array`, `ArrayBuffer` - * md5([]); // d41d8cd98f00b204e9800998ecf8427e - * md5(new Uint8Array([])); // d41d8cd98f00b204e9800998ecf8427e - */ - root.md5 = exports; - if (AMD) { - define(function () { - return exports; - }); - } - } -})(); \ No newline at end of file diff --git a/frameworks/JavaScript/just/lib/monitor.js b/frameworks/JavaScript/just/lib/monitor.js deleted file mode 100644 index 211f9438a2c..00000000000 --- a/frameworks/JavaScript/just/lib/monitor.js +++ /dev/null @@ -1,125 +0,0 @@ -const { fs, sys, net } = just - -function readStat (pid = sys.pid()) { - const buf = new ArrayBuffer(4096) - const path = `/proc/${pid}/stat` - const fd = fs.open(path) - net.seek(fd, 0, net.SEEK_SET) - let bytes = net.read(fd, buf) - const parts = [] - while (bytes > 0) { - parts.push(buf.readString(bytes)) - bytes = net.read(fd, buf) - } - const fields = parts.join('').split(' ') - const comm = fields[1] - const state = fields[2] - const [ - ppid, - pgrp, - session, - ttyNr, - tpgid, - flags, - minflt, - cminflt, - majflt, - cmajflt, - utime, - stime, - cutime, - cstime, - priority, - nice, - numThreads, - itrealvalue, - starttime, - vsize, - rssPages, - rsslim, - startcode, - endcode, - startstack, - kstkesp, - kstkeip, - signal, - blocked, - sigignore, - sigcatch, - wchan, - nswap, - cnswap, - exitSignal, - processor, - rtPriority, - policy, - delayacctBlkioTicks, - guestTime, - cguestTime, - startData, - endData, - startBrk, - argStart, - argEnd, - envStart, - envEnd, - exitCode - ] = fields.slice(3).map(v => Number(v)) - net.close(fd) - return { - pid, - comm, - state, - ppid, - pgrp, - session, - ttyNr, - tpgid, - flags, - minflt, - cminflt, - majflt, - cmajflt, - utime, - stime, - cutime, - cstime, - priority, - nice, - numThreads, - itrealvalue, - starttime, - vsize, - rssPages, - rsslim, - startcode, - endcode, - startstack, - kstkesp, - kstkeip, - signal, - blocked, - sigignore, - sigcatch, - wchan, - nswap, - cnswap, - exitSignal, - processor, - rtPriority, - policy, - delayacctBlkioTicks, - guestTime, - cguestTime, - startData, - endData, - startBrk, - argStart, - argEnd, - envStart, - envEnd, - exitCode - } -} - -module.exports = { readStat } diff --git a/frameworks/JavaScript/just/lib/pg.js b/frameworks/JavaScript/just/lib/pg.js deleted file mode 100644 index 8362f81bc85..00000000000 --- a/frameworks/JavaScript/just/lib/pg.js +++ /dev/null @@ -1,350 +0,0 @@ -const md5 = require('md5.js') - -function syncMessage () { - const len = 5 - const buf = new ArrayBuffer(len) - const dv = new DataView(buf) - dv.setUint8(0, 83) - dv.setUint32(1, 4) - return buf -} - -function startupMessage ({ user, database, parameters = [] }) { - let len = 8 + 4 + 1 + user.length + 1 + 8 + 1 + database.length + 2 - for (let i = 0; i < parameters.length; i++) { - const { name, value } = parameters[i] - len += (name.length + 1 + value.length + 1) - } - const buf = new ArrayBuffer(len) - const dv = new DataView(buf) - let off = 0 - dv.setInt32(0, 0) - off += 4 - // 0x00030000 = 3.0 - dv.setInt32(4, 196608) - off += 4 - - off += buf.writeString('user', off) - dv.setUint8(off++, 0) - off += buf.writeString(user, off) - dv.setUint8(off++, 0) - - off += buf.writeString('database', off) - dv.setUint8(off++, 0) - off += buf.writeString(database, off) - dv.setUint8(off++, 0) - - for (let i = 0; i < parameters.length; i++) { - const { name, value } = parameters[i] - off += buf.writeString(name, off) - dv.setUint8(off++, 0) - off += buf.writeString(value, off) - dv.setUint8(off++, 0) - } - dv.setUint8(off++, 0) - dv.setInt32(0, off) - return buf -} - -function md5AuthMessage ({ user, pass, salt }) { - const token = `${pass}${user}` - let hash = md5(token) - const plain = new ArrayBuffer(36) - plain.writeString(`md5${hash}`, 0) - const plain2 = new ArrayBuffer(36) - plain2.copyFrom(plain, 0, 32, 3) - plain2.copyFrom(salt, 32, 4) - hash = `md5${md5(plain2)}` - const len = hash.length + 5 - let off = 0 - const buf = new ArrayBuffer(len + 1) - const dv = new DataView(buf) - dv.setUint8(off++, 112) - dv.setUint32(off, len) - off += 4 - off += buf.writeString(hash, off) - dv.setUint8(off++, 0) - return buf -} - -function createParser (buf) { - let nextRow = 0 - let parseNext = 0 - let parameters = {} - const query = { start: 0, end: 0, rows: 0, running: false } - - if (freeList.length) return freeList.shift() - - function onDataRow (len, off) { - // D = DataRow - nextRow++ - return off + len - 4 - } - - function onCommandComplete (len, off) { - // C = CommandComplete - query.end = off - query.rows = nextRow - query.running = false - off += len - 4 - nextRow = 0 - parser.onMessage() - return off - } - - function onRowDescripton (len, off) { - // T = RowDescription - const fieldCount = dv.getInt16(off) - off += 2 - fields.length = 0 - for (let i = 0; i < fieldCount; i++) { - const name = readCString(buf, u8, off) - off += name.length + 1 - const tid = dv.getInt32(off) - off += 4 - const attrib = dv.getInt16(off) - off += 2 - const oid = dv.getInt32(off) - off += 4 - const size = dv.getInt16(off) - off += 2 - const mod = dv.getInt32(off) - off += 4 - const format = dv.getInt16(off) - off += 2 - fields.push({ name, tid, attrib, oid, size, mod, format }) - } - parser.onMessage() - return off - } - - function onAuthenticationOk (len, off) { - // R = AuthenticationOk - const method = dv.getInt32(off) - off += 4 - if (method === constants.AuthenticationMD5Password) { - parser.salt = buf.slice(off, off + 4) - off += 4 - parser.onMessage() - } - return off - } - - function onErrorResponse (len, off) { - // E = ErrorResponse - errors.length = 0 - let fieldType = u8[off++] - while (fieldType !== 0) { - const val = readCString(buf, u8, off) - errors.push({ type: fieldType, val }) - off += (val.length + 1) - fieldType = u8[off++] - } - parser.onMessage() - return off - } - - function onParameterStatus (len, off) { - // S = ParameterStatus - const key = readCString(buf, u8, off) - off += (key.length + 1) - const val = readCString(buf, u8, off) - off += val.length + 1 - parameters[key] = val - return off - } - - function onParameterDescription (len, off) { - // t = ParameterDescription - const nparams = dv.getInt16(off) - parser.params = [] - off += 2 - for (let i = 0; i < nparams; i++) { - parser.params.push(dv.getUint32(off)) - off += 4 - } - return off - } - - function onParseComplete (len, off) { - // 1 = ParseComplete - off += len - 4 - parser.onMessage() - return off - } - - function onBindComplete (len, off) { - // 2 = BindComplete - off += len - 4 - parser.onMessage() - query.rows = 0 - query.start = query.end = off - query.running = true - return off - } - - function onReadyForQuery (len, off) { - // Z = ReadyForQuery - parser.status = u8[off] - parser.onMessage() - off += len - 4 - return off - } - - function onBackendKeyData (len, off) { - // K = BackendKeyData - parser.pid = dv.getUint32(off) - off += 4 - parser.key = dv.getUint32(off) - off += 4 - parser.onMessage() - return off - } - - function parse (bytesRead) { - let type - let len - let off = parseNext - const end = buf.offset + bytesRead - while (off < end) { - const remaining = end - off - let want = 5 - if (remaining < want) { - if (byteLength - off < 1024) { - if (query.running) { - const queryLen = off - query.start + remaining - buf.copyFrom(buf, 0, queryLen, query.start) - buf.offset = queryLen - parseNext = off - query.start - query.start = 0 - return - } - buf.copyFrom(buf, 0, remaining, off) - buf.offset = remaining - parseNext = 0 - return - } - buf.offset = off + remaining - parseNext = off - return - } - type = parser.type = dv.getUint8(off) - len = parser.len = dv.getUint32(off + 1) - want = len + 1 - if (remaining < want) { - if (byteLength - off < 1024) { - if (query.running) { - const queryLen = off - query.start + remaining - buf.copyFrom(buf, 0, queryLen, query.start) - buf.offset = queryLen - parseNext = off - query.start - query.start = 0 - return - } - buf.copyFrom(buf, 0, remaining, off) - buf.offset = remaining - parseNext = 0 - return - } - buf.offset = off + remaining - parseNext = off - return - } - off += 5 - off = (V[type] || V[0])(len, off) - } - parseNext = buf.offset = 0 - } - - function getResult () { - return readCString(buf, u8, parseNext) - } - - function onDefault (len, off) { - off += len - 4 - parser.onMessage() - return off - } - - function free () { - parser.fields.length = 0 - parser.errors.length = 0 - parameters = parser.parameters = {} - nextRow = 0 - parseNext = 0 - query.start = query.end = query.rows = 0 - query.running = false - freeList.push(parser) - } - - const { messageTypes } = constants - const dv = new DataView(buf) - const u8 = new Uint8Array(buf) - const byteLength = buf.byteLength - const fields = [] - const errors = [] - const V = { - [messageTypes.AuthenticationOk]: onAuthenticationOk, - [messageTypes.ErrorResponse]: onErrorResponse, - [messageTypes.RowDescription]: onRowDescripton, - [messageTypes.CommandComplete]: onCommandComplete, - [messageTypes.ParseComplete]: onParseComplete, - [messageTypes.BindComplete]: onBindComplete, - [messageTypes.ReadyForQuery]: onReadyForQuery, - [messageTypes.BackendKeyData]: onBackendKeyData, - [messageTypes.ParameterStatus]: onParameterStatus, - [messageTypes.ParameterDescription]: onParameterDescription, - [messageTypes.DataRow]: onDataRow, - 0: onDefault - } - const parser = { - buf, - dv, - fields, - parameters, - type: 0, - len: 0, - errors, - getResult, - parse, - free, - query - } - return parser -} - -function readCString (buf, u8, off) { - const start = off - while (u8[off] !== 0) off++ - return buf.readString(off - start, start) -} - -function getPGError (errors) { - return errors.filter(v => v.type === 77)[0].val -} - -const constants = { - AuthenticationMD5Password: 5, - fieldTypes: { - INT4OID: 23, - VARCHAROID: 1043 - }, - messageTypes: { - AuthenticationOk: 82, - ErrorResponse: 69, - RowDescription: 84, - CommandComplete: 67, - ParseComplete: 49, - BindComplete: 50, - ReadyForQuery: 90, - BackendKeyData: 75, - ParameterStatus: 83, - ParameterDescription: 116, - DataRow: 68, - NoData: 110 - } -} - -const freeList = [] - -module.exports = { createParser, syncMessage, startupMessage, md5AuthMessage, getPGError, constants } diff --git a/frameworks/JavaScript/just/lib/stats.js b/frameworks/JavaScript/just/lib/stats.js deleted file mode 100644 index adba12ba67c..00000000000 --- a/frameworks/JavaScript/just/lib/stats.js +++ /dev/null @@ -1,28 +0,0 @@ -function start () { - function onTime () { - stats.time = (new Date()).toUTCString() - } - const stats = { conn: 0, qps: 0, rps: 0, time: 0 } - just.setInterval(() => { - if (just.buffer) { - u32 = new Uint32Array(just.buffer) - } - if (u32) { - Atomics.exchange(u32, 0, stats.rps) - } else { - const { conn, qps, rps, clients } = stats - const { user, system } = just.cpuUsage() - const { rss } = just.memoryUsage() - const rpspc = ((rps / (user + system)) || 0) - just.error(`conn ${conn} qps ${qps} rps ${rps} clients ${clients} mem ${rss} cpu (${user.toFixed(2)}/${system.toFixed(2)}) ${(user + system).toFixed(2)} rps/core ${rpspc.toFixed(2)}`) - } - stats.qps = stats.rps = 0 - }, 1000) - just.setInterval(onTime, 100) - onTime() - return stats -} - -let u32 - -module.exports = { start } diff --git a/frameworks/JavaScript/just/lib/stringify.js b/frameworks/JavaScript/just/lib/stringify.js deleted file mode 100644 index 5ad73f6b829..00000000000 --- a/frameworks/JavaScript/just/lib/stringify.js +++ /dev/null @@ -1,103 +0,0 @@ -// lifted from here: https://github.com/lucagez/slow-json-stringify - -var _prepare = function(e) { - var r = JSON.stringify(e, function(e, r) { - return r.isSJS ? r.type + "__sjs" : r - }); - return { - preparedString: r, - preparedSchema: JSON.parse(r) - } -}, -_find = function(path) { - for (var length = path.length, str = "obj", i = 0; i < length; i++) str = str.replace(/^/, "("), str += " || {})." + path[i]; - return just.vm.runScript("((obj) => " + str + ")") -}, -_makeArraySerializer = function(e) { - return e instanceof Function ? function(r) { - for (var n = "", t = r.length, a = 0; a < t - 1; a++) n += e(r[a]) + ","; - return "[" + (n += e(r[t - 1])) + "]" - } : function(e) { - return JSON.stringify(e) - } -}, -TYPES = ["number", "string", "boolean", "array", "null"], -attr = function(e, r) { - if (!TYPES.includes(e)) throw new Error('Expected one of: "number", "string", "boolean", "null". received "' + e + '" instead'); - var n = r || function(e) { - return e - }; - return { - isSJS: !0, - type: e, - serializer: "array" === e ? _makeArraySerializer(r) : n - } -}, -defaultRegex = new RegExp('\\n|\\r|\\t|\\"|\\\\', "gm"), -escape = function(e) { - return void 0 === e && (e = defaultRegex), - function(r) { - return r.replace(e, function(e) { - return "\\" + e - }) - } -}, -_makeQueue = function(e, r) { - var n = []; - return function e(t, a) { - if (void 0 === a && (a = []), !/__sjs/.test(t)) return Object.keys(t).map(function(r) { - return e(t[r], a.concat([r])) - }); - var i = Array.from(a), - u = _find(i), - s = u(r); - n.push({ - serializer: s.serializer, - find: u, - name: a[a.length - 1] - }) - }(e), n -}, -_makeChunks = function(e, r) { - return e.replace(/"\w+__sjs"/gm, function(e) { - return /string/.test(e) ? '"__par__"' : "__par__" - }).split("__par__").map(function(e, n, t) { - var a = '("' + (r[n] || {}).name + '":("?))$', - i = "(,?)" + a, - u = /^("}|})/.test(t[n + 1] || ""), - s = new RegExp(u ? i : a), - f = /^(\"\,|\,|\")/; - return { - flag: !1, - pure: e, - prevUndef: e.replace(f, ""), - isUndef: e.replace(s, ""), - bothUndef: e.replace(f, "").replace(s, "") - } - }) -}, -_select = function(e) { - return function(r, n) { - var t = e[n]; - return void 0 !== r ? t.flag ? t.prevUndef + r : t.pure + r : (e[n + 1].flag = !0, t.flag ? t.bothUndef : t.isUndef) - } -}, -sjs = function(e) { - var r = _prepare(e), - n = r.preparedString, - t = _makeQueue(r.preparedSchema, e), - a = _makeChunks(n, t), - i = _select(a), - u = t.length; - return function(e) { - for (var r = "", n = 0; n !== u;) { - var s = t[n], - f = s.serializer, - p = (0, s.find)(e); - r += i(f(p), n), n += 1 - } - var o = a[a.length - 1]; - return r + (o.flag ? o.prevUndef : o.pure) - } -}; -exports.sjs = sjs, exports.attr = attr, exports.escape = escape; \ No newline at end of file diff --git a/frameworks/JavaScript/just/lib/tcp.js b/frameworks/JavaScript/just/lib/tcp.js deleted file mode 100644 index dac1dfe910c..00000000000 --- a/frameworks/JavaScript/just/lib/tcp.js +++ /dev/null @@ -1,178 +0,0 @@ - -const { sys, net } = just -const { EPOLLIN, EPOLLERR, EPOLLHUP, EPOLLOUT } = just.loop -const { IPPROTO_TCP, O_NONBLOCK, TCP_NODELAY, SO_KEEPALIVE, SOMAXCONN, AF_INET, SOCK_STREAM, SOL_SOCKET, SO_REUSEADDR, SO_REUSEPORT, SOCK_NONBLOCK, SO_ERROR } = net - -const { loop } = just.factory - -const readableMask = EPOLLIN | EPOLLERR | EPOLLHUP -const readableWritableMask = EPOLLIN | EPOLLERR | EPOLLHUP | EPOLLOUT - -function createServer (host = '127.0.0.1', port = 3000) { - const server = { host, port } - const sockets = {} - - function closeSocket (sock) { - const { fd } = sock - sock.onClose && sock.onClose(sock) - delete sockets[fd] - loop.remove(fd) - net.close(fd) - } - - function onConnect (fd, event) { - if (event & EPOLLERR || event & EPOLLHUP) { - return closeSocket({ fd }) - } - const clientfd = net.accept(fd) - const socket = sockets[clientfd] = { fd: clientfd } - net.setsockopt(clientfd, IPPROTO_TCP, TCP_NODELAY, 0) - net.setsockopt(clientfd, SOL_SOCKET, SO_KEEPALIVE, 0) - loop.add(clientfd, (fd, event) => { - if (event & EPOLLERR || event & EPOLLHUP) { - return closeSocket(socket) - } - const bytes = net.recv(fd, buffer, buffer.offset, buffer.byteLength - buffer.offset) - if (bytes > 0) { - socket.onData(bytes) - return - } - if (bytes < 0) { - const errno = sys.errno() - if (errno === net.EAGAIN) return - just.error(`recv error: ${sys.strerror(errno)} (${errno})`) - } - closeSocket(socket) - }) - let flags = sys.fcntl(clientfd, sys.F_GETFL, 0) - flags |= O_NONBLOCK - sys.fcntl(clientfd, sys.F_SETFL, flags) - loop.update(clientfd, readableMask) - socket.write = (buf, len = buf.byteLength, off = 0) => { - const written = net.send(clientfd, buf, len, off) - if (written > 0) { - return written - } - if (written < 0) { - const errno = sys.errno() - if (errno === net.EAGAIN) return written - just.error(`write error (${clientfd}): ${sys.strerror(errno)} (${errno})`) - } - if (written === 0) { - just.error(`zero write ${clientfd}`) - } - return written - } - socket.writeString = str => net.sendString(clientfd, str) - socket.close = () => closeSocket(socket) - const buffer = server.onConnect(socket) - buffer.offset = 0 - } - - function listen (maxconn = SOMAXCONN) { - const r = net.listen(sockfd, maxconn) - if (r === 0) loop.add(sockfd, onConnect) - return r - } - server.listen = listen - - const sockfd = net.socket(AF_INET, SOCK_STREAM | SOCK_NONBLOCK, 0) - net.setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, 1) - net.setsockopt(sockfd, SOL_SOCKET, SO_REUSEPORT, 1) - net.bind(sockfd, host, port) - - return server -} - -function createClient (address = '127.0.0.1', port = 3000) { - const sock = { address, port, connected: false } - let fd - - function closeSocket () { - sock.onClose && sock.onClose(sock) - loop.remove(fd) - net.close(fd) - } - - function handleRead (fd, event) { - const bytes = net.recv(fd, buffer, buffer.offset, buffer.byteLength - buffer.offset) - if (bytes > 0) { - sock.onData(bytes) - return - } - if (bytes < 0) { - const errno = sys.errno() - if (errno === net.EAGAIN) return - just.print(`recv error: ${sys.strerror(errno)} (${errno})`) - } - closeSocket(sock) - } - - function handleError (fd, event) { - const errno = net.getsockopt(fd, SOL_SOCKET, SO_ERROR) - if (!sock.connected) { - sock.onConnect(new Error(`${errno} : ${just.sys.strerror(errno)}`)) - } - } - - function handleWrite (fd, event) { - if (!sock.connected) { - net.setsockopt(fd, IPPROTO_TCP, TCP_NODELAY, 0) - net.setsockopt(fd, SOL_SOCKET, SO_KEEPALIVE, 0) - let flags = sys.fcntl(fd, sys.F_GETFL, 0) - flags |= O_NONBLOCK - sys.fcntl(fd, sys.F_SETFL, flags) - loop.update(fd, readableMask) - buffer = sock.onConnect(null, sock) - buffer.offset = 0 - sock.connected = true - } - } - - function onSocketEvent (fd, event) { - if (event & EPOLLERR || event & EPOLLHUP) { - handleError(fd, event) - closeSocket() - return - } - if (event & EPOLLIN) { - handleRead(fd, event) - } - if (event & EPOLLOUT) { - handleWrite(fd, event) - } - } - - sock.write = (buf, len = buf.byteLength, off = 0) => { - const written = net.send(fd, buf, len, off) - if (written > 0) { - return written - } - if (written < 0) { - const errno = sys.errno() - if (errno === net.EAGAIN) return written - just.error(`write error (${fd}): ${sys.strerror(errno)} (${errno})`) - } - if (written === 0) { - just.error(`zero write ${fd}`) - } - return written - } - sock.writeString = str => net.sendString(fd, str) - - sock.close = () => closeSocket(sock) - - function connect () { - fd = net.socket(AF_INET, SOCK_STREAM | SOCK_NONBLOCK, 0) - loop.add(fd, onSocketEvent, readableWritableMask) - net.connect(fd, address, port) - sock.fd = fd - return sock - } - - let buffer - sock.connect = connect - return sock -} - -module.exports = { createServer, createClient } diff --git a/frameworks/JavaScript/just/spawn.js b/frameworks/JavaScript/just/spawn.js deleted file mode 100644 index bc48b636610..00000000000 --- a/frameworks/JavaScript/just/spawn.js +++ /dev/null @@ -1,38 +0,0 @@ -const { cwd, errno, strerror, spawn } = just.sys -const path = cwd() -const [...args] = just.args.slice(2) -const { socketpair, AF_UNIX, SOCK_STREAM } = just.net -function createPipe () { - const fds = [] - const r = socketpair(AF_UNIX, SOCK_STREAM, fds) - if (r !== 0) throw new Error(`socketpair ${r} errno ${errno()} : ${strerror(errno())}`) - return fds -} - -const cpus = parseInt(just.env().CPUS || just.sys.cpus, 10) -const pids = [] -for (let i = 0; i < cpus; i++) { - const stdin = createPipe() - const stdout = createPipe() - const stderr = createPipe() - const pid = spawn('just', path, args, stdin[1], stdout[1], stderr[1]) - pids.push(pid) -} - -const { readStat } = require('lib/monitor.js') -const last = { user: 0, system: 0 } -just.setInterval(() => { - const stat = { user: 0, system: 0, rss: 0 } - for (const pid of pids) { - const { utime, stime, rssPages } = readStat(pid) - const rss = Math.floor((rssPages * just.sys.pageSize) / (1024 * 1024)) - stat.rss += rss - stat.user += utime - stat.system += stime - } - const user = stat.user - last.user - const system = stat.system - last.system - last.user = stat.user - last.system = stat.system - just.print(`children ${pids.length} rss ${stat.rss} user ${user} system ${system} total ${user + system}`) -}, 1000) diff --git a/frameworks/JavaScript/just/techempower.js b/frameworks/JavaScript/just/techempower.js index a85030ad41a..bba959ba5af 100644 --- a/frameworks/JavaScript/just/techempower.js +++ b/frameworks/JavaScript/just/techempower.js @@ -1,431 +1,81 @@ -const { connect, constants } = require('lib/connection.js') -const { createServer } = require('lib/tcp.js') -const { createParser } = require('lib/http.js') -const { sjs, attr } = require('lib/stringify.js') +const stringify = require('@stringify') +const html = require('@html') +const cache = require('@cache') +const dns = require('@dns') +const postgres = require('@pg') +const http = require('@http') +const socket = require('@socket') -function compile (sock, query) { - return new Promise((resolve, reject) => { - const result = sock.compile(query, err => { - if (err) return reject(err) - resolve(result) - }) - }) -} +const util = require('util.js') +const config = require('tfb.config.js') -async function onPGAuth (sock) { - sock.getWorldById = await compile(sock, { - formats: [{ format: 1, oid: INT4OID }], - sql: 'select id, randomNumber from World where id = $1', - fields: [{ format: 1, oid: INT4OID }], - name: 's1', - portal: '', - maxRows: 0, - params: [1] - }) - sock.allFortunes = await compile(sock, { - formats: [], - sql: 'select * from Fortune', - fields: [{ format: 1, oid: INT4OID }, { format: 0, oid: VARCHAROID }], - name: 's2', - portal: '', - maxRows: 0, - htmlEscape: true, - params: [] - }) - sock.updateWorldById = await compile(sock, { - formats: [{ format: 1, oid: INT4OID }], - sql: 'update World set randomNumber = $2 where id = $1', - fields: [], - name: 's3', - portal: '', - maxRows: 0, - params: [1, 1] - }) - // TODO: we could actually build these on the fly for any number of updates - sock.updateWorldById20 = await compile(sock, { - formats: [{ format: 1, oid: INT4OID }], - sql: `update world set randomnumber = CASE id -when $1 then $2 -when $3 then $4 -when $5 then $6 -when $7 then $8 -when $9 then $10 -when $11 then $12 -when $13 then $14 -when $15 then $16 -when $17 then $18 -when $19 then $20 -when $21 then $22 -when $23 then $24 -when $25 then $26 -when $27 then $28 -when $29 then $30 -when $31 then $32 -when $33 then $34 -when $35 then $36 -when $37 then $38 -when $39 then $40 -else randomnumber -end where id in ($1,$3,$5,$7,$9,$11,$13,$15,$17,$19,$21,$23,$25,$27,$29,$31,$33,$35,$37,$39) -`, - fields: [], - name: 's4', - portal: '', - maxRows: 0, - params: Array(40).fill(0) - }) - sock.updateWorldById15 = await compile(sock, { - formats: [{ format: 1, oid: INT4OID }], - sql: `update world set randomnumber = CASE id -when $1 then $2 -when $3 then $4 -when $5 then $6 -when $7 then $8 -when $9 then $10 -when $11 then $12 -when $13 then $14 -when $15 then $16 -when $17 then $18 -when $19 then $20 -when $21 then $22 -when $23 then $24 -when $25 then $26 -when $27 then $28 -when $29 then $30 -else randomnumber -end where id in ($1,$3,$5,$7,$9,$11,$13,$15,$17,$19,$21,$23,$25,$27,$29) -`, - fields: [], - name: 's5', - portal: '', - maxRows: 0, - params: Array(30).fill(0) - }) - sock.updateWorldById10 = await compile(sock, { - formats: [{ format: 1, oid: INT4OID }], - sql: `update world set randomnumber = CASE id -when $1 then $2 -when $3 then $4 -when $5 then $6 -when $7 then $8 -when $9 then $10 -when $11 then $12 -when $13 then $14 -when $15 then $16 -when $17 then $18 -when $19 then $20 -else randomnumber -end where id in ($1,$3,$5,$7,$9,$11,$13,$15,$17,$19) -`, - fields: [], - name: 's6', - portal: '', - maxRows: 0, - params: Array(20).fill(0) - }) - sock.updateWorldById5 = await compile(sock, { - formats: [{ format: 1, oid: INT4OID }], - sql: `update world set randomnumber = CASE id -when $1 then $2 -when $3 then $4 -when $5 then $6 -when $7 then $8 -when $9 then $10 -else randomnumber -end where id in ($1,$3,$5,$7,$9) -`, - fields: [], - name: 's7', - portal: '', - maxRows: 0, - params: Array(10).fill(0) - }) - sock.getCachedWorldById = await compile(sock, { - formats: [{ format: 1, oid: INT4OID }], - sql: 'select id, randomNumber from World where id = $1', - fields: [{ format: 1, oid: INT4OID }], - name: 's8', - portal: '', - maxRows: 0, - params: [1] - }) - clients.push(sock) - if (clients.length === poolSize) onPGReady() -} +const { getIPAddress } = dns +const { createSocket } = socket +const { createServer, responses } = http +const { SimpleCache } = cache +const { sprayer, sortByMessage, spawn, getUpdateQuery, Clock } = util +const { sjs, attr } = stringify +const { + db, fortunes, worlds, templates, + maxQuery, maxRows, message, json, + extra +} = config -function onPGConnect (err, sock) { - if (err) { - just.error(err.stack) - just.setTimeout(() => connect(tfb, onPGConnect), 1000) - return - } - sock.onClose = () => { - // todo: remove from pool and reconnect? - just.error('pg.close') - } - sock.start(err => { - if (err) return just.error(err.stack) - sock.authenticate(err => { - if (err) return just.error(err.stack) - onPGAuth(sock).catch(err => just.error(err.stack)) - }) - }) -} +async function main () { + const spray = sprayer(maxQuery) + const getRandom = () => Math.ceil(Math.random() * maxRows) + const getCount = (qs = { q: 1 }) => { + return Math.min(parseInt((qs.q) || 1, 10), maxQuery) || 1 + } + const sJSON = sjs({ message: attr('string') }) + const wJSON = sjs({ id: attr('number'), randomnumber: attr('number') }) + const clock = new Clock() -const HEADER = 'Fortunes' -const FOOTER = '
idmessage
' -const S1 = '' -const S2 = '' -const S3 = '' -function getHTML (rows) { - let html = HEADER - for (const row of rows) { - html += (S1 + row[0] + S2 + row[1] + S3) - } - return html + FOOTER -} + const sock = createSocket() + const ip = await getIPAddress(db.hostname) + await sock.connect(ip, db.port) + const pg = await postgres.createSocket(sock, db) -function insertionSort (arr) { - const n = arr.length - for (let i = 1; i < n; i++) { - const c = arr[i] - let j = i - 1 - while ((j > -1) && (c[1] < arr[j][1])) { - arr[j + 1] = arr[j] - j-- - } - arr[j + 1] = c - } - return arr -} + sock.noDelay = false -const cache = {} + const getWorldById = await pg.compile(worlds) + const getFortunes = await pg.compile(fortunes) + const worldCache = new SimpleCache(id => getWorldById(id)) + const template = html.load(templates.fortunes, templates.settings) + const getRandomWorld = () => getWorldById(getRandom()) + const getCachedWorld = () => worldCache.get(getRandom()) -function onHTTPConnect (sock) { - const client = clients[sock.fd % clients.length] - const rbuf = new ArrayBuffer(4096) - const parser = createParser(rbuf) - const { getWorldById, updateWorldById, allFortunes, updateWorldById20, updateWorldById15, updateWorldById10, updateWorldById5, getCachedWorldById } = client - const message = { message: 'Hello, World!' } - const text = 'Hello, World!' - const extra = [0, 'Additional fortune added at request time.'] - const updateQueries = { - 5: updateWorldById5, - 10: updateWorldById10, - 15: updateWorldById15, - 20: updateWorldById20 - } - const results = [] - let queries = 0 - let updates = 0 - function onUpdateMulti () { - const json = JSON.stringify(results) - sock.writeString(`${rJSON}${json.length}${END}${json}`) - } - function onUpdateSingle () { - updates++ - if (results.length === updates) { - const json = JSON.stringify(results) - sock.writeString(`${rJSON}${json.length}${END}${json}`) - } - } - function onUpdates () { - const [id, randomNumber] = getWorldById.getRows()[0] - results.push({ id, randomNumber }) - if (results.length === queries) { - const query = updateQueries[queries] - if (query) { - let i = 0 - for (const row of results) { - row.randomNumber = Math.ceil(Math.random() * 10000) - query.params[i++] = row.id - query.params[i++] = row.randomNumber - } - query.call(onUpdateMulti) - return - } - updates = 0 - for (const row of results) { - row.randomNumber = Math.ceil(Math.random() * 10000) - updateWorldById.params[0] = row.id - updateWorldById.params[1] = row.randomNumber - updateWorldById.append(onUpdateSingle) - } - updateWorldById.send() - } - } - function handleUpdates (qs) { - const [, val] = qs.split('=') - queries = Math.min(parseInt(val || 1, 10), 500) || 1 - results.length = 0 - for (let i = 1; i < queries; i++) { - getWorldById.params[0] = Math.ceil(Math.random() * 10000) - getWorldById.append(onUpdates, (i % 20 === 0)) - } - getWorldById.params[0] = Math.ceil(Math.random() * 10000) - getWorldById.append(onUpdates) - getWorldById.send() - } - function onMulti () { - const [id, randomNumber] = getWorldById.getRows()[0] - results.push({ id, randomNumber }) - if (results.length === queries) { - const json = JSON.stringify(results) - sock.writeString(`${rJSON}${json.length}${END}${json}`) - queries = 0 - } - } - function handleMulti (qs) { - const [, val] = qs.split('=') - queries = Math.min(parseInt(val || 1, 10), 500) || 1 - results.length = 0 - for (let i = 1; i < queries; i++) { - getWorldById.params[0] = Math.ceil(Math.random() * 10000) - getWorldById.append(onMulti, (i % 20 === 0)) - } - getWorldById.params[0] = Math.ceil(Math.random() * 10000) - getWorldById.append(onMulti) - getWorldById.send() - } - function onCached () { - const row = getCachedWorldById.getRows()[0] - const [id, randomNumber] = row - const world = { id, randomNumber } - cache[id] = world - results.push(world) - if (results.length === queries) { - const json = JSON.stringify(results) - sock.writeString(`${rJSON}${json.length}${END}${json}`) - queries = 0 - results.length = 0 - } - } - function handleCached (qs) { - const [, val] = qs.split('=') - queries = Math.min(parseInt(val || 1, 10), 500) || 1 - for (let i = 1; i < queries; i++) { - const id = Math.ceil(Math.random() * 10000) - const row = cache[id] - if (row) { - results.push(row) - } else { - getCachedWorldById.params[0] = id - getCachedWorldById.append(onCached, (i % 20 === 0)) - } - } - const id = Math.ceil(Math.random() * 10000) - const row = cache[id] - if (row) { - results.push(row) - } else { - getCachedWorldById.params[0] = id - getCachedWorldById.append(onCached) - } - if (results.length === queries) { - const json = JSON.stringify(results) - sock.writeString(`${rJSON}${json.length}${END}${json}`) - queries = 0 - results.length = 0 - return - } - getCachedWorldById.send() - } - function onFortunes () { - const html = getHTML(insertionSort([extra, ...allFortunes.getRows()])) - sock.writeString(`${rHTML}${utf8Length(html)}${END}${html}`) - } - function onSingle () { - const [id, randomNumber] = getWorldById.getRows()[0] - const json = sDB({ id, randomNumber }) - sock.writeString(`${rJSON}${json.length}${END}${json}`) - } - const queryPath = '/query' - const updatePath = '/update' - const cachePath = '/cached-world' - const pathSep = '?' - const END = '\r\n\r\n' - const handlers = { - '/json': () => { - const json = sJSON(message) - sock.writeString(`${rJSON}${json.length}${END}${json}`) - }, - '/fortunes': () => allFortunes.call(onFortunes), - '/db': () => { - getWorldById.params[0] = Math.ceil(Math.random() * 10000) - getWorldById.call(onSingle) - }, - '/plaintext': () => sock.writeString(`${rTEXT}${text.length}${END}${text}`), - default: url => { - const [path, qs] = url.split(pathSep) - if (path === queryPath) { - handleMulti(qs) - return - } - if (path === updatePath) { - handleUpdates(qs) - return - } - if (path === cachePath) { - handleCached(qs) - return - } - sock.writeString(r404) - } - } - parser.onRequests = count => { - if (count > 1) { - sock.writeString(`${rTEXT}${text.length}${END}${text}`.repeat(count)) - return - } - const url = parser.url(0) - const handler = (handlers[url] || handlers.default) - handler(url) - } - sock.onData = bytes => parser.parse(bytes) - sock.onClose = () => { - parser.free() - } - return parser.buffer -} - -function onPGReady () { - microtasks = false - just.print(`listen: ${server.listen()}`) -} + const server = createServer() + .get('/plaintext', res => res.text(message)) + .get('/json', res => res.utf8(sJSON(json), responses.json)) + .get('/db', async res => { + res.utf8(wJSON(await getRandomWorld()), responses.json) + }) + .get('/fortunes', async res => { + res.html(template.call(sortByMessage([extra, ...await getFortunes()]))) + }) + .get('/cached-world', async (res, req) => { + res.json(await Promise.all(spray(getCount(req.query), getCachedWorld))) + }) + .get('/query', async (res, req) => { + res.json(await Promise.all(spray(getCount(req.query), getRandomWorld))) + }) + .get('/update', async (res, req) => { + const count = getCount(req.query) + const worlds = await Promise.all(spray(count, getRandomWorld)) + const updateWorlds = await getUpdateQuery(count, pg) + await updateWorlds(...worlds.map(w => { + w.randomnumber = getRandom() + return [w.id, w.randomnumber] + }).flat()) + res.json(worlds) + }) + .listen('0.0.0.0', 8080) -const { utf8Length } = just.sys -const poolSize = parseInt(just.env().PGPOOL || just.sys.cpus, 10) -const server = createServer('0.0.0.0', 8080) -server.onConnect = onHTTPConnect -const { INT4OID, VARCHAROID } = constants.fieldTypes -const clients = [] -const tfb = { - hostname: 'tfb-database', - port: 5432, - user: 'benchmarkdbuser', - pass: 'benchmarkdbpass', - database: 'hello_world' + clock.set(() => { + worldCache.tick() + server.update() + }) } -let i = poolSize -const sJSON = sjs({ message: attr('string') }) -const sDB = sjs({ id: attr('number'), randomNumber: attr('number') }) -while (i--) connect(tfb, onPGConnect) -const { loop } = just.factory -let microtasks = true -let time = (new Date()).toUTCString() -let rHTML = `HTTP/1.1 200 OK\r\nServer: j\r\nDate: ${time}\r\nContent-Type: text/html; charset=UTF-8\r\nContent-Length: ` -let rTEXT = `HTTP/1.1 200 OK\r\nServer: j\r\nDate: ${time}\r\nContent-Type: text/plain\r\nContent-Length: ` -let rJSON = `HTTP/1.1 200 OK\r\nServer: j\r\nDate: ${time}\r\nContent-Type: application/json\r\nContent-Length: ` -let r404 = `HTTP/1.1 404 Not Found\r\nServer: j\r\nDate: ${time}\r\nContent-Type: text/plain\r\nContent-Length: 0\r\n\r\n` -just.setInterval(() => { - time = (new Date()).toUTCString() - rHTML = `HTTP/1.1 200 OK\r\nServer: j\r\nDate: ${time}\r\nContent-Type: text/html; charset=UTF-8\r\nContent-Length: ` - rTEXT = `HTTP/1.1 200 OK\r\nServer: j\r\nDate: ${time}\r\nContent-Type: text/plain\r\nContent-Length: ` - rJSON = `HTTP/1.1 200 OK\r\nServer: j\r\nDate: ${time}\r\nContent-Type: application/json\r\nContent-Length: ` - r404 = `HTTP/1.1 404 Not Found\r\nServer: j\r\nDate: ${time}\r\nContent-Type: text/plain\r\nContent-Length: 0\r\n\r\n` -}, 100) -while (1) { - if (loop.poll(0) === 0) loop.poll(-1) - if (microtasks) just.sys.runMicroTasks() -} +spawn(main).catch(err => just.error(err.stack)) diff --git a/frameworks/JavaScript/just/tfb.config.js b/frameworks/JavaScript/just/tfb.config.js new file mode 100644 index 00000000000..d7669ca7b1a --- /dev/null +++ b/frameworks/JavaScript/just/tfb.config.js @@ -0,0 +1,56 @@ +const postgres = require('@pg') + +const { constants } = postgres +const { BinaryInt, VarChar } = constants + +const db = { + hostname: 'tfb-database', + user: 'benchmarkdbuser', + pass: 'benchmarkdbpass', + database: 'hello_world', + version: constants.PG_VERSION, + port: 5432 +} + +const fortunes = { + portal: '', + formats: [], + name: 'fortunes', + maxRows: 0, + params: [], + sql: 'select * from Fortune', + fields: [ + { format: BinaryInt, name: 'id' }, + { format: VarChar, name: 'message', htmlEscape: true } + ] +} + +const worlds = { + portal: '', + formats: [BinaryInt], + name: 'worlds', + maxRows: 0, + params: [0], + sql: 'select id, randomNumber from World where id = $1', + fields: [ + { format: BinaryInt, name: 'id' }, + { format: BinaryInt, name: 'randomnumber' } + ] +} + +const templates = { + fortunes: 'fortunes.html', + settings: { rawStrings: false, compile: true } +} + +const maxQuery = 500 +const maxRows = 10000 +const message = 'Hello, World!' +const json = { message } +const extra = { id: 0, message: 'Additional fortune added at request time.' } + +module.exports = { + db, fortunes, worlds, templates, + maxQuery, maxRows, message, json, + extra +} diff --git a/frameworks/JavaScript/just/util.js b/frameworks/JavaScript/just/util.js new file mode 100644 index 00000000000..dccbc031fe1 --- /dev/null +++ b/frameworks/JavaScript/just/util.js @@ -0,0 +1,139 @@ +const process = require('process') +const postgres = require('@pg') + +const { constants } = postgres +const { BinaryInt } = constants + +/** + * Generate a Bulk Update SQL statement definition For a given table, identity + * column and column to be updated, it will generate a single SQL + * statement to update all fields in one statement + * + * @param {string} table - The name of the table + * @param {string} field - The name of the field we want to update + * @param {string} id - The name of the id field + * @param {string} updates - The number of rows to update in the statement + * @param {string} type - The name of the table + */ +function generateBulkUpdate (table, field, id, updates = 5, formats = [BinaryInt]) { + function getIds (count) { + const updates = [] + for (let i = 1; i < (count * 2); i += 2) { + updates.push(`$${i}`) + } + return updates.join(',') + } + function getClauses (count) { + const clauses = [] + for (let i = 1; i < (count * 2); i += 2) { + clauses.push(`when $${i} then $${i + 1}`) + } + return clauses.join('\n') + } + const sql = [] + sql.push(`update ${table} set ${field} = CASE ${id}`) + sql.push(getClauses(updates)) + sql.push(`else ${field}`) + sql.push(`end where ${id} in (${getIds(updates)})`) + return { + formats, + fields: [], + name: `bulk.${updates}`, + portal: '', + params: Array(updates * 2).fill(0), + sql: sql.join('\n'), + sync: true + } +} + +/** + * Utility function to generate an array of N values populated with provided + * map function. There seems to be no simpler/quicker way to do this in JS. + * @param {string} n - Size of the array to create + * @param {string} field - The map function which will create each array value + */ +function sprayer (max = 100) { + const ar = [0] + for (let i = 0; i < max; i++) { + ar[i + 1] = (new Array(i + 1)).fill(1) + } + max += 1 + return (n, fn) => ar[n % max].map(fn) +} + +function sortByMessage (arr) { + const n = arr.length + for (let i = 1; i < n; i++) { + const c = arr[i] + let j = i - 1 + while ((j > -1) && (c.message < arr[j].message)) { + arr[j + 1] = arr[j] + j-- + } + arr[j + 1] = c + } + return arr +} + +function spawn (main) { + if (just.env()['WORKER']) return main() + const { watch, launch } = process + const processes = [] + const cpus = parseInt(just.env().CPUS || just.sys.cpus, 10) + for (let i = 0; i < cpus; i++) { + just.sys.setenv('WORKER', i) + //const proc = launch(just.args[0], ['--trace-gc', ...just.args.slice(1)]) + const proc = launch(just.args[0], just.args.slice(1)) + processes.push(proc) + proc.stats = { user: 0, system: 0 } + } + return Promise.all(processes.map(p => watch(p))) +} + +const updates = new Map() + +function getUpdateQuery (count, pg, formats = [BinaryInt]) { + const query = updates.get(count) + if (query) return query + const promise = pg.compile(generateBulkUpdate('world', 'randomnumber', 'id', count, formats)) + updates.set(count, promise) + return promise +} + +class Clock { + constructor () { + this.slots = new Map() + } + + unset (callback, repeat = 1000) { + const current = this.slots.get(repeat) + if (!current) return + current.callbacks = current.callbacks.filter(cb => cb !== callback) + if (!current.callbacks.length) { + just.clearTimeout(current.timer) + this.slots.delete(repeat) + } + } + + set (callback, repeat = 1000) { + let current = this.slots.get(repeat) + if (current) { + current.callbacks.push(callback) + return + } + current = { + callbacks: [callback], + timer: just.setInterval(() => current.callbacks.forEach(cb => cb()), repeat) + } + this.slots.set(repeat, current) + } +} + +module.exports = { + sprayer, + spawn, + sortByMessage, + generateBulkUpdate, + getUpdateQuery, + Clock +} diff --git a/frameworks/JavaScript/nodejs/app.js b/frameworks/JavaScript/nodejs/app.js index eaee2b12590..386642c479c 100755 --- a/frameworks/JavaScript/nodejs/app.js +++ b/frameworks/JavaScript/nodejs/app.js @@ -13,16 +13,16 @@ if (process.env.TFB_TEST_NAME === 'nodejs-mongodb') { process.env.NODE_HANDLER = 'sequelize-postgres'; } -if (cluster.isMaster) { +if (cluster.isPrimary) { + console.log(`Primary ${process.pid} is running`); + // Fork workers. for (let i = 0; i < numCPUs; i++) { cluster.fork(); } cluster.on('exit', (worker, code, signal) => { - console.log([ - 'A process exit was triggered, most likely due to a failed database action', - 'NodeJS test server shutting down now'].join('\n')); + console.log(`worker ${worker.process.pid} died`); process.exit(1); }); } else { diff --git a/frameworks/JavaScript/nodejs/nodejs.dockerfile b/frameworks/JavaScript/nodejs/nodejs.dockerfile index 436c3bcea02..18f94864176 100644 --- a/frameworks/JavaScript/nodejs/nodejs.dockerfile +++ b/frameworks/JavaScript/nodejs/nodejs.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim ARG TFB_TEST_NAME diff --git a/frameworks/JavaScript/restify/restify.dockerfile b/frameworks/JavaScript/restify/restify.dockerfile index 29b425df551..63f391d4c83 100644 --- a/frameworks/JavaScript/restify/restify.dockerfile +++ b/frameworks/JavaScript/restify/restify.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.2-slim +FROM node:16.14.2-slim WORKDIR /nextjs ADD ./ ./ diff --git a/frameworks/JavaScript/restify/server.js b/frameworks/JavaScript/restify/server.js index a87007b18a8..addbd29467f 100644 --- a/frameworks/JavaScript/restify/server.js +++ b/frameworks/JavaScript/restify/server.js @@ -1,5 +1,5 @@ const cluster = require('cluster'); -const cpus = require('os').cpus(); +const numCPUs = require('os').cpus().length; const server = require('restify').createServer(); server.get('/plaintext', (req, res) => @@ -8,10 +8,18 @@ server.get('/plaintext', (req, res) => server.get('/json', (req, res) => res.json({ message: 'Hello, World!' })); - -if (cluster.isMaster) { - cpus.forEach(() => cluster.fork()); +if (cluster.isPrimary) { + console.log(`Primary ${process.pid} is running`); + + // Fork workers. + for (let i = 0; i < numCPUs; i++) { + cluster.fork(); + } + + cluster.on('exit', (worker, code, signal) => { + console.log(`worker ${worker.process.pid} died`); + }); } else { server.listen(8080, () => - console.log('%s listening at %s', server.name, server.url)); + console.log(`${server.name} listening at ${server.url}`)); } diff --git a/frameworks/JavaScript/ringojs/app/views.js b/frameworks/JavaScript/ringojs/app/views.js index 76a4780316e..7773f92fa8b 100644 --- a/frameworks/JavaScript/ringojs/app/views.js +++ b/frameworks/JavaScript/ringojs/app/views.js @@ -44,7 +44,7 @@ app.get('/dbquery/:queries?', function(request, queries) { return response.json(worlds); }); -app.get('/fortune', function() { +app.get('/fortunes', function() { const fortunes = models.store.query('select Fortune.* from Fortune'); fortunes.push({ id: 0, diff --git a/frameworks/JavaScript/ringojs/benchmark_config.json b/frameworks/JavaScript/ringojs/benchmark_config.json index ee252b1ceb0..74dfb0f50fe 100644 --- a/frameworks/JavaScript/ringojs/benchmark_config.json +++ b/frameworks/JavaScript/ringojs/benchmark_config.json @@ -27,7 +27,7 @@ "json_url": "/json", "db_url": "/db", "query_url": "/dbquery/", - "fortune_url": "/fortune", + "fortune_url": "/fortunes", "plaintext_url": "/plaintext", "update_url": "/updates/", "port": 8080, diff --git a/frameworks/Kotlin/hexagon/README.md b/frameworks/Kotlin/hexagon/README.md index b83921aecdc..28397711a58 100644 --- a/frameworks/Kotlin/hexagon/README.md +++ b/frameworks/Kotlin/hexagon/README.md @@ -2,15 +2,15 @@ # Hexagon Benchmarking Test This is the Hexagon portion of a [benchmarking test suite](../../../README.md) comparing a variety -of web development platforms. The test utilizes Hexagon routes, serialization and database access. +of web development platforms. The test utilizes Hexagon routes and serialization. ## Tests -You can run tests against any running server passing the `verify.endpoint` project property. I.e.: -`./gradlew verify -Pverify.endpoint=http://host:1234` +You can verify the benchmarks with the following command (from the project root): +`./tfb --mode verify --test hexagon hexagon-jetty hexagon-tomcat hexagon-netty hexagon-nettyepoll` -* [Hexagon Web](src/main/kotlin/com/hexagonkt/Benchmark.kt) -* [Hexagon Storage](src/main/kotlin/com/hexagonkt/BenchmarkStorage.kt) +To run the full benchmarks locally, on the project root (not this directory) execute: +`./tfb --mode benchmark --test hexagon hexagon-jetty hexagon-tomcat hexagon-netty hexagon-nettyepoll` ## Infrastructure Software Versions @@ -18,24 +18,38 @@ You can run tests against any running server passing the `verify.endpoint` proje ## Test URLs -In URLs replace `${DB_ENGINE}` with: `postgresql` +### Jetty -and `${TEMPLATE_ENGINE}` with: `pebble` +* JSON Encoding Test: http://localhost:9090/json +* Plain Text Test: http://localhost:9090/plaintext +* Data-Store/Database Mapping Test: http://localhost:9090/db?queries=5 +* Fortunes: http://localhost:9090/fortunes +* Database updates: http://localhost:9090/update +* Database queries: http://localhost:9090/query -### Jetty +### Netty + +* JSON Encoding Test: http://localhost:9090/json +* Plain Text Test: http://localhost:9090/plaintext +* Data-Store/Database Mapping Test: http://localhost:9090/db?queries=5 +* Fortunes: http://localhost:9090/fortunes +* Database updates: http://localhost:9090/update +* Database queries: http://localhost:9090/query + +### Netty Epoll * JSON Encoding Test: http://localhost:9090/json * Plain Text Test: http://localhost:9090/plaintext -* Data-Store/Database Mapping Test: http://localhost:9090/${DB_ENGINE}/db?queries=5 -* Fortunes: http://localhost:9090/${DB_ENGINE}/${TEMPLATE_ENGINE}/fortunes -* Database updates: http://localhost:9090/${DB_ENGINE}/update -* Database queries: http://localhost:9090/${DB_ENGINE}/query +* Data-Store/Database Mapping Test: http://localhost:9090/db?queries=5 +* Fortunes: http://localhost:9090/fortunes +* Database updates: http://localhost:9090/update +* Database queries: http://localhost:9090/query ### Tomcat * JSON Encoding Test: http://localhost:8080/json * Plain Text Test: http://localhost:8080/plaintext -* Data-Store/Database Mapping Test: http://localhost:8080/${DB_ENGINE}/db?queries=5 -* Fortunes: http://localhost:8080/${DB_ENGINE}/${TEMPLATE_ENGINE}/fortunes -* Database updates: http://localhost:8080/${DB_ENGINE}/update -* Database queries: http://localhost:8080/${DB_ENGINE}/query +* Data-Store/Database Mapping Test: http://localhost:8080/db?queries=5 +* Fortunes: http://localhost:8080/fortunes +* Database updates: http://localhost:8080/update +* Database queries: http://localhost:8080/query diff --git a/frameworks/Kotlin/hexagon/benchmark_config.json b/frameworks/Kotlin/hexagon/benchmark_config.json index d7186d7d9e8..fd4254a3355 100644 --- a/frameworks/Kotlin/hexagon/benchmark_config.json +++ b/frameworks/Kotlin/hexagon/benchmark_config.json @@ -4,11 +4,11 @@ { "default": { "json_url": "/json", - "db_url": "/postgresql/db", - "query_url": "/postgresql/query?queries=", - "fortune_url": "/postgresql/pebble/fortunes", - "update_url": "/postgresql/update?queries=", - "cached_query_url": "/postgresql/cached?count=", + "db_url": "/db", + "query_url": "/query?queries=", + "fortune_url": "/fortunes", + "update_url": "/update?queries=", + "cached_query_url": "/cached-queries?count=", "plaintext_url": "/plaintext", "port": 9090, "approach": "Realistic", @@ -27,11 +27,11 @@ }, "netty": { "json_url": "/json", - "db_url": "/postgresql/db", - "query_url": "/postgresql/query?queries=", - "fortune_url": "/postgresql/pebble/fortunes", - "update_url": "/postgresql/update?queries=", - "cached_query_url": "/postgresql/cached?count=", + "db_url": "/db", + "query_url": "/query?queries=", + "fortune_url": "/fortunes", + "update_url": "/update?queries=", + "cached_query_url": "/cached-queries?count=", "plaintext_url": "/plaintext", "port": 9090, "approach": "Realistic", @@ -48,13 +48,36 @@ "notes": "http://hexagonkt.com", "versus": "netty" }, + "nettyepoll": { + "json_url": "/json", + "db_url": "/db", + "query_url": "/query?queries=", + "fortune_url": "/fortunes", + "update_url": "/update?queries=", + "cached_query_url": "/cached-queries?count=", + "plaintext_url": "/plaintext", + "port": 9090, + "approach": "Realistic", + "classification": "Micro", + "database": "postgres", + "framework": "Hexagon", + "language": "Kotlin", + "orm": "Raw", + "platform": "Netty", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "Hexagon Netty Epoll PostgreSQL", + "notes": "http://hexagonkt.com", + "versus": "netty" + }, "tomcat": { "json_url": "/json", - "db_url": "/postgresql/db", - "query_url": "/postgresql/query?queries=", - "fortune_url": "/postgresql/pebble/fortunes", - "update_url": "/postgresql/update?queries=", - "cached_query_url": "/postgresql/cached?count=", + "db_url": "/db", + "query_url": "/query?queries=", + "fortune_url": "/fortunes", + "update_url": "/update?queries=", + "cached_query_url": "/cached-queries?count=", "plaintext_url": "/plaintext", "port": 8080, "approach": "Realistic", diff --git a/frameworks/Kotlin/hexagon/build.gradle b/frameworks/Kotlin/hexagon/build.gradle index b5de5912a03..8ee07ba3ca1 100644 --- a/frameworks/Kotlin/hexagon/build.gradle +++ b/frameworks/Kotlin/hexagon/build.gradle @@ -1,14 +1,15 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.6.10" + id "org.jetbrains.kotlin.jvm" version "1.7.10" } ext { - hexagonVersion = "2.0.5" + hexagonVersion = "2.0.17" hikariVersion = "5.0.1" - jettyVersion = "11.0.8" - postgresqlVersion = "42.3.3" + jettyVersion = "11.0.11" + postgresqlVersion = "42.4.0" cache2kVersion = "2.6.1.Final" + nettyVersion = "4.1.79.Final" gradleScripts = "https://raw.githubusercontent.com/hexagonkt/hexagon/$hexagonVersion/gradle" } @@ -31,12 +32,13 @@ war { installDist.dependsOn("war") dependencies { - implementation("com.hexagonkt:http_server_netty:$hexagonVersion") + implementation("com.hexagonkt:http_server_netty_epoll:$hexagonVersion") implementation("com.hexagonkt:http_server_jetty:$hexagonVersion") implementation("com.hexagonkt:templates_pebble:$hexagonVersion") implementation("com.hexagonkt:logging_slf4j_jul:$hexagonVersion") implementation("com.hexagonkt:serialization_jackson_json:$hexagonVersion") + implementation("io.netty:netty-transport-native-epoll:$nettyVersion:linux-x86_64") implementation("org.cache2k:cache2k-core:$cache2kVersion") implementation("com.zaxxer:HikariCP:$hikariVersion") implementation("org.postgresql:postgresql:$postgresqlVersion") @@ -44,3 +46,13 @@ dependencies { // providedCompile excludes the dependency only in the WAR, not in the distribution providedCompile("org.eclipse.jetty:jetty-webapp:$jettyVersion") { exclude module: "slf4j-api" } } + +task("minimizeTemplate") { + doLast { + File template = file("$buildDir/resources/main/fortunes.pebble.html") + List lines = template.readLines().collect { it.trim() } + template.write(lines.join("")) + } +} + +assemble.dependsOn("minimizeTemplate") diff --git a/frameworks/Kotlin/hexagon/config.toml b/frameworks/Kotlin/hexagon/config.toml index 91b846defad..ad228b9203e 100644 --- a/frameworks/Kotlin/hexagon/config.toml +++ b/frameworks/Kotlin/hexagon/config.toml @@ -4,11 +4,11 @@ name = "hexagon" [main] urls.plaintext = "/plaintext" urls.json = "/json" -urls.db = "/postgresql/db" -urls.query = "/postgresql/query?queries=" -urls.update = "/postgresql/update?queries=" -urls.fortune = "/postgresql/pebble/fortunes" -urls.cached_query = "/postgresql/cached?count=" +urls.db = "/db" +urls.query = "/query?queries=" +urls.update = "/update?queries=" +urls.fortune = "/fortunes" +urls.cached_query = "/cached-queries?count=" approach = "Realistic" classification = "Micro" database = "postgres" @@ -22,11 +22,29 @@ versus = "servlet" [netty] urls.plaintext = "/plaintext" urls.json = "/json" -urls.db = "/postgresql/db" -urls.query = "/postgresql/query?queries=" -urls.update = "/postgresql/update?queries=" -urls.fortune = "/postgresql/pebble/fortunes" -urls.cached_query = "/postgresql/cached?count=" +urls.db = "/db" +urls.query = "/query?queries=" +urls.update = "/update?queries=" +urls.fortune = "/fortunes" +urls.cached_query = "/cached-queries?count=" +approach = "Realistic" +classification = "Micro" +database = "postgres" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = "Netty" +webserver = "None" +versus = "netty" + +[nettyepoll] +urls.plaintext = "/plaintext" +urls.json = "/json" +urls.db = "/db" +urls.query = "/query?queries=" +urls.update = "/update?queries=" +urls.fortune = "/fortunes" +urls.cached_query = "/cached-queries?count=" approach = "Realistic" classification = "Micro" database = "postgres" @@ -40,11 +58,11 @@ versus = "netty" [tomcat] urls.plaintext = "/plaintext" urls.json = "/json" -urls.db = "/postgresql/db" -urls.query = "/postgresql/query?queries=" -urls.update = "/postgresql/update?queries=" -urls.fortune = "/postgresql/pebble/fortunes" -urls.cached_query = "/postgresql/cached?count=" +urls.db = "/db" +urls.query = "/query?queries=" +urls.update = "/update?queries=" +urls.fortune = "/fortunes" +urls.cached_query = "/cached-queries?count=" approach = "Realistic" classification = "Micro" database = "postgres" diff --git a/frameworks/Kotlin/hexagon/hexagon-netty.dockerfile b/frameworks/Kotlin/hexagon/hexagon-netty.dockerfile index fad88b2d257..579a9402136 100644 --- a/frameworks/Kotlin/hexagon/hexagon-netty.dockerfile +++ b/frameworks/Kotlin/hexagon/hexagon-netty.dockerfile @@ -1,7 +1,7 @@ # # BUILD # -FROM gradle:7.4-jdk11 AS gradle_build +FROM gradle:7.5.0-jdk17-alpine AS gradle_build USER root WORKDIR /hexagon @@ -12,11 +12,12 @@ RUN gradle --quiet # # RUNTIME # -FROM adoptopenjdk:11-jre-hotspot-bionic +FROM eclipse-temurin:17-jre-alpine ENV DBSTORE postgresql ENV POSTGRESQL_DB_HOST tfb-database ENV WEBENGINE netty ENV PROJECT hexagon +ENV DISABLE_CHECKS true COPY --from=gradle_build /hexagon/build/install/$PROJECT /opt/$PROJECT diff --git a/frameworks/Kotlin/hexagon/hexagon-nettyepoll.dockerfile b/frameworks/Kotlin/hexagon/hexagon-nettyepoll.dockerfile new file mode 100644 index 00000000000..e9caab6bc32 --- /dev/null +++ b/frameworks/Kotlin/hexagon/hexagon-nettyepoll.dockerfile @@ -0,0 +1,26 @@ +# +# BUILD +# +FROM gradle:7.5.0-jdk17-alpine AS gradle_build +USER root +WORKDIR /hexagon + +COPY src src +COPY build.gradle build.gradle +RUN gradle --quiet + +# +# RUNTIME +# +FROM eclipse-temurin:17-jre-alpine +ENV DBSTORE postgresql +ENV POSTGRESQL_DB_HOST tfb-database +ENV WEBENGINE netty_epoll +ENV PROJECT hexagon +ENV DISABLE_CHECKS true + +COPY --from=gradle_build /hexagon/build/install/$PROJECT /opt/$PROJECT + +EXPOSE 9090 + +ENTRYPOINT /opt/$PROJECT/bin/$PROJECT diff --git a/frameworks/Kotlin/hexagon/hexagon-tomcat.dockerfile b/frameworks/Kotlin/hexagon/hexagon-tomcat.dockerfile index e8ad5a4e2ec..5134fedad4c 100644 --- a/frameworks/Kotlin/hexagon/hexagon-tomcat.dockerfile +++ b/frameworks/Kotlin/hexagon/hexagon-tomcat.dockerfile @@ -1,7 +1,7 @@ # # BUILD # -FROM gradle:7.4-jdk11 AS gradle_build +FROM gradle:7.5.0-jdk17-alpine AS gradle_build USER root WORKDIR /hexagon @@ -12,9 +12,10 @@ RUN gradle --quiet # # RUNTIME # -FROM tomcat:10.0.14-jre17-temurin +FROM tomcat:10.1.0-jre17-temurin ENV DBSTORE postgresql ENV POSTGRESQL_DB_HOST tfb-database +ENV DISABLE_CHECKS true COPY --from=gradle_build /hexagon/build/libs/ROOT.war /usr/local/tomcat/webapps/ROOT.war EXPOSE 8080 diff --git a/frameworks/Kotlin/hexagon/hexagon.dockerfile b/frameworks/Kotlin/hexagon/hexagon.dockerfile index 1c50885a34a..465f2d82146 100644 --- a/frameworks/Kotlin/hexagon/hexagon.dockerfile +++ b/frameworks/Kotlin/hexagon/hexagon.dockerfile @@ -1,7 +1,7 @@ # # BUILD # -FROM gradle:7.4-jdk11 AS gradle_build +FROM gradle:7.5.0-jdk17-alpine AS gradle_build USER root WORKDIR /hexagon @@ -12,11 +12,12 @@ RUN gradle --quiet # # RUNTIME # -FROM adoptopenjdk:11-jre-hotspot-bionic +FROM eclipse-temurin:17-jre-alpine ENV DBSTORE postgresql ENV POSTGRESQL_DB_HOST tfb-database ENV WEBENGINE jetty ENV PROJECT hexagon +ENV DISABLE_CHECKS true COPY --from=gradle_build /hexagon/build/install/$PROJECT /opt/$PROJECT diff --git a/frameworks/Kotlin/hexagon/src/main/kotlin/Benchmark.kt b/frameworks/Kotlin/hexagon/src/main/kotlin/Benchmark.kt index 70ab0ba2cf2..310bbb81c0a 100644 --- a/frameworks/Kotlin/hexagon/src/main/kotlin/Benchmark.kt +++ b/frameworks/Kotlin/hexagon/src/main/kotlin/Benchmark.kt @@ -5,22 +5,13 @@ import com.hexagonkt.http.server.HttpServerPort import com.hexagonkt.http.server.HttpServerSettings import com.hexagonkt.http.server.jetty.JettyServletAdapter import com.hexagonkt.http.server.netty.NettyServerAdapter +import com.hexagonkt.http.server.netty.epoll.NettyEpollServerAdapter import com.hexagonkt.store.BenchmarkSqlStore -import com.hexagonkt.store.BenchmarkStore -import com.hexagonkt.templates.TemplatePort import com.hexagonkt.templates.pebble.PebbleAdapter import java.net.InetAddress internal val settings = Settings() -internal val stores: Map by lazy { - mapOf("postgresql" to BenchmarkSqlStore("postgresql")) -} - -internal val templateEngines: Map by lazy { - mapOf("pebble" to PebbleAdapter) -} - private val engines: Map by lazy { mapOf( "jetty" to JettyServletAdapter( @@ -29,12 +20,13 @@ private val engines: Map by lazy { sendXPoweredBy = settings.sendXPoweredBy, ), "netty" to NettyServerAdapter(), + "netty_epoll" to NettyEpollServerAdapter(), ) } private val server: HttpServer by lazy { val engine = engines[settings.webEngine] ?: error("Unsupported server engine") - val controller = Controller(settings, stores, templateEngines) + val controller = Controller(settings, BenchmarkSqlStore("postgresql"), PebbleAdapter) val serverSettings = HttpServerSettings( bindAddress = InetAddress.getByName(settings.bindAddress), bindPort = settings.bindPort, diff --git a/frameworks/Kotlin/hexagon/src/main/kotlin/Controller.kt b/frameworks/Kotlin/hexagon/src/main/kotlin/Controller.kt index 1454f9f37cc..2c82ad706b8 100644 --- a/frameworks/Kotlin/hexagon/src/main/kotlin/Controller.kt +++ b/frameworks/Kotlin/hexagon/src/main/kotlin/Controller.kt @@ -1,11 +1,11 @@ package com.hexagonkt -import com.hexagonkt.core.require import com.hexagonkt.core.media.ApplicationMedia.JSON import com.hexagonkt.core.media.TextMedia.HTML import com.hexagonkt.core.media.TextMedia.PLAIN -import com.hexagonkt.core.multiMapOf import com.hexagonkt.http.model.ContentType +import com.hexagonkt.http.model.Header +import com.hexagonkt.http.model.HttpFields import com.hexagonkt.http.server.handlers.HttpServerContext import com.hexagonkt.http.server.handlers.PathHandler import com.hexagonkt.http.server.handlers.path @@ -23,8 +23,8 @@ import kotlin.text.Charsets.UTF_8 class Controller( settings: Settings, - stores: Map, - templateEngines: Map, + store: BenchmarkStore, + templateEngine: TemplatePort, ) { private val queriesParam: String = settings.queriesParam private val cachedQueriesParam: String = settings.cachedQueriesParam @@ -34,47 +34,36 @@ class Controller( private val json: ContentType = ContentType(JSON) private val html: ContentType = ContentType(HTML, charset = UTF_8) - private val templates: Map = mapOf( - "pebble" to URL("classpath:fortunes.pebble.html") + private val templateUrl: URL = URL("classpath:fortunes.pebble.html") + + private val headers = HttpFields( + Header("server", "Hexagon"), ) internal val path: PathHandler by lazy { path { on("*") { - val headers = multiMapOf( - "server" to "Hexagon", - "date" to now().toHttpFormat(), - ) - - send(headers = headers) + send(headers = headers + Header("date", now().toHttpFormat())) } get("/plaintext") { ok(settings.textMessage, contentType = plain) } get("/json") { ok(Message(settings.textMessage).serialize(Json.raw), contentType = json) } - - stores.forEach { (storeEngine, store) -> - path("/$storeEngine") { - templateEngines.forEach { (templateEngineId, templateEngine) -> - get("/${templateEngineId}/fortunes") { listFortunes(store, templateEngineId, templateEngine) } - } - - get("/db") { dbQuery(store) } - get("/query") { getWorlds(store) } - get("/cached") { getCachedWorlds(store) } - get("/update") { updateWorlds(store) } - } - } + get("/fortunes") { listFortunes(store, templateUrl, templateEngine) } + get("/db") { dbQuery(store) } + get("/query") { getWorlds(store) } + get("/cached-queries") { getCachedWorlds(store) } + get("/update") { updateWorlds(store) } } } private fun HttpServerContext.listFortunes( - store: BenchmarkStore, templateKind: String, templateAdapter: TemplatePort + store: BenchmarkStore, templateUrl: URL, templateAdapter: TemplatePort ): HttpServerContext { val fortunes = store.findAllFortunes() + Fortune(0, "Additional fortune added at request time.") val sortedFortunes = fortunes.sortedBy { it.message } val context = mapOf("fortunes" to sortedFortunes) - val body = templateAdapter.render(templates.require(templateKind), context) + val body = templateAdapter.render(templateUrl, context) return ok(body, contentType = html) } diff --git a/frameworks/Kotlin/hexagon/src/main/kotlin/WebListenerServer.kt b/frameworks/Kotlin/hexagon/src/main/kotlin/WebListenerServer.kt index eec6f2571d3..d269138faec 100644 --- a/frameworks/Kotlin/hexagon/src/main/kotlin/WebListenerServer.kt +++ b/frameworks/Kotlin/hexagon/src/main/kotlin/WebListenerServer.kt @@ -1,20 +1,24 @@ package com.hexagonkt -import com.hexagonkt.core.multiMapOf +import com.hexagonkt.http.model.Header +import com.hexagonkt.http.model.HttpFields import com.hexagonkt.http.server.handlers.HttpHandler import com.hexagonkt.http.server.handlers.OnHandler import com.hexagonkt.http.server.servlet.ServletServer +import com.hexagonkt.store.BenchmarkSqlStore +import com.hexagonkt.templates.pebble.PebbleAdapter import jakarta.servlet.annotation.WebListener @WebListener class WebListenerServer(settings: Settings = Settings()) : ServletServer(createHandlers(settings)) { private companion object { + val headers = HttpFields(Header("server", "Tomcat")) fun createHandlers(settings: Settings): List { - val controller = Controller(settings, stores, templateEngines) + val controller = Controller(settings, BenchmarkSqlStore("postgresql"), PebbleAdapter) val controllerPath = controller.path val serverHeaderHandler = OnHandler("*") { - send(headers = multiMapOf("server" to "Tomcat")) + send(headers = headers) } return listOf(serverHeaderHandler, controllerPath) diff --git a/frameworks/Kotlin/hexagon/src/main/kotlin/store/BenchmarkSqlStore.kt b/frameworks/Kotlin/hexagon/src/main/kotlin/store/BenchmarkSqlStore.kt index 3a724dd900e..f2d68d7cb6c 100644 --- a/frameworks/Kotlin/hexagon/src/main/kotlin/store/BenchmarkSqlStore.kt +++ b/frameworks/Kotlin/hexagon/src/main/kotlin/store/BenchmarkSqlStore.kt @@ -24,8 +24,15 @@ internal class BenchmarkSqlStore(engine: String, private val settings: Settings val dbHost = Jvm.systemSettingOrNull("${engine.uppercase()}_DB_HOST") ?: "localhost" val environment = Jvm.systemSettingOrNull(String::class, "BENCHMARK_ENV")?.lowercase() val poolSize = 8 + if (environment == "citrine") Jvm.cpuCount else Jvm.cpuCount * 2 + val postgresqlSettings = listOf( + "ssl=false", + "assumeMinServerVersion=12.10", + "databaseMetadataCacheFieldsMiB=8", + "prepareThreshold=1", + "reWriteBatchedInserts=true", + ).joinToString("&") val config = HikariConfig().apply { - jdbcUrl = "jdbc:postgresql://$dbHost/${settings.databaseName}" + jdbcUrl = "jdbc:postgresql://$dbHost/${settings.databaseName}?$postgresqlSettings" maximumPoolSize = Jvm.systemSettingOrNull(Int::class, "maximumPoolSize") ?: poolSize driverClassName = settings.databaseDriver username = settings.databaseUsername @@ -35,12 +42,12 @@ internal class BenchmarkSqlStore(engine: String, private val settings: Settings } override fun findAllFortunes(): List { - val fortunes = mutableListOf() + var fortunes = listOf() dataSource.connection.use { con: Connection -> val rs = con.prepareStatement(SELECT_ALL_FORTUNES).executeQuery() while (rs.next()) - fortunes += Fortune(rs.getInt(1), rs.getString(2)) + fortunes = fortunes + Fortune(rs.getInt(1), rs.getString(2)) } return fortunes diff --git a/frameworks/Kotlin/http4k/apache/build.gradle b/frameworks/Kotlin/http4k/apache/build.gradle index 0d2c0d137d0..d3b9b85f260 100644 --- a/frameworks/Kotlin/http4k/apache/build.gradle +++ b/frameworks/Kotlin/http4k/apache/build.gradle @@ -1,6 +1,6 @@ dependencies { - compile project(":core") - compile "org.http4k:http4k-server-apache:$http4k_version" + api project(":core") + api "org.http4k:http4k-server-apache:$http4k_version" } apply plugin: 'application' diff --git a/frameworks/Kotlin/http4k/apache/src/main/kotlin/Http4kApacheServer.kt b/frameworks/Kotlin/http4k/apache/src/main/kotlin/Http4kApacheServer.kt index 95e37b0b337..1cc2a2251f7 100644 --- a/frameworks/Kotlin/http4k/apache/src/main/kotlin/Http4kApacheServer.kt +++ b/frameworks/Kotlin/http4k/apache/src/main/kotlin/Http4kApacheServer.kt @@ -15,8 +15,8 @@ fun main() { } private class TfbApacheServer(val port: Int) : ServerConfig { - override fun toServer(httpHandler: HttpHandler): Http4kServer = object : Http4kServer { - val handler = Http4kRequestHandler(httpHandler) + override fun toServer(http: HttpHandler): Http4kServer = object : Http4kServer { + val handler = Http4kRequestHandler(http) val server = ServerBootstrap.bootstrap() .setListenerPort(port) diff --git a/frameworks/Kotlin/http4k/apache4/build.gradle b/frameworks/Kotlin/http4k/apache4/build.gradle index 6ded60f9bfc..0270cee3cfc 100644 --- a/frameworks/Kotlin/http4k/apache4/build.gradle +++ b/frameworks/Kotlin/http4k/apache4/build.gradle @@ -1,6 +1,6 @@ dependencies { - compile project(":core") - compile "org.http4k:http4k-server-apache4:$http4k_version" + api project(":core") + api "org.http4k:http4k-server-apache4:$http4k_version" } apply plugin: 'application' diff --git a/frameworks/Kotlin/http4k/build.gradle b/frameworks/Kotlin/http4k/build.gradle index 8847dee90a5..6c8493e41d2 100644 --- a/frameworks/Kotlin/http4k/build.gradle +++ b/frameworks/Kotlin/http4k/build.gradle @@ -1,49 +1,45 @@ buildscript { - ext.kotlin_version = "1.3.72" - ext.http4k_version = "3.260.0" + ext.kotlin_version = "1.6.21" + ext.http4k_version = "4.25.16.2" repositories { mavenCentral() - jcenter() + maven { + url "https://plugins.gradle.org/m2/" + } } dependencies { classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' + classpath 'com.github.jengelman.gradle.plugins:shadow:6.1.0' } } allprojects { - repositories { mavenCentral() - jcenter() } apply plugin: "kotlin" - compileKotlin.kotlinOptions.jvmTarget = "1.8" + compileKotlin.kotlinOptions.jvmTarget = "11" - sourceCompatibility = JavaVersion.VERSION_1_8 - targetCompatibility = JavaVersion.VERSION_1_8 + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 version = project.hasProperty('releaseVersion') ? project.releaseVersion : 'LOCAL' group = 'org.http4k' - compileTestKotlin { - kotlinOptions { - languageVersion = "1.3" - } - } + compileTestKotlin.kotlinOptions.languageVersion = "1.6" } dependencies { - compile project(":core") - compile project(":apache") - compile project(":jetty") - compile project(":ktorcio") - compile project(":ktornetty") - compile project(":netty") - compile project(":ratpack") - compile project(":undertow") + api(project(":core")) + api(project(":apache")) + api(project(":jetty")) + api(project(":ktorcio")) + api(project(":ktornetty")) + api(project(":netty")) + api(project(":ratpack")) + api(project(":undertow")) } diff --git a/frameworks/Kotlin/http4k/core/build.gradle b/frameworks/Kotlin/http4k/core/build.gradle index 6ac21218e49..86debf37244 100644 --- a/frameworks/Kotlin/http4k/core/build.gradle +++ b/frameworks/Kotlin/http4k/core/build.gradle @@ -1,12 +1,12 @@ dependencies { - compile "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version" - compile "org.jetbrains.kotlin:kotlin-reflect:$kotlin_version" - compile "org.http4k:http4k-core:$http4k_version" - compile "org.http4k:http4k-format-jackson:$http4k_version" - compile "org.http4k:http4k-template-pebble:$http4k_version" - compile "org.apache.commons:commons-lang3:3.11" - compile "com.zaxxer:HikariCP:3.4.5" - compile "org.postgresql:postgresql:42.2.16" - compile "org.cache2k:cache2k-base-bom:1.2.4.Final" + api "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version" + api "org.jetbrains.kotlin:kotlin-reflect:$kotlin_version" + api "org.http4k:http4k-core:$http4k_version" + api "org.http4k:http4k-format-jackson:$http4k_version" + api "org.http4k:http4k-template-pebble:$http4k_version" + api "org.apache.commons:commons-lang3:3.11" + api "com.zaxxer:HikariCP:3.4.5" + api "org.postgresql:postgresql:42.2.16" + api "org.cache2k:cache2k-base-bom:1.2.4.Final" } diff --git a/frameworks/Kotlin/http4k/http4k-apache.dockerfile b/frameworks/Kotlin/http4k/http4k-apache.dockerfile index 77c443f6d39..909c0c4bac0 100644 --- a/frameworks/Kotlin/http4k/http4k-apache.dockerfile +++ b/frameworks/Kotlin/http4k/http4k-apache.dockerfile @@ -1,4 +1,4 @@ -FROM gradle:6.6.0-jdk11 +FROM gradle:7.4.2-jdk11 USER root WORKDIR /http4k COPY build.gradle build.gradle diff --git a/frameworks/Kotlin/http4k/http4k-apache4.dockerfile b/frameworks/Kotlin/http4k/http4k-apache4.dockerfile index 9469f73d0e1..4c49d591735 100644 --- a/frameworks/Kotlin/http4k/http4k-apache4.dockerfile +++ b/frameworks/Kotlin/http4k/http4k-apache4.dockerfile @@ -1,4 +1,4 @@ -FROM gradle:6.6.0-jdk11 +FROM gradle:7.4.2-jdk11 USER root WORKDIR /http4k COPY build.gradle build.gradle diff --git a/frameworks/Kotlin/http4k/http4k-jetty.dockerfile b/frameworks/Kotlin/http4k/http4k-jetty.dockerfile index 52b72ff9182..f8923ae917d 100644 --- a/frameworks/Kotlin/http4k/http4k-jetty.dockerfile +++ b/frameworks/Kotlin/http4k/http4k-jetty.dockerfile @@ -1,4 +1,4 @@ -FROM gradle:6.6.0-jdk11 +FROM gradle:7.4.2-jdk11 USER root WORKDIR /http4k COPY build.gradle build.gradle diff --git a/frameworks/Kotlin/http4k/http4k-ktorcio.dockerfile b/frameworks/Kotlin/http4k/http4k-ktorcio.dockerfile index 2096344178d..df71fd4eaa3 100644 --- a/frameworks/Kotlin/http4k/http4k-ktorcio.dockerfile +++ b/frameworks/Kotlin/http4k/http4k-ktorcio.dockerfile @@ -1,4 +1,4 @@ -FROM gradle:6.6.0-jdk11 +FROM gradle:7.4.2-jdk11 USER root WORKDIR /http4k COPY build.gradle build.gradle diff --git a/frameworks/Kotlin/http4k/http4k-ktornetty.dockerfile b/frameworks/Kotlin/http4k/http4k-ktornetty.dockerfile index e61fb34c7e5..5d24b2f6c99 100644 --- a/frameworks/Kotlin/http4k/http4k-ktornetty.dockerfile +++ b/frameworks/Kotlin/http4k/http4k-ktornetty.dockerfile @@ -1,4 +1,4 @@ -FROM gradle:6.6.0-jdk11 +FROM gradle:7.4.2-jdk11 USER root WORKDIR /http4k COPY build.gradle build.gradle diff --git a/frameworks/Kotlin/http4k/http4k-netty.dockerfile b/frameworks/Kotlin/http4k/http4k-netty.dockerfile index 4cd3b24562e..1030f7a6b19 100644 --- a/frameworks/Kotlin/http4k/http4k-netty.dockerfile +++ b/frameworks/Kotlin/http4k/http4k-netty.dockerfile @@ -1,4 +1,4 @@ -FROM gradle:6.6.0-jdk11 +FROM gradle:7.4.2-jdk11 USER root WORKDIR /http4k COPY build.gradle build.gradle diff --git a/frameworks/Kotlin/http4k/http4k-ratpack.dockerfile b/frameworks/Kotlin/http4k/http4k-ratpack.dockerfile index 1ef99704e4c..d1c3a940c53 100644 --- a/frameworks/Kotlin/http4k/http4k-ratpack.dockerfile +++ b/frameworks/Kotlin/http4k/http4k-ratpack.dockerfile @@ -1,4 +1,4 @@ -FROM gradle:6.6.0-jdk11 +FROM gradle:7.4.2-jdk11 USER root WORKDIR /http4k COPY build.gradle build.gradle diff --git a/frameworks/Kotlin/http4k/http4k-undertow.dockerfile b/frameworks/Kotlin/http4k/http4k-undertow.dockerfile index 7cb2ae135d4..f643ddad376 100644 --- a/frameworks/Kotlin/http4k/http4k-undertow.dockerfile +++ b/frameworks/Kotlin/http4k/http4k-undertow.dockerfile @@ -1,4 +1,4 @@ -FROM gradle:6.6.0-jdk11 +FROM gradle:7.4.2-jdk11 USER root WORKDIR /http4k COPY build.gradle build.gradle diff --git a/frameworks/Kotlin/http4k/http4k.dockerfile b/frameworks/Kotlin/http4k/http4k.dockerfile index 6ed42075e11..be9ac89b873 100644 --- a/frameworks/Kotlin/http4k/http4k.dockerfile +++ b/frameworks/Kotlin/http4k/http4k.dockerfile @@ -1,4 +1,4 @@ -FROM gradle:6.6.0-jdk11 +FROM gradle:7.4.2-jdk11 USER root WORKDIR /http4k COPY build.gradle build.gradle diff --git a/frameworks/Kotlin/http4k/jetty/build.gradle b/frameworks/Kotlin/http4k/jetty/build.gradle index 597269f1d50..78a9701726e 100644 --- a/frameworks/Kotlin/http4k/jetty/build.gradle +++ b/frameworks/Kotlin/http4k/jetty/build.gradle @@ -1,6 +1,6 @@ dependencies { - compile project(":core") - compile "org.http4k:http4k-server-jetty:$http4k_version" + api project(":core") + api "org.http4k:http4k-server-jetty:$http4k_version" } apply plugin: 'application' diff --git a/frameworks/Kotlin/http4k/ktorcio/build.gradle b/frameworks/Kotlin/http4k/ktorcio/build.gradle index e4f6adaa3a0..f1c12b39dcd 100644 --- a/frameworks/Kotlin/http4k/ktorcio/build.gradle +++ b/frameworks/Kotlin/http4k/ktorcio/build.gradle @@ -1,6 +1,6 @@ dependencies { - compile project(":core") - compile "org.http4k:http4k-server-ktorcio:$http4k_version" + api project(":core") + api "org.http4k:http4k-server-ktorcio:$http4k_version" } apply plugin: 'application' diff --git a/frameworks/Kotlin/http4k/ktornetty/build.gradle b/frameworks/Kotlin/http4k/ktornetty/build.gradle index 59bb23155f0..a4771b2053c 100644 --- a/frameworks/Kotlin/http4k/ktornetty/build.gradle +++ b/frameworks/Kotlin/http4k/ktornetty/build.gradle @@ -1,6 +1,6 @@ dependencies { - compile project(":core") - compile "org.http4k:http4k-server-ktornetty:$http4k_version" + api project(":core") + api "org.http4k:http4k-server-ktornetty:$http4k_version" } apply plugin: 'application' diff --git a/frameworks/Kotlin/http4k/netty/build.gradle b/frameworks/Kotlin/http4k/netty/build.gradle index d37104b9a2d..d728aebc3a1 100644 --- a/frameworks/Kotlin/http4k/netty/build.gradle +++ b/frameworks/Kotlin/http4k/netty/build.gradle @@ -1,6 +1,6 @@ dependencies { - compile project(":core") - compile "org.http4k:http4k-server-netty:$http4k_version" + api project(":core") + api "org.http4k:http4k-server-netty:$http4k_version" } apply plugin: 'application' diff --git a/frameworks/Kotlin/http4k/ratpack/build.gradle b/frameworks/Kotlin/http4k/ratpack/build.gradle index 9e9ddf5bbf0..9d09a4af1f4 100644 --- a/frameworks/Kotlin/http4k/ratpack/build.gradle +++ b/frameworks/Kotlin/http4k/ratpack/build.gradle @@ -1,6 +1,6 @@ dependencies { - compile project(":core") - compile "org.http4k:http4k-server-ratpack:$http4k_version" + api project(":core") + api "org.http4k:http4k-server-ratpack:$http4k_version" } apply plugin: 'application' diff --git a/frameworks/Kotlin/http4k/sunhttp/build.gradle b/frameworks/Kotlin/http4k/sunhttp/build.gradle index 9cd80d0660b..2395c18e19b 100644 --- a/frameworks/Kotlin/http4k/sunhttp/build.gradle +++ b/frameworks/Kotlin/http4k/sunhttp/build.gradle @@ -1,5 +1,5 @@ dependencies { - compile project(":core") + api project(":core") } apply plugin: 'application' diff --git a/frameworks/Kotlin/http4k/undertow/build.gradle b/frameworks/Kotlin/http4k/undertow/build.gradle index 012f09fc3b0..a42a601445e 100644 --- a/frameworks/Kotlin/http4k/undertow/build.gradle +++ b/frameworks/Kotlin/http4k/undertow/build.gradle @@ -1,6 +1,6 @@ dependencies { - compile project(":core") - compile "org.http4k:http4k-server-undertow:$http4k_version" + api project(":core") + api "org.http4k:http4k-server-undertow:$http4k_version" } apply plugin: 'application' diff --git a/frameworks/Kotlin/kooby/pom.xml b/frameworks/Kotlin/kooby/pom.xml index 51160f7f72a..0dfc7bc9f84 100644 --- a/frameworks/Kotlin/kooby/pom.xml +++ b/frameworks/Kotlin/kooby/pom.xml @@ -13,7 +13,7 @@ 2.9.5 - 42.3.3 + 42.4.1 UTF-8 11 11 diff --git a/frameworks/Kotlin/ktor/benchmark_config.json b/frameworks/Kotlin/ktor/benchmark_config.json index a699ada20a0..63eba04f817 100644 --- a/frameworks/Kotlin/ktor/benchmark_config.json +++ b/frameworks/Kotlin/ktor/benchmark_config.json @@ -116,6 +116,29 @@ "display_name": "Ktor-reactivepg", "notes": "", "versus": "netty" + }, + "pgclient": { + "plaintext_url": "/plaintext", + "json_url": "/json", + "db_url": "/db", + "query_url": "/query?queries=", + "update_url": "/updates?queries=", + "fortune_url": "/fortunes", + + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "Postgres", + "framework": "ktor", + "language": "Kotlin", + "orm": "Raw", + "platform": "Netty", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "ktor-pgclient", + "notes": "http://ktor.io/", + "versus": "netty" } } ] diff --git a/frameworks/Kotlin/ktor/ktor-pgclient.dockerfile b/frameworks/Kotlin/ktor/ktor-pgclient.dockerfile new file mode 100644 index 00000000000..c5494ceaab3 --- /dev/null +++ b/frameworks/Kotlin/ktor/ktor-pgclient.dockerfile @@ -0,0 +1,15 @@ +FROM openjdk:11.0.3-jdk-stretch as build +WORKDIR /app +COPY ktor-pgclient/gradle gradle +COPY ktor-pgclient/build.gradle.kts build.gradle.kts +COPY ktor-pgclient/gradlew gradlew +COPY ktor-pgclient/src src +RUN /app/gradlew --no-daemon shadowJar + +FROM openjdk:11.0.3-jdk-slim +WORKDIR /app +COPY --from=build /app/build/libs/ktor-pgclient.jar ktor-pgclient.jar + +EXPOSE 8080 + +CMD ["java", "-server", "-Xms1G", "-Xmx1G", "-XX:-UseBiasedLocking", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:+AlwaysPreTouch", "-jar", "ktor-pgclient.jar"] diff --git a/frameworks/Kotlin/ktor/ktor-pgclient/README.md b/frameworks/Kotlin/ktor/ktor-pgclient/README.md new file mode 100755 index 00000000000..a1e9827a659 --- /dev/null +++ b/frameworks/Kotlin/ktor/ktor-pgclient/README.md @@ -0,0 +1,38 @@ +# Ktor with Vert.x async pgclient + +This sets up testing using [Ktor](https://ktor.io/), with the async PostgreSQL client of the Eclipse Vert.x project. +The client features batching, pipelining and supports coroutines. + +## Test URLs + +### Plain Text Test + + http://localhost:8080/plaintext + +### JSON Encoding Test + + http://localhost:8080/json + +### Single Query Test + + http://localhost:8080/db + +### Multiple Queries Test + + http://localhost:8080/query?queries= + +### Database updates Test + + http://localhost:8080/updates?queries= + +### Fortunes Test + + http://localhost:8080/fortunes + +## build + + ./gradlew build + +## run + + java -jar build/libs/bench.jar diff --git a/frameworks/Kotlin/ktor/ktor-pgclient/build.gradle.kts b/frameworks/Kotlin/ktor/ktor-pgclient/build.gradle.kts new file mode 100644 index 00000000000..f57c372b49f --- /dev/null +++ b/frameworks/Kotlin/ktor/ktor-pgclient/build.gradle.kts @@ -0,0 +1,38 @@ +plugins { + application + kotlin("jvm") version "1.6.10" + id("org.jetbrains.kotlin.plugin.serialization") version "1.6.21" + id("com.github.johnrengelman.shadow") version "7.1.2" +} + +group = "org.jetbrains.ktor" +version = "1.0-SNAPSHOT" + +repositories { + mavenCentral() +} + +application { + mainClass.set("MainKt") +} + +dependencies { + implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8") + implementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.3.2") + implementation("io.ktor:ktor-server-netty:2.0.1") + implementation("io.ktor:ktor-server-html-builder-jvm:2.0.1") + implementation("io.ktor:ktor-server-default-headers-jvm:2.0.1") + implementation("io.vertx:vertx-pg-client:4.2.3") + implementation("io.vertx:vertx-lang-kotlin:4.2.3") + implementation("io.vertx:vertx-lang-kotlin-coroutines:4.2.3") +} + +tasks.withType().configureEach { + kotlinOptions.jvmTarget = "11" +} + +tasks.shadowJar { + archiveBaseName.set("ktor-pgclient") + archiveClassifier.set("") + archiveVersion.set("") +} diff --git a/frameworks/Kotlin/ktor/ktor-pgclient/gradle/wrapper/gradle-wrapper.jar b/frameworks/Kotlin/ktor/ktor-pgclient/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 00000000000..7454180f2ae Binary files /dev/null and b/frameworks/Kotlin/ktor/ktor-pgclient/gradle/wrapper/gradle-wrapper.jar differ diff --git a/frameworks/Kotlin/ktor/ktor-pgclient/gradle/wrapper/gradle-wrapper.properties b/frameworks/Kotlin/ktor/ktor-pgclient/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000000..aa991fceae6 --- /dev/null +++ b/frameworks/Kotlin/ktor/ktor-pgclient/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/frameworks/Kotlin/ktor/ktor-pgclient/gradlew b/frameworks/Kotlin/ktor/ktor-pgclient/gradlew new file mode 100755 index 00000000000..1b6c787337f --- /dev/null +++ b/frameworks/Kotlin/ktor/ktor-pgclient/gradlew @@ -0,0 +1,234 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +APP_NAME="Gradle" +APP_BASE_NAME=${0##*/} + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/frameworks/Kotlin/ktor/ktor-pgclient/gradlew.bat b/frameworks/Kotlin/ktor/ktor-pgclient/gradlew.bat new file mode 100644 index 00000000000..107acd32c4e --- /dev/null +++ b/frameworks/Kotlin/ktor/ktor-pgclient/gradlew.bat @@ -0,0 +1,89 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/frameworks/Kotlin/ktor/ktor-pgclient/src/main/kotlin/main.kt b/frameworks/Kotlin/ktor/ktor-pgclient/src/main/kotlin/main.kt new file mode 100644 index 00000000000..2af9d576ce8 --- /dev/null +++ b/frameworks/Kotlin/ktor/ktor-pgclient/src/main/kotlin/main.kt @@ -0,0 +1,175 @@ +import io.ktor.http.* +import io.ktor.server.application.* +import io.ktor.server.engine.* +import io.ktor.server.html.* +import io.ktor.server.netty.* +import io.ktor.server.plugins.defaultheaders.* +import io.ktor.server.response.* +import io.ktor.server.routing.* +import io.vertx.kotlin.coroutines.await +import io.vertx.pgclient.PgConnectOptions +import io.vertx.pgclient.PgPool +import io.vertx.sqlclient.PoolOptions +import io.vertx.sqlclient.Tuple +import kotlinx.html.* +import kotlinx.serialization.Serializable +import kotlinx.serialization.builtins.ListSerializer +import kotlinx.serialization.json.Json +import java.util.concurrent.ThreadLocalRandom + +@Serializable +data class Message(val message: String) + +@Serializable +data class World(val id: Int, val randomNumber: Int) + +data class Fortune(val id: Int, val message: String) + +val rand: ThreadLocalRandom + get() = ThreadLocalRandom.current() + +interface Repository { + suspend fun getWorld(): World + suspend fun getFortunes(): List + suspend fun updateWorlds(worlds: List) +} + +class PgclientRepository : Repository { + private val connectOptions = + PgConnectOptions() + .setPort(5432) + .setHost("tfb-database") + .setDatabase("hello_world") + .setUser("benchmarkdbuser") + .setPassword("benchmarkdbpass") + .apply { + cachePreparedStatements = true + } + + private val poolOptions = PoolOptions() + private val client = ThreadLocal.withInitial { PgPool.client(connectOptions, poolOptions) } + private fun client() = client.get() + + override suspend fun getFortunes(): List { + val results = client().preparedQuery("select id, message from fortune").execute().await() + return results.map { Fortune(it.getInteger(0), it.getString(1)) } + } + + override suspend fun getWorld(): World { + val worldId = rand.nextInt(1, 10001) + val result = + client() + .preparedQuery("select id, randomNumber from world where id = $1") + .execute(Tuple.of(worldId)) + .await() + val row = result.first() + return World(row.getInteger(0), row.getInteger(1)!!) + } + + override suspend fun updateWorlds(worlds: List) { + val batch = worlds.map { Tuple.of(it.id, it.randomNumber) } + client() + .preparedQuery("update world set randomNumber = $1 where id = $2") + .executeBatch(batch) + .await() + } +} + +fun String.toBoxedInt(range: IntRange): Int = + try { + this.toInt().coerceIn(range) + } catch (e: NumberFormatException) { + 1 + } + +class MainTemplate : Template { + val content = Placeholder() + override fun HTML.apply() { + head { + title { +"Fortunes" } + } + body { + insert(content) + } + } +} + +class FortuneTemplate( + private val fortunes: List, + private val main: MainTemplate = MainTemplate() +) : Template { + override fun HTML.apply() { + insert(main) { + content { + table { + tr { + th { +"id" } + th { +"message" } + } + fortunes.forEach { fortune -> + tr { + td { +fortune.id.toString() } + td { +fortune.message } + } + } + } + } + } + } +} + +fun main() { + val db = PgclientRepository() + + val messageSerializer = Message.serializer() + val worldSerializer = World.serializer() + val worldListSerializer = ListSerializer(World.serializer()) + + val server = embeddedServer(Netty, 8080, configure = { + shareWorkGroup = true + }) { + install(DefaultHeaders) + routing { + get("/plaintext") { + call.respondText("Hello, World!") + } + + get("/json") { + call.respondText( + Json.encodeToString(messageSerializer, Message("Hello, World!")), + ContentType.Application.Json + ) + } + + get("/db") { + call.respondText(Json.encodeToString(worldSerializer, db.getWorld()), ContentType.Application.Json) + } + + get("/query") { + val queries = call.parameters["queries"]?.toBoxedInt(1..500) ?: 1 + val worlds = (1..queries).map { db.getWorld() } + call.respondText(Json.encodeToString(worldListSerializer, worlds), ContentType.Application.Json) + } + + get("/fortunes") { + val newFortune = Fortune(0, "Additional fortune added at request time.") + val fortunes = db.getFortunes().toMutableList() + fortunes.add(newFortune) + fortunes.sortBy { it.message } + call.respondHtmlTemplate(FortuneTemplate(fortunes)) { } + } + + get("/updates") { + val queries = call.parameters["queries"]?.toBoxedInt(1..500) ?: 1 + val worlds = (1..queries).map { db.getWorld() } + val newWorlds = worlds.map { it.copy(randomNumber = rand.nextInt(1, 10001)) } + + db.updateWorlds(newWorlds) + + call.respondText(Json.encodeToString(worldListSerializer, newWorlds), ContentType.Application.Json) + } + } + } + + server.start(wait = true) +} diff --git a/frameworks/Kotlin/ktor/ktor/pom.xml b/frameworks/Kotlin/ktor/ktor/pom.xml index a093d1fdf0b..ab847c5b2c0 100644 --- a/frameworks/Kotlin/ktor/ktor/pom.xml +++ b/frameworks/Kotlin/ktor/ktor/pom.xml @@ -12,15 +12,15 @@ org.jetbrains.ktor tech-empower-framework-benchmark - 1.5.21 - 1.6.1 - 1.2.2 + 1.6.21 + 2.0.1 + 1.3.2 0.7.3 UTF-8 5.0.0 1.2.4 - 8.0.25 - 42.2.23 + 8.0.28 + 42.4.1 @@ -49,25 +49,14 @@ kotlinx-html-jvm ${kotlinx.html.version}
- - - io.ktor - ktor-server-netty - ${ktor.version} - - - io.ktor - ktor-server-jetty - ${ktor.version} - io.ktor - ktor-server-cio + ktor-server-default-headers-jvm ${ktor.version} io.ktor - ktor-html-builder + ktor-server-html-builder-jvm ${ktor.version} @@ -93,6 +82,21 @@ logback-classic ${logback.version} + + io.ktor + ktor-server-netty-jvm + 2.0.0 + + + io.ktor + ktor-server-jetty-jvm + 2.0.0 + + + io.ktor + ktor-server-cio-jvm + 2.0.0 + diff --git a/frameworks/Kotlin/ktor/ktor/src/main/kotlin/org/jetbrains/ktor/benchmarks/Hello.kt b/frameworks/Kotlin/ktor/ktor/src/main/kotlin/org/jetbrains/ktor/benchmarks/Hello.kt index f5838a081db..d4047754d60 100644 --- a/frameworks/Kotlin/ktor/ktor/src/main/kotlin/org/jetbrains/ktor/benchmarks/Hello.kt +++ b/frameworks/Kotlin/ktor/ktor/src/main/kotlin/org/jetbrains/ktor/benchmarks/Hello.kt @@ -1,19 +1,22 @@ package org.jetbrains.ktor.benchmarks -import com.zaxxer.hikari.* -import io.ktor.application.* -import io.ktor.features.* -import io.ktor.html.* +import com.zaxxer.hikari.HikariConfig +import com.zaxxer.hikari.HikariDataSource import io.ktor.http.* import io.ktor.http.content.* -import io.ktor.response.* -import io.ktor.routing.* -import kotlinx.coroutines.* +import io.ktor.server.application.* +import io.ktor.server.html.* +import io.ktor.server.plugins.defaultheaders.* +import io.ktor.server.response.* +import io.ktor.server.routing.* +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.withContext import kotlinx.html.* -import kotlinx.serialization.* -import kotlinx.serialization.builtins.* -import kotlinx.serialization.json.* -import java.util.concurrent.* +import kotlinx.serialization.Serializable +import kotlinx.serialization.builtins.ListSerializer +import kotlinx.serialization.encodeToString +import kotlinx.serialization.json.Json +import java.util.concurrent.ThreadLocalRandom @Serializable data class Message(val message: String) @@ -74,7 +77,8 @@ fun Application.main() { when (queries) { null -> Json.encodeToString(worldSerializer, result.single()) else -> Json.encodeToString(worldListSerializer, result) - }, ContentType.Application.Json, HttpStatusCode.OK + }, + ContentType.Application.Json, HttpStatusCode.OK ) } @@ -106,7 +110,6 @@ fun Application.main() { td { +fortune.id.toString() } td { +fortune.message } } - } } } @@ -130,7 +133,6 @@ fun Application.main() { } } } - } result.forEach { it.randomNumber = random.nextInt(dbRows) + 1 } @@ -144,7 +146,6 @@ fun Application.main() { updateStatement.executeUpdate() } } - } } @@ -152,7 +153,8 @@ fun Application.main() { when (queries) { null -> Json.encodeToString(worldSerializer, result.single()) else -> Json.encodeToString(worldListSerializer, result) - }, ContentType.Application.Json, HttpStatusCode.OK + }, + ContentType.Application.Json, HttpStatusCode.OK ) } } @@ -189,4 +191,3 @@ fun ApplicationCall.queries() = try { } catch (nfe: NumberFormatException) { 1 } - diff --git a/frameworks/Kotlin/pellet/README.md b/frameworks/Kotlin/pellet/README.md new file mode 100755 index 00000000000..da2635f2f39 --- /dev/null +++ b/frameworks/Kotlin/pellet/README.md @@ -0,0 +1,35 @@ +# Pellet + +[Pellet](https://www.pellet.dev) is an opinionated, Kotlin-first web framework that helps you write fast, concise, and correct backend services 🚀. + +This is a simple set of benchmarks as part of the TechEmpower Web Framework Benchmarks suite. + +The suite currently includes the plaintext, JSON serialization, single query, multiple query, database updates, and fortunes tests. + +All routes are contained within the [Benchmark.kt](sample/src/main/kotlin/benchmark/Benchmark.kt) file. + +## Test URLs + +### Plaintext + +http://localhost:8080/plaintext + +### JSON Serialization + +http://localhost:8080/json + +### Single Query + +http://localhost:8080/db + +### Multiple Queries + +http://localhost:8080/queries + +### Database Updates + +http://localhost:8080/updates + +### Fortunes + +http://localhost:8080/fortunes diff --git a/frameworks/Kotlin/pellet/benchmark_config.json b/frameworks/Kotlin/pellet/benchmark_config.json new file mode 100755 index 00000000000..fff2e4f1056 --- /dev/null +++ b/frameworks/Kotlin/pellet/benchmark_config.json @@ -0,0 +1,30 @@ +{ + "framework": "pellet", + "tests": [ + { + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "db_url": "/db", + "query_url": "/query?queries=", + "update_url": "/updates?queries=", + "fortune_url": "/fortunes", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "Postgres", + "framework": "Pellet", + "language": "Kotlin", + "flavor": "None", + "orm": "micro", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "Pellet", + "notes": "", + "versus": "None" + } + } + ] +} diff --git a/frameworks/Kotlin/pellet/pellet.dockerfile b/frameworks/Kotlin/pellet/pellet.dockerfile new file mode 100644 index 00000000000..cb96c1079c5 --- /dev/null +++ b/frameworks/Kotlin/pellet/pellet.dockerfile @@ -0,0 +1,13 @@ +FROM gradle:jdk18 as gradle +WORKDIR /sample +COPY sample/build.gradle.kts build.gradle.kts +COPY sample/src src +RUN gradle clean shadowJar --no-daemon + +FROM openjdk:18-jdk-buster +WORKDIR /sample +COPY --from=gradle /sample/build/libs/sample-1.0.0-all.jar app.jar + +EXPOSE 8080 + +CMD ["java", "-server", "-jar", "app.jar"] diff --git a/frameworks/Kotlin/pellet/sample/build.gradle.kts b/frameworks/Kotlin/pellet/sample/build.gradle.kts new file mode 100644 index 00000000000..a41736ca3a7 --- /dev/null +++ b/frameworks/Kotlin/pellet/sample/build.gradle.kts @@ -0,0 +1,57 @@ +plugins { + application + id("com.github.johnrengelman.shadow") version "7.1.0" + kotlin("jvm") version "1.7.10" + kotlin("plugin.serialization") version "1.7.10" + id("nu.studer.rocker") version "3.0.4" +} + +group = "benchmark" +version = "1.0.0" + +repositories { + mavenCentral() +} + +rocker { + version.set("1.3.0") + configurations { + create("main") { + optimize.set(true) + templateDir.set(file("src/main/resources")) + outputDir.set(file("src/generated/rocker")) + } + } +} + +dependencies { + implementation(platform("dev.pellet:pellet-bom:0.0.15")) + implementation("dev.pellet:pellet-server") + implementation("dev.pellet:pellet-logging") + implementation("org.slf4j:slf4j-api:1.7.36") + implementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.4.0-RC") + implementation(platform(kotlin("bom"))) + implementation(kotlin("stdlib-jdk8")) + implementation(platform("org.jetbrains.kotlinx:kotlinx-coroutines-bom:1.6.4")) + implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core") + implementation("org.jetbrains.kotlinx:kotlinx-coroutines-jdk8") + implementation("io.vertx:vertx-pg-client:4.3.2") + implementation("com.ongres.scram:client:2.1") + implementation("io.vertx:vertx-lang-kotlin:4.3.2") + implementation("io.vertx:vertx-lang-kotlin-coroutines:4.3.2") +} + +java { + toolchain { + sourceCompatibility = JavaVersion.VERSION_18 + targetCompatibility = JavaVersion.VERSION_18 + } +} + +tasks.withType { + kotlinOptions.jvmTarget = "18" +} + +application { + mainClass.set("benchmark.BenchmarkKt") +} diff --git a/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/Benchmark.kt b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/Benchmark.kt new file mode 100644 index 00000000000..b756c01f476 --- /dev/null +++ b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/Benchmark.kt @@ -0,0 +1,152 @@ +package benchmark + +import benchmark.data.Fortune +import benchmark.data.TFBRepository +import com.fizzed.rocker.runtime.StringBuilderOutput +import dev.pellet.logging.pelletLogger +import dev.pellet.server.PelletBuilder.httpRouter +import dev.pellet.server.PelletBuilder.pelletServer +import dev.pellet.server.PelletConnector +import dev.pellet.server.codec.mime.MediaType +import dev.pellet.server.responder.http.PelletHTTPRouteContext +import dev.pellet.server.routing.http.HTTPRouteResponse +import kotlinx.coroutines.runBlocking +import kotlinx.serialization.json.Json +import java.time.Instant +import java.time.ZoneId +import java.time.format.DateTimeFormatter +import java.util.Locale +import java.util.concurrent.ThreadLocalRandom + +object Benchmark + +val logger = pelletLogger() +val jsonEncoder = Json { + prettyPrint = false +} + +fun main() = runBlocking { + val sharedRouter = httpRouter { + get("/plaintext", ::handlePlain) + get("/json", ::handleJson) + get("/db", ::handleDb) + get("/query", ::handleQuery) + get("/updates", ::handleUpdates) + get("/fortunes", ::handleFortunes) + } + val pellet = pelletServer { + logRequests = false + httpConnector { + endpoint = PelletConnector.Endpoint( + hostname = "0.0.0.0", + port = 8080 + ) + router = sharedRouter + } + } + pellet.start().join() +} + +val dateFormatter = DateTimeFormatter + .ofPattern("EEE, dd MMM yyyy HH:mm:ss z", Locale.ENGLISH) + .withZone(ZoneId.of("GMT")) + +private suspend fun handlePlain( + context: PelletHTTPRouteContext +): HTTPRouteResponse { + return HTTPRouteResponse.Builder() + .statusCode(200) + .entity("Hello, World!", MediaType("text", "plain")) + .header("Server", "pellet") + .header("Date", dateFormatter.format(Instant.now())) + .build() +} + +@kotlinx.serialization.Serializable +data class ResponseBody( + val message: String +) + +private suspend fun handleJson( + context: PelletHTTPRouteContext +): HTTPRouteResponse { + val responseBody = ResponseBody(message = "Hello, World!") + return HTTPRouteResponse.Builder() + .statusCode(200) + .jsonEntity(jsonEncoder, responseBody) + .header("Server", "pellet") + .header("Date", dateFormatter.format(Instant.now())) + .build() +} + +private val repository = TFBRepository() + +private suspend fun handleDb( + context: PelletHTTPRouteContext +): HTTPRouteResponse { + val result = repository.fetchWorld() + return HTTPRouteResponse.Builder() + .statusCode(200) + .jsonEntity(jsonEncoder, result) + .header("Server", "pellet") + .header("Date", dateFormatter.format(Instant.now())) + .build() +} + +private suspend fun handleQuery( + context: PelletHTTPRouteContext +): HTTPRouteResponse { + val rawQueries = context.firstQueryParameter("queries").getOrNull() + val queries = (rawQueries?.toIntOrNull() ?: 1).coerceIn(1, 500) + val worlds = (1 .. queries) + .map { + repository.fetchWorld() + } + return HTTPRouteResponse.Builder() + .statusCode(200) + .jsonEntity(jsonEncoder, worlds) + .header("Server", "pellet") + .header("Date", dateFormatter.format(Instant.now())) + .build() +} + +private suspend fun handleUpdates( + context: PelletHTTPRouteContext +): HTTPRouteResponse { + val rawQueries = context.firstQueryParameter("queries").getOrNull() + val queries = (rawQueries?.toIntOrNull() ?: 1).coerceIn(1, 500) + val worlds = (1 .. queries) + .map { + repository.fetchWorld() + } + val newWorlds = worlds.map { + it.copy( + randomNumber = ThreadLocalRandom.current().nextInt(1, 10001) + ) + } + repository.updateWorlds(newWorlds) + return HTTPRouteResponse.Builder() + .statusCode(200) + .jsonEntity(jsonEncoder, newWorlds) + .header("Server", "pellet") + .header("Date", dateFormatter.format(Instant.now())) + .build() +} + +private suspend fun handleFortunes( + context: PelletHTTPRouteContext +): HTTPRouteResponse { + val newFortune = Fortune(0, "Additional fortune added at request time.") + val fortunes = repository.fetchFortunes().toMutableList() + fortunes.add(newFortune) + fortunes.sortBy { it.message } + val template = views.fortunes.template(fortunes) + .render(StringBuilderOutput.FACTORY) + .toString() + return HTTPRouteResponse.Builder() + .statusCode(200) + .entity(template, "text/html; charset=utf-8") + .header("Server", "pellet") + .header("Date", dateFormatter.format(Instant.now())) + .build() +} \ No newline at end of file diff --git a/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/Fortune.kt b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/Fortune.kt new file mode 100644 index 00000000000..7dd31b0d5ab --- /dev/null +++ b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/Fortune.kt @@ -0,0 +1,3 @@ +package benchmark.data + +data class Fortune(val id: Int, val message: String) \ No newline at end of file diff --git a/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/FortuneDAO.kt b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/FortuneDAO.kt new file mode 100644 index 00000000000..889c42ad809 --- /dev/null +++ b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/FortuneDAO.kt @@ -0,0 +1,6 @@ +package benchmark.data + +interface FortuneDAO { + + suspend fun fetchFortunes(): List +} \ No newline at end of file diff --git a/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/TFBRepository.kt b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/TFBRepository.kt new file mode 100644 index 00000000000..c73285c8df5 --- /dev/null +++ b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/TFBRepository.kt @@ -0,0 +1,59 @@ +package benchmark.data + +import io.vertx.kotlin.coroutines.await +import io.vertx.pgclient.PgConnectOptions +import io.vertx.pgclient.PgPool +import io.vertx.sqlclient.PoolOptions +import io.vertx.sqlclient.Tuple +import java.util.concurrent.ThreadLocalRandom + +class TFBRepository: WorldDAO, FortuneDAO { + + private val connectOptions = PgConnectOptions() + .setPort(5432) + .setHost("tfb-database") + .setDatabase("hello_world") + .setUser("benchmarkdbuser") + .setPassword("benchmarkdbpass") + .apply { + cachePreparedStatements = true + } + + private val poolOptions = PoolOptions() + private val client = PgPool.client(connectOptions, poolOptions) + + override suspend fun fetchWorld(): WorldDTO { + val worldId = ThreadLocalRandom.current().nextInt(1, 10001) + val result = client + .preparedQuery("select id, randomNumber from world where id = $1") + .execute(Tuple.of(worldId)) + .await() + val row = result.first() + return WorldDTO( + row.getInteger(0), + row.getInteger(1) + ) + } + + override suspend fun updateWorlds(worlds: List) { + val batch = worlds.map { + Tuple.of(it.id, it.randomNumber) + } + client + .preparedQuery("update world set randomNumber = $1 where id = $2") + .executeBatch(batch) + .await() + } + + override suspend fun fetchFortunes(): List { + val results = client.preparedQuery("select id, message from fortune") + .execute() + .await() + return results.map { + Fortune( + it.getInteger(0), + it.getString(1) + ) + } + } +} \ No newline at end of file diff --git a/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/WorldDAO.kt b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/WorldDAO.kt new file mode 100644 index 00000000000..20324a49fb2 --- /dev/null +++ b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/WorldDAO.kt @@ -0,0 +1,7 @@ +package benchmark.data + +interface WorldDAO { + + suspend fun fetchWorld(): WorldDTO + suspend fun updateWorlds(worlds: List) +} \ No newline at end of file diff --git a/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/WorldDTO.kt b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/WorldDTO.kt new file mode 100644 index 00000000000..dc2cbb83657 --- /dev/null +++ b/frameworks/Kotlin/pellet/sample/src/main/kotlin/benchmark/data/WorldDTO.kt @@ -0,0 +1,6 @@ +package benchmark.data + +import kotlinx.serialization.Serializable + +@Serializable +data class WorldDTO(val id: Int, val randomNumber: Int) \ No newline at end of file diff --git a/frameworks/Kotlin/pellet/sample/src/main/resources/views/fortunes.rocker.html b/frameworks/Kotlin/pellet/sample/src/main/resources/views/fortunes.rocker.html new file mode 100644 index 00000000000..daacddbde35 --- /dev/null +++ b/frameworks/Kotlin/pellet/sample/src/main/resources/views/fortunes.rocker.html @@ -0,0 +1,15 @@ +@import java.util.List +@import benchmark.data.Fortune +@args (List fortunes) + + + Fortunes + + + + @for (it: fortunes) { + + } +
idmessage
@it.getId()@it.getMessage()
+ + diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/README.md b/frameworks/Kotlin/vertx-web-kotlin-coroutines/README.md new file mode 100644 index 00000000000..b144d619530 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/README.md @@ -0,0 +1,5 @@ +# Vert.x Web With Kotlin Coroutines Benchmarking Test + +This is the Vert.x Web With Kotlin Coroutines portion of a [benchmarking test suite](../) comparing a variety of web development platforms. + +This project is adapted from the [Vert.x Web portion](/frameworks/Java/vertx-web), with consistent dependency versions, code converted into Kotlin, and all future compositions adapted into coroutine calls, mainly to see how much overhead Kotlin coroutines introduce. See that project for more details. diff --git a/frameworks/Go/beego/benchmark_config.json b/frameworks/Kotlin/vertx-web-kotlin-coroutines/benchmark_config.json similarity index 50% rename from frameworks/Go/beego/benchmark_config.json rename to frameworks/Kotlin/vertx-web-kotlin-coroutines/benchmark_config.json index 48374fe9f4d..2b8ebd26c83 100644 --- a/frameworks/Go/beego/benchmark_config.json +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/benchmark_config.json @@ -1,44 +1,45 @@ { - "framework": "beego", + "framework": "vertx-web-kotlin-coroutines", "tests": [{ "default": { "json_url": "/json", "plaintext_url": "/plaintext", "port": 8080, "approach": "Realistic", - "classification": "Fullstack", + "classification": "Micro", "database": "None", - "framework": "None", - "language": "Go", + "framework": "vertx-web", + "language": "Kotlin", "flavor": "None", - "orm": "raw", - "platform": "None", + "orm": "Raw", + "platform": "Vert.x", "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "Beego", + "display_name": "vertx-web-kotlin-coroutines", "notes": "", - "versus": "go" + "versus": "vertx-web" }, - "orm-mysql": { + "postgres": { "db_url": "/db", "query_url": "/queries?queries=", + "fortune_url": "/fortunes", "update_url": "/update?queries=", "port": 8080, "approach": "Realistic", - "classification": "Fullstack", - "database": "MySQL", - "framework": "beego", - "language": "Go", + "classification": "Micro", + "database": "Postgres", + "framework": "vertx-web", + "language": "Kotlin", "flavor": "None", - "orm": "Micro", - "platform": "None", + "orm": "Raw", + "platform": "Vert.x", "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "Beego", + "display_name": "vertx-web-kotlin-coroutines-postgres", "notes": "", - "versus": "go" + "versus": "vertx-web-postgres" } }] } diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/build.gradle.kts b/frameworks/Kotlin/vertx-web-kotlin-coroutines/build.gradle.kts new file mode 100644 index 00000000000..85030f4d59d --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/build.gradle.kts @@ -0,0 +1,72 @@ +import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar +import org.jetbrains.kotlin.gradle.tasks.KotlinCompile + +plugins { + kotlin("jvm") version "1.6.10" + application + id("nu.studer.rocker") version "3.0.4" + id("com.github.johnrengelman.shadow") version "7.1.2" +} + +group = "io.vertx" +version = "4.1.5" + +repositories { + mavenCentral() +} + +dependencies { + implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.0") + implementation(platform("io.vertx:vertx-stack-depchain:$version")) + implementation("io.vertx:vertx-core") + implementation("com.fasterxml.jackson.module:jackson-module-blackbird:2.12.4") + implementation("io.vertx:vertx-web") + implementation("io.vertx:vertx-pg-client") + implementation("io.vertx:vertx-web-templ-rocker") + implementation("io.netty", "netty-transport-native-epoll", classifier = "linux-x86_64") + implementation("io.vertx:vertx-lang-kotlin") + implementation("io.vertx:vertx-lang-kotlin-coroutines") +} + +rocker { + configurations { + create("main") { + templateDir.set(file("src/main/resources")) + optimize.set(true) + javaVersion.set("1.8") + } + } +} + +tasks.withType { + kotlinOptions.jvmTarget = "11" +} + + +// content below copied from the project generated by the app generator + +val mainVerticleName = "io.vertx.benchmark.App" +val launcherClassName = "io.vertx.core.Launcher" +application { + mainClass.set(launcherClassName) +} + +tasks.withType { + archiveClassifier.set("fat") + manifest { + attributes(mapOf("Main-Verticle" to mainVerticleName)) + } + mergeServiceFiles() +} + +val watchForChange = "src/**/*" +val doOnChange = "${projectDir}/gradlew classes" +tasks.withType { + args = listOf( + "run", + mainVerticleName, + "--redeploy=$watchForChange", + "--launcher-class=$launcherClassName", + "--on-redeploy=$doOnChange" + ) +} diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/config.toml b/frameworks/Kotlin/vertx-web-kotlin-coroutines/config.toml new file mode 100644 index 00000000000..ff3cdc7336d --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/config.toml @@ -0,0 +1,30 @@ +[framework] +name = "vertx-web-kotlin-coroutines" + +[main] +urls.plaintext = "/plaintext" +urls.json = "/json" +approach = "Realistic" +classification = "Micro" +database = "None" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = "Vert.x" +webserver = "None" +versus = "vertx-web" + +[postgres] +urls.db = "/db" +urls.query = "/queries?queries=" +urls.update = "/update?queries=" +urls.fortune = "/fortunes" +approach = "Realistic" +classification = "Micro" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = "Vert.x" +webserver = "None" +versus = "vertx-web-postgres" diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradle.properties b/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradle.properties new file mode 100644 index 00000000000..7fc6f1ff272 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradle.properties @@ -0,0 +1 @@ +kotlin.code.style=official diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradle/wrapper/gradle-wrapper.jar b/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 00000000000..7454180f2ae Binary files /dev/null and b/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradle/wrapper/gradle-wrapper.jar differ diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradle/wrapper/gradle-wrapper.properties b/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000000..669386b870a --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-all.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradlew b/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradlew new file mode 100644 index 00000000000..1b6c787337f --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradlew @@ -0,0 +1,234 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +APP_NAME="Gradle" +APP_BASE_NAME=${0##*/} + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradlew.bat b/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradlew.bat new file mode 100644 index 00000000000..107acd32c4e --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/gradlew.bat @@ -0,0 +1,89 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/settings.gradle.kts b/frameworks/Kotlin/vertx-web-kotlin-coroutines/settings.gradle.kts new file mode 100644 index 00000000000..67832ba6faa --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/settings.gradle.kts @@ -0,0 +1 @@ +rootProject.name = "vertx-web-kotlin-coroutines-benchmark" diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/conf/config.json b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/conf/config.json new file mode 100644 index 00000000000..9067794a751 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/conf/config.json @@ -0,0 +1,9 @@ +{ + "connection_string": "mongodb://tfb-database:27017", + "db_name": "hello_world", + "host": "tfb-database", + "username": "benchmarkdbuser", + "password": "benchmarkdbpass", + "database": "hello_world", + "maxPoolSize": 64 +} \ No newline at end of file diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/conf/vertx.json b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/conf/vertx.json new file mode 100644 index 00000000000..1348ad31a43 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/conf/vertx.json @@ -0,0 +1,3 @@ +{ + "preferNativeTransport": true +} \ No newline at end of file diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/App.kt b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/App.kt new file mode 100644 index 00000000000..99552462b0a --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/App.kt @@ -0,0 +1,287 @@ +package io.vertx.benchmark + +import com.fasterxml.jackson.module.blackbird.BlackbirdModule +import io.vertx.benchmark.model.Fortune +import io.vertx.benchmark.model.Message +import io.vertx.benchmark.model.World +import io.vertx.core.Vertx +import io.vertx.core.http.HttpHeaders +import io.vertx.core.json.Json +import io.vertx.core.json.JsonObject +import io.vertx.core.json.jackson.DatabindCodec +import io.vertx.ext.web.Route +import io.vertx.ext.web.Router +import io.vertx.ext.web.RoutingContext +import io.vertx.ext.web.templ.rocker.RockerTemplateEngine +import io.vertx.kotlin.coroutines.CoroutineVerticle +import io.vertx.kotlin.coroutines.await +import io.vertx.kotlin.pgclient.pgConnectOptionsOf +import io.vertx.kotlin.sqlclient.poolOptionsOf +import io.vertx.pgclient.PgPool +import io.vertx.sqlclient.Tuple +import kotlinx.coroutines.async +import kotlinx.coroutines.awaitAll +import kotlinx.coroutines.launch +import java.time.ZonedDateTime +import java.time.format.DateTimeFormatter +import kotlin.system.exitProcess + +class App : CoroutineVerticle() { + companion object { + init { + DatabindCodec.mapper().registerModule(BlackbirdModule()) + DatabindCodec.prettyMapper().registerModule(BlackbirdModule()) + } + + private const val SERVER = "vertx-web" + + // for PgClientBenchmark only + private const val UPDATE_WORLD = "UPDATE world SET randomnumber=$1 WHERE id=$2" + private const val SELECT_WORLD = "SELECT id, randomnumber from WORLD where id=$1" + private const val SELECT_FORTUNE = "SELECT id, message from FORTUNE" + } + + inline fun Route.coroutineHandler(crossinline requestHandler: suspend (RoutingContext) -> Unit): Route = + handler { ctx -> launch { requestHandler(ctx) } } + + inline fun RoutingContext.checkedRun(block: () -> Unit): Unit = + try { + block() + } catch (t: Throwable) { + fail(t) + } + + inline fun Route.checkedCoroutineHandler(crossinline requestHandler: suspend (RoutingContext) -> Unit): Route = + coroutineHandler { ctx -> ctx.checkedRun { requestHandler(ctx) } } + + /** + * PgClient implementation + */ + private inner class PgClientBenchmark(vertx: Vertx, config: JsonObject) { + private val client: PgPool + + // In order to use a template we first need to create an engine + private val engine: RockerTemplateEngine + + init { + val options = with(config) { + pgConnectOptionsOf( + cachePreparedStatements = true, + host = getString("host"), + port = getInteger("port", 5432), + user = getString("username"), + password = getString("password"), + database = config.getString("database"), + pipeliningLimit = 100000 // Large pipelining means less flushing and we use a single connection anyway; + ) + } + + client = PgPool.pool(vertx, options, poolOptionsOf(maxSize = 4)) + engine = RockerTemplateEngine.create() + } + + suspend fun dbHandler(ctx: RoutingContext) { + val result = try { + client + .preparedQuery(SELECT_WORLD) + .execute(Tuple.of(randomWorld())) + .await() + } catch (t: Throwable) { + // adapted from the Java code and kept, though I don't see the purpose of this + t.printStackTrace() + throw t + } + + val resultSet = result.iterator() + if (!resultSet.hasNext()) { + ctx.response() + .setStatusCode(404) + .end() + .await() + return + } + val row = resultSet.next() + ctx.response() + .putHeader(HttpHeaders.SERVER, SERVER) + .putHeader(HttpHeaders.DATE, date) + .putHeader(HttpHeaders.CONTENT_TYPE, "application/json") + .end(Json.encodeToBuffer(World(row.getInteger(0), row.getInteger(1)))) + .await() + } + + suspend fun queriesHandler(ctx: RoutingContext) { + val queries: Int = getQueries(ctx.request()) + val worlds = arrayOfNulls(queries) + val failed = booleanArrayOf(false) + val cnt = intArrayOf(0) + List(queries) { + async { + val result = `try` { client.preparedQuery(SELECT_WORLD).execute(Tuple.of(randomWorld())).await() } + + if (!failed[0]) { + if (result is Try.Failure) { + failed[0] = true + ctx.fail(result.throwable) + return@async + } + + // we need a final reference + val row = (result as Try.Success).value.iterator().next() + worlds[cnt[0]++] = World(row.getInteger(0), row.getInteger(1)) + + // stop condition + if (cnt[0] == queries) { + ctx.response() + .putHeader(HttpHeaders.SERVER, SERVER) + .putHeader(HttpHeaders.DATE, date) + .putHeader(HttpHeaders.CONTENT_TYPE, "application/json") + .end(Json.encodeToBuffer(worlds)) + .await() + } + } + } + } + .awaitAll() + } + + suspend fun fortunesHandler(ctx: RoutingContext) { + val result = client.preparedQuery(SELECT_FORTUNE).execute().await() + + val resultSet = result.iterator() + if (!resultSet.hasNext()) { + ctx.fail(404) + return + } + val fortunes = ArrayList() + while (resultSet.hasNext()) { + val row = resultSet.next() + fortunes.add(Fortune(row.getInteger(0), row.getString(1))) + } + fortunes.add(Fortune(0, "Additional fortune added at request time.")) + fortunes.sort() + ctx.put("fortunes", fortunes) + + // and now delegate to the engine to render it. + val result2 = engine.render(ctx.data(), "templates/Fortunes.rocker.html").await() + ctx.response() + .putHeader(HttpHeaders.SERVER, SERVER) + .putHeader(HttpHeaders.DATE, date) + .putHeader(HttpHeaders.CONTENT_TYPE, "text/html; charset=UTF-8") + .end(result2) + .await() + } + + suspend fun updateHandler(ctx: RoutingContext) { + val queries = getQueries(ctx.request()) + val worlds = arrayOfNulls(queries) + val failed = booleanArrayOf(false) + val queryCount = intArrayOf(0) + List(worlds.size) { + val id = randomWorld() + async { + val r2 = `try` { client.preparedQuery(SELECT_WORLD).execute(Tuple.of(id)).await() } + + if (!failed[0]) { + if (r2 is Try.Failure) { + failed[0] = true + ctx.fail(r2.throwable) + return@async + } + val row = (r2 as Try.Success).value.iterator().next() + worlds[queryCount[0]++] = World(row.getInteger(0), randomWorld()) + if (queryCount[0] == worlds.size) { + worlds.sort() + val batch = ArrayList() + for (world in worlds) { + world!! + batch.add(Tuple.of(world.randomNumber, world.id)) + } + ctx.checkedRun { + client.preparedQuery(UPDATE_WORLD) + .executeBatch(batch) + .await() + ctx.response() + .putHeader(HttpHeaders.SERVER, SERVER) + .putHeader(HttpHeaders.DATE, date) + .putHeader(HttpHeaders.CONTENT_TYPE, "application/json") + .end(Json.encodeToBuffer(worlds)) + .await() + } + } + } + } + } + .awaitAll() + } + } + + private var date: String? = null + override suspend fun start() { + val app = Router.router(vertx) + // initialize the date header + date = DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now()) + // refresh the value as a periodic task + vertx.setPeriodic(1000) { date = DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now()) } + val pgClientBenchmark = PgClientBenchmark(vertx, config) + + /* + * This test exercises the framework fundamentals including keep-alive support, request routing, request header + * parsing, object instantiation, JSON serialization, response header generation, and request count throughput. + */ + app.get("/json").checkedCoroutineHandler { ctx -> + ctx.response() + .putHeader(HttpHeaders.SERVER, SERVER) + .putHeader(HttpHeaders.DATE, date) + .putHeader(HttpHeaders.CONTENT_TYPE, "application/json") + .end(Json.encodeToBuffer(Message("Hello, World!"))) + .await() + } + + /* + * This test exercises the framework's object-relational mapper (ORM), random number generator, database driver, + * and database connection pool. + */ + app.get("/db").checkedCoroutineHandler { ctx -> pgClientBenchmark.dbHandler(ctx) } + + /* + * This test is a variation of Test #2 and also uses the World table. Multiple rows are fetched to more dramatically + * punish the database driver and connection pool. At the highest queries-per-request tested (20), this test + * demonstrates all frameworks' convergence toward zero requests-per-second as database activity increases. + */ + app.get("/queries").checkedCoroutineHandler { ctx -> pgClientBenchmark.queriesHandler(ctx) } + + /* + * This test exercises the ORM, database connectivity, dynamic-size collections, sorting, server-side templates, + * XSS countermeasures, and character encoding. + */ + app.get("/fortunes").checkedCoroutineHandler { ctx -> pgClientBenchmark.fortunesHandler(ctx) } + + /* + * This test is a variation of Test #3 that exercises the ORM's persistence of objects and the database driver's + * performance at running UPDATE statements or similar. The spirit of this test is to exercise a variable number of + * read-then-write style database operations. + */ + app.route("/update").checkedCoroutineHandler { ctx -> pgClientBenchmark.updateHandler(ctx) } + + /* + * This test is an exercise of the request-routing fundamentals only, designed to demonstrate the capacity of + * high-performance platforms in particular. Requests will be sent using HTTP pipelining. The response payload is + * still small, meaning good performance is still necessary in order to saturate the gigabit Ethernet of the test + * environment. + */ + app.get("/plaintext").checkedCoroutineHandler { ctx -> + ctx.response() + .putHeader(HttpHeaders.SERVER, SERVER) + .putHeader(HttpHeaders.DATE, date) + .putHeader(HttpHeaders.CONTENT_TYPE, "text/plain") + .end("Hello, World!") + .await() + } + try { + vertx.createHttpServer().requestHandler(app).listen(8080).await() + } catch (t: Throwable) { + t.printStackTrace() + exitProcess(1) + } + } +} \ No newline at end of file diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/Helper.kt b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/Helper.kt new file mode 100644 index 00000000000..1987c568567 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/Helper.kt @@ -0,0 +1,34 @@ +package io.vertx.benchmark + +import io.vertx.core.http.HttpServerRequest +import java.util.* +import kotlin.math.max +import kotlin.math.min + +private val RANDOM = SplittableRandom() + +/** + * Returns the value of the "queries" getRequest parameter, which is an integer + * bound between 1 and 500 with a default value of 1. + * + * @param request the current HTTP request + * @return the value of the "queries" parameter + */ +fun getQueries(request: HttpServerRequest): Int { + val param = request.getParam("queries") ?: return 1 + return try { + val parsedValue = param.toInt() + min(500, max(1, parsedValue)) + } catch (e: NumberFormatException) { + 1 + } +} + +/** + * Returns a random integer that is a suitable value for both the `id` + * and `randomNumber` properties of a world object. + * + * @return a random world number + */ +fun randomWorld(): Int = + 1 + RANDOM.nextInt(10000) diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/Try.kt b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/Try.kt new file mode 100644 index 00000000000..6ad86614457 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/Try.kt @@ -0,0 +1,13 @@ +package io.vertx.benchmark + +sealed class Try { + class Success(val value: T) : Try() + class Failure(val throwable: Throwable) : Try() +} + +inline fun `try`(block: () -> T): Try = + try { + Try.Success(block()) + } catch (t: Throwable) { + Try.Failure(t) + } \ No newline at end of file diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/model/Fortune.kt b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/model/Fortune.kt new file mode 100644 index 00000000000..68aa3920c40 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/model/Fortune.kt @@ -0,0 +1,13 @@ +package io.vertx.benchmark.model + +import io.vertx.core.json.JsonObject + +/** + * The model for the "fortune" database table. + */ +class Fortune(val id: Int, val message: String) : Comparable { + constructor(doc: JsonObject) : this(doc.getInteger("id"), doc.getString("message")) + + override fun compareTo(other: Fortune): Int = + message compareTo other.message +} \ No newline at end of file diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/model/Message.kt b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/model/Message.kt new file mode 100644 index 00000000000..a6292411374 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/model/Message.kt @@ -0,0 +1,3 @@ +package io.vertx.benchmark.model + +class Message(val message: String) \ No newline at end of file diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/model/World.kt b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/model/World.kt new file mode 100644 index 00000000000..7c235fceaa6 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/kotlin/io/vertx/benchmark/model/World.kt @@ -0,0 +1,13 @@ +package io.vertx.benchmark.model + +import io.vertx.core.json.JsonObject + +/** + * The model for the "world" database table. + */ +class World(val id: Int, val randomNumber: Int) : Comparable { + constructor(doc: JsonObject) : this(doc.getInteger("id"), doc.getInteger("randomNumber")) + + override fun compareTo(other: World): Int = + id compareTo other.id +} \ No newline at end of file diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/resources/templates/Fortunes.rocker.html b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/resources/templates/Fortunes.rocker.html new file mode 100644 index 00000000000..c9930838a1c --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/resources/templates/Fortunes.rocker.html @@ -0,0 +1,21 @@ +@import java.util.* +@import io.vertx.benchmark.model.* +@args(List fortunes) + + +Fortunes + + + + + + + @for ((ForIterator i, Fortune fortune) : fortunes) { + + + + + } +
idmessage
@fortune.getId()@fortune.getMessage()
+ + diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/resources/templates/fortunes.hbs b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/resources/templates/fortunes.hbs new file mode 100644 index 00000000000..b3f7dc01d8b --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/src/main/resources/templates/fortunes.hbs @@ -0,0 +1,15 @@ + + +Fortunes + + + + + + {{#each fortunes}} + + + + {{/each}}
idmessage
{{id}}{{message}}
+ + \ No newline at end of file diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/vertx-web-kotlin-coroutines-postgres.dockerfile b/frameworks/Kotlin/vertx-web-kotlin-coroutines/vertx-web-kotlin-coroutines-postgres.dockerfile new file mode 100644 index 00000000000..8758c0326a3 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/vertx-web-kotlin-coroutines-postgres.dockerfile @@ -0,0 +1,36 @@ +FROM gradle:7.3.3-jdk11 as gradle +WORKDIR /vertx-web-kotlin-coroutines +COPY gradle gradle +COPY src src +COPY build.gradle.kts build.gradle.kts +COPY gradle.properties gradle.properties +COPY gradlew gradlew +COPY settings.gradle.kts settings.gradle.kts +RUN gradle shadowJar + +EXPOSE 8080 + +CMD java \ + -server \ + -XX:+UseNUMA \ + -XX:+UseParallelGC \ + -XX:+AggressiveOpts \ + -Dvertx.disableMetrics=true \ + -Dvertx.disableH2c=true \ + -Dvertx.disableWebsockets=true \ + -Dvertx.flashPolicyHandler=false \ + -Dvertx.threadChecks=false \ + -Dvertx.disableContextTimings=true \ + -Dvertx.disableTCCL=true \ + -Dvertx.disableHttpHeadersValidation=true \ + -Dvertx.eventLoopPoolSize=$((`grep --count ^processor /proc/cpuinfo`)) \ + -Dio.netty.buffer.checkBounds=false \ + -Dio.netty.buffer.checkAccessible=false \ + -jar \ + build/libs/vertx-web-kotlin-coroutines-benchmark-4.1.5-fat.jar \ + --instances \ + `grep --count ^processor /proc/cpuinfo` \ + --conf \ + src/main/conf/config.json \ + --options \ + src/main/conf/vertx.json diff --git a/frameworks/Kotlin/vertx-web-kotlin-coroutines/vertx-web-kotlin-coroutines.dockerfile b/frameworks/Kotlin/vertx-web-kotlin-coroutines/vertx-web-kotlin-coroutines.dockerfile new file mode 100644 index 00000000000..076d2be8e61 --- /dev/null +++ b/frameworks/Kotlin/vertx-web-kotlin-coroutines/vertx-web-kotlin-coroutines.dockerfile @@ -0,0 +1,34 @@ +FROM gradle:7.3.3-jdk11 as gradle +WORKDIR /vertx-web-kotlin-coroutines +COPY src src +COPY build.gradle.kts build.gradle.kts +COPY gradle.properties gradle.properties +COPY settings.gradle.kts settings.gradle.kts +RUN gradle shadowJar + +EXPOSE 8080 + +CMD java \ + -server \ + -XX:+UseNUMA \ + -XX:+UseParallelGC \ + -XX:+AggressiveOpts \ + -Dvertx.disableMetrics=true \ + -Dvertx.disableH2c=true \ + -Dvertx.disableWebsockets=true \ + -Dvertx.flashPolicyHandler=false \ + -Dvertx.threadChecks=false \ + -Dvertx.disableContextTimings=true \ + -Dvertx.disableTCCL=true \ + -Dvertx.disableHttpHeadersValidation=true \ + -Dvertx.eventLoopPoolSize=$((`grep --count ^processor /proc/cpuinfo`)) \ + -Dio.netty.buffer.checkBounds=false \ + -Dio.netty.buffer.checkAccessible=false \ + -jar \ + build/libs/vertx-web-kotlin-coroutines-benchmark-4.1.5-fat.jar \ + --instances \ + `grep --count ^processor /proc/cpuinfo` \ + --conf \ + src/main/conf/config.json \ + --options \ + src/main/conf/vertx.json diff --git a/frameworks/Mumps/m-web-server/_techempbenchmark.m b/frameworks/Mumps/m-web-server/_techempbenchmark.m deleted file mode 100644 index c08315c6d6b..00000000000 --- a/frameworks/Mumps/m-web-server/_techempbenchmark.m +++ /dev/null @@ -1,22 +0,0 @@ -%techempbenchmark ; Techempower plaintext and json tests;2020-08-31 09:30PM - ; - -te - S ^%webhttp(0,"NOGZIP")=1 - D job^%webreq(8080,"",,,1) - ;D start^%webreq(8080,"",,,,,1) - Q - -plaintext(RESULT,ARGS) ; [Public] GET /plaintext - S RESULT("mime")="text/plain" - S RESULT="Hello, World!" - Q - -json(RESULT,ARGS) ; [Public] GET /json - S RESULT("mime")="application/json" - ;S RESULT("headers","Temp")="Test" - N MSG - S MSG("message")="Hello, World!" - D ENCODE^%webjson("MSG","RESULT") - K MSG - Q diff --git a/frameworks/Mumps/m-web-server/benchmark_config.json b/frameworks/Mumps/m-web-server/benchmark_config.json deleted file mode 100644 index d0ec28be4df..00000000000 --- a/frameworks/Mumps/m-web-server/benchmark_config.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "framework": "m-web-server", - "tests": [{ - "default": { - "json_url": "/json", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "None", - "framework": "m-web-server", - "language": "mumps", - "orm": "None", - "platform": "None", - "webserver": "m-web-server", - "os": "Linux", - "database_os": "None", - "display_name": "m-web-server", - "notes": "None", - "versus": "", - "tags": [] - } - }] -} - diff --git a/frameworks/Mumps/m-web-server/config.toml b/frameworks/Mumps/m-web-server/config.toml deleted file mode 100644 index 32eb1346468..00000000000 --- a/frameworks/Mumps/m-web-server/config.toml +++ /dev/null @@ -1,15 +0,0 @@ -[framework] -name = "m-web-server" - -[main] -urls.plaintext = "/plaintext" -urls.json = "/json" -approach = "Realistic" -classification = "Fullstack" -database = "None" -database_os = "None" -os = "Linux" -orm = "None" -platform = "None" -webserver = "m-web-server" -versus = "" diff --git a/frameworks/Mumps/m-web-server/m-web-server.dockerfile b/frameworks/Mumps/m-web-server/m-web-server.dockerfile deleted file mode 100644 index 5ca276ce176..00000000000 --- a/frameworks/Mumps/m-web-server/m-web-server.dockerfile +++ /dev/null @@ -1,43 +0,0 @@ -FROM yottadb/yottadb-base:latest - -ARG DEBIAN_FRONTEND=noninteractive -RUN apt update && \ - apt upgrade -y && \ - apt install -y \ - libcurl4-openssl-dev \ - git - -# Install cURL plugin -RUN git clone https://github.com/shabiel/fis-gtm-plugins.git -ENV LD_LIBRARY_PATH /opt/yottadb/current -RUN cd fis-gtm-plugins/libcurl && \ - . /opt/yottadb/current/ydb_env_set && \ - export gtm_dist=$ydb_dist && \ - make install - -# Install M-Unit -RUN git clone https://github.com/ChristopherEdwards/M-Unit.git munit - -RUN cd munit && \ - mkdir r && \ - cd Routines && \ - for file in _*.m; do mv $file /data/munit/r/; done - -RUN git clone https://github.com/sumeetchhetri/M-Web-Server -RUN mkdir -p /mwebserver/r && cp -rf M-Web-Server/src/* /mwebserver/r - -# Install M-Web-Server -COPY _techempbenchmark.m /mwebserver/r/ - -ENV GTMXC_libcurl "/opt/yottadb/current/plugin/libcurl_ydb_wrapper.xc" -RUN . /opt/yottadb/current/ydb_env_set && \ - export ydb_routines="/mwebserver/r /data/munit/r $ydb_routines" && \ - mumps -r ^%webtest - -COPY run.sh ./ -RUN chmod +x run.sh - -EXPOSE 8080 - -CMD ./run.sh - diff --git a/frameworks/Mumps/m-web-server/run.sh b/frameworks/Mumps/m-web-server/run.sh deleted file mode 100644 index 9c54b16eb0f..00000000000 --- a/frameworks/Mumps/m-web-server/run.sh +++ /dev/null @@ -1,9 +0,0 @@ -. /opt/yottadb/current/ydb_env_set -export ydb_routines="/mwebserver/r /data/munit/r $ydb_routines" -mupip rundown -f /tmp/yottadb/r1.30_x86_64/ydb-relinkctl-98fedd5406dbe07da4076a0607b49e01 -rm -f /tmp/yottadb/r1.30_x86_64/ydb-relinkctl-98fedd5406dbe07da4076a0607b49e01 -mumps -r ^%techempbenchmark -while : -do - sleep 1 -done diff --git a/frameworks/Nim/prologue/prologue.dockerfile b/frameworks/Nim/prologue/prologue.dockerfile index 52701756c5a..c497fbf2113 100644 --- a/frameworks/Nim/prologue/prologue.dockerfile +++ b/frameworks/Nim/prologue/prologue.dockerfile @@ -1,4 +1,4 @@ -FROM nimlang/nim:1.4.0 +FROM nimlang/nim:1.6.4 ADD ./ /prologue diff --git a/frameworks/OCaml/morph/benchmark_config.json b/frameworks/OCaml/morph/benchmark_config.json index 161be8fc123..5b2fe87f60a 100755 --- a/frameworks/OCaml/morph/benchmark_config.json +++ b/frameworks/OCaml/morph/benchmark_config.json @@ -22,7 +22,8 @@ "database_os": "Linux", "display_name": "Morph", "notes": "", - "versus": "None" + "versus": "None", + "tags": ["broken"] }, "flambda": { "json_url": "/json", @@ -44,7 +45,8 @@ "database_os": "Linux", "display_name": "Morph-flambda", "notes": "", - "versus": "None" + "versus": "None", + "tags": ["broken"] }, "single": { "json_url": "/json", @@ -66,7 +68,8 @@ "database_os": "Linux", "display_name": "Morph-single-process", "notes": "This is using a single process since that is more realistic", - "versus": "None" + "versus": "None", + "tags": ["broken"] } } ] diff --git a/frameworks/OCaml/tiny_httpd/README.md b/frameworks/OCaml/tiny_httpd/README.md deleted file mode 100755 index 412e7c53429..00000000000 --- a/frameworks/OCaml/tiny_httpd/README.md +++ /dev/null @@ -1,20 +0,0 @@ -# tiny_httpd Benchmarking Test - -### Test Type Implementation Source Code - -* [JSON](src/src/bin/tiny.ml#L7-12) -* [PLAINTEXT](src/src/bin/tiny.ml#L13-20) - -## Important Libraries -The tests were run with: -* [tiny_httpd](https://github.com/c-cube/tiny_httpd) -* [atdgen](https://github.com/ahrefs/atd) - -## Test URLs -### JSON - -http://localhost:8080/json - -### PLAINTEXT - -http://localhost:8080/plaintext diff --git a/frameworks/OCaml/tiny_httpd/src/dune-project b/frameworks/OCaml/tiny_httpd/src/dune-project deleted file mode 100644 index 8c73b93a7e9..00000000000 --- a/frameworks/OCaml/tiny_httpd/src/dune-project +++ /dev/null @@ -1,26 +0,0 @@ -(lang dune 2.7) -(name webmachine-tfb) - -(generate_opam_files true) - -(source (github TechEmpower/FrameworkBenchmarks)) -(license MIT) -(authors "Robin Björklin") - -(package - (name tiny) - (synopsis "Rudimentary implementation of the Tech Empower Benchmark suite") - (depends - (dune (>= 2.7.1)) - (tiny_httpd (>= 0.6)))) - -(package - (name lib) - (synopsis "WIP library") - (depends - (dune (>= 2.7.1)) - (biniou (>= 1.2.1)) - (yojson (>= 1.7.0)) - (atd (>= 2.2.1)) - (atdgen (>= 2.2.1)) - (atdgen-runtime (>= 2.2.1)))) diff --git a/frameworks/OCaml/tiny_httpd/src/lib.opam b/frameworks/OCaml/tiny_httpd/src/lib.opam deleted file mode 100644 index dabb857fca0..00000000000 --- a/frameworks/OCaml/tiny_httpd/src/lib.opam +++ /dev/null @@ -1,31 +0,0 @@ -# This file is generated by dune, edit dune-project instead -opam-version: "2.0" -synopsis: "WIP library" -authors: ["Robin Björklin"] -license: "MIT" -homepage: "https://github.com/TechEmpower/FrameworkBenchmarks" -bug-reports: "https://github.com/TechEmpower/FrameworkBenchmarks/issues" -depends: [ - "dune" {>= "2.7" & >= "2.7.1"} - "biniou" {>= "1.2.1"} - "yojson" {>= "1.7.0"} - "atd" {>= "2.2.1"} - "atdgen" {>= "2.2.1"} - "atdgen-runtime" {>= "2.2.1"} - "odoc" {with-doc} -] -build: [ - ["dune" "subst"] {dev} - [ - "dune" - "build" - "-p" - name - "-j" - jobs - "@install" - "@runtest" {with-test} - "@doc" {with-doc} - ] -] -dev-repo: "git+https://github.com/TechEmpower/FrameworkBenchmarks.git" diff --git a/frameworks/OCaml/tiny_httpd/src/src/bin/dune b/frameworks/OCaml/tiny_httpd/src/src/bin/dune deleted file mode 100644 index c6c445f2541..00000000000 --- a/frameworks/OCaml/tiny_httpd/src/src/bin/dune +++ /dev/null @@ -1,5 +0,0 @@ -(executable - (libraries tiny_httpd lib) - (public_name tiny) - (package tiny) - (name tiny)) diff --git a/frameworks/OCaml/tiny_httpd/src/src/bin/tiny.ml b/frameworks/OCaml/tiny_httpd/src/src/bin/tiny.ml deleted file mode 100644 index 26f03aea144..00000000000 --- a/frameworks/OCaml/tiny_httpd/src/src/bin/tiny.ml +++ /dev/null @@ -1,24 +0,0 @@ -module S = Tiny_httpd - -let () = - let server = S.create ~addr:"0.0.0.0" ~max_connections:256 () in - let headers = [ ("Server", "tiny_httpd") ] in - (* say hello *) - S.add_route_handler ~meth:`GET server - S.Route.(exact "plaintext" @/ string @/ return) - (fun _ _req -> - let headers = S.Headers.set "Content-Type" "text/plain" headers in - let headers = S.Headers.set "Date" (Lib.Time.now ()) headers in - S.Response.make_string ~headers (Ok "Hello, World!")); - S.add_route_handler ~meth:`GET server - S.Route.(exact "json" @/ string @/ return) - (fun _ _req -> - let headers = S.Headers.set "Content-Type" "application/json" headers in - let headers = S.Headers.set "Date" (Lib.Time.now ()) headers in - let json = Lib.Message_t.{ message = "Hello, World!" } in - S.Response.make_string ~headers - (Ok (Lib.Message_j.string_of_message json))); - Printf.printf "listening on http://%s:%d\n%!" (S.addr server) (S.port server); - match S.run server with - | Ok () -> () - | Error e -> raise e diff --git a/frameworks/OCaml/tiny_httpd/src/src/lib/dune b/frameworks/OCaml/tiny_httpd/src/src/lib/dune deleted file mode 100644 index aeb618bc6b5..00000000000 --- a/frameworks/OCaml/tiny_httpd/src/src/lib/dune +++ /dev/null @@ -1,16 +0,0 @@ -(library - (name lib) - (public_name lib) - (libraries yojson atdgen-runtime)) - -(rule - (targets message_t.ml - message_t.mli) - (deps message.atd) - (action (run atdgen -t %{deps}))) - -(rule - (targets message_j.ml - message_j.mli) - (deps message.atd) - (action (run atdgen -j %{deps}))) diff --git a/frameworks/OCaml/tiny_httpd/src/src/lib/message.atd b/frameworks/OCaml/tiny_httpd/src/src/lib/message.atd deleted file mode 100644 index b0b5fc86816..00000000000 --- a/frameworks/OCaml/tiny_httpd/src/src/lib/message.atd +++ /dev/null @@ -1,3 +0,0 @@ -type message = { - message : string; -} diff --git a/frameworks/OCaml/tiny_httpd/src/src/lib/time.ml b/frameworks/OCaml/tiny_httpd/src/src/lib/time.ml deleted file mode 100644 index 76f14c08761..00000000000 --- a/frameworks/OCaml/tiny_httpd/src/src/lib/time.ml +++ /dev/null @@ -1,43 +0,0 @@ -let weekday Unix.{ tm_wday; _ } = - match tm_wday with - | 0 -> "Sun" - | 1 -> "Mon" - | 2 -> "Tue" - | 3 -> "Wed" - | 4 -> "Thu" - | 5 -> "Fri" - | 6 -> "Sat" - | _ -> failwith "weekday" - -let month Unix.{ tm_mon; _ } = - match tm_mon with - | 0 -> "Jan" - | 1 -> "Feb" - | 2 -> "Mar" - | 3 -> "Apr" - | 4 -> "May" - | 5 -> "Jun" - | 6 -> "Jul" - | 7 -> "Aug" - | 8 -> "Sep" - | 9 -> "Oct" - | 10 -> "Nov" - | 11 -> "Dec" - | _ -> failwith "month" - -let gmt tm = - Printf.sprintf "%s, %02u %s %04u %02u:%02u:%02u GMT" (weekday tm) tm.tm_mday - (month tm) (tm.tm_year + 1900) tm.tm_hour tm.tm_min tm.tm_sec - -let last_time = ref (Unix.gettimeofday ()) - -let time_string = ref (gmt (Unix.gmtime (Unix.gettimeofday ()))) - -let now () = - let _now = Unix.gettimeofday () in - if _now -. !last_time >= 1. then ( - let new_time_string = gmt (Unix.gmtime _now) in - time_string := new_time_string; - last_time := _now; - new_time_string ) - else !time_string diff --git a/frameworks/OCaml/tiny_httpd/src/tiny.opam b/frameworks/OCaml/tiny_httpd/src/tiny.opam deleted file mode 100644 index 8c89b38b7a9..00000000000 --- a/frameworks/OCaml/tiny_httpd/src/tiny.opam +++ /dev/null @@ -1,27 +0,0 @@ -# This file is generated by dune, edit dune-project instead -opam-version: "2.0" -synopsis: "Rudimentary implementation of the Tech Empower Benchmark suite" -authors: ["Robin Björklin"] -license: "MIT" -homepage: "https://github.com/TechEmpower/FrameworkBenchmarks" -bug-reports: "https://github.com/TechEmpower/FrameworkBenchmarks/issues" -depends: [ - "dune" {>= "2.7" & >= "2.7.1"} - "tiny_httpd" {>= "0.6"} - "odoc" {with-doc} -] -build: [ - ["dune" "subst"] {dev} - [ - "dune" - "build" - "-p" - name - "-j" - jobs - "@install" - "@runtest" {with-test} - "@doc" {with-doc} - ] -] -dev-repo: "git+https://github.com/TechEmpower/FrameworkBenchmarks.git" diff --git a/frameworks/OCaml/tiny_httpd/tiny_httpd.dockerfile b/frameworks/OCaml/tiny_httpd/tiny_httpd.dockerfile deleted file mode 100644 index 89e00555f3e..00000000000 --- a/frameworks/OCaml/tiny_httpd/tiny_httpd.dockerfile +++ /dev/null @@ -1,22 +0,0 @@ -FROM ocurrent/opam:debian-10-ocaml-4.11-flambda - -ENV DIR tiny_httpd -# https://blog.packagecloud.io/eng/2017/02/21/set-environment-variable-save-thousands-of-system-calls/ -ENV TZ :/etc/localtime - -# https://caml.inria.fr/pub/docs/manual-ocaml/libref/Gc.html -# https://linux.die.net/man/1/ocamlrun -# https://blog.janestreet.com/memory-allocator-showdown/ -ENV OCAMLRUNPARAM a=2,o=240 - -WORKDIR /${DIR} - -RUN opam install tiny_httpd atdgen - -COPY ./src /${DIR} - -RUN sudo chown -R opam: . && eval $(opam env) && dune build --profile release - -EXPOSE 8080 - -ENTRYPOINT _build/default/src/bin/tiny.exe diff --git a/frameworks/PHP/amp/amp.dockerfile b/frameworks/PHP/amp/amp.dockerfile index 01e964c85c2..4af0ea55dc9 100644 --- a/frameworks/PHP/amp/amp.dockerfile +++ b/frameworks/PHP/amp/amp.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:21.10 +FROM ubuntu:22.04 ARG DEBIAN_FRONTEND=noninteractive @@ -14,7 +14,7 @@ RUN wget http://pear.php.net/go-pear.phar --quiet && php go-pear.phar #RUN apt-get install -y libuv1-dev > /dev/null RUN apt-get install -y libevent-dev > /dev/null #RUN pecl install uv-0.2.4 > /dev/null && echo "extension=uv.so" > /etc/php/8.1/cli/conf.d/uv.ini -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini ADD ./ /amp WORKDIR /amp diff --git a/frameworks/PHP/comet/comet-mysql.dockerfile b/frameworks/PHP/comet/comet-mysql.dockerfile index c5ab86ce256..8b7ff0379f3 100644 --- a/frameworks/PHP/comet/comet-mysql.dockerfile +++ b/frameworks/PHP/comet/comet-mysql.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev git > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY php.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/comet/comet.dockerfile b/frameworks/PHP/comet/comet.dockerfile index 371f662eb55..5f0a16ebdd0 100644 --- a/frameworks/PHP/comet/comet.dockerfile +++ b/frameworks/PHP/comet/comet.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev git > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY php.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/hamlet/benchmark_config.json b/frameworks/PHP/hamlet/benchmark_config.json index ea22ec609ce..b8fda2f4f31 100644 --- a/frameworks/PHP/hamlet/benchmark_config.json +++ b/frameworks/PHP/hamlet/benchmark_config.json @@ -23,7 +23,8 @@ "database_os": "Linux", "display_name": "hamlet", "notes": "", - "versus": "php" + "versus": "php", + "tags": ["broken"] }, "swoole": { "plaintext_url": "/plaintext", @@ -47,7 +48,8 @@ "database_os": "Linux", "display_name": "hamlet-swoole", "notes": "", - "versus": "swoole" + "versus": "swoole", + "tags": ["broken"] }, "workerman": { "plaintext_url": "/plaintext", @@ -71,7 +73,8 @@ "database_os": "Linux", "display_name": "hamlet-workerman", "notes": "", - "versus": "workerman" + "versus": "workerman", + "tags": ["broken"] } }] } diff --git a/frameworks/PHP/imi/.env b/frameworks/PHP/imi/.env new file mode 100644 index 00000000000..37ece06e93d --- /dev/null +++ b/frameworks/PHP/imi/.env @@ -0,0 +1 @@ +WITH_REDIS=1 \ No newline at end of file diff --git a/frameworks/PHP/imi/.env-with-redis b/frameworks/PHP/imi/.env-with-redis deleted file mode 100644 index 69c4ca82b2b..00000000000 --- a/frameworks/PHP/imi/.env-with-redis +++ /dev/null @@ -1,2 +0,0 @@ -@app.pools.redis.pool.config.minResources=16 -WITH_REDIS=1 \ No newline at end of file diff --git a/frameworks/PHP/imi/ApiServer/Controller/IndexController.php b/frameworks/PHP/imi/ApiServer/Controller/IndexController.php index a9d38736f09..74761aaf48f 100644 --- a/frameworks/PHP/imi/ApiServer/Controller/IndexController.php +++ b/frameworks/PHP/imi/ApiServer/Controller/IndexController.php @@ -1,17 +1,20 @@ 'Hello, World!']; } /** * @Action - * @View(renderType="html") - * - * @return void */ - public function plaintext() + public function plaintext(): IHttpResponse { - return RequestContext::get('response')->withHeader('Content-Type', 'text/plain; charset=utf-8')->write('Hello, World!'); + $response = $this->response; + $response->setHeader('Content-Type', 'text/plain; charset=utf-8') + ->getBody() + ->write('Hello, World!'); + return $response; } /** * @Action - * - * @return void */ - public function dbModel() + public function dbModel(): ?World { return World::find(\mt_rand(1, 10000)); } /** * @Action - * - * @return void - */ - public function dbQueryBuilder() - { - return Db::query()->from('World')->field('id', 'randomNumber')->where('id', '=', \mt_rand(1, 10000))->limit(1)->select()->get(); - } - - /** - * @Action - * - * @return void */ - public function dbRaw() + public function dbRaw(): array { $db = Db::getInstance(); $stmt = $db->prepare('SELECT id, randomNumber FROM World WHERE id = ? LIMIT 1'); @@ -74,10 +62,8 @@ public function dbRaw() /** * @Action - * - * @return void */ - public function queryModel($queries) + public function queryModel($queries): array { $queries = (int)$queries; if($queries > 1) @@ -101,31 +87,7 @@ public function queryModel($queries) * * @return void */ - public function queryQueryBuilder($queries) - { - $queries = (int)$queries; - if($queries > 1) - { - $queryCount = \min($queries, 500); - } - else - { - $queryCount = 1; - } - $list = []; - while ($queryCount--) - { - $list[] = Db::query()->from('World')->field('id', 'randomNumber')->where('id', '=', \mt_rand(1, 10000))->limit(1)->select()->get(); - } - return $list; - } - - /** - * @Action - * - * @return void - */ - public function queryRaw($queries) + public function queryRaw($queries): array { $queries = (int)$queries; if($queries > 1) @@ -150,14 +112,10 @@ public function queryRaw($queries) /** * @Action * @View(renderType="html") - * - * @return void */ - public function fortunes() + public function fortunes(): array { - RequestContext::use(function(&$context){ - $context['response'] = $context['response']->withHeader('Content-Type', 'text/html; charset=UTF-8'); - }); + $this->response->setHeader('Content-Type', 'text/html; charset=UTF-8'); $list = Fortune::select(); $rows = []; foreach($list as $item) @@ -177,7 +135,7 @@ public function fortunes() * * @return void */ - public function fortunesRaw() + public function fortunesRaw(): IHttpResponse { $rows = []; foreach(Db::getInstance()->query('SELECT id, message FROM Fortune')->fetchAll() as $item) @@ -194,16 +152,14 @@ public function fortunesRaw() $html .= "{$id}{$message}"; } - return RequestContext::get('response')->withHeader('Content-Type', 'text/html; charset=UTF-8') - ->withBody(new MemoryStream("Fortunes{$html}
idmessage
")); + return $this->response->setHeader('Content-Type', 'text/html; charset=UTF-8') + ->setBody(new MemoryStream("Fortunes{$html}
idmessage
")); } /** * @Action - * - * @return void */ - public function updateModel($queries) + public function updateModel($queries): array { $queries = (int)$queries; if($queries > 1) @@ -226,40 +182,8 @@ public function updateModel($queries) /** * @Action - * - * @return void */ - public function updateQueryBuilder($queries) - { - $queries = (int)$queries; - if($queries > 1) - { - $queryCount = \min($queries, 500); - } - else - { - $queryCount = 1; - } - $list = []; - while ($queryCount--) - { - $id = \mt_rand(1, 10000); - $row = Db::query()->from('World')->field('id', 'randomNumber')->where('id', '=', $id)->limit(1)->select()->get(); - $row['randomNumber'] = $randomNumber = \mt_rand(1, 10000); - Db::query()->from('World')->where('id', '=', $id)->limit(1)->update([ - 'randomNumber' => $randomNumber, - ]); - $list[] = $row; - } - return $list; - } - - /** - * @Action - * - * @return void - */ - public function updateRaw($queries) + public function updateRaw($queries): array { $queries = (int)$queries; if($queries > 1) @@ -283,16 +207,15 @@ public function updateRaw($queries) $stmtUpdate->execute([$randomNumber, $id]); $list[] = $row; } + return $list; } /** * @Action * @Route("cached-worlds") - * - * @return void */ - public function cachedWorlds($count) + public function cachedWorlds($count): array { $count = (int)$count; if($count > 1) @@ -303,12 +226,20 @@ public function cachedWorlds($count) { $queryCount = 1; } - $ids = []; - while ($queryCount--) + + $list = App::get('worlds'); + $result = []; + $keys = \array_rand($list, $queryCount); + foreach ((array) $keys as $key) { - $ids[] = 'world:' . \mt_rand(1, 10000); + if (!isset($list[$key])) + { + break; + } + $result[] = $list[$key]; } - return RedisManager::getInstance()->mget($ids); + + return $result; } } diff --git a/frameworks/PHP/imi/ApiServer/Controller/PgController.php b/frameworks/PHP/imi/ApiServer/Controller/PgController.php new file mode 100644 index 00000000000..1ae426535f6 --- /dev/null +++ b/frameworks/PHP/imi/ApiServer/Controller/PgController.php @@ -0,0 +1,201 @@ +prepare('SELECT id, randomnumber FROM World WHERE id = ? LIMIT 1'); + $stmt->execute([\mt_rand(1, 10000)]); + return $stmt->fetch(); + } + + /** + * @Action + */ + public function pgQueryModel($queries): array + { + $queries = (int)$queries; + if($queries > 1) + { + $queryCount = \min($queries, 500); + } + else + { + $queryCount = 1; + } + $list = []; + while ($queryCount--) + { + $list[] = World::find(\mt_rand(1, 10000)); + } + return $list; + } + + /** + * @Action + * + * @return void + */ + public function pgQueryRaw($queries): array + { + $queries = (int)$queries; + if($queries > 1) + { + $queryCount = \min($queries, 500); + } + else + { + $queryCount = 1; + } + $list = []; + $db = Db::getInstance(self::POOL_NAME); + $stmt = $db->prepare('SELECT id, randomnumber FROM World WHERE id = ? LIMIT 1'); + while ($queryCount--) + { + $stmt->execute([\mt_rand(1, 10000)]); + $list[] = $stmt->fetch(); + } + return $list; + } + + /** + * @Action + * @View(renderType="html") + * @HtmlView("fortunes") + */ + public function pgFortunes(): array + { + $this->response->setHeader('Content-Type', 'text/html; charset=UTF-8'); + $list = Fortune::select(); + $rows = []; + foreach($list as $item) + { + $rows[$item->id] = $item->message; + } + $rows[0] = 'Additional fortune added at request time.'; + \asort($rows); + return [ + 'rows' => $rows, + ]; + } + + /** + * @Action + * @View(renderType="html") + * + * @return void + */ + public function pgFortunesRaw(): IHttpResponse + { + $rows = []; + foreach(Db::getInstance(self::POOL_NAME)->query('SELECT id, message FROM Fortune')->fetchAll() as $item) + { + $rows[$item['id']] = $item['message']; + } + $rows[0] = 'Additional fortune added at request time.'; + \asort($rows); + + $html = ''; + foreach ($rows as $id => $message) + { + $message = \htmlspecialchars($message, ENT_QUOTES, 'UTF-8'); + $html .= "{$id}{$message}"; + } + + return $this->response->setHeader('Content-Type', 'text/html; charset=UTF-8') + ->setBody(new MemoryStream("Fortunes{$html}
idmessage
")); + } + + /** + * @Action + */ + public function pgUpdateModel($queries): array + { + $queries = (int)$queries; + if($queries > 1) + { + $queryCount = \min($queries, 500); + } + else + { + $queryCount = 1; + } + $list = []; + while ($queryCount--) + { + $list[] = $row = World::find(\mt_rand(1, 10000)); + $row->randomNumber = \mt_rand(1, 10000); + $row->update(); + } + return $list; + } + + /** + * @Action + */ + public function pgUpdateRaw($queries): array + { + $queries = (int)$queries; + if($queries > 1) + { + $queryCount = \min($queries, 500); + } + else + { + $queryCount = 1; + } + $db = Db::getInstance(self::POOL_NAME); + $stmtSelect = $db->prepare('SELECT id, randomnumber FROM World WHERE id = ? LIMIT 1'); + $stmtUpdate = $db->prepare('UPDATE World SET randomNumber = CASE id' . \str_repeat(' WHEN ?::INTEGER THEN ?::INTEGER ', $queryCount) . 'END WHERE id IN (' . \str_repeat('?::INTEGER,', $queryCount - 1) . '?::INTEGER)'); + $list = []; + $keys = $values = []; + while ($queryCount--) + { + $values[] = $keys[] = $id = \mt_rand(1, 10000); + $stmtSelect->execute([$id]); + $row = $stmtSelect->fetch(); + + $values[] = $row['randomNumber'] = \mt_rand(1, 10000); + $list[] = $row; + } + $stmtUpdate->execute([ + ...$values, + ...$keys + ]); + + return $list; + } +} \ No newline at end of file diff --git a/frameworks/PHP/imi/ApiServer/Main.php b/frameworks/PHP/imi/ApiServer/Main.php deleted file mode 100644 index 1cba5dc93b0..00000000000 --- a/frameworks/PHP/imi/ApiServer/Main.php +++ /dev/null @@ -1,13 +0,0 @@ - [ ], - // bean扫描目录 - 'beanScan' => [ - 'ImiApp\ApiServer\Controller', - 'ImiApp\Model', - ], 'beans' => [ 'HttpDispatcher' => [ - 'middlewares' => [ - \Imi\Server\Http\Middleware\RouteMiddleware::class, - ], + 'middleware' => false, ], 'HtmlView' => [ 'templatePath' => dirname(__DIR__) . '/template/', @@ -23,7 +16,4 @@ ], ] ], - 'controller' => [ - 'singleton' => true, - ], ]; \ No newline at end of file diff --git a/frameworks/PHP/imi/Listener/AppInit.php b/frameworks/PHP/imi/Listener/AppInit.php index 4414f3b6242..b3feed66c94 100644 --- a/frameworks/PHP/imi/Listener/AppInit.php +++ b/frameworks/PHP/imi/Listener/AppInit.php @@ -1,39 +1,29 @@ from('world')->page($page, 1000)->select()->getArray()) - { - $redisList = []; - foreach($list as $row) - { - $redisList['world:' . $row['id']] = $row; - } - $redis->mset($redisList); - ++$page; - } - } + App::set('worlds', Db::query()->from('world')->select()->getArray()); } - -} \ No newline at end of file +} diff --git a/frameworks/PHP/imi/Listener/WorkermanRequest.php b/frameworks/PHP/imi/Listener/WorkermanRequest.php new file mode 100644 index 00000000000..556ee5afc68 --- /dev/null +++ b/frameworks/PHP/imi/Listener/WorkermanRequest.php @@ -0,0 +1,24 @@ +getData()['response']->setHeader('Date', App::get('test_date')); + } +} diff --git a/frameworks/PHP/imi/Listener/WorkermanWorkerStart.php b/frameworks/PHP/imi/Listener/WorkermanWorkerStart.php new file mode 100644 index 00000000000..6ef1c980163 --- /dev/null +++ b/frameworks/PHP/imi/Listener/WorkermanWorkerStart.php @@ -0,0 +1,28 @@ + 30, + 'socket_connect_timeout' => 5, + 'socket_read_timeout' => 30, + 'socket_write_timeout' => 30, + ]); + } } } \ No newline at end of file diff --git a/frameworks/PHP/imi/Model/Base/FortuneBase.php b/frameworks/PHP/imi/Model/Base/FortuneBase.php index 02ac553d7cf..51a58d3e092 100644 --- a/frameworks/PHP/imi/Model/Base/FortuneBase.php +++ b/frameworks/PHP/imi/Model/Base/FortuneBase.php @@ -1,73 +1,77 @@ id; } /** * 赋值 id - * @param int $id id + * @param int|null $id id * @return static - */ + */ public function setId($id) { - $this->id = $id; + $this->id = null === $id ? null : (int)$id; return $this; } /** * message * @Column(name="message", type="varchar", length=2048, accuracy=0, nullable=false, default="", isPrimaryKey=false, primaryKeyIndex=-1, isAutoIncrement=false) - * @var string + * @var string|null */ - protected $message; + protected ?string $message = null; /** * 获取 message * - * @return string - */ - public function getMessage() + * @return string|null + */ + public function getMessage(): ?string { return $this->message; } /** * 赋值 message - * @param string $message message + * @param string|null $message message * @return static - */ + */ public function setMessage($message) { - $this->message = $message; + $this->message = null === $message ? null : (string)$message; return $this; } diff --git a/frameworks/PHP/imi/Model/Base/WorldBase.php b/frameworks/PHP/imi/Model/Base/WorldBase.php index d00e194e2d5..7617e4823a6 100644 --- a/frameworks/PHP/imi/Model/Base/WorldBase.php +++ b/frameworks/PHP/imi/Model/Base/WorldBase.php @@ -1,73 +1,77 @@ id; } /** * 赋值 id - * @param int $id id + * @param int|null $id id * @return static - */ + */ public function setId($id) { - $this->id = $id; + $this->id = null === $id ? null : (int)$id; return $this; } /** * randomNumber * @Column(name="randomNumber", type="int", length=11, accuracy=0, nullable=false, default="0", isPrimaryKey=false, primaryKeyIndex=-1, isAutoIncrement=false) - * @var int + * @var int|null */ - protected $randomNumber; + protected ?int $randomNumber = null; /** * 获取 randomNumber * - * @return int - */ - public function getRandomNumber() + * @return int|null + */ + public function getRandomNumber(): ?int { return $this->randomNumber; } /** * 赋值 randomNumber - * @param int $randomNumber randomNumber + * @param int|null $randomNumber randomNumber * @return static - */ + */ public function setRandomNumber($randomNumber) { - $this->randomNumber = $randomNumber; + $this->randomNumber = null === $randomNumber ? null : (int)$randomNumber; return $this; } diff --git a/frameworks/PHP/imi/Model/Fortune.php b/frameworks/PHP/imi/Model/Fortune.php index eafb7dab036..59f6d1d0721 100644 --- a/frameworks/PHP/imi/Model/Fortune.php +++ b/frameworks/PHP/imi/Model/Fortune.php @@ -1,11 +1,13 @@ id; + } + + /** + * 赋值 id + * @param int|null $id id + * @return static + */ + public function setId(?int $id) + { + $this->id = $id; + return $this; + } + + /** + * message + * @Column(name="message", type="varchar", length=0, accuracy=2048, nullable=false, default="", isPrimaryKey=false, primaryKeyIndex=-1, isAutoIncrement=false, ndims=0) + * @var string|null + */ + protected ?string $message = null; + + /** + * 获取 message + * + * @return string|null + */ + public function getMessage(): ?string + { + return $this->message; + } + + /** + * 赋值 message + * @param string|null $message message + * @return static + */ + public function setMessage(?string $message) + { + $this->message = $message; + return $this; + } + +} diff --git a/frameworks/PHP/imi/Model/PgSql/Base/WorldBase.php b/frameworks/PHP/imi/Model/PgSql/Base/WorldBase.php new file mode 100644 index 00000000000..a269943f81a --- /dev/null +++ b/frameworks/PHP/imi/Model/PgSql/Base/WorldBase.php @@ -0,0 +1,76 @@ +id; + } + + /** + * 赋值 id + * @param int|null $id id + * @return static + */ + public function setId(?int $id) + { + $this->id = $id; + return $this; + } + + /** + * randomnumber + * @Column(name="randomnumber", type="int4", length=-1, accuracy=0, nullable=false, default="0", isPrimaryKey=false, primaryKeyIndex=-1, isAutoIncrement=false, ndims=0) + * @var int|null + */ + protected ?int $randomnumber = null; + + /** + * 获取 randomnumber + * + * @return int|null + */ + public function getRandomnumber(): ?int + { + return $this->randomnumber; + } + + /** + * 赋值 randomnumber + * @param int|null $randomnumber randomnumber + * @return static + */ + public function setRandomnumber(?int $randomnumber) + { + $this->randomnumber = $randomnumber; + return $this; + } + +} diff --git a/frameworks/PHP/imi/Model/PgSql/Fortune.php b/frameworks/PHP/imi/Model/PgSql/Fortune.php new file mode 100644 index 00000000000..9f649c74c3b --- /dev/null +++ b/frameworks/PHP/imi/Model/PgSql/Fortune.php @@ -0,0 +1,16 @@ + - - imi - -

- -[![Latest Version](https://img.shields.io/packagist/v/yurunsoft/imi.svg)](https://packagist.org/packages/yurunsoft/imi) -[![Travis](https://img.shields.io/travis/Yurunsoft/IMI.svg)](https://travis-ci.org/Yurunsoft/IMI) -[![Php Version](https://img.shields.io/badge/php-%3E=7.1-brightgreen.svg)](https://secure.php.net/) -[![Swoole Version](https://img.shields.io/badge/swoole-%3E=4.3.0-brightgreen.svg)](https://github.com/swoole/swoole-src) -[![imi Doc](https://img.shields.io/badge/docs-passing-green.svg)](https://doc.imiphp.com) -[![imi License](https://img.shields.io/badge/license-MulanPSL%201.0-brightgreen.svg)](https://github.com/Yurunsoft/imi/blob/master/LICENSE) - -## 介绍 - -imi 是基于 PHP Swoole 的高性能协程应用开发框架,它支持 HttpApi、WebSocket、TCP、UDP 服务的开发。 - -在 Swoole 的加持下,相比 php-fpm 请求响应能力,I/O密集型场景处理能力,有着本质上的提升。 - -imi 框架拥有丰富的功能组件,可以广泛应用于互联网、移动通信、企业软件、云计算、网络游戏、物联网(IOT)、车联网、智能家居等领域。可以使企业 IT 研发团队的效率大大提升,更加专注于开发创新产品。 - -imi 框架交流群:17916227 [![点击加群](https://pub.idqqimg.com/wpa/images/group.png "点击加群")](https://jq.qq.com/?_wv=1027&k=5wXf4Zq) - -### 核心组件 - -* HttpApi、WebSocket、TCP、UDP 服务器 -* MySQL 连接池 (主从+负载均衡) -* Redis 连接池 (主从+负载均衡) -* 超好用的 ORM (Db、Redis、Tree) -* 毫秒级热更新 -* AOP -* Bean 容器 -* 缓存 (Cache) -* 配置读写 (Config) -* 枚举 (Enum) -* 事件 (Event) -* 门面 (Facade) -* 验证器 (Validate) -* 锁 (Lock) -* 日志 (Log) -* 异步任务 (Task) - -### 扩展组件 - -* [RPC](https://github.com/imiphp/imi-rpc) -* [Hprose](https://github.com/imiphp/imi-hprose) -* [权限控制](https://github.com/imiphp/imi-access-control) -* [Smarty 模版引擎](https://github.com/imiphp/imi-smarty) -* [限流](https://github.com/imiphp/imi-rate-limit) -* [跨进程变量共享](https://github.com/imiphp/imi-shared-memory) -* [Swoole Tracker](https://github.com/imiphp/imi-swoole-tracker) - -## 开始使用 - -创建 Http Server 项目:`composer create-project imiphp/project-http` - -创建 WebSocket Server 项目:`composer create-project imiphp/project-websocket` - -创建 TCP Server 项目:`composer create-project imiphp/project-tcp` - -创建 UDP Server 项目:`composer create-project imiphp/project-udp` - -[完全开发手册](https://doc.imiphp.com) - -## 运行环境 - -- Linux 系统 (Swoole 不支持在 Windows 上运行) -- [PHP](https://php.net/) >= 7.1 -- [Composer](https://getcomposer.org/) -- [Swoole](https://www.swoole.com/) >= 4.3.0 -- Redis、PDO 扩展 - -## 版权信息 - -imi 遵循 木兰宽松许可证(Mulan PSL v1) 开源协议发布,并提供免费使用。 - -## 鸣谢 - -感谢以下开源项目 (按字母顺序排列) 为 imi 提供强力支持! - -- [doctrine/annotations](https://github.com/doctrine/annotations) (PHP 注解处理类库) -- [PHP](https://php.net/) (没有 PHP 就没有 imi) -- [Swoole](https://www.swoole.com/) (没有 Swoole 就没有 imi) - -## 贡献者 - - - -你想出现在贡献者列表中吗? - -你可以做的事(包括但不限于以下): - -* 纠正拼写、错别字 -* 完善注释 -* bug修复 -* 功能开发 -* 文档编写() -* 教程、博客分享 - -> 最新代码以 `dev` 分支为准,提交 `PR` 也请合并至 `dev` 分支! - -提交 `Pull Request` 到本仓库,你就有机会成为 imi 的作者之一! - -## 关于测试脚本 - -### 环境要求 - -Redis、MySQL - -### 首次运行测试 - -* 创建 `db_imi_test` 数据库,将 `tests/db/db.sql` 导入到数据库 - -* 配置系统环境变量,如果默认值跟你的一样就无需配置了 - -名称 | 描述 | 默认值 --|-|- -MYSQL_SERVER_HOST | MySQL 主机名 | 127.0.0.1 | -MYSQL_SERVER_PORT | MySQL 端口 | 3306 | -MYSQL_SERVER_USERNAME | MySQL 用户名 | root | -MYSQL_SERVER_PASSWORD | MySQL 密码 | root | -REDIS_SERVER_HOST | Redis 主机名 | 127.0.0.1 | -REDIS_SERVER_PORT | Redis 端口 | 6379 | -REDIS_SERVER_PASSWORD | Redis 密码 | | -REDIS_CACHE_DB | Redis 缓存用的 `db`,该 `db` 会被清空数据,请慎重设置 | 1 | - -配置命令:`export NAME=VALUE` - -* 首次运行测试脚本:`composer install-test` - -* 首次之后再运行测试的命令:`composer test` - -## 捐赠 - - - -开源不求盈利,多少都是心意,生活不易,随缘随缘…… diff --git a/frameworks/PHP/imi/benchmark_config.json b/frameworks/PHP/imi/benchmark_config.json index 1ad991cc6fa..d765cae1f14 100644 --- a/frameworks/PHP/imi/benchmark_config.json +++ b/frameworks/PHP/imi/benchmark_config.json @@ -3,6 +3,7 @@ "tests": [ { "default": { + "dockerfile": "imi-swoole.dockerfile", "json_url": "/json", "plaintext_url": "/plaintext", "db_url": "/dbModel", @@ -22,35 +23,63 @@ "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "imi", + "display_name": "imi-swoole", "notes": "", "versus": "Swoole" }, - "query-builder": { - "db_url": "/dbQueryBuilder", - "query_url": "/queryQueryBuilder?queries=", - "update_url": "/updateQueryBuilder?queries=", + "swoole-mysql-raw": { + "dockerfile": "imi-swoole.dockerfile", + "db_url": "/dbRaw", + "query_url": "/queryRaw?queries=", + "fortune_url": "/fortunesRaw", + "update_url": "/updateRaw?queries=", "port": 8080, "approach": "Realistic", - "classification": "Fullstack", + "classification": "Micro", "database": "MySQL", "framework": "imi", "language": "PHP", "flavor": "None", - "orm": "Micro", + "orm": "Raw", "platform": "Swoole", "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "imi-query-builder", + "display_name": "imi-swoole-mysql-raw", "notes": "", "versus": "Swoole" }, - "raw": { + "workerman": { + "dockerfile": "imi-workerman.dockerfile", + "json_url": "/json", + "plaintext_url": "/plaintext", + "db_url": "/dbModel", + "query_url": "/queryModel?queries=", + "fortune_url": "/fortunes", + "update_url": "/updateModel?queries=", + "cached_query_url": "/cached-worlds?count=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "MySQL", + "framework": "imi", + "language": "PHP", + "flavor": "None", + "orm": "Full", + "platform": "Workerman", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "imi-workerman", + "notes": "", + "versus": "Workerman" + }, + "workerman-mysql-raw": { + "dockerfile": "imi-workerman.dockerfile", "db_url": "/dbRaw", "query_url": "/queryRaw?queries=", - "update_url": "/updateRaw?queries=", "fortune_url": "/fortunesRaw", + "update_url": "/updateRaw?queries=", "port": 8080, "approach": "Realistic", "classification": "Micro", @@ -59,13 +88,101 @@ "language": "PHP", "flavor": "None", "orm": "Raw", + "platform": "Workerman", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "imi-workerman-mysql-raw", + "notes": "", + "versus": "Workerman" + }, + "swoole-pgsql": { + "dockerfile": "imi-swoole-pgsql.dockerfile", + "db_url": "/pgDbModel", + "query_url": "/pgQueryModel?queries=", + "fortune_url": "/pgFortunes", + "update_url": "/pgUpdateModel?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "Postgres", + "framework": "imi", + "language": "PHP", + "flavor": "None", + "orm": "Full", + "platform": "Swoole", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "imi-swoole-pgsql", + "notes": "", + "versus": "Swoole" + }, + "swoole-pgsql-raw": { + "dockerfile": "imi-swoole-pgsql.dockerfile", + "db_url": "/pgDbRaw", + "query_url": "/pgQueryRaw?queries=", + "fortune_url": "/pgFortunesRaw", + "update_url": "/pgUpdateRaw?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "Postgres", + "framework": "imi", + "language": "PHP", + "flavor": "None", + "orm": "Raw", "platform": "Swoole", "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "imi-raw", + "display_name": "imi-swoole-pgsql-raw", "notes": "", "versus": "Swoole" + }, + "workerman-pgsql": { + "dockerfile": "imi-workerman.dockerfile", + "db_url": "/pgDbModel", + "query_url": "/pgQueryModel?queries=", + "fortune_url": "/pgFortunes", + "update_url": "/pgUpdateModel?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "Postgres", + "framework": "imi", + "language": "PHP", + "flavor": "None", + "orm": "Full", + "platform": "Workerman", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "imi-workerman-pgsql", + "notes": "", + "versus": "Workerman" + }, + "workerman-pgsql-raw": { + "dockerfile": "imi-workerman.dockerfile", + "db_url": "/pgDbRaw", + "query_url": "/pgQueryRaw?queries=", + "fortune_url": "/pgFortunesRaw", + "update_url": "/pgUpdateRaw?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "Postgres", + "framework": "imi", + "language": "PHP", + "flavor": "None", + "orm": "Raw", + "platform": "Workerman", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "imi-workerman-pgsql-raw", + "notes": "", + "versus": "Workerman" } } ] diff --git a/frameworks/PHP/imi/composer.json b/frameworks/PHP/imi/composer.json index c43c1c01db1..b0ca7fb7aaa 100644 --- a/frameworks/PHP/imi/composer.json +++ b/frameworks/PHP/imi/composer.json @@ -1,10 +1,11 @@ { - "require": { - "yurunsoft/imi": "~1.0" - }, + "require": { + "imiphp/imi": "~2.0", + "imiphp/imi-pgsql": "~2.0" + }, "autoload": { "psr-4" : { - "ImiApp\\" : "./" + "ImiApp\\" : "./" } - } + } } \ No newline at end of file diff --git a/frameworks/PHP/imi/config.toml b/frameworks/PHP/imi/config.toml index 7323cc02a88..b8b2ab82241 100644 --- a/frameworks/PHP/imi/config.toml +++ b/frameworks/PHP/imi/config.toml @@ -2,6 +2,7 @@ name = "imi" [main] +dockerfile = "imi-swoole.dockerfile" urls.plaintext = "/plaintext" urls.json = "/json" urls.db = "/dbModel" @@ -19,7 +20,8 @@ platform = "Swoole" webserver = "None" versus = "Swoole" -[raw] +[swoole-mysql-raw] +dockerfile = "imi-swoole.dockerfile" urls.db = "/dbRaw" urls.query = "/queryRaw?queries=" urls.update = "/updateRaw?queries=" @@ -34,16 +36,101 @@ platform = "Swoole" webserver = "None" versus = "Swoole" -[query-builder] -urls.db = "/dbQueryBuilder" -urls.query = "/queryQueryBuilder?queries=" -urls.update = "/updateQueryBuilder?queries=" +[workerman] +dockerfile = "imi-workerman.dockerfile" +urls.plaintext = "/plaintext" +urls.json = "/json" +urls.db = "/dbModel" +urls.query = "/queryModel?queries=" +urls.update = "/updateModel?queries=" +urls.fortune = "/fortunes" +urls.cached_query = "/cached-worlds?count=" approach = "Realistic" classification = "Fullstack" database = "MySQL" database_os = "Linux" os = "Linux" -orm = "Micro" +orm = "Full" +platform = "Workerman" +webserver = "None" +versus = "Workerman" + +[workerman-mysql-raw] +dockerfile = "imi-workerman.dockerfile" +urls.db = "/dbRaw" +urls.query = "/queryRaw?queries=" +urls.update = "/updateRaw?queries=" +urls.fortune = "/fortunesRaw" +approach = "Realistic" +classification = "Micro" +database = "MySQL" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = "Workerman" +webserver = "None" +versus = "Workerman" + +[swoole-pgsql] +dockerfile = "imi-swoole.dockerfile" +urls.db = "/pgDbModel" +urls.query = "/pgQueryModel?queries=" +urls.update = "/pgUpdateModel?queries=" +urls.fortune = "/pgFortunes" +approach = "Realistic" +classification = "Fullstack" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "Full" platform = "Swoole" webserver = "None" versus = "Swoole" + +[swoole-pgsql-raw] +dockerfile = "imi-swoole.dockerfile" +urls.db = "/dbRaw" +urls.query = "/pgQueryRaw?queries=" +urls.update = "/pgUpdateRaw?queries=" +urls.fortune = "/pgFortunesRaw" +approach = "Realistic" +classification = "Micro" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = "Swoole" +webserver = "None" +versus = "Swoole" + +[workerman-pgsql] +dockerfile = "imi-workerman.dockerfile" +urls.db = "/pgDbModel" +urls.query = "/pgQueryModel?queries=" +urls.update = "/pgUpdateModel?queries=" +urls.fortune = "/pgFortunes" +approach = "Realistic" +classification = "Fullstack" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "Full" +platform = "Workerman" +webserver = "None" +versus = "Workerman" + +[workerman-pgsql-raw] +dockerfile = "imi-workerman.dockerfile" +urls.db = "/dbRaw" +urls.query = "/pgQueryRaw?queries=" +urls.update = "/pgUpdateRaw?queries=" +urls.fortune = "/pgFortunesRaw" +approach = "Realistic" +classification = "Micro" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = "Workerman" +webserver = "None" +versus = "Workerman" diff --git a/frameworks/PHP/imi/config/beans.php b/frameworks/PHP/imi/config/beans.php index 99f35edf856..ecc1bae0324 100644 --- a/frameworks/PHP/imi/config/beans.php +++ b/frameworks/PHP/imi/config/beans.php @@ -3,6 +3,4 @@ 'hotUpdate' => [ 'status' => false, // 关闭热更新去除注释,不设置即为开启,建议生产环境关闭 ], - 'Logger' => [ - ], ]; \ No newline at end of file diff --git a/frameworks/PHP/imi/config/config.php b/frameworks/PHP/imi/config/config.php index ccbe65934d0..f7c75e32a99 100644 --- a/frameworks/PHP/imi/config/config.php +++ b/frameworks/PHP/imi/config/config.php @@ -1,11 +1,13 @@ 'tfb-database', - 'username' => 'benchmarkdbuser', - 'password' => 'benchmarkdbpass', - 'database' => 'hello_world', - 'dbClass' => \Imi\Db\Drivers\Swoole\Driver::class, -]; + +use Imi\App; + +$mode = App::isInited() ? App::getApp()->getType() : ''; +$isMysql = ('mysql' === strtolower(getenv('TFB_TEST_DATABASE') ?: 'mysql')); +$host = 'tfb-database'; +$username = 'benchmarkdbuser'; +$password = 'benchmarkdbpass'; + return [ // 项目根命名空间 'namespace' => 'ImiApp', @@ -15,18 +17,13 @@ 'beans' => __DIR__ . '/beans.php', ], - // 扫描目录 - 'beanScan' => [ - 'ImiApp\Listener', - ], - // 组件命名空间 'components' => [], // 主服务器配置 - 'mainServer' => [ + 'mainServer' => 'swoole' === $mode ? [ 'namespace' => 'ImiApp\ApiServer', - 'type' => Imi\Server\Type::HTTP, + 'type' => Imi\Swoole\Server\Type::HTTP, 'host' => '0.0.0.0', 'port' => 8080, 'mode' => SWOOLE_BASE, @@ -39,61 +36,91 @@ 'http_parse_files' => false, 'http_compression' => false, ], - ], + ] : [], + + // Workerman 服务器配置 + 'workermanServer' => 'workerman' === $mode ? [ + // 服务器名,http 也可以改成 abc 等等,完全自定义 + 'http' => [ + // 指定服务器命名空间 + 'namespace' => 'ImiApp\ApiServer', + // 服务器类型 + 'type' => Imi\Workerman\Server\Type::HTTP, // HTTP、WEBSOCKET、TCP、UDP + 'host' => '0.0.0.0', + 'port' => 8080, + // socket的上下文选项,参考:http://doc3.workerman.net/315128 + 'context' => [], + 'configs' => [ + // 支持设置 Workerman 参数 + 'count' => (int) shell_exec('nproc') * 4, + ], + ], + ] : [], 'db' => [ - 'defaultPool' => 'db', // 默认连接池 - ], - 'redis' => [ - 'defaultPool' => 'redis', // 默认连接池 - 'quickFromRequestContext' => true, // 从当前上下文中获取公用连接 + 'defaultPool' => $isMysql ? 'mysql' : 'pgsql', // 默认连接池 + 'connections' => [ + 'mysql' => [ + 'host' => $host, + 'username' => $username, + 'password' => $password, + 'database' => 'hello_world', + 'dbClass' => \Imi\Db\Mysql\Drivers\Mysqli\Driver::class, + 'checkStateWhenGetResource' => false, + ], + 'pgsql' => [ + 'host' => $host, + 'username' => $username, + 'password' => $password, + 'database' => 'hello_world', + 'dbClass' => \Imi\Pgsql\Db\Drivers\PdoPgsql\Driver::class, + 'checkStateWhenGetResource' => false, + ], + ], ], - 'pools' => [ + + 'pools' => 'swoole' === $mode ? [ // 连接池名称 - 'db' => [ - // 异步池子,worker进程使用 - 'async' => [ - 'pool' => [ - 'class' => \Imi\Db\Pool\CoroutineDbPool::class, - 'config' => [ - // 池子中最多资源数 - 'maxResources' => 512, - // 池子中最少资源数 - 'minResources' => 16, - 'gcInterval' => null, - 'checkStateWhenGetResource' => false, - 'requestResourceCheckInterval' => 30, - ], + 'mysql' => [ + 'pool' => [ + 'class' => \Imi\Swoole\Db\Pool\CoroutineDbPool::class, + 'config' => [ + // 池子中最多资源数 + 'maxResources' => intval(1024 / swoole_cpu_num()), + // 池子中最少资源数 + 'minResources' => $isMysql ? 16 : 0, + 'gcInterval' => 0, + 'checkStateWhenGetResource' => false, ], - // resource也可以定义多个连接 - 'resource' => $dbResourceConfig, + ], + // resource也可以定义多个连接 + 'resource' => [ + 'host' => $host, + 'username' => $username, + 'password' => $password, + 'database' => 'hello_world', + 'dbClass' => \Imi\Swoole\Db\Driver\Swoole\Driver::class, ], ], - 'redis' => [ - 'pool' => [ - // 协程池类名 - 'asyncClass' => \Imi\Redis\CoroutineRedisPool::class, - 'config' => [ + 'pgsql' => [ + 'pool' => [ + 'class' => \Imi\Swoole\Db\Pool\CoroutineDbPool::class, + 'config' => [ // 池子中最多资源数 - 'maxResources' => 512, + 'maxResources' => intval(1024 / swoole_cpu_num()), // 池子中最少资源数 - 'minResources' => 0, - 'gcInterval' => null, + 'minResources' => $isMysql ? 0 : 16, 'checkStateWhenGetResource' => false, - 'requestResourceCheckInterval' => 30, ], ], - // 数组资源配置 - 'resource' => [ - 'host' => '127.0.0.1', - 'port' => 6379, - // 是否自动序列化变量 - 'serialize' => true, - // 密码 - 'password' => null, - // 第几个库 - 'db' => 0, + // resource也可以定义多个连接 + 'resource' => [ + 'host' => $host, + 'username' => $username, + 'password' => $password, + 'database' => 'hello_world', + 'dbClass' => \Imi\Pgsql\Db\Drivers\Swoole\Driver::class, ], ], - ], + ] : [], ]; diff --git a/frameworks/PHP/imi/imi-raw.dockerfile b/frameworks/PHP/imi/imi-raw.dockerfile deleted file mode 100644 index 2bd4cba6606..00000000000 --- a/frameworks/PHP/imi/imi-raw.dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -FROM php:8.0-cli - -RUN pecl install swoole > /dev/null && \ - docker-php-ext-enable swoole - -RUN docker-php-ext-install bcmath pdo_mysql opcache > /dev/null - -RUN apt -yqq update > /dev/null && \ - apt -yqq install git unzip > /dev/null - -RUN echo "opcache.enable_cli=On" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini -RUN echo "opcache.jit=Off" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini -RUN echo "opcache.jit_buffer_size=128M" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini - -COPY . /imi -COPY php.ini /usr/local/etc/php/ - -WORKDIR /imi - -RUN chmod -R ug+rwx /imi/.runtime - -RUN curl -sSL https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer -RUN composer install --no-dev --classmap-authoritative --quiet > /dev/null -RUN composer dumpautoload -o - -EXPOSE 8080 - -CMD php vendor/bin/imi server/start diff --git a/frameworks/PHP/imi/imi-swoole-pgsql.dockerfile b/frameworks/PHP/imi/imi-swoole-pgsql.dockerfile new file mode 100644 index 00000000000..2118e2c0e23 --- /dev/null +++ b/frameworks/PHP/imi/imi-swoole-pgsql.dockerfile @@ -0,0 +1,37 @@ +FROM php:8.0-cli + +ENV SWOOLE_VERSION 4.8.3 +ENV SWOOLE_POSTGRES 4.8.0 +ARG TFB_TEST_DATABASE +ENV TFB_TEST_DATABASE=${TFB_TEST_DATABASE} + +RUN docker-php-ext-install -j$(nproc) opcache > /dev/null + +RUN apt -yqq update > /dev/null && \ + apt -yqq install git unzip libpq-dev > /dev/null + +RUN cd /tmp && curl -sSL "https://github.com/swoole/swoole-src/archive/v${SWOOLE_VERSION}.tar.gz" | tar xzf - \ + && cd swoole-src-${SWOOLE_VERSION} \ + && phpize && ./configure > /dev/null && make -j > /dev/null && make install > /dev/null \ + && docker-php-ext-enable swoole + +RUN cd /tmp && curl -sSL "https://github.com/swoole/ext-postgresql/archive/v${SWOOLE_POSTGRES}.tar.gz" | tar xzf - \ + && cd ext-postgresql-${SWOOLE_POSTGRES} \ + && phpize && ./configure > /dev/null && make -j > /dev/null && make install > /dev/null \ + && docker-php-ext-enable swoole_postgresql + +COPY . /imi +COPY php.ini /usr/local/etc/php/ + +RUN chmod -R ug+rwx /imi/.runtime + +WORKDIR /imi + +RUN curl -sSL https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer +RUN composer install --no-dev --classmap-authoritative --quiet > /dev/null +RUN composer require imiphp/imi-swoole:~2.0 -W +RUN composer dumpautoload -o + +EXPOSE 8080 + +CMD ./run-swoole.sh diff --git a/frameworks/PHP/imi/imi-query-builder.dockerfile b/frameworks/PHP/imi/imi-swoole.dockerfile similarity index 50% rename from frameworks/PHP/imi/imi-query-builder.dockerfile rename to frameworks/PHP/imi/imi-swoole.dockerfile index 2bd4cba6606..46130742c84 100644 --- a/frameworks/PHP/imi/imi-query-builder.dockerfile +++ b/frameworks/PHP/imi/imi-swoole.dockerfile @@ -1,28 +1,31 @@ -FROM php:8.0-cli +FROM php:8.1-cli -RUN pecl install swoole > /dev/null && \ - docker-php-ext-enable swoole +ENV SWOOLE_VERSION 4.8.3 +ARG TFB_TEST_DATABASE +ENV TFB_TEST_DATABASE=${TFB_TEST_DATABASE} -RUN docker-php-ext-install bcmath pdo_mysql opcache > /dev/null +RUN docker-php-ext-install -j$(nproc) opcache > /dev/null RUN apt -yqq update > /dev/null && \ apt -yqq install git unzip > /dev/null -RUN echo "opcache.enable_cli=On" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini -RUN echo "opcache.jit=Off" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini -RUN echo "opcache.jit_buffer_size=128M" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini +RUN pecl update-channels + +RUN pecl install swoole-${SWOOLE_VERSION} > /dev/null && \ + docker-php-ext-enable swoole COPY . /imi COPY php.ini /usr/local/etc/php/ -WORKDIR /imi - RUN chmod -R ug+rwx /imi/.runtime +WORKDIR /imi + RUN curl -sSL https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer RUN composer install --no-dev --classmap-authoritative --quiet > /dev/null +RUN composer require imiphp/imi-swoole:~2.0 -W RUN composer dumpautoload -o EXPOSE 8080 -CMD php vendor/bin/imi server/start +CMD ./run-swoole.sh diff --git a/frameworks/PHP/imi/imi-workerman.dockerfile b/frameworks/PHP/imi/imi-workerman.dockerfile new file mode 100644 index 00000000000..46174f82122 --- /dev/null +++ b/frameworks/PHP/imi/imi-workerman.dockerfile @@ -0,0 +1,30 @@ +FROM php:8.1-cli + +ARG TFB_TEST_DATABASE +ENV TFB_TEST_DATABASE=${TFB_TEST_DATABASE} + +RUN apt -yqq update > /dev/null && \ + apt -yqq install git unzip libevent-dev libssl-dev libpq-dev > /dev/null + +RUN docker-php-ext-install -j$(nproc) opcache mysqli pcntl sockets pdo_pgsql > /dev/null + +RUN pecl update-channels + +RUN pecl install event > /dev/null && \ + echo "extension=event.so" > /usr/local/etc/php/conf.d/event.ini + +COPY . /imi +COPY php.ini /usr/local/etc/php/ + +RUN chmod -R ug+rwx /imi/.runtime + +WORKDIR /imi + +RUN curl -sSL https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer +RUN composer install --no-dev --classmap-authoritative --quiet > /dev/null +RUN composer require imiphp/imi-workerman:~2.0 -W +RUN composer dumpautoload -o + +EXPOSE 8080 + +CMD ./run-workerman.sh diff --git a/frameworks/PHP/imi/imi.dockerfile b/frameworks/PHP/imi/imi.dockerfile deleted file mode 100644 index e9c96abbf86..00000000000 --- a/frameworks/PHP/imi/imi.dockerfile +++ /dev/null @@ -1,34 +0,0 @@ -FROM php:8.0-cli - -RUN pecl install swoole > /dev/null && \ - docker-php-ext-enable swoole - -RUN docker-php-ext-install bcmath pdo_mysql opcache > /dev/null - -RUN pecl install redis > /dev/null && \ - docker-php-ext-enable redis - -RUN apt -yqq update > /dev/null && \ - apt -yqq install git unzip > /dev/null - -RUN apt -yqq install redis-server > /dev/null - -RUN echo "opcache.enable_cli=On" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini -RUN echo "opcache.jit=Off" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini -RUN echo "opcache.jit_buffer_size=128M" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini - -COPY . /imi -COPY php.ini /usr/local/etc/php/ - -WORKDIR /imi -COPY .env-with-redis .env - -RUN chmod -R ug+rwx /imi/.runtime - -RUN curl -sSL https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer -RUN composer install --no-dev --classmap-authoritative --quiet > /dev/null -RUN composer dumpautoload -o - -EXPOSE 8080 - -CMD ./run-with-redis.sh diff --git a/frameworks/PHP/imi/php.ini b/frameworks/PHP/imi/php.ini index 3cf51cccca9..1e894fe2889 100644 --- a/frameworks/PHP/imi/php.ini +++ b/frameworks/PHP/imi/php.ini @@ -1,2 +1,5 @@ +opcache.enable=1 opcache.enable_cli=1 opcache.validate_timestamps=0 +opcache.enable_file_override=1 +opcache.huge_code_pages=1 diff --git a/frameworks/PHP/imi/run-swoole.sh b/frameworks/PHP/imi/run-swoole.sh new file mode 100755 index 00000000000..2e104ceefb2 --- /dev/null +++ b/frameworks/PHP/imi/run-swoole.sh @@ -0,0 +1,2 @@ +#!/bin/bash +php vendor/bin/imi-swoole swoole/start diff --git a/frameworks/PHP/imi/run-with-redis.sh b/frameworks/PHP/imi/run-with-redis.sh deleted file mode 100755 index df89ed1ce46..00000000000 --- a/frameworks/PHP/imi/run-with-redis.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -service redis-server start -php vendor/bin/imi server/start diff --git a/frameworks/PHP/imi/run-workerman.sh b/frameworks/PHP/imi/run-workerman.sh new file mode 100755 index 00000000000..05c6c52fe8b --- /dev/null +++ b/frameworks/PHP/imi/run-workerman.sh @@ -0,0 +1,2 @@ +#!/bin/bash +php vendor/bin/imi-workerman workerman/start diff --git a/frameworks/PHP/kumbiaphp/kumbiaphp-workerman-mysql.dockerfile b/frameworks/PHP/kumbiaphp/kumbiaphp-workerman-mysql.dockerfile index aae5a85fff5..7fe1f85d92d 100644 --- a/frameworks/PHP/kumbiaphp/kumbiaphp-workerman-mysql.dockerfile +++ b/frameworks/PHP/kumbiaphp/kumbiaphp-workerman-mysql.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY deploy/conf/cliphp.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/kumbiaphp/kumbiaphp-workerman.dockerfile b/frameworks/PHP/kumbiaphp/kumbiaphp-workerman.dockerfile index d5380e1f564..dde619cb90b 100644 --- a/frameworks/PHP/kumbiaphp/kumbiaphp-workerman.dockerfile +++ b/frameworks/PHP/kumbiaphp/kumbiaphp-workerman.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY deploy/conf/cliphp.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/mark/mark.dockerfile b/frameworks/PHP/mark/mark.dockerfile index d88fb7573e4..07da2f90177 100644 --- a/frameworks/PHP/mark/mark.dockerfile +++ b/frameworks/PHP/mark/mark.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev git > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY php.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/mixphp/mixphp-workerman-mysql.dockerfile b/frameworks/PHP/mixphp/mixphp-workerman-mysql.dockerfile index 6ec439243f6..d2a27876c01 100644 --- a/frameworks/PHP/mixphp/mixphp-workerman-mysql.dockerfile +++ b/frameworks/PHP/mixphp/mixphp-workerman-mysql.dockerfile @@ -8,7 +8,7 @@ RUN apt-get update -yqq && apt-get install -yqq git unzip wget curl build-essent RUN apt-get install -y php8.1-dev libevent-dev > /dev/null RUN wget http://pear.php.net/go-pear.phar --quiet && php go-pear.phar -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY php-jit.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/mixphp/mixphp-workerman-pgsql.dockerfile b/frameworks/PHP/mixphp/mixphp-workerman-pgsql.dockerfile index b01c5f2db05..e1e2bc87936 100644 --- a/frameworks/PHP/mixphp/mixphp-workerman-pgsql.dockerfile +++ b/frameworks/PHP/mixphp/mixphp-workerman-pgsql.dockerfile @@ -8,7 +8,7 @@ RUN apt-get update -yqq && apt-get install -yqq git unzip wget curl build-essent RUN apt-get install -y php8.1-dev libevent-dev > /dev/null RUN wget http://pear.php.net/go-pear.phar --quiet && php go-pear.phar -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY php-jit.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/php-ngx/app-pg.php b/frameworks/PHP/php-ngx/app-pg.php new file mode 100644 index 00000000000..c5258a147a7 --- /dev/null +++ b/frameworks/PHP/php-ngx/app-pg.php @@ -0,0 +1,70 @@ +execute([mt_rand(1, 10000)]); + echo json_encode(DbRaw::$random->fetch(), JSON_NUMERIC_CHECK); +} + +function query() +{ + ngx_header_set('Content-Type', 'application/json'); + + $query_count = 1; + $params = (int) ngx::query_args()['q']; + if ($params > 1) { + $query_count = min($params, 500); + } + while ($query_count--) { + DbRaw::$random->execute([mt_rand(1, 10000)]); + $arr[] = DbRaw::$random->fetch(); + } + + echo json_encode($arr, JSON_NUMERIC_CHECK); +} + +function update() +{ + ngx_header_set('Content-Type', 'application/json'); + + $query_count = 1; + $params = (int) ngx::query_args()['q']; + if ($params > 1) { + $query_count = min($params, 500); + } + while ($query_count--) { + + DbRaw::$random->execute([mt_rand(1, 10000)]); + $row = DbRaw::$random->fetch(); + $row['randomNumber'] = mt_rand(1, 10000); + + $worlds[] = $row; + } + + DbRaw::update($worlds); + + echo json_encode($worlds, JSON_NUMERIC_CHECK); +} + +function fortune() +{ + ngx_header_set('Content-Type', 'text/html;charset=UTF-8'); + + DbRaw::$fortune->execute(); + + $arr = DbRaw::$fortune->fetchAll(PDO::FETCH_KEY_PAIR); + $arr[0] = 'Additional fortune added at request time.'; + asort($arr); + + $html = ''; + foreach ($arr as $id => $message) { + $message = htmlspecialchars($message, ENT_QUOTES, 'UTF-8'); + $html .= "$id$message"; + } + + echo "Fortunes$html
idmessage
"; +} diff --git a/frameworks/PHP/php-ngx/dbraw.php b/frameworks/PHP/php-ngx/dbraw.php new file mode 100644 index 00000000000..2a7c8d355c1 --- /dev/null +++ b/frameworks/PHP/php-ngx/dbraw.php @@ -0,0 +1,88 @@ + PDO::FETCH_ASSOC, + PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION, + PDO::ATTR_EMULATE_PREPARES => false + ] + ); + + self::$fortune = $pdo->prepare('SELECT id,message FROM Fortune'); + self::$random = $pdo->prepare('SELECT id,randomNumber FROM World WHERE id = ?'); + self::$instance = $pdo; + } + + /** + * Postgres bulk update + * + * @param array $worlds + * @return void + */ + public static function update(array $worlds) + { + $rows = count($worlds); + + if (!isset(self::$update[$rows])) { + $sql = 'UPDATE world SET randomNumber = CASE id' + . str_repeat(' WHEN ?::INTEGER THEN ?::INTEGER ', $rows) + . 'END WHERE id IN (' + . str_repeat('?::INTEGER,', $rows - 1) . '?::INTEGER)'; + + self::$update[$rows] = self::$instance->prepare($sql); + } + + $val = []; + $keys = []; + foreach ($worlds as $world) { + $val[] = $keys[] = $world['id']; + $val[] = $world['randomNumber']; + } + + self::$update[$rows]->execute([...$val, ...$keys]); + } + + /** + * Alternative bulk update in Postgres + * + * @param array $worlds + * @return void + */ + public static function update2(array $worlds) + { + $rows = count($worlds); + + if (!isset(self::$update[$rows])) { + $sql = 'UPDATE world SET randomNumber = temp.randomNumber FROM (VALUES ' + . implode(', ', array_fill(0, $rows, '(?::INTEGER, ?::INTEGER)')) . + ' ORDER BY 1) AS temp(id, randomNumber) WHERE temp.id = world.id'; + + self::$update[$rows] = self::$instance->prepare($sql); + } + + $val = []; + foreach ($worlds as $world) { + $val[] = $world['id']; + $val[] = $world['randomNumber']; + //$update->bindParam(++$i, $world['id'], PDO::PARAM_INT); + } + + self::$update[$rows]->execute($val); + } +} diff --git a/frameworks/PHP/php-ngx/php-ngx-async.dockerfile b/frameworks/PHP/php-ngx/php-ngx-async.dockerfile index 348aeffa969..7e1c4ab6ed4 100644 --- a/frameworks/PHP/php-ngx/php-ngx-async.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx-async.dockerfile @@ -11,7 +11,7 @@ RUN apt-get update -yqq > /dev/null && \ ADD ./ ./ -ENV NGINX_VERSION 1.21.4 +ENV NGINX_VERSION 1.21.6 RUN git clone -b v0.0.26 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null diff --git a/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile b/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile index a8a3f9e8f10..07735149280 100644 --- a/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile @@ -11,7 +11,7 @@ RUN apt-get update -yqq > /dev/null && \ ADD ./ ./ -ENV NGINX_VERSION 1.21.4 +ENV NGINX_VERSION 1.21.6 RUN git clone -b v0.0.26 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null diff --git a/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile b/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile index 7dd38615d28..33179c85b3e 100644 --- a/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile @@ -11,7 +11,7 @@ RUN apt-get update -yqq > /dev/null && \ ADD ./ ./ -ENV NGINX_VERSION 1.21.4 +ENV NGINX_VERSION 1.21.6 RUN git clone -b v0.0.26 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null @@ -26,7 +26,7 @@ RUN wget -q http://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz && \ --add-module=/ngx_php7 > /dev/null && \ make > /dev/null && make install > /dev/null -RUN sed -i "s|mysql:|pgsql:|g" /app.php +RUN sed -i "s|app.php|app-pg.php|g" /deploy/nginx.conf RUN export WORKERS=$(( 4 * $(nproc) )) && \ sed -i "s|worker_processes auto|worker_processes $WORKERS|g" /deploy/nginx.conf diff --git a/frameworks/PHP/php-ngx/php-ngx.dockerfile b/frameworks/PHP/php-ngx/php-ngx.dockerfile index 972a2205716..355a5e9ff0c 100644 --- a/frameworks/PHP/php-ngx/php-ngx.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ php8.1-cli php8.1-dev libphp8.1-embed php8.1-mysql nginx > /dev/null ADD ./ ./ -ENV NGINX_VERSION 1.21.4 +ENV NGINX_VERSION 1.21.6 RUN git clone -b v0.0.26 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null diff --git a/frameworks/PHP/php/benchmark_config.json b/frameworks/PHP/php/benchmark_config.json index 01c63c095b4..d3c8754e91c 100644 --- a/frameworks/PHP/php/benchmark_config.json +++ b/frameworks/PHP/php/benchmark_config.json @@ -12,7 +12,7 @@ "approach": "Realistic", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "PHP", "language": "PHP", "flavor": "PHP8.1", "orm": "Raw", @@ -35,7 +35,7 @@ "approach": "Realistic", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "PHP", "language": "PHP", "flavor": "PHP8.1", "orm": "Raw", @@ -56,7 +56,7 @@ "approach": "Realistic", "classification": "Platform", "database": "Postgres", - "framework": "None", + "framework": "PHP", "language": "PHP", "flavor": "PHP8.1", "orm": "Raw", @@ -79,7 +79,7 @@ "approach": "Realistic", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "PHP", "language": "PHP", "flavor": "PHP8.1", "orm": "Raw", @@ -102,9 +102,9 @@ "approach": "Realistic", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "PHP", "language": "PHP", - "flavor": "PHP7", + "flavor": "PHP8.1", "orm": "Raw", "platform": "Unit Nginx", "webserver": "None", @@ -125,7 +125,7 @@ "approach": "Realistic", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "PHP", "language": "PHP", "flavor": "PHP8.1", "orm": "Raw", @@ -146,7 +146,7 @@ "approach": "Realistic", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "PHP", "language": "PHP", "flavor": "PHP8.1", "orm": "Full", @@ -165,7 +165,7 @@ "approach": "Realistic", "classification": "Platform", "database": "MySQL", - "framework": "None", + "framework": "PHP", "language": "PHP", "flavor": "PHP8.1", "orm": "Micro", diff --git a/frameworks/PHP/php/deploy/nginx-unit.json b/frameworks/PHP/php/deploy/nginx-unit.json index e9984ab93ea..b5bc980d07b 100644 --- a/frameworks/PHP/php/deploy/nginx-unit.json +++ b/frameworks/PHP/php/deploy/nginx-unit.json @@ -7,7 +7,7 @@ "applications": { "benchmark": { - "type": "php 7", + "type": "php", "processes": 84, "user": "www-data", "group": "www-data", diff --git a/frameworks/PHP/php/php-unit.dockerfile b/frameworks/PHP/php/php-unit.dockerfile index 480ff2b4abc..2461192ab6d 100644 --- a/frameworks/PHP/php/php-unit.dockerfile +++ b/frameworks/PHP/php/php-unit.dockerfile @@ -1,29 +1,13 @@ -FROM ubuntu:20.04 +FROM nginx/unit:1.27.0-php8.1 -ARG DEBIAN_FRONTEND=noninteractive - -RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null -#RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php -RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq curl php-mysql > /dev/null - -RUN curl https://nginx.org/keys/nginx_signing.key | apt-key add - \ - && add-apt-repository "deb https://packages.nginx.org/unit/ubuntu/ focal unit" -s \ - && apt-get -y update \ - && apt-get -y install unit unit-php - -ADD ./ /php +ADD . /php WORKDIR /php -# forward log to docker log collector -#RUN ln -sf /dev/stdout /var/log/unit.log - -# RUN if [ $(nproc) = 2 ]; then sed -i "s|\"processes\": 128,|\"processes\": 64,|g" /php/deploy/nginx-unit.json ; fi; +RUN docker-php-ext-install pdo_mysql opcache > /dev/null +RUN if [ $(nproc) = 2 ]; then sed -i "s|\"processes\": 84,|\"processes\": 64,|g" /php/deploy/nginx-unit.json ; fi; -RUN unitd && \ - curl -X PUT --data-binary @/php/deploy/nginx-unit.json --unix-socket \ - /var/run/control.unit.sock http://localhost/config +COPY deploy/nginx-unit.json /docker-entrypoint.d/nginx-unit.json EXPOSE 8080 -CMD unitd --no-daemon +CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"] diff --git a/frameworks/PHP/reactphp/composer.json b/frameworks/PHP/reactphp/composer.json index fa459501ad9..fdea490c2d2 100644 --- a/frameworks/PHP/reactphp/composer.json +++ b/frameworks/PHP/reactphp/composer.json @@ -3,7 +3,7 @@ "php": ">=5.3.0", "psr/http-message": "^1.0", "react/event-loop": "^1.2", - "react/http": "^1.4", - "react/socket": "^1.8" + "react/http": "^1.6", + "react/socket": "^1.11" } } diff --git a/frameworks/PHP/reactphp/reactphp.dockerfile b/frameworks/PHP/reactphp/reactphp.dockerfile index d85b00e4534..7e31f407da5 100644 --- a/frameworks/PHP/reactphp/reactphp.dockerfile +++ b/frameworks/PHP/reactphp/reactphp.dockerfile @@ -14,7 +14,7 @@ RUN wget http://pear.php.net/go-pear.phar --quiet && php go-pear.phar #RUN apt-get install -y libuv1-dev > /dev/null RUN apt-get install -y libevent-dev > /dev/null #RUN pecl install uv-0.2.4 > /dev/null && echo "extension=uv.so" > /etc/php/8.1/cli/conf.d/uv.ini -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini ADD ./ /reactphp WORKDIR /reactphp diff --git a/frameworks/PHP/spiral/.env b/frameworks/PHP/spiral/.env index ca57b15e3fe..44cd7106607 100644 --- a/frameworks/PHP/spiral/.env +++ b/frameworks/PHP/spiral/.env @@ -1,5 +1,6 @@ # Debug mode disabled view cache and enabled higher verbosity. -DEBUG = false +DEBUG=false +DB_DSN=mysql:host=tfb-database:3306;charset=utf8;dbname=hello_world;user=benchmarkdbuser;password=benchmarkdbpass # Set to application specific value, used to encrypt/decrypt cookies and etc. -ENCRYPTER_KEY = def00000f6f989c4ba99b5eec3dcd4f5b0fb7e5fbbf95d3cacb6b7ed049e22b4a931db7ad59085225b36d051fb06530c8a41b83d10761439326656536293473c2472d911 \ No newline at end of file +ENCRYPTER_KEY=def00000f6f989c4ba99b5eec3dcd4f5b0fb7e5fbbf95d3cacb6b7ed049e22b4a931db7ad59085225b36d051fb06530c8a41b83d10761439326656536293473c2472d911 diff --git a/frameworks/PHP/spiral/.rr.yaml b/frameworks/PHP/spiral/.rr.yaml index f983cd6120c..325329d3756 100644 --- a/frameworks/PHP/spiral/.rr.yaml +++ b/frameworks/PHP/spiral/.rr.yaml @@ -1,4 +1,15 @@ +version: '2.7' + +rpc: + listen: tcp://127.0.0.1:6001 + +server: + command: "php app.php" + relay: pipes + http: - address: :8080 - workers: - command: "php app.php" \ No newline at end of file + address: 0.0.0.0:8080 + +logs: + mode: production + level: error diff --git a/frameworks/PHP/spiral/README.md b/frameworks/PHP/spiral/README.md index df6f67599e0..4e3f1bf0704 100644 --- a/frameworks/PHP/spiral/README.md +++ b/frameworks/PHP/spiral/README.md @@ -8,9 +8,9 @@ Benchmark code is located in `app/src/Controllers/BenchmarkController.php`. The tests were run with: * [Spiral Framework Version 2](https://github.com/spiral/framework/) * [Spiral/Stempler](https://github.com/spiral/stempler) as template engine -* [Cycle ORM 1.1.*](https://github.com/cycle/orm) -* [RoadRunner 1.4.*](https://roadrunner.dev/) -* [PHP Version 7.4.*](http://www.php.net/) in CLI mode with OPCache +* [Cycle ORM 2.*](https://github.com/cycle/orm) +* [RoadRunner 2.*](https://roadrunner.dev/) +* [PHP Version 8.0.*](http://www.php.net/) in CLI mode with OPCache ## Test URLs Test | URL @@ -20,4 +20,4 @@ Data-Store/Database | http://localhost:8080/db Variable Query | http://localhost:8080/db/:queries Templating and ORM | http://localhost:8080/fortunes Update ORM | http://localhost:8080/updates/:queries -Plain Text | http://localhost:8080/plaintext \ No newline at end of file +Plain Text | http://localhost:8080/plaintext diff --git a/frameworks/PHP/spiral/app.php b/frameworks/PHP/spiral/app.php index 751bec60053..69043e58e91 100644 --- a/frameworks/PHP/spiral/app.php +++ b/frameworks/PHP/spiral/app.php @@ -17,6 +17,7 @@ //Initiating shared container, bindings, directories and etc $app = \App\App::init(['root' => __DIR__]); -if ($app != null) { - $app->serve(); -} \ No newline at end of file +if ($app !== null) { + $code = (int)$app->serve(); + exit($code); +} diff --git a/frameworks/PHP/spiral/app/config/database.php b/frameworks/PHP/spiral/app/config/database.php index bed66fe517b..571fb162eb1 100644 --- a/frameworks/PHP/spiral/app/config/database.php +++ b/frameworks/PHP/spiral/app/config/database.php @@ -1,4 +1,5 @@ 'default', + 'default' => 'default', 'databases' => [ 'default' => ['driver' => 'mysql'], ], - 'drivers' => [ - 'mysql' => [ - 'driver' => Driver\MySQL\MySQLDriver::class, - 'connection' => 'mysql:host=tfb-database:3306;charset=utf8;dbname=hello_world', - 'username' => 'benchmarkdbuser', - 'password' => 'benchmarkdbpass', - ], - ] -]; \ No newline at end of file + 'drivers' => [ + 'mysql' => new Config\MySQLDriverConfig( + connection: new Config\MySQL\DsnConnectionConfig(env('DB_DSN')), + queryCache: true + ), + ], +]; diff --git a/frameworks/PHP/spiral/app/src/App.php b/frameworks/PHP/spiral/app/src/App.php index 98846d2cfc8..c2f3d8c5726 100644 --- a/frameworks/PHP/spiral/app/src/App.php +++ b/frameworks/PHP/spiral/app/src/App.php @@ -15,7 +15,10 @@ use Spiral\DotEnv\Bootloader as DotEnv; use Spiral\Framework\Kernel; use Spiral\Nyholm\Bootloader as Nyholm; +use Spiral\Cycle\Bootloader as CycleBridge; +use Spiral\RoadRunnerBridge\Bootloader as RoadRunnerBridge; use Spiral\Stempler\Bootloader as Stempler; +use Spiral\Scaffolder\Bootloader as Scaffolder; class App extends Kernel { @@ -37,7 +40,8 @@ class App extends Kernel Bootloader\Security\FiltersBootloader::class, Bootloader\Security\GuardBootloader::class, - Bootloader\Http\HttpBootloader::class, + RoadRunnerBridge\HttpBootloader::class, + DebugBootloader::class, // HTTP extensions @@ -46,18 +50,23 @@ class App extends Kernel Bootloader\Http\ErrorHandlerBootloader::class, // Databases - Bootloader\Database\DatabaseBootloader::class, - Bootloader\Database\MigrationsBootloader::class, + CycleBridge\DatabaseBootloader::class, + CycleBridge\MigrationsBootloader::class, // ORM - Bootloader\Cycle\CycleBootloader::class, - Bootloader\Cycle\AnnotatedBootloader::class, + CycleBridge\SchemaBootloader::class, + CycleBridge\CycleOrmBootloader::class, + CycleBridge\AnnotatedBootloader::class, + CycleBridge\CommandBootloader::class, // Template engine Stempler\StemplerBootloader::class, + Scaffolder\ScaffolderBootloader::class, + // Framework commands - Bootloader\CommandBootloader::class + Bootloader\CommandBootloader::class, + RoadRunnerBridge\CommandBootloader::class, ]; /* @@ -66,4 +75,4 @@ class App extends Kernel protected const APP = [ RoutesBootloader::class, ]; -} \ No newline at end of file +} diff --git a/frameworks/PHP/spiral/app/src/Model/Fortune.php b/frameworks/PHP/spiral/app/src/Model/Fortune.php index 18762a7d1d1..0dd23c045cb 100644 --- a/frameworks/PHP/spiral/app/src/Model/Fortune.php +++ b/frameworks/PHP/spiral/app/src/Model/Fortune.php @@ -9,28 +9,24 @@ namespace App\Model; +use App\Model\Repository; use Cycle\Annotated\Annotation\Column; use Cycle\Annotated\Annotation\Entity; -/** - * @Entity( - * table="Fortune", - * repository="Repository/FortuneRepository" - * ) - */ +#[Entity(table: 'Fortune', repository: Repository\FortuneRepository::class)] class Fortune implements \JsonSerializable { - /** @Column(type="primary") */ + #[Column(type: 'primary')] public $id; - /** @Column(type="text") */ + #[Column(type: 'text')] public $message; /** - * @return array|mixed + * @return array */ - public function jsonSerialize() + public function jsonSerialize(): mixed { return ['id' => $this->id, 'message' => $this->message]; } -} \ No newline at end of file +} diff --git a/frameworks/PHP/spiral/app/src/Model/World.php b/frameworks/PHP/spiral/app/src/Model/World.php index 27ad9bb537d..ccf0d90c5ee 100644 --- a/frameworks/PHP/spiral/app/src/Model/World.php +++ b/frameworks/PHP/spiral/app/src/Model/World.php @@ -9,28 +9,24 @@ namespace App\Model; +use App\Model\Repository; use Cycle\Annotated\Annotation\Column; use Cycle\Annotated\Annotation\Entity; -/** - * @Entity( - * table="World", - * repository="Repository/WorldRepository" - * ) - */ +#[Entity(table: 'World', repository: Repository\WorldRepository::class)] class World implements \JsonSerializable { - /** @Column(type="primary") */ + #[Column(type: 'primary')] public $id; - /** @Column(type="int", name="randomNumber") */ + #[Column(type: 'int', name: 'randomNumber')] public $randomNumber; /** - * @return array|mixed + * @return array */ - public function jsonSerialize() + public function jsonSerialize(): mixed { return ['id' => $this->id, 'randomNumber' => $this->randomNumber]; } -} \ No newline at end of file +} diff --git a/frameworks/PHP/spiral/composer.json b/frameworks/PHP/spiral/composer.json index c4a36e47461..d7841690ba0 100644 --- a/frameworks/PHP/spiral/composer.json +++ b/frameworks/PHP/spiral/composer.json @@ -9,26 +9,11 @@ } ], "require": { - "php": ">=7.1", - "spiral/framework": "^2.0", - "spiral/debug": "^1.3", - "spiral/snapshots": "^1.0", - "spiral/console": "^1.2", - "spiral/http": "^1.1", - "spiral/router": "^1.1", - "spiral/roadrunner": "^1.4", - "spiral/security": "^2.1", - "spiral/validation": "^1.2", - "spiral/filters": "^1.2", - "spiral/database": "^2.3", - "spiral/migrations": "^2.0", - "cycle/orm": "^1.0.10", - "cycle/proxy-factory": "^1.0", - "cycle/annotated": "^2.0", - "cycle/migrations": "^1.0", - "spiral/dotenv-bridge": "^1.0", + "php": ">=8.0", + "spiral/framework": "^2.9", "spiral/nyholm-bridge": "^1.0", - "spiral/stempler-bridge": "^1.0" + "spiral/cycle-bridge": "^1.0", + "spiral/roadrunner-bridge": "^1.0" }, "scripts": { "post-create-project-cmd": [ @@ -42,5 +27,10 @@ "psr-4": { "App\\": "app/src/" } + }, + "config": { + "allow-plugins": { + "spiral/composer-publish-plugin": true + } } } diff --git a/frameworks/PHP/spiral/spiral.dockerfile b/frameworks/PHP/spiral/spiral.dockerfile index c04b30ee978..1a5d80c0bb0 100644 --- a/frameworks/PHP/spiral/spiral.dockerfile +++ b/frameworks/PHP/spiral/spiral.dockerfile @@ -1,7 +1,11 @@ -FROM php:7.4 +FROM php:8.1.2 RUN docker-php-ext-install pdo_mysql > /dev/null +# Workaround solution for installing ext-sockets for PHP 8.0 +# See https://github.com/docker-library/php/issues/1245 +RUN CFLAGS="$CFLAGS -D_GNU_SOURCE" docker-php-ext-install sockets > /dev/null + ADD ./ /spiral WORKDIR /spiral @@ -11,12 +15,12 @@ RUN chmod +x /usr/local/etc/php/install-composer.sh && /usr/local/etc/php/instal # install dependencies RUN apt-get update -yqq > /dev/null && apt-get install -yqq git unzip > /dev/null -RUN php composer.phar install --optimize-autoloader --classmap-authoritative --no-dev --quiet +RUN php composer.phar install --optimize-autoloader --classmap-authoritative --no-dev # pre-configure -RUN ./vendor/bin/spiral get > /dev/null 2>&1 +RUN ./vendor/bin/rr get-binary > /dev/null 2>&1 RUN php app.php configure > /dev/null 2>&1 EXPOSE 8080 -CMD php app.php up > /dev/null 2>&1 && ./spiral serve -o "http.workers.pool.numWorkers = 64" +CMD php app.php up > /dev/null 2>&1 && ./rr serve -o "http.pool.num_workers = 64" diff --git a/frameworks/PHP/symfony/benchmark_config.json b/frameworks/PHP/symfony/benchmark_config.json index bcb6a450e4e..edebc55f6d7 100644 --- a/frameworks/PHP/symfony/benchmark_config.json +++ b/frameworks/PHP/symfony/benchmark_config.json @@ -65,7 +65,8 @@ "database_os": "Linux", "display_name": "symfony-swoole", "notes": "", - "versus": "swoole" + "versus": "swoole", + "tags": ["broken"] } }] } diff --git a/frameworks/PHP/symfony/composer.json b/frameworks/PHP/symfony/composer.json index e771da8dcae..b4b2fe2c09b 100644 --- a/frameworks/PHP/symfony/composer.json +++ b/frameworks/PHP/symfony/composer.json @@ -20,6 +20,11 @@ "preferred-install": { "*": "dist" }, + "allow-plugins": { + "symfony/flex": true, + "symfony/runtime": true, + "composer/package-versions-deprecated": true + }, "sort-packages": true }, "autoload": { diff --git a/frameworks/PHP/ubiquity/ubiquity-workerman-mongo.dockerfile b/frameworks/PHP/ubiquity/ubiquity-workerman-mongo.dockerfile index 2b5daf06383..437cc2fdc26 100644 --- a/frameworks/PHP/ubiquity/ubiquity-workerman-mongo.dockerfile +++ b/frameworks/PHP/ubiquity/ubiquity-workerman-mongo.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY deploy/conf/php-async.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/ubiquity/ubiquity-workerman-mysql.dockerfile b/frameworks/PHP/ubiquity/ubiquity-workerman-mysql.dockerfile index 9fa263f2315..47d2262ea91 100644 --- a/frameworks/PHP/ubiquity/ubiquity-workerman-mysql.dockerfile +++ b/frameworks/PHP/ubiquity/ubiquity-workerman-mysql.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY deploy/conf/php-async.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/ubiquity/ubiquity-workerman-raw.dockerfile b/frameworks/PHP/ubiquity/ubiquity-workerman-raw.dockerfile index 84081c2df42..f095ab239f8 100644 --- a/frameworks/PHP/ubiquity/ubiquity-workerman-raw.dockerfile +++ b/frameworks/PHP/ubiquity/ubiquity-workerman-raw.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY deploy/conf/php-async.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/ubiquity/ubiquity-workerman.dockerfile b/frameworks/PHP/ubiquity/ubiquity-workerman.dockerfile index 9a5f19ab32a..5242c94ffbc 100644 --- a/frameworks/PHP/ubiquity/ubiquity-workerman.dockerfile +++ b/frameworks/PHP/ubiquity/ubiquity-workerman.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY deploy/conf/php-async.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/webman/composer.json b/frameworks/PHP/webman/composer.json index fca70845573..0b583513d9b 100644 --- a/frameworks/PHP/webman/composer.json +++ b/frameworks/PHP/webman/composer.json @@ -33,6 +33,10 @@ "ext-event": "For better performance. " }, "autoload": { + "psr-4": { + "": "./", + "App\\": "./app" + }, "files": [ "./support/helpers.php" ] diff --git a/frameworks/PHP/webman/config/bootstrap.php b/frameworks/PHP/webman/config/bootstrap.php index 226ffb8ac9b..a2d57a3020c 100644 --- a/frameworks/PHP/webman/config/bootstrap.php +++ b/frameworks/PHP/webman/config/bootstrap.php @@ -13,8 +13,6 @@ */ return [ - support\bootstrap\Container::class, - support\bootstrap\Log::class, support\bootstrap\db\Raw::class, support\bootstrap\Date::class, ]; diff --git a/frameworks/PHP/webman/config/server.php b/frameworks/PHP/webman/config/server.php index 6b5cc249820..39f256488ef 100644 --- a/frameworks/PHP/webman/config/server.php +++ b/frameworks/PHP/webman/config/server.php @@ -20,8 +20,9 @@ 'count' => cpu_count() * 4, 'user' => '', 'group' => '', - 'pid_file' => runtime_path() . '/webman.pid', - 'max_request' => 10000000000, - 'stdout_file' => runtime_path() . '/logs/stdout.log', + 'pid_file' => runtime_path() . '/webman.pid', + 'status_file' => runtime_path() . '/webman.status', + 'stdout_file' => runtime_path() . '/logs/stdout.log', + 'log_file' => runtime_path() . '/logs/workerman.log', 'max_package_size' => 10*1024*1024 -]; \ No newline at end of file +]; diff --git a/frameworks/PHP/webman/start.php b/frameworks/PHP/webman/start.php index 4e5d365650d..489e4470a52 100644 --- a/frameworks/PHP/webman/start.php +++ b/frameworks/PHP/webman/start.php @@ -1,123 +1,4 @@ +#!/usr/bin/env php $property = $config[$property]; - } -} - -$worker->onWorkerStart = function ($worker) { - Config::reload(config_path(), ['route', 'container']); - foreach (config('bootstrap', []) as $class_name) { - /** @var \Webman\Bootstrap $class_name */ - $class_name::start($worker); - } - $app = new App($worker, Container::instance(), Log::channel('default'), app_path(), public_path()); - Route::load(config_path() . '/route.php'); - Middleware::load(config('middleware', [])); - Middleware::load(['__static__' => config('static.middleware', [])]); - Http::requestClass(Request::class); - - $worker->onMessage = [$app, 'onMessage']; -}; - - -foreach (config('process', []) as $process_name => $config) { - $worker = new Worker($config['listen'] ?? null, $config['context'] ?? []); - $property_map = [ - 'count', - 'user', - 'group', - 'reloadable', - 'reusePort', - 'transport', - 'protocol', - ]; - $worker->name = $process_name; - foreach ($property_map as $property) { - if (isset($config[$property])) { - $worker->$property = $config[$property]; - } - } - - $worker->onWorkerStart = function ($worker) use ($config) { - Config::reload(config_path(), ['route']); - - $bootstrap = $config['bootstrap'] ?? config('bootstrap', []); - if (!in_array(support\bootstrap\Log::class, $bootstrap)) { - $bootstrap[] = support\bootstrap\Log::class; - } - foreach ($bootstrap as $class_name) { - /** @var \Webman\Bootstrap $class_name */ - $class_name::start($worker); - } - - foreach ($config['services'] ?? [] as $server) { - if (!class_exists($server['handler'])) { - echo "process error: class {$config['handler']} not exists\r\n"; - continue; - } - $listen = new Worker($server['listen'] ?? null, $server['context'] ?? []); - if (isset($server['listen'])) { - echo "listen: {$server['listen']}\n"; - } - $class = Container::make($server['handler'], $server['constructor'] ?? []); - worker_bind($listen, $class); - $listen->listen(); - } - - if (isset($config['handler'])) { - if (!class_exists($config['handler'])) { - echo "process error: class {$config['handler']} not exists\r\n"; - return; - } - - $class = Container::make($config['handler'], $config['constructor'] ?? []); - worker_bind($worker, $class); - } - - }; -} - - -Worker::runAll(); \ No newline at end of file +support\App::run(); diff --git a/frameworks/PHP/webman/support/bootstrap.php b/frameworks/PHP/webman/support/bootstrap.php new file mode 100644 index 00000000000..652e0530673 --- /dev/null +++ b/frameworks/PHP/webman/support/bootstrap.php @@ -0,0 +1,132 @@ + + * @copyright walkor + * @link http://www.workerman.net/ + * @license http://www.opensource.org/licenses/mit-license.php MIT License + */ + +use Dotenv\Dotenv; +use support\Log; +use Webman\Bootstrap; +use Webman\Config; +use Webman\Route; +use Webman\Middleware; +use Webman\Util; + +$worker = $worker ?? null; + +if ($timezone = config('app.default_timezone')) { + date_default_timezone_set($timezone); +} + +set_error_handler(function ($level, $message, $file = '', $line = 0) { + if (error_reporting() & $level) { + throw new ErrorException($message, 0, $level, $file, $line); + } +}); + +if ($worker) { + register_shutdown_function(function ($start_time) { + if (time() - $start_time <= 1) { + sleep(1); + } + }, time()); +} + +if (class_exists('Dotenv\Dotenv') && file_exists(base_path() . '/.env')) { + if (method_exists('Dotenv\Dotenv', 'createUnsafeImmutable')) { + Dotenv::createUnsafeImmutable(base_path())->load(); + } else { + Dotenv::createMutable(base_path())->load(); + } +} + +Support\App::loadAllConfig(['route']); + +foreach (config('autoload.files', []) as $file) { + include_once $file; +} +foreach (config('plugin', []) as $firm => $projects) { + foreach ($projects as $name => $project) { + if (!is_array($project)) { + continue; + } + foreach ($project['autoload']['files'] ?? [] as $file) { + include_once $file; + } + } + foreach ($projects['autoload']['files'] ?? [] as $file) { + include_once $file; + } +} + +Middleware::load(config('middleware', []), ''); +foreach (config('plugin', []) as $firm => $projects) { + foreach ($projects as $name => $project) { + if (!is_array($project) || $name === 'static') { + continue; + } + Middleware::load($project['middleware'] ?? [], ''); + } + Middleware::load($projects['middleware'] ?? [], $firm); + if ($static_middlewares = config("plugin.$firm.static.middleware")) { + Middleware::load(['__static__' => $static_middlewares], $firm); + } +} +Middleware::load(['__static__' => config('static.middleware', [])], ''); + +foreach (config('bootstrap', []) as $class_name) { + if (!class_exists($class_name)) { + $log = "Warning: Class $class_name setting in config/bootstrap.php not found\r\n"; + echo $log; + Log::error($log); + continue; + } + /** @var Bootstrap $class_name */ + $class_name::start($worker); +} + +foreach (config('plugin', []) as $firm => $projects) { + foreach ($projects as $name => $project) { + if (!is_array($project)) { + continue; + } + foreach ($project['bootstrap'] ?? [] as $class_name) { + if (!class_exists($class_name)) { + $log = "Warning: Class $class_name setting in config/plugin/$firm/$name/bootstrap.php not found\r\n"; + echo $log; + Log::error($log); + continue; + } + /** @var Bootstrap $class_name */ + $class_name::start($worker); + } + } + foreach ($projects['bootstrap'] ?? [] as $class_name) { + if (!class_exists($class_name)) { + $log = "Warning: Class $class_name setting in plugin/$firm/config/bootstrap.php not found\r\n"; + echo $log; + Log::error($log); + continue; + } + /** @var Bootstrap $class_name */ + $class_name::start($worker); + } +} + +$directory = base_path() . '/plugin'; +$paths = [config_path()]; +foreach (Util::scanDir($directory) as $path) { + if (is_dir($path = "$path/config")) { + $paths[] = $path; + } +} +Route::load($paths); + diff --git a/frameworks/PHP/webman/support/helpers.php b/frameworks/PHP/webman/support/helpers.php index f9e043ce46c..564bcd18cdd 100644 --- a/frameworks/PHP/webman/support/helpers.php +++ b/frameworks/PHP/webman/support/helpers.php @@ -14,20 +14,36 @@ use support\Request; use support\Response; +use support\Translation; +use support\Container; use support\view\Raw; -use support\bootstrap\Translation; +use support\view\Blade; +use support\view\ThinkPHP; +use support\view\Twig; +use Workerman\Worker; use Webman\App; use Webman\Config; -use Webman\Exception\ClassNotFoundException; +use Webman\Route; -define('BASE_PATH', realpath(__DIR__ . '/../')); +// Phar support. +if (\is_phar()) { + \define('BASE_PATH', dirname(__DIR__)); +} else { + \define('BASE_PATH', realpath(__DIR__ . '/../')); +} +\define('WEBMAN_VERSION', '1.4'); /** - * @return string + * @param $return_phar + * @return false|string */ -function base_path() +function base_path(bool $return_phar = true) { - return BASE_PATH; + static $real_path = ''; + if (!$real_path) { + $real_path = \is_phar() ? \dirname(Phar::running(false)) : BASE_PATH; + } + return $return_phar ? BASE_PATH : $real_path; } /** @@ -43,7 +59,11 @@ function app_path() */ function public_path() { - return BASE_PATH . DIRECTORY_SEPARATOR . 'public'; + static $path = ''; + if (!$path) { + $path = \config('app.public_path', BASE_PATH . DIRECTORY_SEPARATOR . 'public'); + } + return $path; } /** @@ -55,11 +75,18 @@ function config_path() } /** + * Phar support. + * Compatible with the 'realpath' function in the phar file. + * * @return string */ function runtime_path() { - return BASE_PATH . DIRECTORY_SEPARATOR . 'runtime'; + static $path = ''; + if (!$path) { + $path = \config('app.runtime_path', BASE_PATH . DIRECTORY_SEPARATOR . 'runtime'); + } + return $path; } /** @@ -68,7 +95,7 @@ function runtime_path() * @param string $body * @return Response */ -function response($body = '', $status = 200, $headers = array()) +function response($body = '', $status = 200, $headers = []) { return new Response($status, $headers, $body); } @@ -80,7 +107,7 @@ function response($body = '', $status = 200, $headers = array()) */ function json($data, $options = JSON_UNESCAPED_UNICODE) { - return new Response(200, ['Content-Type' => 'application/json'], json_encode($data, $options)); + return new Response(200, ['Content-Type' => 'application/json'], \json_encode($data, $options)); } /** @@ -102,19 +129,19 @@ function xml($xml) */ function jsonp($data, $callback_name = 'callback') { - if (!is_scalar($data) && null !== $data) { - $data = json_encode($data); + if (!\is_scalar($data) && null !== $data) { + $data = \json_encode($data); } return new Response(200, [], "$callback_name($data)"); } /** - * @param $location + * @param string $location * @param int $status * @param array $headers * @return Response */ -function redirect($location, $status = 302, $headers = []) +function redirect(string $location, int $status = 302, array $headers = []) { $response = new Response($status, ['Location' => $location]); if (!empty($headers)) { @@ -127,17 +154,61 @@ function redirect($location, $status = 302, $headers = []) * @param $template * @param array $vars * @param null $app - * @return string + * @return Response */ -function view($template, $vars = [], $app = null) +function view(string $template, array $vars = [], string $app = null) { - static $handler; - if (null === $handler) { - $handler = config('view.handler'); - } + $request = \request(); + $plugin = $request->plugin ?? ''; + $handler = \config($plugin ? "plugin.$plugin.view.handler" : 'view.handler'); return new Response(200, [], $handler::render($template, $vars, $app)); } +/** + * @param string $template + * @param array $vars + * @param string|null $app + * @return Response + * @throws Throwable + */ +function raw_view(string $template, array $vars = [], string $app = null) +{ + return new Response(200, [], Raw::render($template, $vars, $app)); +} + +/** + * @param string $template + * @param array $vars + * @param string|null $app + * @return Response + */ +function blade_view(string $template, array $vars = [], string $app = null) +{ + return new Response(200, [], Blade::render($template, $vars, $app)); +} + +/** + * @param string $template + * @param array $vars + * @param string|null $app + * @return Response + */ +function think_view(string $template, array $vars = [], string $app = null) +{ + return new Response(200, [], ThinkPHP::render($template, $vars, $app)); +} + +/** + * @param string $template + * @param array $vars + * @param string|null $app + * @return Response + */ +function twig_view(string $template, array $vars = [], string $app = null) +{ + return new Response(200, [], Twig::render($template, $vars, $app)); +} + /** * @return Request */ @@ -147,54 +218,69 @@ function request() } /** - * @param $key - * @param null $default - * @return mixed + * @param string|null $key + * @param $default + * @return array|mixed|null */ -function config($key = null, $default = null) +function config(string $key = null, $default = null) { return Config::get($key, $default); } -if (!function_exists('env')) { - /** - * @param $key - * @param null $default - * @return array|bool|false|mixed|string - */ - function env($key, $default = null) - { - $value = getenv($key); - - if ($value === false) { - return $default; - } +/** + * @param string $name + * @param ...$parameters + * @return string + */ +function route(string $name, ...$parameters) +{ + $route = Route::getByName($name); + if (!$route) { + return ''; + } - switch (strtolower($value)) { - case 'true': - case '(true)': - return true; - case 'false': - case '(false)': - return false; - case 'empty': - case '(empty)': - return ''; - case 'null': - case '(null)': - return null; - } + if (!$parameters) { + return $route->url(); + } - if (($valueLength = strlen($value)) > 1 && $value[0] === '"' && $value[$valueLength - 1] === '"') { - return substr($value, 1, -1); - } + if (\is_array(\current($parameters))) { + $parameters = \current($parameters); + } + return $route->url($parameters); +} + +/** + * @param mixed $key + * @param mixed $default + * @return mixed + */ +function session($key = null, $default = null) +{ + $session = \request()->session(); + if (null === $key) { + return $session; + } + if (\is_array($key)) { + $session->put($key); + return null; + } + if (\strpos($key, '.')) { + $key_array = \explode('.', $key); + $value = $session->all(); + foreach ($key_array as $index) { + if (!isset($value[$index])) { + return $default; + } + $value = $value[$index]; + } return $value; } + return $session->get($key, $default); } /** - * @param null|string $id + * @param string $id * @param array $parameters * @param string|null $domain * @param string|null $locale @@ -202,14 +288,15 @@ function env($key, $default = null) */ function trans(string $id, array $parameters = [], string $domain = null, string $locale = null) { - return Translation::trans($id, $parameters, $domain, $locale); + $res = Translation::trans($id, $parameters, $domain, $locale); + return $res === '' ? $id : $res; } /** * @param null|string $locale * @return string */ -function locale(string $locale) +function locale(string $locale = null) { if (!$locale) { return Translation::getLocale(); @@ -217,11 +304,65 @@ function locale(string $locale) Translation::setLocale($locale); } +/** + * 404 not found + * + * @return Response + */ +function not_found() +{ + return new Response(404, [], \file_get_contents(public_path() . '/404.html')); +} + +/** + * Copy dir. + * + * @param string $source + * @param string $dest + * @param bool $overwrite + * @return void + */ +function copy_dir(string $source, string $dest, bool $overwrite = false) +{ + if (\is_dir($source)) { + if (!is_dir($dest)) { + \mkdir($dest); + } + $files = \scandir($source); + foreach ($files as $file) { + if ($file !== "." && $file !== "..") { + \copy_dir("$source/$file", "$dest/$file"); + } + } + } else if (\file_exists($source) && ($overwrite || !\file_exists($dest))) { + \copy($source, $dest); + } +} + +/** + * Remove dir. + * + * @param string $dir + * @return bool + */ +function remove_dir(string $dir) +{ + if (\is_link($dir) || \is_file($dir)) { + return \unlink($dir); + } + $files = \array_diff(\scandir($dir), array('.', '..')); + foreach ($files as $file) { + (\is_dir("$dir/$file") && !\is_link($dir)) ? \remove_dir("$dir/$file") : \unlink("$dir/$file"); + } + return \rmdir($dir); +} + /** * @param $worker * @param $class */ -function worker_bind($worker, $class) { +function worker_bind($worker, $class) +{ $callback_map = [ 'onConnect', 'onMessage', @@ -233,24 +374,109 @@ function worker_bind($worker, $class) { 'onWebSocketConnect' ]; foreach ($callback_map as $name) { - if (method_exists($class, $name)) { + if (\method_exists($class, $name)) { $worker->$name = [$class, $name]; } } - if (method_exists($class, 'onWorkerStart')) { - call_user_func([$class, 'onWorkerStart'], $worker); + if (\method_exists($class, 'onWorkerStart')) { + [$class, 'onWorkerStart']($worker); } } /** - * @return int + * @param $process_name + * @param $config + * @return void + */ +function worker_start($process_name, $config) +{ + $worker = new Worker($config['listen'] ?? null, $config['context'] ?? []); + $property_map = [ + 'count', + 'user', + 'group', + 'reloadable', + 'reusePort', + 'transport', + 'protocol', + ]; + $worker->name = $process_name; + foreach ($property_map as $property) { + if (isset($config[$property])) { + $worker->$property = $config[$property]; + } + } + + $worker->onWorkerStart = function ($worker) use ($config) { + require_once \base_path() . '/support/bootstrap.php'; + + foreach ($config['services'] ?? [] as $server) { + if (!\class_exists($server['handler'])) { + echo "process error: class {$server['handler']} not exists\r\n"; + continue; + } + $listen = new Worker($server['listen'] ?? null, $server['context'] ?? []); + if (isset($server['listen'])) { + echo "listen: {$server['listen']}\n"; + } + $instance = Container::make($server['handler'], $server['constructor'] ?? []); + \worker_bind($listen, $instance); + $listen->listen(); + } + + if (isset($config['handler'])) { + if (!\class_exists($config['handler'])) { + echo "process error: class {$config['handler']} not exists\r\n"; + return; + } + + $instance = Container::make($config['handler'], $config['constructor'] ?? []); + \worker_bind($worker, $instance); + } + + }; +} + +/** + * Phar support. + * Compatible with the 'realpath' function in the phar file. + * + * @param string $file_path + * @return string */ -function cpu_count() { - if (strtolower(PHP_OS) === 'darwin') { - $count = shell_exec('sysctl -n machdep.cpu.core_count'); +function get_realpath(string $file_path): string +{ + if (\strpos($file_path, 'phar://') === 0) { + return $file_path; } else { - $count = shell_exec('nproc'); + return \realpath($file_path); } - $count = (int)$count > 0 ? (int)$count : 4; - return $count; -} \ No newline at end of file +} + +/** + * @return bool + */ +function is_phar() +{ + return \class_exists(\Phar::class, false) && Phar::running(); +} + +/** + * @return int + */ +function cpu_count() +{ + // Windows does not support the number of processes setting. + if (\DIRECTORY_SEPARATOR === '\\') { + return 1; + } + $count = 4; + if (\is_callable('shell_exec')) { + if (\strtolower(PHP_OS) === 'darwin') { + $count = (int)\shell_exec('sysctl -n machdep.cpu.core_count'); + } else { + $count = (int)\shell_exec('nproc'); + } + } + return $count > 0 ? $count : 4; +} diff --git a/frameworks/PHP/webman/webman.dockerfile b/frameworks/PHP/webman/webman.dockerfile index eef0c5ec2c6..01a722f94b6 100644 --- a/frameworks/PHP/webman/webman.dockerfile +++ b/frameworks/PHP/webman/webman.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:21.10 +FROM ubuntu:22.04 ARG DEBIAN_FRONTEND=noninteractive @@ -9,8 +9,8 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer -RUN apt-get update -yqq && apt-get install -y php-pear php8.1-dev libevent-dev git > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN apt-get update -yqq && apt-get install -y php-pear php8.1-dev libevent-dev git +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY php.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/workerman/app-pg.php b/frameworks/PHP/workerman/app-pg.php new file mode 100644 index 00000000000..471c71b2186 --- /dev/null +++ b/frameworks/PHP/workerman/app-pg.php @@ -0,0 +1,118 @@ +path()) { + '/plaintext' => text(), + '/json' => json(), + '/db' => db(), + '/fortunes' => fortune(), + '/query' => query($request), + '/update' => updateraw($request), + // '/info' => info(), + default => new Response(404, [], 'Error 404'), + }; +} + +function text() +{ + return new Response(200, [ + 'Content-Type' => 'text/plain', + 'Date' => Header::$date + ], 'Hello, World!'); +} + +function json() +{ + return new Response(200, [ + 'Content-Type' => 'application/json', + 'Date' => Header::$date + ], json_encode(['message' => 'Hello, World!'])); +} + +function db() +{ + DbRaw::$random->execute([mt_rand(1, 10000)]); + + return new Response(200, [ + 'Content-Type' => 'application/json', + 'Date' => Header::$date + ], json_encode(DbRaw::$random->fetch())); +} + +function query($request) +{ + $query_count = 1; + $q = (int) $request->get('q'); + if ($q > 1) { + $query_count = min($q, 500); + } + + while ($query_count--) { + DbRaw::$random->execute([mt_rand(1, 10000)]); + $arr[] = DbRaw::$random->fetch(); + } + + return new Response(200, [ + 'Content-Type' => 'application/json', + 'Date' => Header::$date + ], json_encode($arr)); +} + +function updateraw($request) +{ + $query_count = 1; + $q = (int) $request->get('q'); + if ($q > 1) { + $query_count = min($q, 500); + } + + while ($query_count--) { + + DbRaw::$random->execute([mt_rand(1, 10000)]); + $row = DbRaw::$random->fetch(); + $row['randomNumber'] = mt_rand(1, 10000); + + $worlds[] = $row; + } + + DbRaw::update($worlds); + + return new Response(200, [ + 'Content-Type' => 'application/json', + 'Date' => Header::$date + ], json_encode($worlds)); +} + +function fortune() +{ + DbRaw::$fortune->execute(); + + $arr = DbRaw::$fortune->fetchAll(PDO::FETCH_KEY_PAIR); + $arr[0] = 'Additional fortune added at request time.'; + asort($arr); + + $html = ''; + foreach ($arr as $id => $message) { + $message = htmlspecialchars($message, ENT_QUOTES, 'UTF-8'); + $html .= "$id$message"; + } + + return new Response(200, [ + 'Date' => Header::$date + ], "Fortunes$html
idmessage
" + ); +} + +/* function info() +{ + ob_start(); + phpinfo(); + return new Response(200, ['Content-Type' => 'text/plain'], ob_get_clean()); +} + */ diff --git a/frameworks/PHP/workerman/dbraw.php b/frameworks/PHP/workerman/dbraw.php new file mode 100644 index 00000000000..2a7c8d355c1 --- /dev/null +++ b/frameworks/PHP/workerman/dbraw.php @@ -0,0 +1,88 @@ + PDO::FETCH_ASSOC, + PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION, + PDO::ATTR_EMULATE_PREPARES => false + ] + ); + + self::$fortune = $pdo->prepare('SELECT id,message FROM Fortune'); + self::$random = $pdo->prepare('SELECT id,randomNumber FROM World WHERE id = ?'); + self::$instance = $pdo; + } + + /** + * Postgres bulk update + * + * @param array $worlds + * @return void + */ + public static function update(array $worlds) + { + $rows = count($worlds); + + if (!isset(self::$update[$rows])) { + $sql = 'UPDATE world SET randomNumber = CASE id' + . str_repeat(' WHEN ?::INTEGER THEN ?::INTEGER ', $rows) + . 'END WHERE id IN (' + . str_repeat('?::INTEGER,', $rows - 1) . '?::INTEGER)'; + + self::$update[$rows] = self::$instance->prepare($sql); + } + + $val = []; + $keys = []; + foreach ($worlds as $world) { + $val[] = $keys[] = $world['id']; + $val[] = $world['randomNumber']; + } + + self::$update[$rows]->execute([...$val, ...$keys]); + } + + /** + * Alternative bulk update in Postgres + * + * @param array $worlds + * @return void + */ + public static function update2(array $worlds) + { + $rows = count($worlds); + + if (!isset(self::$update[$rows])) { + $sql = 'UPDATE world SET randomNumber = temp.randomNumber FROM (VALUES ' + . implode(', ', array_fill(0, $rows, '(?::INTEGER, ?::INTEGER)')) . + ' ORDER BY 1) AS temp(id, randomNumber) WHERE temp.id = world.id'; + + self::$update[$rows] = self::$instance->prepare($sql); + } + + $val = []; + foreach ($worlds as $world) { + $val[] = $world['id']; + $val[] = $world['randomNumber']; + //$update->bindParam(++$i, $world['id'], PDO::PARAM_INT); + } + + self::$update[$rows]->execute($val); + } +} diff --git a/frameworks/PHP/workerman/workerman-async.dockerfile b/frameworks/PHP/workerman/workerman-async.dockerfile index 5a37fb3e356..ab78774c2f9 100644 --- a/frameworks/PHP/workerman/workerman-async.dockerfile +++ b/frameworks/PHP/workerman/workerman-async.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:21.10 +FROM ubuntu:22.04 ARG DEBIAN_FRONTEND=noninteractive diff --git a/frameworks/PHP/workerman/workerman-pgsql.dockerfile b/frameworks/PHP/workerman/workerman-pgsql.dockerfile index 1c743174beb..86717567acf 100644 --- a/frameworks/PHP/workerman/workerman-pgsql.dockerfile +++ b/frameworks/PHP/workerman/workerman-pgsql.dockerfile @@ -1,5 +1,4 @@ -FROM ubuntu:21.10 - +FROM ubuntu:22.04 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null @@ -10,14 +9,15 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev git > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY php.ini /etc/php/8.1/cli/php.ini ADD ./ /workerman WORKDIR /workerman -RUN sed -i "s|'mysql:host|'pgsql:host|g" app.php +RUN sed -i "s|'/app.php|'/app-pg.php|g" server.php +RUN sed -i "s|init()|DbRaw::init()|g" server.php RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet diff --git a/frameworks/PHP/workerman/workerman-php8-jit.dockerfile b/frameworks/PHP/workerman/workerman-php8-jit.dockerfile index cd86738fe85..ea641405bf5 100644 --- a/frameworks/PHP/workerman/workerman-php8-jit.dockerfile +++ b/frameworks/PHP/workerman/workerman-php8-jit.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:21.10 +FROM ubuntu:22.04 ARG DEBIAN_FRONTEND=noninteractive @@ -10,14 +10,15 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev git > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY php-jit.ini /etc/php/8.1/cli/php.ini ADD ./ /workerman WORKDIR /workerman -RUN sed -i "s|'mysql:host|'pgsql:host|g" app.php +RUN sed -i "s|'/app.php|'/app-pg.php|g" server.php +RUN sed -i "s|init()|DbRaw::init()|g" server.php RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet diff --git a/frameworks/PHP/workerman/workerman.dockerfile b/frameworks/PHP/workerman/workerman.dockerfile index 2f99ef78cc2..e883ee21f4c 100644 --- a/frameworks/PHP/workerman/workerman.dockerfile +++ b/frameworks/PHP/workerman/workerman.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:21.10 +FROM ubuntu:22.04 ARG DEBIAN_FRONTEND=noninteractive @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer RUN apt-get install -y php-pear php8.1-dev libevent-dev git > /dev/null -RUN pecl install event-3.0.6 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini COPY php-jit.ini /etc/php/8.1/cli/php.ini diff --git a/frameworks/PHP/yii2/app/index.php b/frameworks/PHP/yii2/app/index.php index 80851d36ea5..d8154161700 100755 --- a/frameworks/PHP/yii2/app/index.php +++ b/frameworks/PHP/yii2/app/index.php @@ -51,4 +51,12 @@ ], ]; -(new yii\web\Application($config))->run(); +(new yii\web\Application($config))->run(); + +function handleWorkerman() +{ + global $config; + ob_start(); + (new yii\web\Application($config))->run(); + return ob_get_clean(); +} diff --git a/frameworks/PHP/yii2/benchmark_config.json b/frameworks/PHP/yii2/benchmark_config.json index 2424e4bf7c4..6d6a3f4d3af 100644 --- a/frameworks/PHP/yii2/benchmark_config.json +++ b/frameworks/PHP/yii2/benchmark_config.json @@ -43,6 +43,29 @@ "display_name": "yii2-raw", "notes": "", "versus": "php" + }, + "workerman": { + "json_url": "/site/json", + "db_url": "/site/db", + "query_url": "/site/queries?queries=", + "fortune_url": "/site/fortunes", + "update_url": "/site/updates?queries=", + "plaintext_url": "/site/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "MySQL", + "framework": "yii2", + "language": "PHP", + "flavor": "PHP8.1", + "orm": "Full", + "platform": "workerman", + "webserver": "none", + "os": "Linux", + "database_os": "Linux", + "display_name": "yii2-workerman", + "notes": "", + "versus": "workerman" } }] } diff --git a/frameworks/PHP/yii2/composer.json b/frameworks/PHP/yii2/composer.json index e77691110a0..538098961ab 100755 --- a/frameworks/PHP/yii2/composer.json +++ b/frameworks/PHP/yii2/composer.json @@ -1,6 +1,12 @@ { "require": { "yidas/yii2-composer-bower-skip": "~2.0.13", - "yiisoft/yii2": "~2.0.43" - } + "yiisoft/yii2": "~2.0.43", + "joanhey/adapterman": "dev-master" + }, + "config": { + "allow-plugins": { + "yiisoft/*": true + } + } } diff --git a/frameworks/PHP/yii2/deploy/conf/cli-php.ini b/frameworks/PHP/yii2/deploy/conf/cli-php.ini new file mode 100644 index 00000000000..9b458242057 --- /dev/null +++ b/frameworks/PHP/yii2/deploy/conf/cli-php.ini @@ -0,0 +1,16 @@ +#zend_extension=opcache.so +opcache.enable=1 +opcache.enable_cli=1 +opcache.validate_timestamps=0 +opcache.save_comments=0 +opcache.enable_file_override=1 +opcache.huge_code_pages=1 + +mysqlnd.collect_statistics = Off + +memory_limit = 512M + +opcache.jit_buffer_size=128M +opcache.jit=tracing + +disable_functions=header,header_remove,headers_sent,http_response_code,setcookie,session_create_id,session_id,session_name,session_save_path,session_status,session_start,session_write_close,set_time_limit \ No newline at end of file diff --git a/frameworks/PHP/yii2/server.php b/frameworks/PHP/yii2/server.php new file mode 100644 index 00000000000..283046d5a75 --- /dev/null +++ b/frameworks/PHP/yii2/server.php @@ -0,0 +1,44 @@ +count = (int) shell_exec('nproc') * 4; +$http_worker->name = 'AdapterMan'; +$http_worker->onWorkerStart = function () { + WorkerTimer::init(); + //init(); +}; + +$http_worker->onMessage = static function ($connection, $request) { + + $_SERVER['SCRIPT_FILENAME'] = '/app/index.php'; + $_SERVER['SCRIPT_NAME'] = '/index.php'; + Http::header(WorkerTimer::$date); + $connection->send( + handleWorkerman() + ); +}; + +class WorkerTimer +{ + public static $date; + + public static function init() + { + self::$date = 'Date: '.gmdate('D, d M Y H:i:s').' GMT'; + Timer::add(1, function() { + WorkerTimer::$date = 'Date: '.gmdate('D, d M Y H:i:s').' GMT'; + }); + } +} + +Worker::runAll(); \ No newline at end of file diff --git a/frameworks/PHP/yii2/yii2-workerman.dockerfile b/frameworks/PHP/yii2/yii2-workerman.dockerfile new file mode 100644 index 00000000000..f0e841ea5dd --- /dev/null +++ b/frameworks/PHP/yii2/yii2-workerman.dockerfile @@ -0,0 +1,27 @@ +FROM ubuntu:20.04 + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null +RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php +RUN apt-get update -yqq > /dev/null && \ + apt-get install -yqq git php8.1-cli php8.1-mysql php8.1-mbstring php8.1-xml > /dev/null + +COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer + +RUN apt-get install -y php-pear php8.1-dev libevent-dev > /dev/null +RUN pecl install event-3.0.8 > /dev/null && echo "extension=event.so" > /etc/php/8.1/cli/conf.d/event.ini + +COPY deploy/conf/cli-php.ini /etc/php/8.1/cli/php.ini + +ADD ./ /yii2 +WORKDIR /yii2 + +RUN composer install --optimize-autoloader --classmap-authoritative --no-dev + +RUN sed -i 's|(new yii\\web\\Application|//(new yii\\web\\Application|' app/index.php +RUN sed -i 's|(headers_sent($file, $line))|(headers_sent())|g' vendor/yiisoft/yii2/web/Response.php + +RUN chmod -R 777 /yii2 + +CMD php server.php start diff --git a/frameworks/Pascal/mormot/README.md b/frameworks/Pascal/mormot/README.md new file mode 100644 index 00000000000..37186e3cff6 --- /dev/null +++ b/frameworks/Pascal/mormot/README.md @@ -0,0 +1,72 @@ +# mORMot Benchmarking Test + +This is a framework implementation using the [mORMot2](https://github.com/synopse/mORMot2) FreePascal/Delphi framework. +It builds using [FreePascal](https://www.freepascal.org/) compiler and developed using [Lazarus IDE](https://www.lazarus-ide.org/) + +### Test Type Implementation Source Code + +* [ORM and RAW implementation for all tests](src/raw.pas) + +## Important Libraries +The tests were run with: +* [mORMot2 latest](https://github.com/synopse/mORMot2) +* [FreePascal 3.2.2](https://www.freepascal.org/) + +## Contributor tips +For debugging purpose run Postges using Docker +```shell +sudo docker run --name postgres -e POSTGRES_PASSWORD=postgres -d -p 5432:5432 postgres:12 +``` +add `tfb-database` into hosts +```shell +echo '127.0.0.1 tfb-database' | sudo tee -a /etc/hosts +``` + +Database can be initialized using scripts from [TFBDatabases repo](https://github.com/TechEmpower/TFBDatabases) + +```shell +git clone https://github.com/TechEmpower/TFBDatabases.git +cd TFBDatabases +psql postgres://postgres:postgres@tfb-database:5432 < create-postgres-database.sql +psql postgres://benchmarkdbuser:benchmarkdbpass@tfb-database:5432/hello_world < create-postgres.sql +``` + +## Test URLs + +### JSON + +http://localhost:8080/json + +### PLAINTEXT + +http://localhost:8080/plaintext + +`mORMot` HTTP server does not support [HTTP pipelining](https://developer.mozilla.org/en-US/docs/Web/HTTP/Connection_management_in_HTTP_1.x#http_pipelining), +so numbers is not impressive here. + +### DB + +http://localhost:8080/db + +### QUERY + +http://localhost:8080/query?queries= + +### CACHED QUERY + +http://localhost:8080/cached_query?queries= + +### UPDATE + +http://localhost:8080/update?queries= + +Batch update SQL statement in case of Postgres is generated by ORM as such: +```sql +update table set field = v.field from (SELECT unnest(?), unnest(?)) as v(id, field) where t.id = v.id +``` +Nested select is a reason of warning for `tfb --verify` + +### FORTUNES + +http://localhost:8080/fortunes + diff --git a/frameworks/Pascal/mormot/benchmark_config.json b/frameworks/Pascal/mormot/benchmark_config.json new file mode 100644 index 00000000000..449dc3cf3ae --- /dev/null +++ b/frameworks/Pascal/mormot/benchmark_config.json @@ -0,0 +1,53 @@ +{ + "framework": "mormot", + "tests": [ + { + "default": { + "json_url": "/json", + "db_url": "/db", + "query_url": "/queries?queries=", + "fortune_url": "/fortunes", + "update_url": "/updates?queries=", + "plaintext_url": "/plaintext", + "cached_query_url": "/cached-queries?count=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "postgres", + "framework": "mormot", + "language": "Pascal", + "flavor": "None", + "orm": "Full", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "mormot", + "notes": "", + "versus": "None" + }, + "postgres-raw": { + "dockerfile": "mormot.dockerfile", + "db_url": "/rawdb", + "query_url": "/rawqueries?queries=", + "fortune_url": "/rawfortunes", + "update_url": "/rawupdates?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "postgres", + "framework": "mormot", + "language": "Pascal", + "flavor": "None", + "orm": "Raw", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "mormot", + "notes": "", + "versus": "None" + } + } + ] +} diff --git a/frameworks/Pascal/mormot/mormot.dockerfile b/frameworks/Pascal/mormot/mormot.dockerfile new file mode 100644 index 00000000000..24fd1de7cf6 --- /dev/null +++ b/frameworks/Pascal/mormot/mormot.dockerfile @@ -0,0 +1,20 @@ +FROM freepascal/fpc:3.2.2-focal-full as builder + +RUN apt-get update -yqq +RUN apt-get install -yqq p7zip-full zlib1g-dev + +WORKDIR /build +COPY src/ src/ +COPY setup_and_build.sh . + +RUN /bin/bash -c ./setup_and_build.sh + +FROM ubuntu:22.04 +COPY --from=builder /build/bin/fpc-x86_64-linux/raw /usr/local/bin/raw + +RUN apt-get update && apt-get install -yqq postgresql-client + +EXPOSE 8080 +CMD ["raw"] + + diff --git a/frameworks/Pascal/mormot/setup_and_build.sh b/frameworks/Pascal/mormot/setup_and_build.sh new file mode 100755 index 00000000000..ca011d34ba5 --- /dev/null +++ b/frameworks/Pascal/mormot/setup_and_build.sh @@ -0,0 +1,88 @@ +#!/bin/bash + +# Update mORMot and static folder content from the latest [pre]release of mORMot2 +# Required tools: jq wget 7zip. On Ubuntu can be installed by +# sudo apt install wget jq p7zip-full + +# On error +err_report() { + >&2 echo "Error in $0 on line $1" + script_aborted +} +trap 'err_report $LINENO' ERR + +script_successful(){ + echo "++Build successfully++" + exit 0 +} + +script_aborted() { + echo "******Build aborted******" + exit 1 +} + +set -o pipefail + +rm -rf ./libs + +# echo "Getting the latest pre-release URL..." +# USED_TAG=$(wget -qO- https://api.github.com/repos/synopse/mORMot2/releases/latest | jq -r '.tag_name') +USED_TAG="2.0.3780" + +echo "Used release tag $USED_TAG" +URL="https://github.com/synopse/mORMot2/releases/download/$USED_TAG/mormot2static.7z" +echo "Download statics from $URL ..." +wget -q -O./mormot2static.7z "$URL" + +mkdir -p ./libs/mORMot/static +echo "Unpacking to ./libs/mORMot/static ..." +7za x ./mormot2static.7z -o./libs/mORMot/static +rm -rf ./mormot2static.7z + +# uncomment for fixed commit URL +URL=https://github.com//synopse/mORMot2/tarball/e567622e45caaf7056ee8ba6d1827314d945ccb2 +#URL="https://api.github.com/repos/synopse/mORMot2/tarball/$USED_TAG" +echo "Download and unpacking mORMot sources from $URL ..." +wget -qO- "$URL" | tar -xz -C ./libs/mORMot --strip-components=1 + + +# uncomment line below to echo commands to console +set -x + +# get a mORMot folder name based on this script location +TARGET="${TARGET:-linux}" +ARCH="${ARCH:-x86_64}" +ARCH_TG="$ARCH-$TARGET" + +MSRC="./libs/mORMot/src" +BIN="./bin" +STATIC="./libs/mORMot/static" + +mkdir -p "$BIN/fpc-$ARCH_TG/.dcu" +rm -f "$BIN"/fpc-"$ARCH_TG"/.dcu/* + +dest_fn=raw +if [[ $TARGET == win* ]]; then + dest_fn="$dest_fn.exe" +fi + +# suppress warnings +# Warning: (5059) Function result variable does not seem to be initialized +# Warning: (5036) Local variable XXX does not seem to be initialized +# Warning: (5089) Local variable XXX of a managed type does not seem to be initialized +# Warning: (5090) Variable XXX of a managed type does not seem to be initialized +SUPRESS_WARN=-vm11047,6058,5092,5091,5060,5058,5057,5028,5024,5023,4081,4079,4055,3187,3124,3123,5059,5036,5089,5090 + +echo "Start compiling..." +fpc -MDelphi -Sci -Ci -O3 -g -gl -gw2 -Xg -k'-rpath=$ORIGIN' -k-L$BIN \ + -T$TARGET -P$ARCH \ + -veiq -v-n-h- $SUPRESS_WARN \ + -Fi"$BIN/fpc-$ARCH_TG/.dcu" -Fi"$MSRC" \ + -Fl"$STATIC/$ARCH-$TARGET" \ + -Fu"$MSRC/core" -Fu"$MSRC/db" -Fu"$MSRC/rest" -Fu"$MSRC/crypt" \ + -Fu"$MSRC/app" -Fu"$MSRC/net" -Fu"$MSRC/lib" -Fu"$MSRC/orm" -Fu"$MSRC/soa" \ + -FU"$BIN/fpc-$ARCH_TG/.dcu" -FE"$BIN/fpc-$ARCH_TG" -o"$BIN/fpc-$ARCH_TG/$dest_fn" \ + -dFPC_X64MM -dFPCMM_SERVER -dNOSYNDBZEOS -dNOSYNDBIBX -dFPCMM_REPORTMEMORYLEAKS \ + -B -Se1 "./src/raw.pas" | grep "[Warning|Error|Fatal]:" + +script_successful \ No newline at end of file diff --git a/frameworks/Pascal/mormot/src/raw.lpi b/frameworks/Pascal/mormot/src/raw.lpi new file mode 100644 index 00000000000..81f13a0c1b1 --- /dev/null +++ b/frameworks/Pascal/mormot/src/raw.lpi @@ -0,0 +1,152 @@ + + + + + + + + + + + + + <UseAppBundle Value="False"/> + <ResourceType Value="res"/> + </General> + <BuildModes> + <Item Name="Debug" Default="True"/> + <Item Name="Release"> + <CompilerOptions> + <Version Value="11"/> + <Target> + <Filename Value="exe/raw"/> + </Target> + <SearchPaths> + <IncludeFiles Value="../libs/mORMot/src;$(ProjOutDir)"/> + <Libraries Value="../libs/mORMot/static/$(TargetCPU)-$(TargetOS)"/> + <OtherUnitFiles Value="../libs/mORMot/src/core;../libs/mORMot/src/db;../libs/mORMot/src/rest;../libs/mORMot/src/app;../libs/mORMot/src/net;../libs/mORMot/src/lib;../libs/mORMot/src/orm;../libs/mORMot/src/crypt;../libs/mORMot/src/soa"/> + <UnitOutputDirectory Value="lib/$(TargetCPU)-$(TargetOS)"/> + </SearchPaths> + <CodeGeneration> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + <DebugInfoType Value="dsDwarf3"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dFPC_X64MM +-dFPCMM_SERVER +-dFPCMM_REPORTMEMORYLEAKS +-dNOSYNDBZEOS +-dNOSYNDBIBX"/> + <OtherDefines Count="5"> + <Define0 Value="FPC_X64MM"/> + <Define1 Value="FPCMM_SERVER"/> + <Define2 Value="FPCMM_REPORTMEMORYLEAKS"/> + <Define3 Value="NOSYNDBZEOS"/> + <Define4 Value="NOSYNDBIBX"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item> + </BuildModes> + <PublishOptions> + <Version Value="2"/> + <UseFileFilters Value="True"/> + </PublishOptions> + <RunParams> + <FormatVersion Value="2"/> + </RunParams> + <Units> + <Unit> + <Filename Value="raw.pas"/> + <IsPartOfProject Value="True"/> + </Unit> + </Units> + </ProjectOptions> + <CompilerOptions> + <Version Value="11"/> + <Target> + <Filename Value="exe/raw"/> + </Target> + <SearchPaths> + <IncludeFiles Value="../libs/mORMot/src;$(ProjOutDir)"/> + <Libraries Value="../libs/mORMot/static/$(TargetCPU)-$(TargetOS)"/> + <OtherUnitFiles Value="../libs/mORMot/src/core;../libs/mORMot/src/db;../libs/mORMot/src/rest;../libs/mORMot/src/app;../libs/mORMot/src/net;../libs/mORMot/src/lib;../libs/mORMot/src/orm;../libs/mORMot/src/crypt;../libs/mORMot/src/soa"/> + <UnitOutputDirectory Value="lib/$(TargetCPU)-$(TargetOS)"/> + </SearchPaths> + <Linking> + <Debugging> + <DebugInfoType Value="dsDwarf3"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dFPC_X64MM +-dFPCMM_SERVER +-dFPCMM_REPORTMEMORYLEAKS +-dFPCMM_DEBUG +-dNOSYNDBZEOS +-dNOSYNDBIBX"/> + <OtherDefines Count="7"> + <Define0 Value="FPC_X64MM"/> + <Define1 Value="FPCMM_SERVER"/> + <Define2 Value="FPCMM_REPORTMEMORYLEAKS"/> + <Define3 Value="FPCMM_DEBUG"/> + <Define4 Value="WITH_LOGS"/> + <Define5 Value="NOSYNDBZEOS"/> + <Define6 Value="NOSYNDBIBX"/> + </OtherDefines> + </Other> + </CompilerOptions> + <Debugging> + <Exceptions> + <Item> + <Name Value="EAbort"/> + </Item> + <Item> + <Name Value="ECodetoolError"/> + </Item> + <Item> + <Name Value="EFOpenError"/> + </Item> + <Item> + <Name Value="EInterfaceFactoryException"/> + </Item> + <Item> + <Name Value="EAssertionFailed"/> + </Item> + <Item> + <Name Value="ERttiException"/> + </Item> + <Item> + <Name Value="EFastReader"/> + </Item> + <Item> + <Name Value="RunError(216)"/> + </Item> + <Item> + <Name Value="ENetSock"/> + </Item> + <Item> + <Name Value="EThreadError"/> + </Item> + <Item> + <Name Value="ESynCrypto"/> + </Item> + <Item> + <Name Value="ERelayProtocol"/> + </Item> + <Item> + <Name Value="EServiceException"/> + </Item> + <Item> + <Name Value="EFCreateError"/> + </Item> + </Exceptions> + </Debugging> +</CONFIG> diff --git a/frameworks/Pascal/mormot/src/raw.pas b/frameworks/Pascal/mormot/src/raw.pas new file mode 100644 index 00000000000..1e9fe253e5a --- /dev/null +++ b/frameworks/Pascal/mormot/src/raw.pas @@ -0,0 +1,603 @@ +program raw; + +{ +TechEmpower framework benchmarks implementation +See https://github.com/TechEmpower/FrameworkBenchmarks/wiki/Project-Information-Framework-Tests-Overview +} + +{$I mormot.defines.inc} + +{.$define USE_SQLITE3} +// may be defined to use a SQLite3 database instead of external PostgresSQL DB +// - note: /rawupdates and /rawqueries are PostgresSQL specific and will fail + +{.$define WITH_LOGS} +// logging is fine for debugging, less for benchmarking ;) + +uses + {$I mormot.uses.inc} // include mormot.core.fpcx64mm + sysutils, + classes, + BaseUnix, + mormot.core.base, + mormot.core.os, + mormot.core.rtti, + mormot.core.log, + mormot.core.unicode, + mormot.core.text, + mormot.core.buffers, + mormot.core.json, + mormot.core.data, + mormot.core.variants, + mormot.core.perf, + mormot.core.mustache, + mormot.orm.core, + mormot.orm.sql, + mormot.db.core, + mormot.db.raw.sqlite3, + mormot.db.raw.sqlite3.static, + {$ifdef USE_SQLITE3} + mormot.db.sql.sqlite3, + {$endif USE_SQLITE3} + mormot.rest.sqlite3, + mormot.net.http, + mormot.net.server, + mormot.net.async, + mormot.db.sql, + mormot.db.sql.postgres; + +type + TMessageRec = packed record + message: RawUtf8; + end; + TWorldRec = packed record + id: integer; + randomNumber: integer; + end; + TWorlds = array of TWorldRec; + TFortune = packed record + id: integer; + message: RawUtf8; + end; + TFortunes = array of TFortune; + + TOrmWorld = class(TOrm) + protected + fRandomNumber: integer; + published + property RandomNumber: integer + read fRandomNumber write fRandomNumber; + end; + TOrmCachedWorld = class(TOrmWorld); + TOrmWorldClass = class of TOrmWorld; + TOrmFortune = class(TOrm) + protected + fMessage: RawUtf8; + published + property Message: RawUtf8 + read fMessage write fMessage; + end; + TOrmFortunes = array of TOrmFortune; + + { TRawAsyncServer } + + TRawAsyncServer = class + private + fHttpServer: THttpAsyncServer; + fDbPool: TSqlDBConnectionProperties; + fModel: TOrmModel; + fStore: TRestServerDB; + fTemplate: TSynMustache; + protected + // main HTTP routing method + function DoOnRequest(ctxt: THttpServerRequestAbstract): cardinal; + // return ?queries= parameter value. If missed or < 1 return 1, if > 500 return 500 + function getQueriesParamValue(ctxt: THttpServerRequestAbstract; + const search: RawUtf8 = 'QUERIES='): integer; + procedure getRandomWorlds(cnt: PtrInt; out res: TWorlds); + {$ifdef USE_SQLITE3} + procedure GenerateDB; + {$endif USE_SQLITE3} + public + constructor Create(threadCount: integer); + destructor Destroy; override; + // those are the implementation methods + function json(ctxt: THttpServerRequestAbstract): cardinal; + function db(ctxt: THttpServerRequestAbstract): cardinal; + // /queries and /cached-queries endpoints are implemented in doqueries + function doqueries(ctxt: THttpServerRequestAbstract; orm: TOrmWorldClass; + const search: RawUtf8): cardinal; + function fortunes(ctxt: THttpServerRequestAbstract): cardinal; + function updates(ctxt: THttpServerRequestAbstract): cardinal; + function plaintext(ctxt: THttpServerRequestAbstract): cardinal; + function rawdb(ctxt: THttpServerRequestAbstract): cardinal; + function rawqueries(ctxt: THttpServerRequestAbstract): cardinal; + function rawfortunes(ctxt: THttpServerRequestAbstract): cardinal; + function rawupdates(ctxt: THttpServerRequestAbstract): cardinal; + end; + +const + TEXT_CONTENT_TYPE_NO_ENCODING: RawUtf8 = 'text/plain'; + HELLO_WORLD: RawUtf8 = 'Hello, World!'; + WORLD_COUNT = 10000; + + WORLD_READ_SQL = 'select id, randomNumber from World where id=?'; + WORLD_UPDATE_SQLN ='update World as t set randomNumber = v.r from ' + + '(SELECT unnest(?::NUMERIC[]), unnest(?::NUMERIC[])) as v(id, r)' + + ' where t.id = v.id'; + FORTUNES_SQL = 'select id, message from Fortune'; + + FORTUNES_MESSAGE = 'Additional fortune added at request time.'; + FORTUNES_TPL = '<!DOCTYPE html>' + + '<html>' + + '<head><title>Fortunes' + + '' + + '' + + '' + + '{{#.}}' + + '' + + '{{/.}}' + + '
idmessage
{{id}}{{message}}
' + + '' + + ''; + + +{ TRawAsyncServer } + +constructor TRawAsyncServer.Create(threadCount: integer); +begin + inherited Create; + {$ifdef USE_SQLITE3} + fDbPool := TSqlDBSQLite3ConnectionProperties.Create( + SQLITE_MEMORY_DATABASE_NAME, '', '', ''); + fDbPool.StatementCacheReplicates := threadcount; // shared SQlite3 connection + {$else} + fDbPool := TSqlDBPostgresConnectionProperties.Create( + 'tfb-database:5432', 'hello_world', 'benchmarkdbuser', 'benchmarkdbpass'); + {$endif USE_SQLITE3} + fModel := TOrmModel.Create([TOrmWorld, TOrmFortune, TOrmCachedWorld]); + OrmMapExternal(fModel, [TOrmWorld, TOrmFortune], fDbPool); + // CachedWorld table doesn't exists in DB, but should as read in requirements. + // Use world table as in other implementations. + OrmMapExternal(fModel, TOrmCachedWorld, fDbPool, 'world'); + fStore := TRestServerDB.Create(fModel, SQLITE_MEMORY_DATABASE_NAME); + fStore.NoAjaxJson := true; + {$ifdef USE_SQLITE3} + GenerateDB; + {$else} + fStore.Server.CreateMissingTables; // create SQlite3 virtual tables + {$endif USE_SQLITE3} + if fStore.Server.Cache.SetCache(TOrmCachedWorld) then + fStore.Server.Cache.FillFromQuery(TOrmCachedWorld, '', []); + fTemplate := TSynMustache.Parse(FORTUNES_TPL); + fHttpServer := THttpAsyncServer.Create( + '8080', nil, nil, '', threadCount, + 5 * 60 * 1000, // 5 minutes keep alive connections + [hsoNoXPoweredHeader, // not needed for a benchmark + hsoHeadersInterning, // reduce memory contention for /plaintext and /json + hsoNoStats, // disable low-level statistic counters + {$ifdef WITH_LOGS} + hsoLogVerbose, + {$endif WITH_LOGS} + hsoIncludeDateHeader // required by TPW General Test Requirements #5 + ]); + fHttpServer.HttpQueueLength := 100000; // needed e.g. from wrk/ab benchmarks + fHttpServer.OnRequest := DoOnRequest; + fHttpServer.WaitStarted; // raise exception e.g. on binding issue +end; + +destructor TRawAsyncServer.Destroy; +begin + fHttpServer.Free; + fStore.Free; + fModel.Free; + fDBPool.free; + inherited Destroy; +end; + +function TRawAsyncServer.DoOnRequest(ctxt: THttpServerRequestAbstract): cardinal; +const + ROUTES: array[0..11] of RawUtf8 = ( + // basic tests + '/PLAINTEXT', '/JSON', + // ORM tests + '/DB', '/QUERIES', '/FORTUNES', '/UPDATES', '/CACHED-QUERIES', + // raw tests + '/RAWDB' , '/RAWQUERIES', '/RAWFORTUNES', '/RAWUPDATES', ''); +var + route: PtrInt; +begin + {$ifdef WITH_LOGS} + TSynLog.Add.Log(sllServiceCall, 'DoOnRequest % %', [ctxt.Method, ctxt.Url], self); + {$endif WITH_LOGS} + result := HTTP_NOTFOUND; + route := IdemPPChar(pointer(ctxt.Url), @ROUTES); + if (route >= 0) and + (ctxt.Url[length(ROUTES[route]) + 1] in [#0, '?', '/']) then + case route of + // basic tests + 0: result := plaintext(ctxt); + 1: result := json(ctxt); + // ORM tests + 2: result := db(ctxt); + 3: result := doqueries(ctxt, TOrmWorld, 'QUERIES='); + 4: result := fortunes(ctxt); + 5: result := updates(ctxt); + 6: result := doqueries(ctxt, TOrmCachedWorld, 'COUNT='); + // raw tests + 7: result := rawdb(ctxt); + 8: result := rawqueries(ctxt); + 9: result := rawfortunes(ctxt); + 10: result := rawupdates(ctxt); + end; +end; + +function RandomWorld: integer; inline; +begin + result := Random32(WORLD_COUNT) + 1; +end; + +{$ifdef USE_SQLITE3} + +const + _FORTUNES: array[1..12] of RawUtf8 = ( + 'fortune: No such file or directory', + 'A computer scientist is someone who fixes things that aren''t broken.', + 'After enough decimal places, nobody gives a damn.', + 'A bad random number generator: 1, 1, 1, 1, 1, 4.33e+67, 1, 1, 1', + 'A computer program does what you tell it to do, not what you want it to do.', + 'Emacs is a nice operating system, but I prefer UNIX. — Tom Christaensen', + 'Any program that runs right is obsolete.', + 'A list is only as strong as its weakest link. — Donald Knuth', + 'Feature: A bug with seniority.', + 'Computers make very fast, very accurate mistakes.', + '', + 'フレームワークのベンチマーク'); + +procedure TRawAsyncServer.GenerateDB; +var + i: PtrInt; + b: TRestBatch; + w: TOrmWorld; + f: TOrmFortune; +begin + fStore.Server.CreateMissingTables; + w := TOrmWorld.Create; + f := TOrmFortune.Create; + b := TRestBatch.Create(fStore.Orm, nil); + try + for i := 1 to WORLD_COUNT do + begin + w.IDValue := i; + w.RandomNumber := RandomWorld; + b.Add(w, true, true); + end; + for i := low(_FORTUNES) to high(_FORTUNES) do + begin + f.IDValue := i; + f.Message := _FORTUNES[i]; + b.Add(f, true, true); + end; + if fStore.Orm.BatchSend(b) <> HTTP_SUCCESS then + raise EOrmBatchException.Create('GenerateDB failed'); + finally + b.Free; + f.Free; + w.Free; + end; +end; + +{$endif USE_SQLITE3} + +function TRawAsyncServer.json(ctxt: THttpServerRequestAbstract): cardinal; +var + msgRec: TMessageRec; +begin + msgRec.message := HELLO_WORLD; + ctxt.OutContentType := JSON_CONTENT_TYPE; + ctxt.OutContent := SaveJson(msgRec, TypeInfo(TMessageRec)); + result := HTTP_SUCCESS; +end; + +function TRawAsyncServer.plaintext(ctxt: THttpServerRequestAbstract): cardinal; +begin + ctxt.OutContentType := TEXT_CONTENT_TYPE_NO_ENCODING; + ctxt.OutContent := HELLO_WORLD; + result := HTTP_SUCCESS; +end; + +function TRawAsyncServer.rawdb(ctxt: THttpServerRequestAbstract): cardinal; +var + conn: TSqlDBConnection; + stmt: ISQLDBStatement; +begin + result := HTTP_SERVERERROR; + conn := fDbPool.ThreadSafeConnection; + stmt := conn.NewStatementPrepared(WORLD_READ_SQL, true, true); + stmt.Bind(1, RandomWorld); + stmt.ExecutePrepared; + if stmt.Step then + begin + ctxt.OutContent := FormatUtf8('{"id":%,"randomNumber":%}', + [stmt.ColumnInt(0), stmt.ColumnInt(1)]); + ctxt.OutContentType := JSON_CONTENT_TYPE; + result := HTTP_SUCCESS; + stmt.ReleaseRows; + end; + stmt := nil; +end; + +function TRawAsyncServer.db(ctxt: THttpServerRequestAbstract): cardinal; +var + w: TOrmWorld; +begin + w := TOrmWorld.Create(fStore.Orm, RandomWorld); + try + ctxt.OutContent := FormatUtf8('{"id":%,"randomNumber":%}', + [w.IDValue, w.randomNumber]); + ctxt.OutContentType := JSON_CONTENT_TYPE; + result := HTTP_SUCCESS; + finally + w.Free; + end; +end; + +function TRawAsyncServer.getQueriesParamValue(ctxt: THttpServerRequestAbstract; + const search: RawUtf8): integer; +var + p: PUtf8Char; +begin + result := 0; + p := PosChar(pointer(ctxt.Url), '?'); + if p <> nil then + UrlDecodeInteger(p + 1, search, result); + if result = 0 then + result := 1 + else if result > 500 then + result := 500; +end; + +procedure TRawAsyncServer.getRandomWorlds(cnt: PtrInt; out res: TWorlds); +var + conn: TSqlDBConnection; + stmt: ISQLDBStatement; + i: PtrInt; +begin + SetLength(res{%H-}, cnt); + conn := fDbPool.ThreadSafeConnection; + stmt := conn.NewStatementPrepared(WORLD_READ_SQL, true, true); + for i := 0 to cnt - 1 do + begin + stmt.Bind(1, RandomWorld); + stmt.ExecutePrepared; + if not stmt.Step then + exit; + res[i].id := stmt.ColumnInt(0); + res[i].randomNumber := stmt.ColumnInt(1); + end; +end; + +function TRawAsyncServer.rawqueries(ctxt: THttpServerRequestAbstract): cardinal; +var + cnt: PtrInt; + res: TWorlds; +begin + cnt := getQueriesParamValue(ctxt); + getRandomWorlds(cnt, res); + if res = nil then + exit(HTTP_SERVERERROR); + ctxt.OutContentType := JSON_CONTENT_TYPE; + ctxt.OutContent := SaveJson(res, TypeInfo(TWorlds)); + result := HTTP_SUCCESS; +end; + +function TRawAsyncServer.doqueries(ctxt: THttpServerRequestAbstract; + orm: TOrmWorldClass; const search: RawUtf8): cardinal; +var + cnt, i: PtrInt; + res: TWorlds; + w: TOrmWorld; +begin + result := HTTP_SERVERERROR; + cnt := getQueriesParamValue(ctxt, search); + SetLength(res, cnt); + w := orm.Create; // TOrmWorld or TOrmCachedWorld + try + for i := 0 to cnt - 1 do + begin + if not fStore.Orm.Retrieve(RandomWorld, w) then + exit; + res[i].id := w.IDValue; + res[i].randomNumber := w.RandomNumber; + end; + finally + w.Free; + end; + ctxt.OutContentType := JSON_CONTENT_TYPE; + ctxt.OutContent := SaveJson(res, TypeInfo(TWorlds)); + result := HTTP_SUCCESS; +end; + +function OrmFortuneCompareByMessage(const A, B): integer; +begin + result := StrComp(pointer(TOrmFortune(A).Message), pointer(TOrmFortune(B).Message)); +end; + +function TRawAsyncServer.fortunes(ctxt: THttpServerRequestAbstract): cardinal; +var + list: TOrmFortunes; + new: TOrmFortune; + arr: TDynArray; +begin + result := HTTP_SERVERERROR; + arr.Init(TypeInfo(TOrmFortunes), list); + if fStore.Orm.RetrieveListObjArray(list, TOrmFortune, '', []) then + try + new := TOrmFortune.Create; + new.Message := FORTUNES_MESSAGE; + arr.Add(new); + arr.Sort(OrmFortuneCompareByMessage); + ctxt.OutContent := fTemplate.RenderDataArray(arr); + ctxt.OutContentType := HTML_CONTENT_TYPE; + result := HTTP_SUCCESS; + finally + arr.Clear; + end; +end; + +function FortuneCompareByMessage(const A, B): integer; +begin + result := StrComp(pointer(TFortune(A).message), pointer(TFortune(B).message)); +end; + +function TRawAsyncServer.rawfortunes(ctxt: THttpServerRequestAbstract): cardinal; +var + conn: TSqlDBConnection; + stmt: ISQLDBStatement; + list: TFortunes; + f: TFortune; + arr: TDynArray; + n: integer; +begin + result := HTTP_SERVERERROR; + conn := fDbPool.ThreadSafeConnection; + stmt := conn.NewStatementPrepared(FORTUNES_SQL, true, true); + stmt.ExecutePrepared; + arr.Init(TypeInfo(TFortunes), list, @n); + while stmt.Step do + begin + f.id := stmt.ColumnInt(0); + f.message := stmt.ColumnUtf8(1); + arr.Add(f); + end; + f.id := 0; + f.message := FORTUNES_MESSAGE; + arr.Add(f); + arr.Sort(FortuneCompareByMessage); + ctxt.OutContent := fTemplate.RenderDataArray(arr); + ctxt.OutContentType := HTML_CONTENT_TYPE; + result := HTTP_SUCCESS; +end; + +function TRawAsyncServer.updates(ctxt: THttpServerRequestAbstract): cardinal; +var + cnt, i: PtrInt; + res: TWorlds; + w: TOrmWorld; + b: TRestBatch; +begin + result := HTTP_SERVERERROR; + cnt := getQueriesParamValue(ctxt); + SetLength(res, cnt); + b := TRestBatch.Create(fStore.ORM, TOrmWorld, {transrows=}0, + [boExtendedJson, boNoModelEncoding, boPutNoCacheFlush]); + w := TOrmWorld.Create; + try + for i := 0 to cnt - 1 do + begin + if not fStore.Orm.Retrieve(RandomWorld, w) then + exit; + w.RandomNumber := RandomWorld; + b.Update(w); + res[i].id := w.IDValue; + res[i].randomNumber := w.RandomNumber; + end; + result := fStore.Orm.BatchSend(b); + finally + w.Free; + b.Free; + end; + if result <> HTTP_SUCCESS then + exit; + ctxt.OutContentType := JSON_CONTENT_TYPE; + ctxt.OutContent := SaveJson(res, TypeInfo(TWorlds)); +end; + +function TRawAsyncServer.rawupdates(ctxt: THttpServerRequestAbstract): cardinal; +var + cnt, i: PtrInt; + words: TWorlds; + ids, nums: TInt64DynArray; + conn: TSqlDBConnection; + stmt: ISQLDBStatement; +begin + cnt := getQueriesParamValue(ctxt); + getRandomWorlds(cnt, words); + if length(words) <> cnt then + exit(HTTP_SERVERERROR); + setLength(ids, cnt); + setLength(nums, cnt); + // generate new randoms, fill parameters arrays for update + for i := 0 to cnt - 1 do + begin + words[i].randomNumber := RandomWorld; + ids[i] := words[i].id; + nums[i] := words[i].randomNumber; + end; + conn := fDbPool.ThreadSafeConnection; + //conn.StartTransaction; + stmt := conn.NewStatementPrepared(WORLD_UPDATE_SQLN, false); + stmt.BindArray(1, nums); + stmt.BindArray(2, ids); + stmt.ExecutePrepared; + //conn.Commit; // autocommit + ctxt.OutContentType := JSON_CONTENT_TYPE; + ctxt.OutContent := SaveJson(words, TypeInfo(TWorlds)); + result := HTTP_SUCCESS; +end; + + + +var + rawServer: TRawAsyncServer; + threads: integer; + +begin + {$ifdef WITH_LOGS} + TSynLog.Family.Level := LOG_VERBOSE; // disable logs for benchmarking + TSynLog.Family.HighResolutionTimestamp := true; + TSynLog.Family.AutoFlushTimeOut := 1; + {$else} + {$ifdef USE_SQLITE3} + TSynLog.Family.Level := LOG_STACKTRACE; // minimal debug logs on fatal errors + {$endif USE_SQLITE3} + {$endif WITH_LOGS} + TSynLog.Family.PerThreadLog := ptIdentifiedInOneFile; + + Rtti.RegisterFromText([ + TypeInfo(TMessageRec), 'message:RawUtf8', + TypeInfo(TWorldRec), 'id,randomNumber:integer', + TypeInfo(TFortune), 'id:integer message:RawUtf8']); + + if (ParamCount <> 1) or + not TryStrToInt(ParamStr(1), threads) then + threads := SystemInfo.dwNumberOfProcessors * 4; + if threads < 16 then + threads := 16 + else if threads > 64 then + threads := 64; // prevents too many PostgreSQL per connection forks + + rawServer := TRawAsyncServer.Create(threads); + try + {$I-} + writeln; + writeln(rawServer.fHttpServer.ClassName, ' running on localhost:', + rawServer.fHttpServer.SockPort, '; num thread=', threads, ' db=', + rawServer.fDbPool.DbmsEngineName, #10); + {$ifdef USE_SQLITE3} + writeln('Press [Enter] to terminate'#10); + readln; + {$else} + writeln('Press Ctrl+C or use SIGTERM to terminate'#10); + FpPause; + {$endif USE_SQLITE3} + //TSynLog.Family.Level := LOG_VERBOSE; // enable shutdown logs for debug + writeln(ObjectToJsonDebug(rawServer.fHttpServer, [woDontStoreVoid, woHumanReadable])); + {$ifdef FPC_X64MM} + WriteHeapStatus(' ', 16, 8, {compileflags=}true); + {$endif FPC_X64MM} + finally + rawServer.Free; + end; + +end. \ No newline at end of file diff --git a/frameworks/Prolog/tuProlog/pom.xml b/frameworks/Prolog/tuProlog/pom.xml index 81562a203b8..a7f42542517 100644 --- a/frameworks/Prolog/tuProlog/pom.xml +++ b/frameworks/Prolog/tuProlog/pom.xml @@ -13,7 +13,7 @@ 16 0.18.2 4.1.2 - 2.12.3 + 2.13.2.1 diff --git a/frameworks/Python/aiohttp/requirements.txt b/frameworks/Python/aiohttp/requirements.txt index 39716504313..aea8aff7754 100644 --- a/frameworks/Python/aiohttp/requirements.txt +++ b/frameworks/Python/aiohttp/requirements.txt @@ -1,9 +1,9 @@ -aiohttp==3.7.4 -asyncpg==0.23.0 +aiohttp==3.8.1 +asyncpg==0.25.0 cchardet==2.1.7 gunicorn==20.1 -jinja2==3.0.1 -psycopg2==2.8.6 -SQLAlchemy==1.4.18 -ujson==5.1.0 -uvloop==0.15.2 +jinja2==3.0.3 +psycopg2==2.9.2 +SQLAlchemy==1.4.29 +ujson==5.4.0 +uvloop==0.16 diff --git a/frameworks/Python/aioworkers/README.md b/frameworks/Python/aioworkers/README.md new file mode 100644 index 00000000000..68d1785ee91 --- /dev/null +++ b/frameworks/Python/aioworkers/README.md @@ -0,0 +1,34 @@ +# [Aioworkers](https://github.com/aioworkers) Benchmarking Test + +This is the aioworkers portion of a [benchmarking tests suite](../../) +comparing a variety of web development platforms. + +The information below is specific to aioworkers. For further guidance, +review the [documentation](https://github.com/TechEmpower/FrameworkBenchmarks/wiki). +Also note that there is additional information provided in +the [Python README](../). + +## Description + +[aioworkers](https://github.com/aioworkers) is a configurable workers +based on asyncio for Python 3. + + +## Implementation + +Aioworkers is implemented using: + +* The uvloop event loop. +* The httptools HTTP parsing library. + + +## Test sources + +All of the test implementations are located within +[config.yaml](config.yaml), [app.py](app.py), +[config-pg.yaml](config-pg.yaml) and [pg.py](pg.py). + + +## Resources + +* [Repo](https://github.com/aioworkers) diff --git a/frameworks/Python/aioworkers/aioworkers-pypy.dockerfile b/frameworks/Python/aioworkers/aioworkers-pypy.dockerfile new file mode 100644 index 00000000000..660d0872856 --- /dev/null +++ b/frameworks/Python/aioworkers/aioworkers-pypy.dockerfile @@ -0,0 +1,14 @@ +FROM pypy:3.8-bullseye + +ADD ./requirements.txt /aioworkers/ + +WORKDIR /aioworkers + +RUN pip3 install -U pip && \ + pip3 install -r /aioworkers/requirements.txt + +ADD ./ /aioworkers + +EXPOSE 8080 + +CMD aioworkers aioworkers.net.web --multiprocessing -c config.yaml diff --git a/frameworks/Python/aioworkers/aioworkers.dockerfile b/frameworks/Python/aioworkers/aioworkers.dockerfile new file mode 100644 index 00000000000..81c2ce3ca50 --- /dev/null +++ b/frameworks/Python/aioworkers/aioworkers.dockerfile @@ -0,0 +1,18 @@ +FROM python:3.9-alpine +RUN apk add --no-cache coreutils make gcc +RUN apk add --no-cache python3-dev musl-dev libffi-dev +RUN pip3 install uvloop + +ADD ./requirements.txt /aioworkers/ +ADD ./requirements-pg.txt /aioworkers/ + +WORKDIR /aioworkers + +RUN pip3 install -U pip && \ + pip3 install -r /aioworkers/requirements-pg.txt + +ADD ./ /aioworkers + +EXPOSE 8080 + +CMD aioworkers aioworkers.net.web --multiprocessing -c config.yaml -c config-pg.yaml diff --git a/frameworks/Python/aioworkers/app.py b/frameworks/Python/aioworkers/app.py new file mode 100755 index 00000000000..37970f04ed3 --- /dev/null +++ b/frameworks/Python/aioworkers/app.py @@ -0,0 +1,6 @@ +async def json_serialization(): + return {"message": "Hello, world!"} + + +async def plaintext_serialization(): + return "Hello, world!" diff --git a/frameworks/Python/aioworkers/benchmark_config.json b/frameworks/Python/aioworkers/benchmark_config.json new file mode 100644 index 00000000000..1f6ff02dc76 --- /dev/null +++ b/frameworks/Python/aioworkers/benchmark_config.json @@ -0,0 +1,45 @@ +{ + "framework": "aioworkers", + "tests": [{ + "default": { + "json_url": "/json", + "fortune_url": "/fortunes", + "plaintext_url": "/plaintext", + "db_url": "/db", + "query_url": "/queries?queries=", + "update_url": "/updates?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "framework": "aioworkers", + "language": "Python", + "flavor": "Python3", + "platform": "None", + "webserver": "aioworkers.net.web", + "os": "Linux", + "orm": "Raw", + "database_os": "Linux", + "database": "Postgres", + "display_name": "aioworkers", + "notes": "" + }, + "pypy": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "framework": "aioworkers", + "language": "Python", + "flavor": "PyPy3", + "platform": "None", + "webserver": "aioworkers.net.web", + "os": "Linux", + "orm": "Raw", + "database_os": "Linux", + "database": "Postgres", + "display_name": "aioworkers", + "notes": "" + } + }] +} diff --git a/frameworks/Python/aioworkers/config-pg.yaml b/frameworks/Python/aioworkers/config-pg.yaml new file mode 100644 index 00000000000..bba1fd119d3 --- /dev/null +++ b/frameworks/Python/aioworkers/config-pg.yaml @@ -0,0 +1,36 @@ +env: + pg.connection: + user: PGUSER + password: PGPASS + +web: + resources: + /db: + get: pg.single_database_query + /queries: + get: pg.multiple_database_queries + /fortunes: + get: pg.fortunes + /updates: + get: pg.database_updates + +pg: + cls: pg.PG + connection: + dsn: postgresql://tfb-database:5432/hello_world + username: benchmarkdbuser + password: benchmarkdbpass + +templates: + cls: pg.Templates + fortune: | + + + Fortunes + + + + {% for fortune in fortunes %} + {% endfor %}
idmessage
{{ fortune[0] }}{{ fortune[1]|e }}
+ + diff --git a/frameworks/Python/aioworkers/config.yaml b/frameworks/Python/aioworkers/config.yaml new file mode 100644 index 00000000000..611dbf13782 --- /dev/null +++ b/frameworks/Python/aioworkers/config.yaml @@ -0,0 +1,34 @@ +http: + groups: [web] + +processes: + web: + cpus: 1 + groups: [web] + +web: + groups: [web] + resources: + /plaintext: + get: app.plaintext_serialization + /json: + get: app.json_serialization + +logging: + version: 1 + disable_existing_loggers: false + root: + level: ERROR + handlers: [console] + formatters: + console: + format: >- + [%(asctime)s.%(msecs)03d] + [%(processName)s %(process)s] + [%(levelname)1.1s]: %(message)s + datefmt: '%Y.%m.%d %H:%M:%S' + handlers: + console: + level: DEBUG + class: logging.StreamHandler + formatter: console diff --git a/frameworks/Python/aioworkers/pg.py b/frameworks/Python/aioworkers/pg.py new file mode 100644 index 00000000000..e2ec0cbea21 --- /dev/null +++ b/frameworks/Python/aioworkers/pg.py @@ -0,0 +1,114 @@ +import logging +from operator import itemgetter +from random import randint + +import asyncpg.exceptions +import jinja2 +from aioworkers_pg.base import Connector + +from aioworkers.core.base import AbstractEntity +from aioworkers.core.config import ValueExtractor +from aioworkers.net.uri import URI + +READ_ROW_SQL = 'SELECT "randomnumber", "id" FROM "world" WHERE id = $1' +WRITE_ROW_SQL = 'UPDATE "world" SET "randomnumber"=$1 WHERE id=$2' +ADDITIONAL_ROW = [0, "Additional fortune added at request time."] +sort_fortunes_key = itemgetter(1) +logger = logging.getLogger(__name__) + + +class PG(Connector): + def set_config(self, config: ValueExtractor) -> None: + cfg = config.connection + dsn: URI = cfg.get_uri("dsn").with_auth( + username=cfg.get("username"), + password=cfg.get("password"), + ) + super().set_config(config.new_child(dsn=dsn)) + + +class Templates(AbstractEntity): + fortune: jinja2.Template + + def set_config(self, config): + super().set_config(config) + self.fortune = jinja2.Template(config.fortune) + + +def get_num_queries(request): + query_count = request.url.query.get_int("queries") + if query_count is None: + return 1 + elif query_count < 1: + return 1 + elif query_count > 500: + return 500 + return query_count + + +async def single_database_query(context): + row_id = randint(1, 10000) + + async with context.pg.pool.acquire() as connection: + number = await connection.fetchval(READ_ROW_SQL, row_id) + + return {"id": row_id, "randomNumber": number} + + +async def multiple_database_queries(context, request): + num_queries = get_num_queries(request) + row_ids = [randint(1, 10000) for _ in range(num_queries)] + worlds = [] + + async with context.pg.pool.acquire() as connection: + statement = await connection.prepare(READ_ROW_SQL) + for row_id in row_ids: + number = await statement.fetchval(row_id) + worlds.append({"id": row_id, "randomNumber": number}) + + return worlds + + +async def fortunes(context, request): + async with context.pg.pool.acquire() as connection: + fortunes = await connection.fetch("SELECT * FROM Fortune") + + fortunes.append(ADDITIONAL_ROW) + fortunes.sort(key=sort_fortunes_key) + content = context.templates.fortune.render(fortunes=fortunes) + + return request.response( + content.encode(), + headers=[ + ("Content-Type", "text/html; charset=utf-8"), + ], + ) + + +async def database_updates(context, request): + num_queries = get_num_queries(request) + uniq = {randint(1, 10000) for _ in range(num_queries)} + while len(uniq) < num_queries: + uniq.add(randint(1, 10000)) + updates = [ + (row_id, randint(1, 10000)) for row_id in uniq + ] + worlds = [ + {"id": row_id, "randomNumber": number} for row_id, number in updates + ] + + async with context.pg.pool.acquire() as connection: + statement = await connection.prepare(READ_ROW_SQL) + for row_id, number in updates: + await statement.fetchval(row_id) + for _ in range(99): + try: + await connection.executemany(WRITE_ROW_SQL, updates) + except asyncpg.exceptions.DeadlockDetectedError as e: + logger.debug('Deadlock %s', e) + else: + break + else: + worlds.clear() + + return worlds diff --git a/frameworks/Python/aioworkers/requirements-pg.txt b/frameworks/Python/aioworkers/requirements-pg.txt new file mode 100644 index 00000000000..716b02571a1 --- /dev/null +++ b/frameworks/Python/aioworkers/requirements-pg.txt @@ -0,0 +1,5 @@ +-r requirements.txt +uvloop==0.16.0 +asyncpg==0.25.0 +aioworkers-pg==0.2.0 +Jinja2==3.0.3 diff --git a/frameworks/Python/aioworkers/requirements.txt b/frameworks/Python/aioworkers/requirements.txt new file mode 100644 index 00000000000..51363aa6c87 --- /dev/null +++ b/frameworks/Python/aioworkers/requirements.txt @@ -0,0 +1,3 @@ +aioworkers==0.21a1 +httptools==0.3.0 +PyYAML==6.0 diff --git a/frameworks/Python/apidaora/requirements.txt b/frameworks/Python/apidaora/requirements.txt index 192e944ff61..90e4dfac0b7 100644 --- a/frameworks/Python/apidaora/requirements.txt +++ b/frameworks/Python/apidaora/requirements.txt @@ -1,6 +1,6 @@ asyncpg==0.21.0 gunicorn==20.0.4 -jinja2==2.11.3 +jinja2==3.0.3 uvloop==0.14.0 uvicorn==0.11.7 apidaora==0.26.0 diff --git a/frameworks/Python/blacksheep/README.md b/frameworks/Python/blacksheep/README.md deleted file mode 100644 index b0aa942adce..00000000000 --- a/frameworks/Python/blacksheep/README.md +++ /dev/null @@ -1,38 +0,0 @@ -# BlackSheep Benchmark Test - -This is the BlackSheep portion of a [benchmarking tests suite](../../) -comparing a variety of web development platforms. - -The information below is specific to BlackSheep. For further guidance, -review the [documentation](https://github.com/TechEmpower/FrameworkBenchmarks/wiki). -Also note that there is additional information provided in -the [Python README](../). - -## Description - -[BlackSheep](https://github.com/RobertoPrevato/BlackSheep) is a fast HTTP Server/Client microframework for Python [asyncio](https://docs.python.org/3/library/asyncio.html), using [Cython](https://cython.org), -[`uvloop`](https://magic.io/blog/uvloop-blazing-fast-python-networking/), and -[`httptools`](https://github.com/MagicStack/httptools). - -

- Black Sheep -

- - -## Implementation - -BlackSheep is implemented using: - -* [asyncio](https://docs.python.org/3/library/asyncio.html). -* [Cython](https://cython.org) -* [`uvloop`](https://magic.io/blog/uvloop-blazing-fast-python-networking/). -* [`httptools`](https://github.com/MagicStack/httptools). -* Python built-in multiprocessing module. - -## Test Paths & Sources - -All of the test implementations are located within a single file ([app.py](app.py)). - -## Resources - -* [Repo](https://github.com/RobertoPrevato/BlackSheep) diff --git a/frameworks/Python/blacksheep/app.py b/frameworks/Python/blacksheep/app.py deleted file mode 100644 index 79f09e6f45b..00000000000 --- a/frameworks/Python/blacksheep/app.py +++ /dev/null @@ -1,145 +0,0 @@ -import os -import ujson -import asyncpg -from random import randint -from multiprocessing import cpu_count -from blacksheep.server import Application -from blacksheep import Response, Header, Content -from jinja2 import Template -json_dumps = ujson.dumps - - -async def configure_db(app): - global db_pool - db_pool = await asyncpg.create_pool( - user=os.getenv('PGUSER', 'benchmarkdbuser'), - password=os.getenv('PGPASS', 'benchmarkdbpass'), - database='hello_world', - host='tfb-database', - port=5432 - ) - - -def load_fortunes_template(): - path = os.path.join('templates', 'fortune.html') - with open(path, 'r') as template_file: - template_text = template_file.read() - return Template(template_text) - - -db_pool = None -fortune_template = load_fortunes_template() - -app = Application() -app.on_start += configure_db - - -def get_num_queries(request): - try: - value = request.query.get('queries') - if value is None: - return 1 - - query_count = int(value[0]) - except (KeyError, IndexError, ValueError): - return 1 - - if query_count < 1: - return 1 - if query_count > 500: - return 500 - return query_count - - -@app.route('/json') -async def json_test(request): - """Test type 1: JSON Serialization""" - - return Response(200, content=Content(b'application/json; charset=utf-8', - json_dumps({'message': 'Hello, world!'}).encode('utf-8'))) - - -@app.route('/db') -async def single_db_query_test(request): - """Test type 2: Single Database Query""" - - row_id = randint(1, 10000) - connection = await db_pool.acquire() - try: - number = await connection.fetchval('SELECT "randomnumber", "id" FROM "world" WHERE id = $1', row_id) - world = {'id': row_id, 'randomNumber': number} - finally: - await db_pool.release(connection) - - return Response(200, content=Content(b'application/json; charset=utf-8', - json_dumps(world).encode('utf-8'))) - - -@app.route('/queries') -async def multiple_db_queries_test(request): - """Test type 3: Multiple Database Queries""" - - num_queries = get_num_queries(request) - - row_ids = [randint(1, 10000) for _ in range(num_queries)] - worlds = [] - - connection = await db_pool.acquire() - try: - statement = await connection.prepare('SELECT "randomnumber", "id" FROM "world" WHERE id = $1') - for row_id in row_ids: - number = await statement.fetchval(row_id) - worlds.append({'id': row_id, 'randomNumber': number}) - finally: - await db_pool.release(connection) - - return Response(200, content=Content(b'application/json; charset=utf-8', - json_dumps(worlds).encode('utf-8'))) - - -@app.route('/fortunes') -async def fortunes_test(request): - """Test type 4: Fortunes""" - - connection = await db_pool.acquire() - - try: - fortunes = await connection.fetch('SELECT * FROM Fortune') - finally: - await db_pool.release(connection) - - fortunes.append([0, 'Additional fortune added at request time.']) - fortunes.sort(key=lambda x: x[1]) - - return Response(200, [ - Header(b'Cache-Control', b'no-cache') - ], content=Content(b'text/html; charset=utf-8', fortune_template.render(fortunes=fortunes).encode('utf8'))) - - -@app.route('/updates') -async def db_updates_test(request): - """Test type 5: Database Updates""" - - num_queries = get_num_queries(request) - - updates = [(randint(1, 10000), randint(1, 10000)) for _ in range(num_queries)] - worlds = [{'id': row_id, 'randomNumber': number} for row_id, number in updates] - - connection = await db_pool.acquire() - try: - statement = await connection.prepare('SELECT "randomnumber", "id" FROM "world" WHERE id = $1') - for row_id, _ in updates: - await statement.fetchval(row_id) - await connection.executemany('UPDATE "world" SET "randomnumber"=$1 WHERE id=$2', updates) - finally: - await db_pool.release(connection) - - return Response(200, content=Content(b'application/json', - json_dumps(worlds).encode('utf-8'))) - - -@app.route('/plaintext') -async def plaintext_test(request): - """Test type 6: Plaintext""" - - return Response(200, content=Content(b'text/plain', b'Hello, World!')) diff --git a/frameworks/Python/blacksheep/benchmark_config.json b/frameworks/Python/blacksheep/benchmark_config.json deleted file mode 100644 index 6cecf7a6080..00000000000 --- a/frameworks/Python/blacksheep/benchmark_config.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "framework": "blacksheep", - "tests": [{ - "default": { - "json_url": "/json", - "fortune_url": "/fortunes", - "plaintext_url": "/plaintext", - "db_url": "/db", - "query_url": "/queries?queries=", - "update_url": "/updates?queries=", - "port": 8080, - "approach": "Realistic", - "classification": "Platform", - "framework": "blacksheep", - "language": "Python", - "flavor": "Python3", - "platform": "None", - "webserver": "None", - "os": "Linux", - "orm": "Raw", - "database_os": "Linux", - "database": "Postgres", - "display_name": "blacksheep", - "notes": "" - } - }] -} diff --git a/frameworks/Python/blacksheep/blacksheep.dockerfile b/frameworks/Python/blacksheep/blacksheep.dockerfile deleted file mode 100644 index 49aeaa92a00..00000000000 --- a/frameworks/Python/blacksheep/blacksheep.dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -FROM python:3.8 - -WORKDIR /blacksheep - -RUN pip3 install cython==0.29.13 - -ADD requirements.txt /blacksheep/ - -RUN pip3 install -r /blacksheep/requirements.txt - -ADD templates/fortune.html /blacksheep/templates/ - -ADD blacksheep_conf.py app.py /blacksheep/ - -EXPOSE 8080 - -CMD gunicorn app:app -k uvicorn.workers.UvicornWorker -c blacksheep_conf.py diff --git a/frameworks/Python/blacksheep/blacksheep_conf.py b/frameworks/Python/blacksheep/blacksheep_conf.py deleted file mode 100644 index 4f4e08a729e..00000000000 --- a/frameworks/Python/blacksheep/blacksheep_conf.py +++ /dev/null @@ -1,14 +0,0 @@ -import multiprocessing -import os - -_is_travis = os.environ.get('TRAVIS') == 'true' - -workers = multiprocessing.cpu_count() -if _is_travis: - workers = 2 - -bind = "0.0.0.0:8080" -keepalive = 120 -errorlog = '-' -pidfile = '/tmp/blacksheep.pid' -loglevel = 'error' diff --git a/frameworks/Python/blacksheep/requirements.txt b/frameworks/Python/blacksheep/requirements.txt deleted file mode 100644 index 0bb7341eac1..00000000000 --- a/frameworks/Python/blacksheep/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -asyncpg==0.21.0 -gunicorn==20.0.4 -Jinja2==2.11.3 -blacksheep==0.2.2 -ujson==2.0.3 -uvloop==0.14.0 -uvicorn==0.11.7 diff --git a/frameworks/Python/bottle/app.py b/frameworks/Python/bottle/app.py index 23628dbcf28..140b865b7d6 100644 --- a/frameworks/Python/bottle/app.py +++ b/frameworks/Python/bottle/app.py @@ -124,7 +124,7 @@ def get_random_world_raw(): return json.dumps(worlds) -@app.route("/fortune") +@app.route("/fortunes") def fortune_orm(db): response.headers['Date'] = formatdate(timeval=None, localtime=False, usegmt=True) fortunes=db.query(Fortune).all() diff --git a/frameworks/Python/bottle/benchmark_config.json b/frameworks/Python/bottle/benchmark_config.json index 7ce6ba00633..39d6e39864e 100644 --- a/frameworks/Python/bottle/benchmark_config.json +++ b/frameworks/Python/bottle/benchmark_config.json @@ -5,7 +5,7 @@ "json_url": "/json", "db_url": "/db", "query_url": "/queries?queries=", - "fortune_url": "/fortune", + "fortune_url": "/fortunes", "update_url": "/updates?queries=", "plaintext_url": "/plaintext", "port": 8080, @@ -28,7 +28,7 @@ "json_url": "/json", "db_url": "/db", "query_url": "/queries?queries=", - "fortune_url": "/fortune", + "fortune_url": "/fortunes", "update_url": "/updates?queries=", "plaintext_url": "/plaintext", "port": 8080, @@ -72,7 +72,7 @@ "json_url": "/json", "db_url": "/db", "query_url": "/queries?queries=", - "fortune_url": "/fortune", + "fortune_url": "/fortunes", "update_url": "/updates?queries=", "plaintext_url": "/plaintext", "port": 8080, diff --git a/frameworks/Python/django/requirements.txt b/frameworks/Python/django/requirements.txt index 08779aeee17..0c8cdd97725 100644 --- a/frameworks/Python/django/requirements.txt +++ b/frameworks/Python/django/requirements.txt @@ -5,4 +5,4 @@ meinheld==1.0.2 mysqlclient==1.4.6 psycopg2==2.8.6 pytz==2020.4 -ujson==4.0.1 \ No newline at end of file +ujson==5.4.0 \ No newline at end of file diff --git a/frameworks/Python/falcon/requirements.txt b/frameworks/Python/falcon/requirements.txt index a6616554ecb..e0adb93da14 100644 --- a/frameworks/Python/falcon/requirements.txt +++ b/frameworks/Python/falcon/requirements.txt @@ -6,4 +6,4 @@ meinheld==1.0.2 orjson==3.6.5 pony==0.7.14 psycopg2-binary==2.9.3; implementation_name=='cpython' -waitress==2.0.0 +waitress==2.1.2 diff --git a/frameworks/Python/fastapi/app.py b/frameworks/Python/fastapi/app.py index 1c2ecd001c9..410eeb5440d 100755 --- a/frameworks/Python/fastapi/app.py +++ b/frameworks/Python/fastapi/app.py @@ -3,8 +3,8 @@ import os import jinja2 from fastapi import FastAPI -from starlette.responses import HTMLResponse, UJSONResponse, PlainTextResponse -from random import randint +from starlette.responses import HTMLResponse, JSONResponse, PlainTextResponse +from random import randint, sample from operator import itemgetter from urllib.parse import parse_qs @@ -14,18 +14,6 @@ ADDITIONAL_ROW = [0, 'Additional fortune added at request time.'] - -async def setup_database(): - global connection_pool - connection_pool = await asyncpg.create_pool( - user=os.getenv('PGUSER', 'benchmarkdbuser'), - password=os.getenv('PGPASS', 'benchmarkdbpass'), - database='hello_world', - host='tfb-database', - port=5432 - ) - - def load_fortunes_template(): path = os.path.join('templates', 'fortune.html') with open(path, 'r') as template_file: @@ -49,16 +37,25 @@ def get_num_queries(queries): connection_pool = None sort_fortunes_key = itemgetter(1) template = load_fortunes_template() -loop = asyncio.get_event_loop() -loop.run_until_complete(setup_database()) - app = FastAPI() +@app.on_event("startup") +async def setup_database(): + global connection_pool + connection_pool = await asyncpg.create_pool( + user=os.getenv('PGUSER', 'benchmarkdbuser'), + password=os.getenv('PGPASS', 'benchmarkdbpass'), + database='hello_world', + host='tfb-database', + port=5432 + ) + + @app.get('/json') async def json_serialization(): - return UJSONResponse({'message': 'Hello, world!'}) + return JSONResponse({'message': 'Hello, world!'}) @app.get('/db') @@ -68,14 +65,13 @@ async def single_database_query(): async with connection_pool.acquire() as connection: number = await connection.fetchval(READ_ROW_SQL, row_id) - return UJSONResponse({'id': row_id, 'randomNumber': number}) + return JSONResponse({'id': row_id, 'randomNumber': number}) @app.get('/queries') async def multiple_database_queries(queries = None): - num_queries = get_num_queries(queries) - row_ids = [randint(1, 10000) for _ in range(num_queries)] + row_ids = sample(range(1, 10000), num_queries) worlds = [] async with connection_pool.acquire() as connection: @@ -84,7 +80,7 @@ async def multiple_database_queries(queries = None): number = await statement.fetchval(row_id) worlds.append({'id': row_id, 'randomNumber': number}) - return UJSONResponse(worlds) + return JSONResponse(worlds) @app.get('/fortunes') @@ -101,7 +97,7 @@ async def fortunes(): @app.get('/updates') async def database_updates(queries = None): num_queries = get_num_queries(queries) - updates = [(randint(1, 10000), randint(1, 10000)) for _ in range(num_queries)] + updates = [(row_id, randint(1, 10000)) for row_id in sample(range(1, 10000), num_queries)] worlds = [{'id': row_id, 'randomNumber': number} for row_id, number in updates] async with connection_pool.acquire() as connection: @@ -110,7 +106,7 @@ async def database_updates(queries = None): await statement.fetchval(row_id) await connection.executemany(WRITE_ROW_SQL, updates) - return UJSONResponse(worlds) + return JSONResponse(worlds) @app.get('/plaintext') diff --git a/frameworks/Python/fastapi/app_orjson.py b/frameworks/Python/fastapi/app_orjson.py index 5e91beeda17..2fdb8d93c35 100755 --- a/frameworks/Python/fastapi/app_orjson.py +++ b/frameworks/Python/fastapi/app_orjson.py @@ -4,12 +4,10 @@ import jinja2 from fastapi import FastAPI from fastapi.responses import HTMLResponse, ORJSONResponse, PlainTextResponse -from random import randint +from random import randint, sample from operator import itemgetter from functools import partial -_randint = partial(randint, 1, 10000) - READ_ROW_SQL = 'SELECT "id", "randomnumber" FROM "world" WHERE id = $1' WRITE_ROW_SQL = 'UPDATE "world" SET "randomnumber"=$1 WHERE id=$2' ADDITIONAL_ROW = [0, "Additional fortune added at request time."] @@ -64,7 +62,7 @@ async def json_serialization(): @app.get("/db") async def single_database_query(): async with connection_pool.acquire() as connection: - record = await connection.fetchrow(READ_ROW_SQL, _randint()) + record = await connection.fetchrow(READ_ROW_SQL, randint(1, 10000)) return ORJSONResponse({"id": record['id'], "randomNumber": record['randomnumber']}) @@ -72,12 +70,14 @@ async def single_database_query(): @app.get("/queries") async def multiple_database_queries(queries=None): num_queries = get_num_queries(queries) - worlds = tuple(map(lambda _: {"id": _randint(), "randomNumber": None}, range(num_queries))) + row_ids = sample(range(1, 10000), num_queries) + worlds = [] async with connection_pool.acquire() as connection: statement = await connection.prepare(READ_ROW_SQL) - for world in worlds: - world["randomNumber"] = await statement.fetchval(world["id"]) + for row_id in row_ids: + number = await statement.fetchval(row_id) + worlds.append({'id': row_id, 'randomNumber': number}) return ORJSONResponse(worlds) @@ -96,7 +96,7 @@ async def fortunes(): @app.get("/updates") async def database_updates(queries=None): num_queries = get_num_queries(queries) - updates = [(_randint(), _randint()) for _ in range(num_queries)] + updates = [(row_id, randint(1, 10000)) for row_id in sample(range(1, 10000), num_queries)] worlds = [{"id": row_id, "randomNumber": number} for row_id, number in updates] async with connection_pool.acquire() as connection: diff --git a/frameworks/Python/fastapi/requirements-orjson.txt b/frameworks/Python/fastapi/requirements-orjson.txt index 9ddf454636d..5bc0989cd0a 100644 --- a/frameworks/Python/fastapi/requirements-orjson.txt +++ b/frameworks/Python/fastapi/requirements-orjson.txt @@ -1,6 +1,7 @@ asyncpg==0.21.0 gunicorn==20.0.4 Jinja2==2.11.3 +markupsafe==2.0.1 fastapi==0.65.2 orjson==2.6.5 uvicorn==0.11.3 diff --git a/frameworks/Python/fastapi/requirements.txt b/frameworks/Python/fastapi/requirements.txt index e730d9b2f2b..dc928416e92 100644 --- a/frameworks/Python/fastapi/requirements.txt +++ b/frameworks/Python/fastapi/requirements.txt @@ -1,7 +1,8 @@ asyncpg==0.21.0 gunicorn==20.0.4 Jinja2==2.11.3 -ujson==2.0.3 +markupsafe==2.0.1 +ujson==5.4.0 uvloop==0.14.0 uvicorn==0.11.3 fastapi==0.65.2 diff --git a/frameworks/Python/flask/requirements-pypy.txt b/frameworks/Python/flask/requirements-pypy.txt index 3392cb7b468..132102fdc92 100644 --- a/frameworks/Python/flask/requirements-pypy.txt +++ b/frameworks/Python/flask/requirements-pypy.txt @@ -11,4 +11,4 @@ MarkupSafe==2.0.1 meinheld==1.0.2 uWSGI==2.0.19.1 Werkzeug==2.0.1 -ujson==5.1.0 \ No newline at end of file +ujson==5.4.0 \ No newline at end of file diff --git a/frameworks/Python/flask/requirements.txt b/frameworks/Python/flask/requirements.txt index 253036cdbd5..89d3bb2e297 100644 --- a/frameworks/Python/flask/requirements.txt +++ b/frameworks/Python/flask/requirements.txt @@ -12,5 +12,5 @@ MarkupSafe==2.0.1 meinheld==1.0.2 uWSGI==2.0.19.1 Werkzeug==2.0.1 -ujson==5.1.0 +ujson==5.4.0 orjson==3.6.0 diff --git a/frameworks/Python/granian/README.md b/frameworks/Python/granian/README.md new file mode 100644 index 00000000000..8c7a150dbed --- /dev/null +++ b/frameworks/Python/granian/README.md @@ -0,0 +1,29 @@ +# Granian Benchmark Test + +This is the Granian portion of a [benchmarking tests suite](../../) comparing a variety of web development platforms. + +The information below is specific to Granian. For further guidance, review the [documentation](https://github.com/TechEmpower/FrameworkBenchmarks/wiki). + +Also note that there is additional information provided in the [Python README](../). + +## Description + +[Granian](https://github.com/emmett-framework/granian) is an asyncio server for Python applications. + +## Test Paths & Source + +Granian includes two different implementations: + +- ASGI implementation in the [app\_asgi.py](app_asgi.py) +- RSGI implementation in the [app\_rsgi.py](app_rsgi.py) + +Both implementations includes the following tests: + +* JSON Serialization: "/json" +* Plaintext: "/plaintext" + +*Replace # with an actual number.* + +## Resources + +* [Github repository](https://github.com/emmett-framework/granian) diff --git a/frameworks/Python/granian/app_asgi.py b/frameworks/Python/granian/app_asgi.py new file mode 100644 index 00000000000..7799b5f9064 --- /dev/null +++ b/frameworks/Python/granian/app_asgi.py @@ -0,0 +1,193 @@ +import asyncio +import os + +from operator import itemgetter +from pathlib import Path +from random import randint +from urllib.parse import parse_qs + +import asyncpg +import jinja2 +import orjson + + +async def pg_setup(): + global pool + pool = await asyncpg.create_pool( + user=os.getenv('PGUSER', 'benchmarkdbuser'), + password=os.getenv('PGPASS', 'benchmarkdbpass'), + database='hello_world', + host='tfb-database', + port=5432 + ) + + +SQL_SELECT = 'SELECT "randomnumber", "id" FROM "world" WHERE id = $1' +SQL_UPDATE = 'UPDATE "world" SET "randomnumber"=$1 WHERE id=$2' +ROW_ADD = [0, 'Additional fortune added at request time.'] + +JSON_RESPONSE = { + 'type': 'http.response.start', + 'status': 200, + 'headers': [ + [b'content-type', b'application/json'], + ] +} +HTML_RESPONSE = { + 'type': 'http.response.start', + 'status': 200, + 'headers': [ + [b'content-type', b'text/html; charset=utf-8'], + ] +} +PLAINTEXT_RESPONSE = { + 'type': 'http.response.start', + 'status': 200, + 'headers': [ + [b'content-type', b'text/plain; charset=utf-8'], + ] +} + +pool = None +key = itemgetter(1) +json_dumps = orjson.dumps + +with Path('templates/fortune.html').open('r') as f: + template = jinja2.Template(f.read()) + +asyncio.get_event_loop().run_until_complete(pg_setup()) + + +def get_num_queries(scope): + try: + query_string = scope['query_string'] + query_count = int(parse_qs(query_string)[b'queries'][0]) + except (KeyError, IndexError, ValueError): + return 1 + + if query_count < 1: + return 1 + if query_count > 500: + return 500 + return query_count + + +async def route_json(scope, receive, send): + await send(JSON_RESPONSE) + await send({ + 'type': 'http.response.body', + 'body': json_dumps({'message': 'Hello, world!'}), + 'more_body': False + }) + + +async def route_db(scope, receive, send): + row_id = randint(1, 10000) + connection = await pool.acquire() + try: + number = await connection.fetchval(SQL_SELECT, row_id) + world = {'id': row_id, 'randomNumber': number} + finally: + await pool.release(connection) + + await send(JSON_RESPONSE) + await send({ + 'type': 'http.response.body', + 'body': json_dumps(world), + 'more_body': False + }) + + +async def route_queries(scope, receive, send): + num_queries = get_num_queries(scope) + row_ids = [randint(1, 10000) for _ in range(num_queries)] + worlds = [] + + connection = await pool.acquire() + try: + statement = await connection.prepare(SQL_SELECT) + for row_id in row_ids: + number = await statement.fetchval(row_id) + worlds.append({'id': row_id, 'randomNumber': number}) + finally: + await pool.release(connection) + + await send(JSON_RESPONSE) + await send({ + 'type': 'http.response.body', + 'body': json_dumps(worlds), + 'more_body': False + }) + + +async def route_fortunes(scope, receive, send): + connection = await pool.acquire() + try: + fortunes = await connection.fetch('SELECT * FROM Fortune') + finally: + await pool.release(connection) + + fortunes.append(ROW_ADD) + fortunes.sort(key=key) + content = template.render(fortunes=fortunes).encode('utf-8') + await send(HTML_RESPONSE) + await send({ + 'type': 'http.response.body', + 'body': content, + 'more_body': False + }) + + +async def route_updates(scope, receive, send): + num_queries = get_num_queries(scope) + updates = [(randint(1, 10000), randint(1, 10000)) for _ in range(num_queries)] + worlds = [{'id': row_id, 'randomNumber': number} for row_id, number in updates] + + connection = await pool.acquire() + try: + statement = await connection.prepare(SQL_SELECT) + for row_id, _ in updates: + await statement.fetchval(row_id) + await connection.executemany(SQL_UPDATE, updates) + finally: + await pool.release(connection) + + await send(JSON_RESPONSE) + await send({ + 'type': 'http.response.body', + 'body': json_dumps(worlds), + 'more_body': False + }) + + +async def route_plaintext(scope, receive, send): + await send(PLAINTEXT_RESPONSE) + await send({ + 'type': 'http.response.body', + 'body': b'Hello, world!', + 'more_body': False + }) + + +async def handle_404(scope, receive, send): + await send(PLAINTEXT_RESPONSE) + await send({ + 'type': 'http.response.body', + 'body': b'Not found', + 'more_body': False + }) + + +routes = { + '/json': route_json, + '/db': route_db, + '/queries': route_queries, + '/fortunes': route_fortunes, + '/updates': route_updates, + '/plaintext': route_plaintext +} + + +def main(scope, receive, send): + handler = routes.get(scope['path'], handle_404) + return handler(scope, receive, send) diff --git a/frameworks/Python/granian/app_rsgi.py b/frameworks/Python/granian/app_rsgi.py new file mode 100644 index 00000000000..00b7683abc7 --- /dev/null +++ b/frameworks/Python/granian/app_rsgi.py @@ -0,0 +1,141 @@ +import asyncio +import os + +from operator import itemgetter +from pathlib import Path +from random import randint +from urllib.parse import parse_qs + +import asyncpg +import jinja2 +import orjson + +from granian.rsgi import Response + + +async def pg_setup(): + global pool + pool = await asyncpg.create_pool( + user=os.getenv('PGUSER', 'benchmarkdbuser'), + password=os.getenv('PGPASS', 'benchmarkdbpass'), + database='hello_world', + host='tfb-database', + port=5432 + ) + + +SQL_SELECT = 'SELECT "randomnumber", "id" FROM "world" WHERE id = $1' +SQL_UPDATE = 'UPDATE "world" SET "randomnumber"=$1 WHERE id=$2' +ROW_ADD = [0, 'Additional fortune added at request time.'] + +JSON_HEADERS = {'content-type': 'application/json'} +HTML_HEADERS = {'content-type': 'text/html; charset=utf-8'} +PLAINTEXT_HEADERS = {'content-type': 'text/plain; charset=utf-8'} + +pool = None +key = itemgetter(1) +json_dumps = orjson.dumps + +with Path('templates/fortune.html').open('r') as f: + template = jinja2.Template(f.read()) + +asyncio.get_event_loop().run_until_complete(pg_setup()) + + +def get_num_queries(scope): + try: + query_count = int(parse_qs(scope.query_string)['queries'][0]) + except (KeyError, IndexError, ValueError): + return 1 + + if query_count < 1: + return 1 + if query_count > 500: + return 500 + return query_count + + +async def route_json(scope, proto): + return Response.bytes(json_dumps({'message': 'Hello, world!'}), 200, JSON_HEADERS) + + +async def route_db(scope, proto): + row_id = randint(1, 10000) + connection = await pool.acquire() + try: + number = await connection.fetchval(SQL_SELECT, row_id) + world = {'id': row_id, 'randomNumber': number} + finally: + await pool.release(connection) + + return Response.bytes(json_dumps(world), 200, JSON_HEADERS) + + +async def route_queries(scope, proto): + num_queries = get_num_queries(scope) + row_ids = [randint(1, 10000) for _ in range(num_queries)] + worlds = [] + + connection = await pool.acquire() + try: + statement = await connection.prepare(SQL_SELECT) + for row_id in row_ids: + number = await statement.fetchval(row_id) + worlds.append({'id': row_id, 'randomNumber': number}) + finally: + await pool.release(connection) + + return Response.bytes(json_dumps(worlds), 200, JSON_HEADERS) + + +async def route_fortunes(scope, proto): + connection = await pool.acquire() + try: + fortunes = await connection.fetch('SELECT * FROM Fortune') + finally: + await pool.release(connection) + + fortunes.append(ROW_ADD) + fortunes.sort(key=key) + content = template.render(fortunes=fortunes) + return Response.str(content, 200, HTML_HEADERS) + + +async def route_updates(scope, proto): + num_queries = get_num_queries(scope) + updates = [(randint(1, 10000), randint(1, 10000)) for _ in range(num_queries)] + worlds = [{'id': row_id, 'randomNumber': number} for row_id, number in updates] + + connection = await pool.acquire() + try: + statement = await connection.prepare(SQL_SELECT) + for row_id, _ in updates: + await statement.fetchval(row_id) + await connection.executemany(SQL_UPDATE, updates) + finally: + await pool.release(connection) + + return Response.bytes(json_dumps(worlds), 200, JSON_HEADERS) + + +async def route_plaintext(scope, proto): + return Response.bytes(b'Hello, world!', 200, PLAINTEXT_HEADERS) + + +async def handle_404(scope, proto): + return Response.bytes(b'Not found', 404, PLAINTEXT_HEADERS) + + +routes = { + '/json': route_json, + '/db': route_db, + '/queries': route_queries, + '/fortunes': route_fortunes, + '/updates': route_updates, + '/plaintext': route_plaintext +} + + +def main(scope, proto): + handler = routes.get(scope.path, handle_404) + return handler(scope, proto) diff --git a/frameworks/Python/granian/benchmark_config.json b/frameworks/Python/granian/benchmark_config.json new file mode 100644 index 00000000000..01a49055ca4 --- /dev/null +++ b/frameworks/Python/granian/benchmark_config.json @@ -0,0 +1,49 @@ +{ + "framework": "granian", + "tests": [{ + "default": { + "json_url": "/json", + "fortune_url": "/fortunes", + "plaintext_url": "/plaintext", + "db_url": "/db", + "query_url": "/queries?queries=", + "update_url": "/updates?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "database": "Postgres", + "framework": "granian", + "language": "Python", + "orm": "Raw", + "platform": "None", + "webserver": "granian", + "os": "Linux", + "database_os": "Linux", + "display_name": "granian [asgi]", + "notes": "", + "versus": "uvicorn" + }, + "rsgi": { + "json_url": "/json", + "fortune_url": "/fortunes", + "plaintext_url": "/plaintext", + "db_url": "/db", + "query_url": "/queries?queries=", + "update_url": "/updates?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "database": "Postgres", + "framework": "granian", + "language": "Python", + "orm": "Raw", + "platform": "None", + "webserver": "granian", + "os": "Linux", + "database_os": "Linux", + "display_name": "granian [rsgi]", + "notes": "", + "versus": "uvicorn" + } + }] +} diff --git a/frameworks/C++/wt/config.toml b/frameworks/Python/granian/config.toml similarity index 56% rename from frameworks/C++/wt/config.toml rename to frameworks/Python/granian/config.toml index 22ba9457b78..6bf4f6a4412 100644 --- a/frameworks/C++/wt/config.toml +++ b/frameworks/Python/granian/config.toml @@ -1,5 +1,5 @@ [framework] -name = "wt" +name = "granian" [main] urls.plaintext = "/plaintext" @@ -7,28 +7,30 @@ urls.json = "/json" urls.db = "/db" urls.query = "/queries?queries=" urls.update = "/updates?queries=" -urls.fortune = "/fortune" +urls.fortune = "/fortunes" approach = "Realistic" -classification = "Fullstack" -database = "MySQL" +classification = "Platform" +database = "Postgres" database_os = "Linux" os = "Linux" -orm = "Full" +orm = "Raw" platform = "None" -webserver = "None" -versus = "wt" +webserver = "granian" +versus = "uvicorn" -[postgres] +[rsgi] +urls.plaintext = "/plaintext" +urls.json = "/json" urls.db = "/db" urls.query = "/queries?queries=" urls.update = "/updates?queries=" -urls.fortune = "/fortune" +urls.fortune = "/fortunes" approach = "Realistic" -classification = "Fullstack" +classification = "Platform" database = "Postgres" database_os = "Linux" os = "Linux" -orm = "Full" +orm = "Raw" platform = "None" -webserver = "None" -versus = "wt" +webserver = "granian" +versus = "uvicorn" diff --git a/frameworks/Python/granian/granian-rsgi.dockerfile b/frameworks/Python/granian/granian-rsgi.dockerfile new file mode 100644 index 00000000000..e6c7278ed6a --- /dev/null +++ b/frameworks/Python/granian/granian-rsgi.dockerfile @@ -0,0 +1,11 @@ +FROM python:3.10-slim + +ADD ./ /granian + +WORKDIR /granian + +RUN pip install -r /granian/requirements.txt + +EXPOSE 8080 + +CMD python run.py rsgi diff --git a/frameworks/Python/granian/granian.dockerfile b/frameworks/Python/granian/granian.dockerfile new file mode 100644 index 00000000000..6e4816234fd --- /dev/null +++ b/frameworks/Python/granian/granian.dockerfile @@ -0,0 +1,11 @@ +FROM python:3.10-slim + +ADD ./ /granian + +WORKDIR /granian + +RUN pip install -r /granian/requirements.txt + +EXPOSE 8080 + +CMD python run.py asgi diff --git a/frameworks/Python/granian/requirements.txt b/frameworks/Python/granian/requirements.txt new file mode 100644 index 00000000000..6914cae94b1 --- /dev/null +++ b/frameworks/Python/granian/requirements.txt @@ -0,0 +1,5 @@ +asyncpg==0.25.0 +granian==0.1.0a3 +jinja2==3.1.2 +orjson==3.7.2 +uvloop==0.16.0 diff --git a/frameworks/Python/granian/run.py b/frameworks/Python/granian/run.py new file mode 100644 index 00000000000..e129daa067d --- /dev/null +++ b/frameworks/Python/granian/run.py @@ -0,0 +1,20 @@ +import multiprocessing +import sys + +from granian import Granian + + +if __name__ == '__main__': + interface = sys.argv[1] + threads = 1 if interface == "asgi" else None + + Granian( + f"app_{interface}:main", + address="0.0.0.0", + port=8080, + workers=multiprocessing.cpu_count(), + threads=threads, + backlog=2048, + interface=interface, + websockets=False + ).serve() diff --git a/frameworks/Python/blacksheep/templates/fortune.html b/frameworks/Python/granian/templates/fortune.html similarity index 100% rename from frameworks/Python/blacksheep/templates/fortune.html rename to frameworks/Python/granian/templates/fortune.html diff --git a/frameworks/Python/japronto/app_postgres.py b/frameworks/Python/japronto/app_postgres.py new file mode 100644 index 00000000000..d68a92f026c --- /dev/null +++ b/frameworks/Python/japronto/app_postgres.py @@ -0,0 +1,51 @@ +import multiprocessing +from wsgiref.handlers import format_date_time +import random + +import japronto +import ujson as json + +from db import init_db, close_db + + +def get_headers(): + return { + 'Server': 'Japronto/0.1.1', + 'Date': format_date_time(None), + } + + +def json_view(request): + return request.Response( + text=json.dumps({'message': 'Hello, world!'}), + mime_type='application/json', + headers=get_headers(), + ) + + +def plaintext_view(request): + return request.Response( + body=b'Hello, world!', + mime_type='text/plain', + headers=get_headers(), + ) + + +async def db_view(request): + async with app.db_pool.acquire() as conn: + world = await conn.fetchrow("select id,randomnumber from world where id=%s" % random.randint(1, 10000)) + return request.Response( + text=json.dumps(dict(world)), + mime_type='application/json', headers=get_headers()) + + +app = japronto.Application() +app.on_startup.append(init_db) +app.on_cleanup.append(close_db) +app.router.add_route('/json', json_view, 'GET') +app.router.add_route('/plaintext', plaintext_view, 'GET') +app.router.add_route('/db', db_view, 'GET') + + +if __name__ == '__main__': + app.run('0.0.0.0', 8080, worker_num=multiprocessing.cpu_count()) diff --git a/frameworks/Python/japronto/benchmark_config.json b/frameworks/Python/japronto/benchmark_config.json index 21bb719516d..60533689a6c 100644 --- a/frameworks/Python/japronto/benchmark_config.json +++ b/frameworks/Python/japronto/benchmark_config.json @@ -18,6 +18,26 @@ "database": "None", "display_name": "Japronto", "notes": "" + }, + "postgres": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "db_url":"/db", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "framework": "japronto", + "database": "Postgres", + "database_os": "Linux", + "language": "Python", + "flavor": "Python3", + "platform": "None", + "webserver": "None", + "os": "Linux", + "orm": "Raw", + "database_os": "Linux", + "display_name": "Japronto", + "notes": "" } }] } diff --git a/frameworks/Python/japronto/db.py b/frameworks/Python/japronto/db.py new file mode 100644 index 00000000000..2609377ff84 --- /dev/null +++ b/frameworks/Python/japronto/db.py @@ -0,0 +1,45 @@ +import asyncio +from contextlib import asynccontextmanager +import asyncpg +import os + + +class Connection(asyncpg.Connection): + async def reset(self, *, timeout=None): + pass + + +class Pool: + def __init__(self, connect_url, max_size=10, connection_class=None): + self._connect_url = connect_url + self._connection_class = connection_class or Connection + self._queue = asyncio.LifoQueue(max_size) + + def __await__(self): + return self._async_init__().__await__() + + async def _async_init__(self): + for _ in range(self._queue.maxsize): + self._queue.put_nowait(await asyncpg.connect(self._connect_url, connection_class=self._connection_class)) + return self + + @asynccontextmanager + async def acquire(self): + conn = await self._queue.get() + try: + yield conn + finally: + self._queue.put_nowait(conn) + + async def close(self): + for _ in range(self._queue.maxsize): + conn = await self._queue.get() + await conn.close() + + +async def init_db(app): + app.db_pool = await Pool("postgresql://%s:%s@tfb-database:5432/hello_world" % (os.getenv("PGUSER", "benchmarkdbuser"), os.getenv("PSPASS", "benchmarkdbpass")), connection_class=asyncpg.Connection) + + +async def close_db(app): + await asyncio.wait_for(app.db_pool.close(), timeout=1) diff --git a/frameworks/Python/japronto/japronto-postgres.dockerfile b/frameworks/Python/japronto/japronto-postgres.dockerfile new file mode 100644 index 00000000000..d868e4b927c --- /dev/null +++ b/frameworks/Python/japronto/japronto-postgres.dockerfile @@ -0,0 +1,11 @@ +FROM python:3.9.7 + +ADD ./ /japronto + +WORKDIR /japronto + +RUN pip3 install -r /japronto/requirements_postgres.txt + +EXPOSE 8080 + +CMD python3 app_postgres.py diff --git a/frameworks/Python/japronto/japronto.dockerfile b/frameworks/Python/japronto/japronto.dockerfile index 82f9a9cc1aa..eb91bb3dfa1 100644 --- a/frameworks/Python/japronto/japronto.dockerfile +++ b/frameworks/Python/japronto/japronto.dockerfile @@ -1,4 +1,4 @@ -FROM python:3.6.6-stretch +FROM python:3.8.12 ADD ./ /japronto diff --git a/frameworks/Python/japronto/requirements.txt b/frameworks/Python/japronto/requirements.txt index 98f49c3ba3e..a28213a7c90 100644 --- a/frameworks/Python/japronto/requirements.txt +++ b/frameworks/Python/japronto/requirements.txt @@ -1,2 +1,2 @@ git+https://github.com/squeaky-pl/japronto.git#egg=japronto -ujson==1.35 +ujson==5.4.0 diff --git a/frameworks/Python/japronto/requirements_postgres.txt b/frameworks/Python/japronto/requirements_postgres.txt new file mode 100644 index 00000000000..9b268878800 --- /dev/null +++ b/frameworks/Python/japronto/requirements_postgres.txt @@ -0,0 +1,3 @@ +git+https://github.com/IterableTrucks/japronto.git@0d848d96dd010f6701729b14e6b8ec0330002b5c +asyncpg==0.25.0 +ujson==5.4.0 diff --git a/frameworks/Python/quart/app.py b/frameworks/Python/quart/app.py index 1a34e80e305..490909bde45 100755 --- a/frameworks/Python/quart/app.py +++ b/frameworks/Python/quart/app.py @@ -3,11 +3,11 @@ import os import asyncpg -from quart import Quart, jsonify, make_response, request, render_template +from quart import jsonify, Quart, request, render_template app = Quart(__name__) -GET_WORLD = "select id,randomnumber from world where id = $1" +GET_WORLD = "select id, randomnumber from world where id = $1" UPDATE_WORLD = "update world set randomNumber = $2 where id = $1" @@ -27,20 +27,17 @@ async def disconnect_from_db(): await app.db.close() -@app.route("/json") +@app.get("/json") async def json(): return {"message": "Hello, World!"} -@app.route("/plaintext") +@app.get("/plaintext") async def plaintext(): - response = await make_response(b"Hello, World!") - # Quart assumes string responses are 'text/html', so make a custom one - response.mimetype = "text/plain" - return response + return "Hello, World!", {"Content-Type": "text/plain"} -@app.route("/db") +@app.get("/db") async def db(): async with app.db.acquire() as conn: key = random.randint(1, 10000) @@ -48,48 +45,42 @@ async def db(): return jsonify({"id": key, "randomNumber": number}) -def get_query_count(args): - qc = args.get("queries") - - if qc is None: - return 1 - +def get_query_count(): try: - qc = int(qc) + num_queries = request.args.get("queries", 1, type=int) except ValueError: + num_queries = 1 + if num_queries < 1: return 1 - - qc = max(qc, 1) - qc = min(qc, 500) - return qc + if num_queries > 500: + return 500 + return num_queries -@app.route("/queries") +@app.get("/queries") async def queries(): - queries = get_query_count(request.args) + queries = get_query_count() worlds = [] async with app.db.acquire() as conn: pst = await conn.prepare(GET_WORLD) - for _ in range(queries): - key = random.randint(1, 10000) + for key in random.sample(range(1, 10000), queries): number = await pst.fetchval(key) worlds.append({"id": key, "randomNumber": number}) return jsonify(worlds) -@app.route("/updates") +@app.get("/updates") async def updates(): - queries = get_query_count(request.args) + queries = get_query_count() new_worlds = [] async with app.db.acquire() as conn, conn.transaction(): pst = await conn.prepare(GET_WORLD) - for _ in range(queries): - key = random.randint(1, 10000) - old_number = await pst.fetchval(key) + for key in random.sample(range(1, 10000), queries): + await pst.fetchval(key) new_number = random.randint(1, 10000) new_worlds.append((key, new_number)) @@ -100,7 +91,7 @@ async def updates(): ) -@app.route("/fortunes") +@app.get("/fortunes") async def fortunes(): async with app.db.acquire() as conn: rows = await conn.fetch("select * from fortune") diff --git a/frameworks/Python/quart/quart-uvicorn.dockerfile b/frameworks/Python/quart/quart-uvicorn.dockerfile index 49648800a85..b7c3f86a0e8 100644 --- a/frameworks/Python/quart/quart-uvicorn.dockerfile +++ b/frameworks/Python/quart/quart-uvicorn.dockerfile @@ -1,4 +1,4 @@ -FROM python:3.8 +FROM python:3.10 ADD ./ /quart diff --git a/frameworks/Python/quart/quart.dockerfile b/frameworks/Python/quart/quart.dockerfile index 175769cefb5..755c39bc88c 100644 --- a/frameworks/Python/quart/quart.dockerfile +++ b/frameworks/Python/quart/quart.dockerfile @@ -1,4 +1,4 @@ -FROM python:3.8 +FROM python:3.10 ADD ./ /quart @@ -8,4 +8,4 @@ RUN pip3 install -r /quart/requirements.txt EXPOSE 8080 -CMD hypercorn app:app --config=python:hypercorn_conf.py +CMD hypercorn app:app --config=file:hypercorn_conf.py diff --git a/frameworks/Python/quart/requirements-uvicorn.txt b/frameworks/Python/quart/requirements-uvicorn.txt index 92aef779a64..b5f73392577 100644 --- a/frameworks/Python/quart/requirements-uvicorn.txt +++ b/frameworks/Python/quart/requirements-uvicorn.txt @@ -1,4 +1,10 @@ -gunicorn==20.0.4 -httptools==0.1.1 -uvicorn==0.11.7 -websockets==9.1 +anyio==3.6.1 +gunicorn==20.1.0 +httptools==0.4.0 +idna==3.3 +python-dotenv==0.20.0 +PyYAML==6.0 +sniffio==1.2.0 +uvicorn==0.18.2 +watchfiles==0.16.1 +websockets==10.3 diff --git a/frameworks/Python/quart/requirements.txt b/frameworks/Python/quart/requirements.txt index 1b2a265c732..aea4a6a9ed4 100644 --- a/frameworks/Python/quart/requirements.txt +++ b/frameworks/Python/quart/requirements.txt @@ -1,19 +1,18 @@ -aiofiles==0.4.0 -asyncpg==0.21.0 -blinker==1.4 -Click==7.0 -h11==0.9.0 -h2==3.2.0 -hpack==3.0.0 -Hypercorn==0.9.2 -hyperframe==5.2.0 -itsdangerous==1.1.0 -Jinja2==2.11.3 -MarkupSafe==1.1.1 -priority==1.3.0 -Quart==0.11.3 -toml==0.10.0 -typing-extensions==3.7.4.1 -uvloop==0.14.0 -Werkzeug==1.0.0 -wsproto==0.15.0 +aiofiles==0.8.0 +asyncpg==0.26.0 +blinker==1.5 +click==8.1.3 +h11==0.13.0 +h2==4.1.0 +hpack==4.0.0 +hypercorn==0.13.2 +hyperframe==6.0.1 +itsdangerous==2.1.2 +Jinja2==3.1.2 +MarkupSafe==2.1.1 +priority==2.0.0 +quart==0.18.0 +toml==0.10.2 +uvloop==0.16.0 +Werkzeug==2.2.0 +wsproto==1.1.0 diff --git a/frameworks/Python/robyn/README.md b/frameworks/Python/robyn/README.md new file mode 100755 index 00000000000..d8483c96453 --- /dev/null +++ b/frameworks/Python/robyn/README.md @@ -0,0 +1,29 @@ +# Robyn Benchmarking Test + +This is the Robyn portion of a [benchmarking tests suite](../../) +comparing a variety of web development platforms. + +The information below is specific to Robyn. For further guidance, +review the [documentation](https://github.com/TechEmpower/FrameworkBenchmarks/wiki). +Also note that there is additional information provided in +the [Python README](../). + +## Description + +[**Robyn**](https://github.com/sansyrox/robyn) is an async Python backend server with a runtime written in Rust, btw. + +The framework is in active development, therefore tests paths will be added after the framework will support them. + +## Test Paths & Sources + +All of the test implementations are located within a single file ([app.py](app.py)). + +### PLAINTEXT + +http://localhost:8080/plaintext + + +## Resources + +* [Robyn source code on GitHub](https://github.com/sansyrox/robyn) +* [Robyn website - documentation](https://sansyrox.github.io/robyn/#/) diff --git a/frameworks/Python/robyn/app.py b/frameworks/Python/robyn/app.py new file mode 100755 index 00000000000..684e979a622 --- /dev/null +++ b/frameworks/Python/robyn/app.py @@ -0,0 +1,25 @@ +import multiprocessing +import os + +from robyn import Robyn + +app = Robyn(__file__) + + +@app.get('/plaintext') +async def plaintext() -> str: + return "Hello, world!" + + +if __name__ == '__main__': + _is_travis = os.environ.get('TRAVIS') == 'true' + + workers = multiprocessing.cpu_count() * 2 + 1 + if _is_travis: + workers = 2 + + app.processes = workers + app.add_header("Server", "Robyn") + app.add_header("Content-Type", "text/plain") + + app.start(url="0.0.0.0", port=8080) diff --git a/frameworks/OCaml/tiny_httpd/benchmark_config.json b/frameworks/Python/robyn/benchmark_config.json similarity index 58% rename from frameworks/OCaml/tiny_httpd/benchmark_config.json rename to frameworks/Python/robyn/benchmark_config.json index 33208d3b4fa..9360501399b 100755 --- a/frameworks/OCaml/tiny_httpd/benchmark_config.json +++ b/frameworks/Python/robyn/benchmark_config.json @@ -1,23 +1,20 @@ { - "framework": "tiny_httpd", + "framework": "robyn", "tests": [ { "default": { - "json_url": "/json", "plaintext_url": "/plaintext", "port": 8080, "approach": "Realistic", "classification": "Micro", - "database": "None", - "framework": "tiny_httpd", - "language": "OCaml", + "framework": "Robyn", + "language": "Python", "flavor": "None", - "orm": "None", + "orm": "Raw", "platform": "None", "webserver": "None", "os": "Linux", - "database_os": "Linux", - "display_name": "tiny_httpd", + "display_name": "Robyn", "notes": "", "versus": "None" } diff --git a/frameworks/D/hunt/config.toml b/frameworks/Python/robyn/config.toml similarity index 57% rename from frameworks/D/hunt/config.toml rename to frameworks/Python/robyn/config.toml index 85c314a3649..1d56c33580c 100644 --- a/frameworks/D/hunt/config.toml +++ b/frameworks/Python/robyn/config.toml @@ -1,14 +1,12 @@ [framework] -name = "hunt" +name = "Robyn" [main] urls.plaintext = "/plaintext" -urls.json = "/json" approach = "Realistic" -classification = "Platform" -database_os = "Linux" +classification = "Micro" os = "Linux" orm = "Raw" platform = "None" webserver = "None" -versus = "Hunt" +versus = "None" diff --git a/frameworks/Python/robyn/requirements.txt b/frameworks/Python/robyn/requirements.txt new file mode 100644 index 00000000000..0fd0592aede --- /dev/null +++ b/frameworks/Python/robyn/requirements.txt @@ -0,0 +1 @@ +robyn==0.11.1 diff --git a/frameworks/Python/robyn/robyn.dockerfile b/frameworks/Python/robyn/robyn.dockerfile new file mode 100644 index 00000000000..c0b4e8b2405 --- /dev/null +++ b/frameworks/Python/robyn/robyn.dockerfile @@ -0,0 +1,11 @@ +FROM python:3.9 + +ADD ./ /robyn + +WORKDIR /robyn + +RUN pip3 install -r /robyn/requirements.txt + +EXPOSE 8080 + +CMD ["python", "app.py"] diff --git a/frameworks/Python/routerling/requirements.txt b/frameworks/Python/routerling/requirements.txt index 1b6e343aa89..7b68e45f102 100644 --- a/frameworks/Python/routerling/requirements.txt +++ b/frameworks/Python/routerling/requirements.txt @@ -6,6 +6,6 @@ h11==0.12.0 Jinja2==3.0.1 MarkupSafe==2.0.1 routerling==0.3.1 -ujson==5.1.0 +ujson==5.4.0 uvloop==0.16.0 uvicorn==0.14.0 diff --git a/frameworks/Python/sanic/app.py b/frameworks/Python/sanic/app.py index f395aff9806..47b374ce9a3 100644 --- a/frameworks/Python/sanic/app.py +++ b/frameworks/Python/sanic/app.py @@ -2,7 +2,7 @@ import os import jinja2 from logging import getLogger -from random import randint +from random import randint, sample from operator import itemgetter import multiprocessing @@ -45,7 +45,7 @@ def get_num_queries(queries): sort_fortunes_key = itemgetter(1) template = load_fortunes_template() -app = sanic.Sanic() +app = sanic.Sanic(name=__name__) @app.listener('before_server_start') @@ -81,7 +81,7 @@ async def single_database_query_view(request): @app.get('/queries') async def multiple_database_queries_view(request): num_queries = get_num_queries(request.args.get('queries', 1)) - row_ids = [randint(1, 10000) for _ in range(num_queries)] + row_ids = sample(range(1, 10000), num_queries) worlds = [] async with connection_pool.acquire() as connection: @@ -119,8 +119,8 @@ async def database_updates_view(request): async with connection_pool.acquire() as connection: statement = await connection.prepare(READ_ROW_SQL_TO_UPDATE) - for _ in range(get_num_queries(queries)): - record = await statement.fetchrow(randint(1, 10000)) + for row_id in sample(range(1, 10000), get_num_queries(queries)): + record = await statement.fetchrow(row_id) world = dict( id=record['id'], randomNumber=record['randomnumber'] ) diff --git a/frameworks/Python/sanic/requirements.txt b/frameworks/Python/sanic/requirements.txt index 91bf9042ad4..13b48a818a3 100644 --- a/frameworks/Python/sanic/requirements.txt +++ b/frameworks/Python/sanic/requirements.txt @@ -1,4 +1,4 @@ -asyncpg==0.20.1 -Jinja2==2.11.3 -uvloop==0.14.0 -sanic==20.12.6 +asyncpg==0.25.0 +Jinja2==3.1.2 +sanic==22.3.1 +uvloop==0.16.0 diff --git a/frameworks/Python/spyne/README.md b/frameworks/Python/spyne/README.md deleted file mode 100644 index 31b5b5d3e58..00000000000 --- a/frameworks/Python/spyne/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# [Spyne](http://spyne.io/) Benchmark Test - -This is the Python Spyne portion of a [benchmarking tests suite](../../) -comparing a variety of frameworks. - -The latest version is at https://github.com/arskom/spyne/tree/master/examples/tfb - -All test implementations are located within ([app.py](app.py)) - -## Description - -Spyne + SQLAlchemy - -### Database - -PostgreSQL (psycopg2 on CPython) - -### Server - -* gunicorn+wsgi on CPython - -## Test URLs -### JSON Encoding - -http://localhost:8080/json - -### Single Row Random Query - -With ORM: - http://localhost:8080/dbs - -Without ORM (raw): - http://localhost:8080/dbsraw - -### Variable Row Query Test - -With ORM: - http://localhost:8080/db?queries=2 - -Without ORM (raw): - http://localhost:8080/dbraw?queries=2 diff --git a/frameworks/Python/spyne/app.py b/frameworks/Python/spyne/app.py deleted file mode 100755 index 46131ef1918..00000000000 --- a/frameworks/Python/spyne/app.py +++ /dev/null @@ -1,303 +0,0 @@ -#!/usr/bin/env python - -import sys - -import spyne.const -spyne.const.MIN_GC_INTERVAL = float('inf') - -from lxml import html - -from random import randint, shuffle, choice -from contextlib import closing -from email.utils import formatdate - -from neurons import TableModel, Application -from neurons.daemon import ServiceDefinition, HttpServer, StaticFileServer -from neurons.daemon.main import Bootstrapper - -from spyne import Integer32, Unicode, rpc, ServiceBase, Integer, Array, Any -from spyne.protocol.html import HtmlCloth -from spyne.protocol.http import HttpRpc -from spyne.protocol.json import JsonDocument -from spyne.server.wsgi import WsgiApplication - -if sys.version_info[0] == 3: - xrange = range - -_is_pypy = hasattr(sys, 'pypy_version_info') - -DBDRIVER = 'postgresql+psycopg2cffi' if _is_pypy else 'postgresql+psycopg2' -DBHOST = 'tfb-database' - - -# models -class DbSessionManager(object): - def __init__(self, config): - self.session = config.get_main_store().Session() - - def close(self, with_err): - self.session.close() - - -class DbConnectionManager(object): - def __init__(self, config): - self.conn = config.get_main_store().engine.connect() - - def close(self, with_err): - self.conn.close() - - -class World(TableModel): - __tablename__ = "world" - _type_info = [ - ('id', Integer32(primary_key=True)), - ('randomNumber', Integer32(sqla_column_args=dict(name='randomnumber'))), - ] - - -T_INDEX = html.fromstring(open('cloths/index.html', 'rb').read()) - - -class Fortune(TableModel): - __tablename__ = "fortune" - - id = Integer32(primary_key=True) - message = Unicode - - -outprot_plain = HttpRpc(mime_type='text/plain') - - -class TfbSimpleService(ServiceBase): - @rpc(_returns=Any) - def json(ctx): - ctx.transport.add_header('Date', formatdate(usegmt=True)) - return dict(message=u'Hello, World!') - - @rpc(_returns=Any) - def plaintext(ctx): - """Test 6: Plaintext""" - ctx.out_protocol = outprot_plain - return b'Hello, World!' - - -def _force_int(v): - try: - return min(500, max(int(v), 1)) - except: - return 1 - - -NumQueriesType = Any(sanitizer=_force_int) - - -class TfbOrmService(ServiceBase): - @rpc(_returns=World) - def db(ctx): - retval = ctx.udc.session.query(World).get(randint(1, 10000)) - return retval - - @rpc(NumQueriesType, _returns=Array(World)) - def dbs(ctx, queries): - if queries is None: - queries = 1 - - q = ctx.udc.session.query(World) - return [q.get(randint(1, 10000)) for _ in xrange(queries)] - - @rpc(_returns=Array(Fortune, html_cloth=T_INDEX), _body_style='out_bare') - def fortunes(ctx): - # This is normally specified at the application level as it's a good - # practice to group rpc endpoints with similar return types under the - # same url fragment. eg. https://example.com/api/json - ctx.out_protocol = HtmlCloth() - ctx.outprot_ctx = ctx.out_protocol.get_context(ctx, ctx.transport) - - fortunes = ctx.udc.session.query(Fortune).all() - fortunes.append( - Fortune(id=0, message=u"Additional fortune added at request time.") - ) - - fortunes.sort(key=lambda x: x.message) - - return fortunes - - @rpc(NumQueriesType, _returns=Array(World)) - def updates(ctx, queries): - """Test 5: Database Updates""" - - if queries is None: - queries = 1 - - retval = [] - q = ctx.udc.session.query(World) - for id in (randint(1, 10000) for _ in xrange(queries)): - world = q.get(id) - world.randomNumber = randint(1, 10000) - retval.append(world) - - ctx.udc.session.commit() - - return retval - - -class TfbRawService(ServiceBase): - @rpc(_returns=World) - def dbraw(ctx): - conn = ctx.udc.conn - - wid = randint(1, 10000) - return conn.execute( - "SELECT id, randomNumber FROM world WHERE id = %s", wid) \ - .fetchone() - - # returning both Any+dict or ObjectMarker+ListOfLists works - @rpc(NumQueriesType, _returns=Any) - def dbsraw(ctx, queries): - if queries is None: - queries = 1 - - retval = [] - conn = ctx.udc.conn - for i in xrange(queries): - wid = randint(1, 10000) - result = conn.execute( - "SELECT id, randomNumber FROM world WHERE id = %s", wid) \ - .fetchone() - retval.append(dict(id=result[0], randomNumber=result[1])) - - return retval - - @rpc(_returns=Array(Fortune, html_cloth=T_INDEX), _body_style='out_bare') - def fortunesraw(ctx): - # This is normally specified at the application level as it's a good - # practice to group rpc endpoints with similar return types under the - # same url fragment. eg. https://example.com/api/json - ctx.out_protocol = HtmlCloth() - ctx.outprot_ctx = ctx.out_protocol.get_context(ctx, ctx.transport) - - res = ctx.udc.conn.execute("SELECT id, message FROM fortune") - fortunes = res.fetchall() - - fortunes.append(Fortune( - id=0, - message=u"Additional fortune added at request time." - )) - - fortunes.sort(key=lambda x: x.message) - - return fortunes - - @rpc(NumQueriesType, _returns=Any) - def updatesraw(ctx, queries): - """Test 5: Database Updates""" - if queries is None: - queries = 1 - - conn = ctx.udc.conn - - ids = [randint(1, 10000) for _ in xrange(queries)] - - retval = [] - for i in ids: - wid, rn = conn.execute( - "SELECT id, randomNumber FROM world WHERE id=%s", i) \ - .fetchone() - - rn = randint(1, 10000) - retval.append(dict(id=wid, randomNumber=rn)) - - conn.execute("UPDATE World SET randomNumber=%s WHERE id=%s", - rn, wid) - - return retval - - -def _on_method_call_db_sess(ctx): - ctx.transport.add_header('Date', formatdate(usegmt=True)) - ctx.udc = DbSessionManager(ctx.app.config) - - -def _on_method_call_db_conn(ctx): - ctx.transport.add_header('Date', formatdate(usegmt=True)) - ctx.udc = DbConnectionManager(ctx.app.config) - - -TfbRawService.event_manager.add_listener("method_call", _on_method_call_db_conn) -TfbOrmService.event_manager.add_listener("method_call", _on_method_call_db_sess) - - -def init_app(config): - subconfig = config.services['root'] - - app = Application( - [TfbOrmService, TfbRawService, TfbSimpleService], - tns='http://techempower.com/benchmarks/Python/Spyne', - in_protocol=HttpRpc(), - out_protocol=JsonDocument(), - config=config, - ) - if subconfig.subapps is None: - subconfig.subapps = {} - - subconfig.subapps.update({'': app}) - - return subconfig.gen_site() - - -def init(config): - return { - 'root': ServiceDefinition( - init=init_app, - default=HttpServer( - type='tcp4', - host='127.0.0.1', - port=8080, - ), - ), - } - - -def parse_config(argv): - from neurons.daemon.main import boot - - retcode, config = boot('tfb', argv, init, bootstrapper=TfbBootstrap) - - if retcode is not None: - sys.exit(retcode) - - return config - - -def gen_wsgi_app(): - config = parse_config([]) - app = config.services['root'].subapps[''].app - return WsgiApplication(app) - - -words = 'some random words for you and me somebody else if then the'.split() - - -class TfbBootstrap(Bootstrapper): - # noinspection PyUnresolvedReferences - def after_tables(self, config): - print("Generating data...") - with closing(config.get_main_store().Session()) as session: - ints = list(range(10000)) - shuffle(ints) - for _ in range(10000): - session.add(World(randomNumber=ints.pop())) - - for _ in range(100): - session.add(Fortune( - message=' '.join([choice(words) - for _ in range(randint(3, 10))]) - )) - - session.commit() - - -if __name__ == '__main__': - parse_config(sys.argv) -else: - application = gen_wsgi_app() diff --git a/frameworks/Python/spyne/benchmark_config.json b/frameworks/Python/spyne/benchmark_config.json deleted file mode 100644 index 1ae01e6e631..00000000000 --- a/frameworks/Python/spyne/benchmark_config.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "framework": "spyne", - "tests": [{ - "default": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/dbs?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "postgres", - "framework": "spyne", - "language": "Python", - "flavor": "Python3", - "orm": "Full", - "platform": "Spyne", - "webserver": "None", - "os": "Linux", - "database_os": "Linux", - "display_name": "Spyne", - "notes": "", - "versus": "wsgi" - }, - "raw": { - "db_url": "/dbraw", - "query_url": "/dbsraw?queries=", - "fortune_url": "/fortunesraw", - "update_url": "/updatesraw?queries=", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "postgres", - "framework": "spyne", - "language": "Python", - "flavor": "Python3", - "orm": "Raw", - "platform": "Spyne", - "webserver": "None", - "os": "Linux", - "database_os": "Linux", - "display_name": "Spyne-raw", - "notes": "", - "versus": "wsgi" - }, - "nginx-uwsgi": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/dbs?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "postgres", - "framework": "spyne", - "language": "Python", - "flavor": "Python3", - "orm": "Full", - "platform": "None", - "webserver": "nginx", - "os": "Linux", - "database_os": "Linux", - "display_name": "Spyne", - "notes": "", - "versus": "wsgi" - } - }] -} diff --git a/frameworks/Python/spyne/cloths/index.html b/frameworks/Python/spyne/cloths/index.html deleted file mode 100644 index 3ee8c872157..00000000000 --- a/frameworks/Python/spyne/cloths/index.html +++ /dev/null @@ -1,18 +0,0 @@ - - - -Fortunes - - - - - - - - - - - -
idmessage
- - diff --git a/frameworks/Python/spyne/config.toml b/frameworks/Python/spyne/config.toml deleted file mode 100644 index f6ac42a8d65..00000000000 --- a/frameworks/Python/spyne/config.toml +++ /dev/null @@ -1,51 +0,0 @@ -[framework] -name = "spyne" - -[main] -urls.plaintext = "/plaintext" -urls.json = "/json" -urls.db = "/db" -urls.query = "/dbs?queries=" -urls.update = "/updates?queries=" -urls.fortune = "/fortunes" -approach = "Realistic" -classification = "Micro" -database = "postgres" -database_os = "Linux" -os = "Linux" -orm = "Full" -platform = "Spyne" -webserver = "None" -versus = "wsgi" - -[raw] -urls.db = "/dbraw" -urls.query = "/dbsraw?queries=" -urls.update = "/updatesraw?queries=" -urls.fortune = "/fortunesraw" -approach = "Realistic" -classification = "Micro" -database = "postgres" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = "Spyne" -webserver = "None" -versus = "wsgi" - -[nginx-uwsgi] -urls.plaintext = "/plaintext" -urls.json = "/json" -urls.db = "/db" -urls.query = "/dbs?queries=" -urls.update = "/updates?queries=" -urls.fortune = "/fortunes" -approach = "Realistic" -classification = "Micro" -database = "postgres" -database_os = "Linux" -os = "Linux" -orm = "Full" -platform = "None" -webserver = "nginx" -versus = "wsgi" diff --git a/frameworks/Python/spyne/gen_benchmark_config.py b/frameworks/Python/spyne/gen_benchmark_config.py deleted file mode 100755 index b4e639e99b0..00000000000 --- a/frameworks/Python/spyne/gen_benchmark_config.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function - -import json - -from spyne import AnyUri, Unicode, ComplexModel, M, UnsignedInteger16, Array -from spyne.protocol.json import JsonDocument -from spyne.util.dictdoc import get_object_as_dict - - -class BenchmarkConfigElement(ComplexModel): - # exclude this from the output document - key = Unicode(pa={JsonDocument: dict(exc=True)}) - - display_name = M(Unicode) - notes = Unicode - versus = Unicode - - db_url = AnyUri - json_url = AnyUri - query_url = AnyUri - fortune_url = AnyUri - update_url = AnyUri - plaintext_url = AnyUri - - port = M(UnsignedInteger16(default=8080)) - - approach = M(Unicode(values=['Realistic', 'Stripped'], default='Realistic')) - classification = M(Unicode(values=['Micro', 'Fullstack', 'Platform'], default='Micro')) - database = M(Unicode(values=['none', 'mongodb', 'postgres', 'mysql'], default='none')) - orm = M(Unicode(values=['Full', 'Micro', 'None', 'Raw'])) - - framework = M(Unicode) - language = M(Unicode) - flavor = M(Unicode) - platform = M(Unicode) - webserver = M(Unicode) - - os = M(Unicode(default='Linux')) - database_os = M(Unicode(default='Linux')) - - -class BenchmarkConfig(ComplexModel): - framework = M(Unicode) - tests = Array(BenchmarkConfigElement, wrapped=False) - - -gen_raw_test = lambda: BenchmarkConfigElement( - display_name="Spyne RAW", - db_url="/dbsraw", - query_url="/dbraw?queries=", - fortune_url="/fortunesraw", - update_url="/raw-updates?queries=", - orm='Raw', -) - -gen_normal_test = lambda: BenchmarkConfigElement( - display_name="Spyne ORM", - db_url="/dbs", - query_url="/db?queries=", - fortune_url="/fortunes", - update_url="/updatesraw?queries=", - orm='Full', -) - - -def add_common(bc): - bc.port = 8080 - bc.approach = "Realistic" - bc.classification = "Micro" - bc.database = "postgres" - bc.framework = "spyne" - bc.language = "Python" - bc.platform = "Spyne" - bc.webserver = "None" - bc.os = "Linux" - bc.database_os = "Linux" - bc.versus = "wsgi" - bc.plaintext_url = "/plaintext" - return bc - - -config = BenchmarkConfig(framework='spyne', tests=[]) - -keys = iter(['default', 'raw', 'py3orm', 'py3raw']) - -for flav in ['CPython', 'Python3']: - bc = add_common(gen_normal_test()) - bc.flavor = flav - bc.key = next(keys) - config.tests.append(bc) - - bc = add_common(gen_raw_test()) - bc.flavor = flav - bc.key = next(keys) - config.tests.append(bc) - -data = get_object_as_dict(config, complex_as=dict) -data['tests'] = [{d['key']: d} for d in data['tests']] - -data = json.dumps(data, indent=2, sort_keys=True, separators=(',', ': ')) - -open('benchmark_config.json', 'wb').write(data) - - -print(data) diff --git a/frameworks/Python/spyne/gunicorn_conf.py b/frameworks/Python/spyne/gunicorn_conf.py deleted file mode 100644 index f9d4e4d7c31..00000000000 --- a/frameworks/Python/spyne/gunicorn_conf.py +++ /dev/null @@ -1,10 +0,0 @@ -import multiprocessing -import os -import sys - -workers = multiprocessing.cpu_count() * 3 - -bind = "0.0.0.0:8080" -keepalive = 120 -errorlog = '-' -pidfile = 'gunicorn.pid' diff --git a/frameworks/Python/spyne/nginx.conf b/frameworks/Python/spyne/nginx.conf deleted file mode 100644 index 38c0c61341c..00000000000 --- a/frameworks/Python/spyne/nginx.conf +++ /dev/null @@ -1,48 +0,0 @@ -# This file is based on /usr/local/nginx/conf/nginx.conf.default. - -# One worker process per core -error_log stderr error; - -events { - # This needed to be increased because the nginx error log said so. - # http://nginx.org/en/docs/ngx_core_module.html#worker_connections - worker_connections 65535; - multi_accept on; -} - -http { - default_type application/octet-stream; - client_body_temp_path /tmp; - - # turn off request logging for performance - access_log off; - - # I think these only options affect static file serving - sendfile on; - tcp_nopush on; - - # Allow many HTTP Keep-Alive requests in a single TCP connection before - # closing it (the default is 100). This will minimize the total number - # of TCP connections opened/closed. The problem is that this may cause - # some worker processes to be handling too connections relative to the - # other workers based on an initial imbalance, so this is disabled for - # now. -# keepalive_requests 1000; - - #keepalive_timeout 0; - keepalive_timeout 65; - - server { - # For information on deferred, see: - # http://nginx.org/en/docs/http/ngx_http_core_module.html#listen - # http://www.techrepublic.com/article/take-advantage-of-tcp-ip-options-to-optimize-data-transmission/ - # The backlog argument to listen() is set to match net.ipv4.tcp_max_syn_backlog and net.core.somaxconn - listen 8080 default_server deferred reuseport backlog=65535; - server_name localhost; - - location / { - uwsgi_pass unix:/var/tmp/uwsgi.sock; - include /usr/local/nginx/conf/uwsgi_params; - } - } -} diff --git a/frameworks/Python/spyne/requirements.txt b/frameworks/Python/spyne/requirements.txt deleted file mode 100644 index ff292a891ed..00000000000 --- a/frameworks/Python/spyne/requirements.txt +++ /dev/null @@ -1,28 +0,0 @@ -attrs==18.2.0 -Automat==0.7.0 -colorama==0.4.1 -constantly==15.1.0 -greenlet==0.4.15 -gunicorn==19.9.0 -hyperlink==18.0.0 -idna==2.8 -incremental==17.5.0 -lxml==4.6.5 -meinheld==0.6.1 -msgpack-python==0.5.6 -neurons==0.8.4 -ply==3.11 -psycopg2==2.7.7 -pycrypto==2.6.1 -PyHamcrest==1.9.0 -pytz==2018.9 -PyYAML==5.4 -six==1.12.0 -slimit==0.8.1 -spyne==2.13.9a0 -SQLAlchemy==1.2.17 -Twisted==22.1.0 -txpostgres==1.6.0 -Werkzeug==0.15.3 -zope.interface==4.6.0 -uwsgi==2.0.18 diff --git a/frameworks/Python/spyne/spyne-nginx-uwsgi.dockerfile b/frameworks/Python/spyne/spyne-nginx-uwsgi.dockerfile deleted file mode 100644 index 27631417b34..00000000000 --- a/frameworks/Python/spyne/spyne-nginx-uwsgi.dockerfile +++ /dev/null @@ -1,19 +0,0 @@ -FROM python:3.6.6-stretch - -RUN curl -s http://nginx.org/keys/nginx_signing.key | apt-key add - -RUN echo "deb http://nginx.org/packages/debian/ stretch nginx" >> /etc/apt/sources.list -RUN echo "deb-src http://nginx.org/packages/debian/ stretch nginx" >> /etc/apt/sources.list - -RUN apt-get update -yqq && apt-get install -yqq nginx - -ADD ./ /spyne - -WORKDIR /spyne - -RUN pip3 install -r /spyne/requirements.txt - -RUN sed -i 's|include .*/conf/uwsgi_params;|include /etc/nginx/uwsgi_params;|g' /spyne/nginx.conf - -EXPOSE 8080 - -CMD nginx -c /spyne/nginx.conf && uwsgi --ini /spyne/uwsgi.ini --processes $(($(nproc)*3)) --wsgi app:application diff --git a/frameworks/Python/spyne/spyne-raw.dockerfile b/frameworks/Python/spyne/spyne-raw.dockerfile deleted file mode 100644 index 6c0f6c2eb51..00000000000 --- a/frameworks/Python/spyne/spyne-raw.dockerfile +++ /dev/null @@ -1,11 +0,0 @@ -FROM python:3.6.6-stretch - -ADD ./ /spyne - -WORKDIR /spyne - -RUN pip3 install -r /spyne/requirements.txt - -EXPOSE 8080 - -CMD gunicorn app:application -c gunicorn_conf.py diff --git a/frameworks/Python/spyne/spyne.dockerfile b/frameworks/Python/spyne/spyne.dockerfile deleted file mode 100644 index 6c0f6c2eb51..00000000000 --- a/frameworks/Python/spyne/spyne.dockerfile +++ /dev/null @@ -1,11 +0,0 @@ -FROM python:3.6.6-stretch - -ADD ./ /spyne - -WORKDIR /spyne - -RUN pip3 install -r /spyne/requirements.txt - -EXPOSE 8080 - -CMD gunicorn app:application -c gunicorn_conf.py diff --git a/frameworks/Python/spyne/tfb.yaml b/frameworks/Python/spyne/tfb.yaml deleted file mode 100644 index fae70928eb7..00000000000 --- a/frameworks/Python/spyne/tfb.yaml +++ /dev/null @@ -1,53 +0,0 @@ -ServiceDaemon: - alert_dests: [] - autoreload: false - daemonize: false - debug: true - debug_reactor: false - file_version: 2 - log_cloth: false - log_dbconn: false - log_interface: false - log_model: false - log_orm: false - log_protocol: false - log_queries: false - log_results: false - log_rss: false - log_sqlalchemy: false - logger_dest_rotation_compression: gzip - logger_dest_rotation_period: WEEKLY - loggers: - - Logger: - level: ERROR - path: . - main_store: sql_main - name: tfb - services: - - HttpServer: - backlog: 50 - disabled: true - host: 127.0.0.1 - name: root - port: 8080 - subapps: - - HttpApplication: - url: '' - type: tcp4 - stores: - - RelationalStore: - async_pool: true - backend: sqlalchemy - conn_str: postgresql://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world - echo_pool: false - max_overflow: 3 - name: sql_main - pool_pre_ping: true - pool_recycle: 3600 - pool_size: 10 - pool_timeout: 30 - pool_use_lifo: false - sync_case_sensitive: false - sync_pool: true - sync_pool_type: QueuePool - uuid: fef52ff8-3103-11e9-b0e1-5453edabe249 diff --git a/frameworks/Python/spyne/uwsgi.ini b/frameworks/Python/spyne/uwsgi.ini deleted file mode 100644 index 5c9f5b80227..00000000000 --- a/frameworks/Python/spyne/uwsgi.ini +++ /dev/null @@ -1,19 +0,0 @@ -[uwsgi] -master -; Increase listen queue used for nginx connecting to uWSGI. This matches -; net.ipv4.tcp_max_syn_backlog and net.core.somaxconn. -listen = 16384 -; for performance -disable-logging -; use UNIX sockets instead of TCP loopback for performance -socket = /var/tmp/uwsgi.sock -; allow nginx to access the UNIX socket -chmod-socket = 666 -; Avoid thundering herd problem http://uwsgi-docs.readthedocs.org/en/latest/articles/SerializingAccept.html . -; This is currently disabled because when I tried it with flask, it caused a -; 20% performance hit. The CPU cores could not be saturated with thunder-lock. -; I'm not yet sure the full story, so this is presently disabled. Also, -; disabling this caused bottle to get ~13% faster. -;thunder-lock -; used by uwsgi_stop.ini -pidfile = /var/tmp/uwsgi.pid diff --git a/frameworks/Python/starlette/app.py b/frameworks/Python/starlette/app.py index a95853ee1e8..7cde26c155d 100755 --- a/frameworks/Python/starlette/app.py +++ b/frameworks/Python/starlette/app.py @@ -1,11 +1,10 @@ -import asyncio import asyncpg import os import jinja2 from starlette.applications import Starlette -from starlette.responses import HTMLResponse, UJSONResponse, PlainTextResponse +from starlette.responses import HTMLResponse, JSONResponse, PlainTextResponse from starlette.routing import Route -from random import randint +from random import randint, sample from operator import itemgetter from urllib.parse import parse_qs @@ -51,8 +50,6 @@ def get_num_queries(request): connection_pool = None sort_fortunes_key = itemgetter(1) template = load_fortunes_template() -loop = asyncio.get_event_loop() -loop.run_until_complete(setup_database()) async def single_database_query(request): @@ -61,12 +58,12 @@ async def single_database_query(request): async with connection_pool.acquire() as connection: number = await connection.fetchval(READ_ROW_SQL, row_id) - return UJSONResponse({'id': row_id, 'randomNumber': number}) + return JSONResponse({'id': row_id, 'randomNumber': number}) async def multiple_database_queries(request): num_queries = get_num_queries(request) - row_ids = [randint(1, 10000) for _ in range(num_queries)] + row_ids = sample(range(1, 10000), num_queries) worlds = [] async with connection_pool.acquire() as connection: @@ -75,7 +72,7 @@ async def multiple_database_queries(request): number = await statement.fetchval(row_id) worlds.append({'id': row_id, 'randomNumber': number}) - return UJSONResponse(worlds) + return JSONResponse(worlds) async def fortunes(request): @@ -90,7 +87,7 @@ async def fortunes(request): async def database_updates(request): num_queries = get_num_queries(request) - updates = [(randint(1, 10000), randint(1, 10000)) for _ in range(num_queries)] + updates = [(row_id, randint(1, 10000)) for row_id in sample(range(1, 10000), num_queries)] worlds = [{'id': row_id, 'randomNumber': number} for row_id, number in updates] async with connection_pool.acquire() as connection: @@ -99,11 +96,11 @@ async def database_updates(request): await statement.fetchval(row_id) await connection.executemany(WRITE_ROW_SQL, updates) - return UJSONResponse(worlds) + return JSONResponse(worlds) routes = [ - Route('/json', UJSONResponse({'message': 'Hello, world!'})), + Route('/json', JSONResponse({'message': 'Hello, world!'})), Route('/db', single_database_query), Route('/queries', multiple_database_queries), Route('/fortunes', fortunes), @@ -111,4 +108,4 @@ async def database_updates(request): Route('/plaintext', PlainTextResponse(b'Hello, world!')), ] -app = Starlette(routes=routes) +app = Starlette(routes=routes, on_startup=[setup_database]) diff --git a/frameworks/Python/starlette/requirements.txt b/frameworks/Python/starlette/requirements.txt index 087f0ef57b4..a9d791c7b47 100644 --- a/frameworks/Python/starlette/requirements.txt +++ b/frameworks/Python/starlette/requirements.txt @@ -1,7 +1,7 @@ -asyncpg==0.21.0 -gunicorn==20.0.4 -Jinja2==2.11.3 -ujson==2.0.3 -uvloop==0.14.0 -uvicorn==0.11.7 -starlette==0.13.2 +asyncpg==0.25.0 +gunicorn==20.1.0 +Jinja2==3.1.2 +starlette==0.19.1 +ujson==5.4.0 +uvicorn==0.17.6 +uvloop==0.16.0 diff --git a/frameworks/Python/starlite/requirements.txt b/frameworks/Python/starlite/requirements.txt index b6375cf47f1..dcd5b6c25ff 100644 --- a/frameworks/Python/starlite/requirements.txt +++ b/frameworks/Python/starlite/requirements.txt @@ -1,5 +1,6 @@ -asyncpg==0.25.0 -Jinja2==3.0.3 -uvicorn==0.17.1 -starlite==1.0.0 -gunicorn==20.1.0 +asyncpg>=0.26.0 +Jinja2>=3.1.2 +uvicorn>=0.18.2 +starlite>=1.7.1 +gunicorn>=20.1.0 +uvloop>=0.16.0 diff --git a/frameworks/Python/turbogears/app.py b/frameworks/Python/turbogears/app.py index 13a0ffaf3ea..685fee40563 100644 --- a/frameworks/Python/turbogears/app.py +++ b/frameworks/Python/turbogears/app.py @@ -74,7 +74,7 @@ def queries(self, queries=1): return json.dumps(worlds) @expose() - def fortune(self): + def fortunes(self): fortunes = db_session.query(Fortune).all() fortunes.append(Fortune(id=0, message="Additional fortune added at request time.")) fortunes.sort(key=attrgetter("message")) diff --git a/frameworks/Python/turbogears/benchmark_config.json b/frameworks/Python/turbogears/benchmark_config.json index 598444aa955..cfd4c3efda7 100644 --- a/frameworks/Python/turbogears/benchmark_config.json +++ b/frameworks/Python/turbogears/benchmark_config.json @@ -5,7 +5,7 @@ "json_url": "/json", "db_url": "/db", "query_url": "/queries?queries=", - "fortune_url": "/fortune", + "fortune_url": "/fortunes", "update_url": "/updates?queries=", "plaintext_url": "/plaintext", "port": 8080, diff --git a/frameworks/Python/uvicorn/app.py b/frameworks/Python/uvicorn/app.py index 1f340bd3cec..a5118309828 100644 --- a/frameworks/Python/uvicorn/app.py +++ b/frameworks/Python/uvicorn/app.py @@ -3,7 +3,7 @@ import jinja2 import os import ujson -from random import randint +from random import randint, sample from operator import itemgetter from urllib.parse import parse_qs @@ -114,7 +114,7 @@ async def multiple_database_queries(scope, receive, send): Test type 3: Multiple database queries """ num_queries = get_num_queries(scope) - row_ids = [randint(1, 10000) for _ in range(num_queries)] + row_ids = sample(range(1, 10000), num_queries) worlds = [] connection = await pool.acquire() @@ -161,7 +161,7 @@ async def database_updates(scope, receive, send): Test type 5: Database updates """ num_queries = get_num_queries(scope) - updates = [(randint(1, 10000), randint(1, 10000)) for _ in range(num_queries)] + updates = [(row_id, randint(1, 10000)) for row_id in sample(range(1, 10000), num_queries)] worlds = [{'id': row_id, 'randomNumber': number} for row_id, number in updates] connection = await pool.acquire() diff --git a/frameworks/Python/uvicorn/requirements.txt b/frameworks/Python/uvicorn/requirements.txt index 51e7b091185..e2bb7f3304a 100644 --- a/frameworks/Python/uvicorn/requirements.txt +++ b/frameworks/Python/uvicorn/requirements.txt @@ -1,6 +1,6 @@ asyncpg==0.21.0 gunicorn==20.0.4 -Jinja2==2.11.3 -ujson==2.0.3 +Jinja2==3.0.3 +ujson==5.4.0 uvloop==0.14.0 uvicorn==0.11.7 diff --git a/frameworks/Python/xpresso/README.md b/frameworks/Python/xpresso/README.md new file mode 100755 index 00000000000..1a66fe57feb --- /dev/null +++ b/frameworks/Python/xpresso/README.md @@ -0,0 +1,25 @@ +# Xpresso Benchmarking Test + +This is the Xpresso portion of a [benchmarking tests suite](../../) comparing a variety of web development platforms. + +The information below is specific to Xpresso. +For further guidance, review the [documentation](https://github.com/TechEmpower/FrameworkBenchmarks/wiki). +Also note that there is additional information provided in the [Python README](../). + +## Description + +[Xpresso] is a high performance Python web framework built on top of [Starlette], [Pydantic] and [di] with a focus on composability, flexibility and minimal but well defined library APIs. + +## Test Paths & Sources + +All of the test implementations are located within a single file ([main.py](main.py)). + +## Resources + +* [Xpresso source code on GitHub](https://github.com/adriangb/xpresso) +* [Xpresso website - documentation](https://xpresso-api.dev) + +[Xpresso]: https://github.com/adriangb/xpresso +[Starlette]: https://github.com/encode/starlette +[Pydantic]: https://github.com/samuelcolvin/pydantic/ +[di]: https://github.com/adriangb/di diff --git a/frameworks/Crystal/orion/benchmark_config.json b/frameworks/Python/xpresso/benchmark_config.json similarity index 71% rename from frameworks/Crystal/orion/benchmark_config.json rename to frameworks/Python/xpresso/benchmark_config.json index 1c56af66bc3..dcc76e906a7 100755 --- a/frameworks/Crystal/orion/benchmark_config.json +++ b/frameworks/Python/xpresso/benchmark_config.json @@ -1,27 +1,27 @@ { - "framework": "orion", + "framework": "xpresso", "tests": [ { "default": { "json_url": "/json", + "fortune_url": "/fortunes", "plaintext_url": "/plaintext", "db_url": "/db", "query_url": "/queries?queries=", - "fortune_url": "/fortunes", "update_url": "/updates?queries=", "port": 8080, "approach": "Realistic", "classification": "Micro", - "database": "postgres", - "framework": "orion", - "language": "Crystal", - "flavor": "None", - "orm": "Micro", + "database": "Postgres", + "framework": "Xpresso", + "language": "Python", + "flavor": "Python3", + "orm": "Raw", "platform": "None", "webserver": "None", "os": "Linux", "database_os": "Linux", - "display_name": "orion", + "display_name": "Xpresso", "notes": "", "versus": "None" } diff --git a/frameworks/Python/blacksheep/config.toml b/frameworks/Python/xpresso/config.toml similarity index 87% rename from frameworks/Python/blacksheep/config.toml rename to frameworks/Python/xpresso/config.toml index add4622d212..fe4a990a539 100644 --- a/frameworks/Python/blacksheep/config.toml +++ b/frameworks/Python/xpresso/config.toml @@ -1,5 +1,5 @@ [framework] -name = "blacksheep" +name = "xpresso" [main] urls.plaintext = "/plaintext" @@ -9,7 +9,7 @@ urls.query = "/queries?queries=" urls.update = "/updates?queries=" urls.fortune = "/fortunes" approach = "Realistic" -classification = "Platform" +classification = "Micro" database = "Postgres" database_os = "Linux" os = "Linux" diff --git a/frameworks/Python/xpresso/main.py b/frameworks/Python/xpresso/main.py new file mode 100755 index 00000000000..0862fe51c7a --- /dev/null +++ b/frameworks/Python/xpresso/main.py @@ -0,0 +1,158 @@ +import multiprocessing +import os +import pathlib +from operator import itemgetter +from random import randint, sample +from typing import Annotated, AsyncIterable, Optional + +import asyncpg # type: ignore +import jinja2 # type: ignore +import uvicorn # type: ignore +from pydantic import BaseModel, Field +from starlette.responses import HTMLResponse, PlainTextResponse +from xpresso import App, Depends, Path, Response, FromQuery + +READ_ROW_SQL = 'SELECT "randomnumber", "id" FROM "world" WHERE id = $1' +WRITE_ROW_SQL = 'UPDATE "world" SET "randomnumber"=$1 WHERE id=$2' +ADDITIONAL_ROW = (0, 'Additional fortune added at request time.') + + +sort_fortunes_key = itemgetter(1) + +app_dir = pathlib.Path(__file__).parent +with (app_dir / "templates" / "fortune.html").open() as template_file: + template = jinja2.Template(template_file.read()) + + +async def get_db_pool() -> AsyncIterable[asyncpg.Pool]: + async with asyncpg.create_pool( # type: ignore + user=os.getenv('PGUSER', 'benchmarkdbuser'), + password=os.getenv('PGPASS', 'benchmarkdbpass'), + database=os.getenv('PGDB', 'hello_world'), + host=os.getenv('PGHOST', 'tfb-database'), + port=5432, + ) as pool: + yield pool + + +DBPool = Annotated[asyncpg.Pool, Depends(get_db_pool, scope="app")] + + +def get_num_queries(queries: Optional[str]) -> int: + if not queries: + return 1 + try: + queries_num = int(queries) + except (ValueError, TypeError): + return 1 + if queries_num < 1: + return 1 + if queries_num > 500: + return 500 + return queries_num + + + +class Greeting(BaseModel): + message: str + + +def json_serialization() -> Greeting: + return Greeting(message="Hello, world!") + + +def plaintext() -> Response: + return PlainTextResponse(b"Hello, world!") + + +class QueryResult(BaseModel): + id: int + randomNumber: int + + +async def single_database_query(pool: DBPool) -> QueryResult: + row_id = randint(1, 10000) + + connection: "asyncpg.Connection" + async with pool.acquire() as connection: # type: ignore + number: int = await connection.fetchval(READ_ROW_SQL, row_id) # type: ignore + + return QueryResult.construct(id=row_id, randomNumber=number) + + +QueryCount = Annotated[str, Field(gt=0, le=500)] + + +async def multiple_database_queries( + pool: DBPool, + queries: FromQuery[str | None] = None, +) -> list[QueryResult]: + num_queries = get_num_queries(queries) + row_ids = sample(range(1, 10000), num_queries) + + connection: "asyncpg.Connection" + async with pool.acquire() as connection: # type: ignore + statement = await connection.prepare(READ_ROW_SQL) # type: ignore + return [ + QueryResult.construct( + id=row_id, + randomNumber=await statement.fetchval(row_id), # type: ignore + ) + for row_id in row_ids + ] + + +async def fortunes(pool: DBPool) -> Response: + connection: "asyncpg.Connection" + async with pool.acquire() as connection: # type: ignore + fortunes: "list[tuple[int, str]]" = await connection.fetch("SELECT * FROM Fortune") # type: ignore + + fortunes.append(ADDITIONAL_ROW) + fortunes.sort(key=sort_fortunes_key) + content = template.render(fortunes=fortunes) # type: ignore + return HTMLResponse(content) + + +async def database_updates( + pool: DBPool, + queries: FromQuery[str | None] = None, +) -> list[QueryResult]: + num_queries = get_num_queries(queries) + + updates = [(row_id, randint(1, 10000)) for row_id in sample(range(1, 10000), num_queries)] + + async with pool.acquire() as connection: + statement = await connection.prepare(READ_ROW_SQL) + for row_id, _ in updates: + await statement.fetchval(row_id) + await connection.executemany(WRITE_ROW_SQL, updates) # type: ignore + + return [QueryResult.construct(id=row_id, randomNumber=number) for row_id, number in updates] + + +routes = ( + Path("/json", get=json_serialization), + Path("/plaintext", get=plaintext), + Path("/db", get=single_database_query), + Path("/queries", get=multiple_database_queries), + Path("/fortunes", get=fortunes), + Path("/updates", get=database_updates), +) + + +app = App(routes=routes) + + +if __name__ == "__main__": + workers = multiprocessing.cpu_count() + if os.environ.get("TRAVIS") == "true": + workers = 2 + uvicorn.run( # type: ignore + "main:app", + host="0.0.0.0", + port=8080, + workers=workers, + log_level="error", + loop="uvloop", + http="httptools", + ) diff --git a/frameworks/Python/xpresso/requirements.txt b/frameworks/Python/xpresso/requirements.txt new file mode 100644 index 00000000000..5371b0a4735 --- /dev/null +++ b/frameworks/Python/xpresso/requirements.txt @@ -0,0 +1,7 @@ +asyncpg==0.26.0 +Jinja2==2.11.3 +markupsafe==2.0.1 +httptools==0.4.0 +uvloop==0.16.0 +uvicorn==0.18.2 +xpresso==0.42.0 diff --git a/frameworks/Python/xpresso/templates/fortune.html b/frameworks/Python/xpresso/templates/fortune.html new file mode 100644 index 00000000000..1c90834285d --- /dev/null +++ b/frameworks/Python/xpresso/templates/fortune.html @@ -0,0 +1,10 @@ + + +Fortunes + + + +{% for fortune in fortunes %} +{% endfor %}
idmessage
{{ fortune[0] }}{{ fortune[1]|e }}
+ + diff --git a/frameworks/Python/xpresso/xpresso.dockerfile b/frameworks/Python/xpresso/xpresso.dockerfile new file mode 100644 index 00000000000..f0aba7a9005 --- /dev/null +++ b/frameworks/Python/xpresso/xpresso.dockerfile @@ -0,0 +1,14 @@ +FROM python:3.10 + +RUN mkdir /xpresso +WORKDIR /xpresso + +COPY ./requirements.txt /xpresso/ + +RUN pip install --no-cache-dir -r /xpresso/requirements.txt + +COPY ./ /xpresso/ + +EXPOSE 8080 + +CMD ["python", "main.py"] diff --git a/frameworks/R/plumber/README.md b/frameworks/R/plumber/README.md new file mode 100755 index 00000000000..a4a7716454d --- /dev/null +++ b/frameworks/R/plumber/README.md @@ -0,0 +1,18 @@ +# [Plumber](https://www.rplumber.io/) Benchmark Test + +The information below is specific to Plumber. For further guidance, +review the [documentation](https://github.com/TechEmpower/FrameworkBenchmarks/wiki). + +This is the R Plumber portion of a [benchmarking tests suite](../../) +comparing a variety of frameworks. + +All test implementations are located within a single file +([plumber.R](plumber.R)). + +## Description + +Plumber + +### Database + +PostgresQL diff --git a/frameworks/R/plumber/benchmark_config.json b/frameworks/R/plumber/benchmark_config.json new file mode 100755 index 00000000000..ceb6f0c608a --- /dev/null +++ b/frameworks/R/plumber/benchmark_config.json @@ -0,0 +1,30 @@ +{ + "framework": "plumber", + "tests": [ + { + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "db_url": "/db", + "query_url": "/query?queries=", + "fortune_url": "/fortunes", + "update_url": "/updates?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "postgres", + "framework": "Plumber", + "language": "R", + "flavor": "None", + "orm": "Raw", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "Plumber", + "notes": "", + "versus": "None" + } + } + ] +} diff --git a/frameworks/Crystal/orion/config.toml b/frameworks/R/plumber/config.toml similarity index 86% rename from frameworks/Crystal/orion/config.toml rename to frameworks/R/plumber/config.toml index 6fe6097ec34..9fac8d01dbb 100644 --- a/frameworks/Crystal/orion/config.toml +++ b/frameworks/R/plumber/config.toml @@ -1,5 +1,5 @@ [framework] -name = "orion" +name = "plumber" [main] urls.plaintext = "/plaintext" @@ -10,10 +10,10 @@ urls.update = "/updates?queries=" urls.fortune = "/fortunes" approach = "Realistic" classification = "Micro" -database = "postgres" +database = "Postgres" database_os = "Linux" os = "Linux" -orm = "Micro" +orm = "Raw" platform = "None" webserver = "None" versus = "None" diff --git a/frameworks/R/plumber/deploy_plumber.R b/frameworks/R/plumber/deploy_plumber.R new file mode 100644 index 00000000000..907a035ab26 --- /dev/null +++ b/frameworks/R/plumber/deploy_plumber.R @@ -0,0 +1,5 @@ +library(plumber) + +r <- plumber::plumb('plumber.R') + +r$run(port = 8080, host = '0.0.0.0') diff --git a/frameworks/R/plumber/plumber.R b/frameworks/R/plumber/plumber.R new file mode 100644 index 00000000000..a86827d437b --- /dev/null +++ b/frameworks/R/plumber/plumber.R @@ -0,0 +1,118 @@ +library(plumber) +library(dplyr) +library(DBI) + + +READ_ROW_SQL_BASE = 'SELECT "randomnumber", "id" FROM "world" WHERE id = ' +WRITE_ROW_SQL = 'UPDATE "world" SET "randomnumber"=$1 WHERE id=$2' + +db_con <- dbConnect(RPostgres::Postgres(), dbname = "hello_world", host="tfb-database", port=5432, user="benchmarkdbuser", password="benchmarkdbpass") + + +get_num_queries <- function(queries){ + query_count <- 1 + query_count <- as.numeric(queries) + if(is.na(query_count)) query_count <- 1 + if(query_count < 1) return(1) + if(query_count > 500) return(500) + return(query_count) +} + + +#* @get /query +#* @param queries +#* @serializer json +function(req, res, queries = NULL) { + res$headers$Server <- "example" + if(is.null(queries)) queries <- 1 + num_queries = get_num_queries(queries) + row_ids = sample.int(10000, num_queries) + + output_list <- list() + for(row_id in row_ids){ + # number = dbFetch(dbSendQuery(db_con, paste0(READ_ROW_SQL_BASE, row_id))) + number = dbGetQuery(db_con, paste0(READ_ROW_SQL_BASE, row_id)) + + output_list <- c(output_list, list(list('id' = row_id, 'randomNumber'= number$randomnumber))) + } + # print(jsonlite::toJSON(plyr::ldply(output_list, as.data.frame))) + plyr::ldply(output_list, as.data.frame) +} + + +#* @get /updates +#* @param queries +#* @serializer json +function(req, res, queries = NULL) { + res$headers$Server <- "example" + if(is.null(queries)) queries <- 1 + num_queries = get_num_queries(queries) + row_ids = sample.int(10000, num_queries) + + output_list <- list() + for(row_id in row_ids){ + new_random_number <- sample.int(10000, 1) + number = dbGetQuery(db_con, paste0(READ_ROW_SQL_BASE, row_id)) + dbExecute(db_con, paste0('UPDATE "world" SET "randomnumber"=', new_random_number, ' WHERE id=', row_id)) + output_list <- c(output_list, list(list('id' = row_id, 'randomNumber'= new_random_number))) + } + + plyr::ldply(output_list, as.data.frame) +} + + +#* @get /fortunes +#* @serializer html +function(req, res) { + res$headers$Server <- "example" + + fortunes_result <- dbGetQuery(db_con, 'SELECT * FROM Fortune') + fortunes_df <- as.data.frame(fortunes_result) + fortunes_df <- rbind(fortunes_df, data.frame(id = 0, message = 'Additional fortune added at request time.')) + fortunes_df <- fortunes_df[order(fortunes_df$message), ] + output_string <- " + + + Fortunes + + + + " + + for(i in 1:nrow(fortunes_df)){ + output_string <- paste0(output_string, paste0("")) + } + paste0(output_string , "
idmessage
", fortunes_df[i, 'id'], + "", fortunes_df[i, 'message'] %>% stringr::str_replace_all("<","<") %>% stringr::str_replace_all(">",">"), "
+ + ") +} + + +#* @get /json +#* @serializer unboxedJSON +function(req, res) { + res$headers$Server <- "example" + list('message'= 'Hello, world!') +} + +#* @get /plaintext +#* @serializer text +function(req, res) { + db_con <- dbConnect(RPostgres::Postgres(), dbname = "hello_world", host="tfb-database", port=5432, user="benchmarkdbuser", password="benchmarkdbpass") + res$headers$Server <- "example" + 'Hello, World!' +} + + +#* @get /db +#* @serializer unboxedJSON +function(req, res) { + res$headers$Server <- "example" + row_id = sample.int(10000, 1) + + # number = dbFetch(dbSendQuery(db_con, paste0(READ_ROW_SQL_BASE, row_id))) + number = dbGetQuery(db_con, paste0(READ_ROW_SQL_BASE, row_id)) + + list('id' = row_id, 'randomNumber'= number$randomnumber) +} diff --git a/frameworks/R/plumber/plumber.dockerfile b/frameworks/R/plumber/plumber.dockerfile new file mode 100644 index 00000000000..21ff6db00c5 --- /dev/null +++ b/frameworks/R/plumber/plumber.dockerfile @@ -0,0 +1,15 @@ +FROM rstudio/plumber + +RUN apt-get update + +RUN apt install postgresql postgresql-contrib -y + +RUN R -e "install.packages(c('DBI', 'RPostgres', 'plyr', 'dplyr', 'stringr'))" + +ADD ./ /plumber + +WORKDIR /plumber + +EXPOSE 8080 + +CMD ["deploy_plumber.R"] diff --git a/frameworks/Racket/racket/app.rkt b/frameworks/Racket/racket/app.rkt index 5184ade0215..d0b042bedb7 100644 --- a/frameworks/Racket/racket/app.rkt +++ b/frameworks/Racket/racket/app.rkt @@ -226,7 +226,8 @@ #:tcp@ tcp@ #:confirmation-channel ch #:safety-limits (make-safety-limits - #:max-waiting 4096 + #:max-concurrent 1000 + #:max-waiting 65535 #:request-read-timeout 16 #:response-timeout 16 #:response-send-timeout 16))) @@ -236,3 +237,10 @@ (raise ready-or-exn)) stop) + +(module+ main + (require net/tcp-unit) + (define stop (start "127.0.0.1" 8000 tcp@)) + (with-handlers ([exn:break? (λ (_) (stop))]) + (displayln "ready") + (sync never-evt))) diff --git a/frameworks/Racket/racket/main.rkt b/frameworks/Racket/racket/main.rkt index 26f001cbc15..c599b730a22 100644 --- a/frameworks/Racket/racket/main.rkt +++ b/frameworks/Racket/racket/main.rkt @@ -62,7 +62,7 @@ (apply choice-evt (map place-dead-evt places))) (define backlog - (* parallelism 4 1024)) + (* parallelism 65 1024)) (define listener (tcp-listen port backlog #t host)) (define stop-ch (make-channel)) @@ -71,6 +71,7 @@ (lambda () (define places* (list->vector places)) (define num-places (vector-length places*)) + (define stop-evt (choice-evt stop-ch place-fail-evt)) (let loop ([idx 0]) (sync (handle-evt @@ -79,9 +80,11 @@ (define-values (in out) (tcp-accept listener)) (place-channel-put (vector-ref places* idx) `(accept ,in, out)) + (tcp-abandon-port out) + (tcp-abandon-port in) (loop (modulo (add1 idx) num-places)))) (handle-evt - (choice-evt stop-ch place-fail-evt) + stop-evt (lambda (_) (stop-places) (tcp-close listener)))))))) diff --git a/frameworks/Racket/racket/place-tcp-unit.rkt b/frameworks/Racket/racket/place-tcp-unit.rkt index ed04f0c398d..c79da3823de 100644 --- a/frameworks/Racket/racket/place-tcp-unit.rkt +++ b/frameworks/Racket/racket/place-tcp-unit.rkt @@ -7,7 +7,11 @@ (provide make-place-tcp@) -(struct place-tcp-listener ()) +(struct place-tcp-listener (sema ch) + #:property prop:evt (lambda (self) + (handle-evt + (place-tcp-listener-sema self) + (lambda (_) self)))) (define (make-place-tcp@ accept-ch) (unit @@ -17,11 +21,11 @@ (define (tcp-abandon-port p) (tcp:tcp-abandon-port p)) - (define (tcp-accept _l) - (apply values (channel-get accept-ch))) + (define (tcp-accept l) + (apply values (channel-get (place-tcp-listener-ch l)))) - (define (tcp-accept/enable-break _l) - (apply values (sync/enable-break accept-ch))) + (define (tcp-accept/enable-break l) + (apply values (sync/enable-break (place-tcp-listener-ch l)))) (define (tcp-accept-ready? _l) (error 'tcp-accept-ready? "not supported")) @@ -50,7 +54,17 @@ [_backlog 4] [_reuse? #f] [_hostname #f]) - (place-tcp-listener)) + (define sema (make-semaphore)) + (define ch (make-channel)) + (thread + (lambda () + (let loop () + (define data (channel-get accept-ch)) + (semaphore-post sema) + (channel-put ch data) + (loop)))) + + (place-tcp-listener sema ch)) (define (tcp-listener? l) (place-tcp-listener? l)))) diff --git a/frameworks/Racket/racket/racket.dockerfile b/frameworks/Racket/racket/racket.dockerfile index a9c9213ad38..1c3c574514b 100644 --- a/frameworks/Racket/racket/racket.dockerfile +++ b/frameworks/Racket/racket/racket.dockerfile @@ -11,7 +11,7 @@ RUN echo 'APT::Get::Install-Recommends "false";' > /etc/apt/apt.conf.d/00-genera FROM debian AS racket -ARG RACKET_VERSION=8.3 +ARG RACKET_VERSION=8.6 RUN apt-get update -q \ && apt-get install --no-install-recommends -q -y \ @@ -28,7 +28,6 @@ ENV SSL_CERT_DIR="/etc/ssl/certs" RUN apt-get update -q \ && apt-get install --no-install-recommends -q -y redis-server - FROM racket AS builder RUN raco pkg install -D --auto --skip-installed redis-lib threading-lib diff --git a/frameworks/Ruby/grape/Gemfile b/frameworks/Ruby/grape/Gemfile index 1e69f15cd60..380df952afe 100644 --- a/frameworks/Ruby/grape/Gemfile +++ b/frameworks/Ruby/grape/Gemfile @@ -1,10 +1,10 @@ source 'http://rubygems.org' -gem 'mysql2', '0.4.10' -gem 'unicorn', '5.3.0' -gem 'puma', '3.12.6' -gem 'activerecord', '5.1.5', :require => 'active_record' -gem 'activerecord-import', "~> 0.18.1" -gem 'grape', '1.1.0' -gem 'rack', '1.6.12' -gem 'json', '2.1.0' +gem 'mysql2', '0.5.4' +gem 'unicorn', '6.1.0' +gem 'puma', '5.6.4' +gem 'activerecord', '7.0.3', :require => 'active_record' +gem 'activerecord-import', '1.4.0' +gem 'grape', '1.6.2' +gem 'rack', '2.2.3.1' +gem 'json', '2.6.2' diff --git a/frameworks/Ruby/grape/README.md b/frameworks/Ruby/grape/README.md index 77a276306e8..3b46eb6614f 100644 --- a/frameworks/Ruby/grape/README.md +++ b/frameworks/Ruby/grape/README.md @@ -6,20 +6,16 @@ For further guidance, review the Also note the additional information provided in the [Ruby README](../). This is the Ruby Grape portion of a [benchmarking test suite](../../) -comparing a variety of web servers along with JRuby/MRI. +comparing a variety of web servers. ## Infrastructure Software Versions The tests were run with: -* [Ruby 2.0.0-p0](http://www.ruby-lang.org/) -* [JRuby 1.7.8](http://jruby.org/) -* [Rubinius 2.2.10](http://rubini.us/) -* [Grape 0.8.0](http://intridea.github.io/grape/) -* [Rack 1.5.2](http://rack.github.com/) -* [Unicorn 4.8.3](http://unicorn.bogomips.org/) -* [TorqBox 0.1.7](http://torquebox.org/torqbox/) -* [Puma 3.9](http://puma.io/) -* [Thin 1.6.2](http://code.macournoyer.com/thin/) +* [Ruby 3.1](http://www.ruby-lang.org/) +* [Grape 1.6.2](http://www.ruby-grape.org/) +* [Rack 2.2.3.1](https://rack.github.io/) +* [Unicorn 6.1.0](https://yhbt.net/unicorn/) +* [Puma 5.6.4](https://puma.io/) ## Paths & Source for Tests @@ -42,4 +38,4 @@ _No experts listed, yet. If you're an expert, add yourself!_ ### Resources -* [Grape Micro-framework Source Code](https://github.com/intridea/grape) +* [Grape Micro-framework Source Code](https://github.com/ruby-grape/grape) diff --git a/frameworks/Ruby/grape/grape-unicorn.dockerfile b/frameworks/Ruby/grape/grape-unicorn.dockerfile index 428dd85e62d..c8c3966c36d 100644 --- a/frameworks/Ruby/grape/grape-unicorn.dockerfile +++ b/frameworks/Ruby/grape/grape-unicorn.dockerfile @@ -1,4 +1,4 @@ -FROM ruby:2.4 +FROM ruby:3.1 RUN apt-get update -yqq && apt-get install -yqq nginx diff --git a/frameworks/Ruby/grape/grape.dockerfile b/frameworks/Ruby/grape/grape.dockerfile index ac3bb1d140b..13a86a64359 100644 --- a/frameworks/Ruby/grape/grape.dockerfile +++ b/frameworks/Ruby/grape/grape.dockerfile @@ -1,4 +1,4 @@ -FROM ruby:2.4 +FROM ruby:3.1 ADD ./ /grape diff --git a/frameworks/Ruby/rails/Gemfile.lock b/frameworks/Ruby/rails/Gemfile.lock index f509357cba7..dfc9e4fd60e 100644 --- a/frameworks/Ruby/rails/Gemfile.lock +++ b/frameworks/Ruby/rails/Gemfile.lock @@ -80,7 +80,7 @@ GEM listen (3.7.1) rb-fsevent (~> 0.10, >= 0.10.3) rb-inotify (~> 0.9, >= 0.9.10) - loofah (2.13.0) + loofah (2.18.0) crass (~> 1.0.2) nokogiri (>= 1.5.9) mail (2.7.1) @@ -88,7 +88,7 @@ GEM marcel (1.0.2) method_source (1.0.0) mini_mime (1.1.2) - mini_portile2 (2.7.1) + mini_portile2 (2.8.0) minitest (5.15.0) mysql2 (0.5.3) net-imap (0.2.3) @@ -107,16 +107,16 @@ GEM net-protocol timeout nio4r (2.5.8) - nokogiri (1.13.1) - mini_portile2 (~> 2.7.0) + nokogiri (1.13.6) + mini_portile2 (~> 2.8.0) racc (~> 1.4) - nokogiri (1.13.1-arm64-darwin) + nokogiri (1.13.6-arm64-darwin) racc (~> 1.4) - nokogiri (1.13.1-x86_64-linux) + nokogiri (1.13.6-x86_64-linux) racc (~> 1.4) oj (3.13.11) pg (1.2.3) - puma (5.6.2) + puma (5.6.4) nio4r (~> 2.0) racc (1.6.0) rack (2.2.3) @@ -139,7 +139,7 @@ GEM rails-dom-testing (2.0.3) activesupport (>= 4.2.0) nokogiri (>= 1.6) - rails-html-sanitizer (1.4.2) + rails-html-sanitizer (1.4.3) loofah (~> 2.3) railties (7.0.1) actionpack (= 7.0.1) diff --git a/frameworks/Ruby/rails/app/controllers/hello_world_controller.rb b/frameworks/Ruby/rails/app/controllers/hello_world_controller.rb index ab1812389ce..25a918d286b 100644 --- a/frameworks/Ruby/rails/app/controllers/hello_world_controller.rb +++ b/frameworks/Ruby/rails/app/controllers/hello_world_controller.rb @@ -11,7 +11,7 @@ def plaintext end def json - render json: { message: 'Hello, World!' } + render json: JSON.generate({message: 'Hello, World!'}) end def db diff --git a/frameworks/Rust/actix/Cargo.lock b/frameworks/Rust/actix/Cargo.lock index d48c2467863..e8dc41b3b1d 100644 --- a/frameworks/Rust/actix/Cargo.lock +++ b/frameworks/Rust/actix/Cargo.lock @@ -19,11 +19,11 @@ dependencies = [ "futures-util", "log", "once_cell", - "parking_lot", + "parking_lot 0.11.2", "pin-project-lite", "smallvec", "tokio", - "tokio-util", + "tokio-util 0.6.9", ] [[package]] @@ -40,7 +40,7 @@ dependencies = [ "memchr", "pin-project-lite", "tokio", - "tokio-util", + "tokio-util 0.6.9", ] [[package]] @@ -251,6 +251,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "anyhow" +version = "1.0.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd" + [[package]] name = "askama" version = "0.11.0" @@ -369,6 +375,25 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bson" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60a2c7c80a7850b56df4b8e98e8e4932c34877b8add4f13e8350499cc1e4572" +dependencies = [ + "ahash", + "base64", + "chrono", + "hex", + "indexmap", + "lazy_static", + "rand", + "serde", + "serde_bytes", + "serde_json", + "uuid", +] + [[package]] name = "buf-min" version = "0.6.1" @@ -378,6 +403,12 @@ dependencies = [ "bytes", ] +[[package]] +name = "bumpalo" +version = "3.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" + [[package]] name = "byteorder" version = "1.4.3" @@ -453,7 +484,7 @@ dependencies = [ "ansi_term", "atty", "bitflags", - "strsim", + "strsim 0.8.0", "textwrap", "unicode-width", "vec_map", @@ -505,11 +536,97 @@ dependencies = [ [[package]] name = "crypto-common" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d6b536309245c849479fba3da410962a43ed8e51c26b729208ec0ac2798d0" +checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" dependencies = [ "generic-array", + "typenum", +] + +[[package]] +name = "darling" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0d720b8683f8dd83c65155f0530560cba68cd2bf395f6513a483caee57ff7f4" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a340f241d2ceed1deb47ae36c4144b2707ec7dd0b649f894cb39bb595986324" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.10.0", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c41b3b7352feb3211a0d743dc5700a4e3b60f51bd2b368892d1e0f9a95f44b" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "data-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57" + +[[package]] +name = "deadpool" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bf0c5365c0925c80a838a6810a1bf38d3304ca6b4eb25829e29e33da12de786" +dependencies = [ + "async-trait", + "deadpool-runtime", + "num_cpus", + "tokio", +] + +[[package]] +name = "deadpool-postgres" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46ff1451a33b8b31b15eedcf5401dbbb28606caed4fa94d20487eb3fac2ebd04" +dependencies = [ + "deadpool", + "log", + "tokio", + "tokio-postgres", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" +dependencies = [ + "tokio", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -521,7 +638,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "rustc_version", + "rustc_version 0.4.0", "syn", ] @@ -550,13 +667,12 @@ dependencies = [ [[package]] name = "digest" -version = "0.10.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b697d66081d42af4fba142d56918a3cb21dc8eb63372c6b85d14f44fb9c5979b" +checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" dependencies = [ "block-buffer", "crypto-common", - "generic-array", "subtle", ] @@ -581,6 +697,18 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "enum-as-inner" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "env_logger" version = "0.9.0" @@ -762,7 +890,7 @@ dependencies = [ "indexmap", "slab", "tokio", - "tokio-util", + "tokio-util 0.6.9", "tracing", ] @@ -785,6 +913,12 @@ dependencies = [ "ahash", ] +[[package]] +name = "heck" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" + [[package]] name = "hermit-abi" version = "0.1.19" @@ -794,15 +928,32 @@ dependencies = [ "libc", ] +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + [[package]] name = "hmac" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddca131f3e7f2ce2df364b57949a9d47915cfbd35e46cfee355ccebbf794d6a2" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ "digest", ] +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi", +] + [[package]] name = "http" version = "0.2.6" @@ -838,6 +989,12 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + [[package]] name = "idna" version = "0.2.3" @@ -868,6 +1025,24 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "ipconfig" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "723519edce41262b05d4143ceb95050e4c614f483e78e9fd9e39a8275a84ad98" +dependencies = [ + "socket2", + "widestring", + "winapi", + "winreg", +] + +[[package]] +name = "ipnet" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9" + [[package]] name = "itoa" version = "0.4.8" @@ -880,6 +1055,15 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" +[[package]] +name = "js-sys" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" +dependencies = [ + "wasm-bindgen", +] + [[package]] name = "language-tags" version = "0.3.2" @@ -914,6 +1098,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "linked-hash-map" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" + [[package]] name = "local-channel" version = "0.1.2" @@ -950,6 +1140,21 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "lru-cache" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + [[package]] name = "matches" version = "0.1.9" @@ -958,9 +1163,9 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "md-5" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6a38fc55c8bbc10058782919516f88826e70320db6d206aebc49611d24216ae" +checksum = "658646b21e0b72f7866c7038ab086d3d5e1cd6271f060fd37defb241949d0582" dependencies = [ "digest", ] @@ -1018,6 +1223,53 @@ dependencies = [ "winapi", ] +[[package]] +name = "mongodb" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a2fe500edae1ffc8e4bbc731e54a44638e5ec59fbff967f1cb8306867f5b53" +dependencies = [ + "async-trait", + "base64", + "bitflags", + "bson", + "chrono", + "derivative", + "futures-core", + "futures-executor", + "futures-io", + "futures-util", + "hex", + "hmac", + "lazy_static", + "md-5", + "os_info", + "pbkdf2", + "percent-encoding", + "rand", + "rustc_version_runtime", + "rustls", + "rustls-pemfile", + "serde", + "serde_bytes", + "serde_with", + "sha-1", + "sha2", + "socket2", + "stringprep", + "strsim 0.10.0", + "take_mut", + "thiserror", + "tokio", + "tokio-rustls", + "tokio-util 0.7.1", + "trust-dns-proto", + "trust-dns-resolver", + "typed-builder", + "uuid", + "webpki-roots", +] + [[package]] name = "nom" version = "7.1.0" @@ -1082,6 +1334,16 @@ version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" +[[package]] +name = "os_info" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "023df84d545ef479cf67fd2f4459a613585c9db4852c2fad12ab70587859d340" +dependencies = [ + "log", + "winapi", +] + [[package]] name = "parking_lot" version = "0.11.2" @@ -1090,7 +1352,17 @@ checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" dependencies = [ "instant", "lock_api", - "parking_lot_core", + "parking_lot_core 0.8.5", +] + +[[package]] +name = "parking_lot" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.2", ] [[package]] @@ -1107,12 +1379,34 @@ dependencies = [ "winapi", ] +[[package]] +name = "parking_lot_core" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "995f667a6c822200b0433ac218e05582f0e2efa1b922a3fd2fbaadc5f87bab37" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-sys", +] + [[package]] name = "paste" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0744126afe1a6dd7f394cb50a716dbe086cb06e255e53d8d0185d82828358fb5" +[[package]] +name = "pbkdf2" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271779f35b581956db91a3e55737327a03aa051e90b1c47aeb189508533adfd7" +dependencies = [ + "digest", +] + [[package]] name = "peeking_take_while" version = "0.1.2" @@ -1218,6 +1512,12 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + [[package]] name = "quote" version = "1.0.15" @@ -1278,9 +1578,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.4" +version = "1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1" dependencies = [ "aho-corasick", "memchr", @@ -1289,9 +1589,34 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.25" +version = "0.6.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" +checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" + +[[package]] +name = "resolv-conf" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" +dependencies = [ + "hostname", + "quick-error", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi", +] [[package]] name = "rustc-hash" @@ -1299,15 +1624,61 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver 0.9.0", +] + [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver", + "semver 1.0.4", ] +[[package]] +name = "rustc_version_runtime" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d31b7153270ebf48bf91c65ae5b0c00e749c4cfad505f66530ac74950249582f" +dependencies = [ + "rustc_version 0.2.3", + "semver 0.9.0", +] + +[[package]] +name = "rustls" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fbfeb8d0ddb84706bc597a5574ab8912817c52a397f819e5b614e2265206921" +dependencies = [ + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "rustls-pemfile" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ee86d63972a7c661d1536fefe8c3c8407321c3df668891286de28abcd087360" +dependencies = [ + "base64", +] + +[[package]] +name = "rustversion" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" + [[package]] name = "ryu" version = "1.0.9" @@ -1320,12 +1691,37 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser", +] + [[package]] name = "semver" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "568a8e6258aa33c13358f81fd834adb854c6f7c9468520910a9b1e8fac068012" +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" + [[package]] name = "serde" version = "1.0.136" @@ -1335,6 +1731,15 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde_bytes" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" +dependencies = [ + "serde", +] + [[package]] name = "serde_derive" version = "1.0.136" @@ -1352,6 +1757,7 @@ version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d23c1ba4cf0efd44be32017709280b32d1cea5c3f1275c3b6d9e8bc54f758085" dependencies = [ + "indexmap", "itoa 1.0.1", "ryu", "serde", @@ -1369,6 +1775,29 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_with" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec1e6ec4d8950e5b1e894eac0d360742f3b1407a6078a604a731c4b3f49cefbc" +dependencies = [ + "rustversion", + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12e47be9471c72889ebafb5e14d5ff930d89ae7a67bbdb5f8abb564f845a927e" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "sha-1" version = "0.10.0" @@ -1382,9 +1811,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.1" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99c3bd8169c58782adad9290a9af5939994036b76187f7b4f0e6de91dbbfc0ec" +checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" dependencies = [ "cfg-if", "cpufeatures", @@ -1497,6 +1926,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + [[package]] name = "stringprep" version = "0.1.2" @@ -1513,6 +1948,12 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + [[package]] name = "subtle" version = "2.4.1" @@ -1530,6 +1971,12 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "take_mut" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" + [[package]] name = "termcolor" version = "1.1.2" @@ -1559,14 +2006,17 @@ dependencies = [ "actix-server", "actix-service", "actix-web", + "anyhow", "askama", "bindgen", "bytes", + "deadpool-postgres", "diesel", "env_logger", "futures", "http", "log", + "mongodb", "num_cpus", "rand", "serde", @@ -1581,6 +2031,26 @@ dependencies = [ "yarte", ] +[[package]] +name = "thiserror" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "time" version = "0.1.43" @@ -1629,7 +2099,7 @@ dependencies = [ "mio 0.7.14", "num_cpus", "once_cell", - "parking_lot", + "parking_lot 0.11.2", "pin-project-lite", "signal-hook-registry", "tokio-macros", @@ -1659,7 +2129,7 @@ dependencies = [ "fallible-iterator", "futures", "log", - "parking_lot", + "parking_lot 0.11.2", "percent-encoding", "phf", "pin-project-lite", @@ -1667,7 +2137,18 @@ dependencies = [ "postgres-types", "socket2", "tokio", - "tokio-util", + "tokio-util 0.6.9", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4151fda0cf2798550ad0b34bcfc9b9dcc2a9d2471c895c68f3a8818e54f2389e" +dependencies = [ + "rustls", + "tokio", + "webpki", ] [[package]] @@ -1684,6 +2165,19 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-util" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0edfdeb067411dba2044da6d1cb2df793dd35add7888d73c16e3381ded401764" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + [[package]] name = "toml" version = "0.5.8" @@ -1713,6 +2207,62 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "trust-dns-proto" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c31f240f59877c3d4bb3b3ea0ec5a6a0cff07323580ff8c7a605cd7d08b255d" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner", + "futures-channel", + "futures-io", + "futures-util", + "idna", + "ipnet", + "lazy_static", + "log", + "rand", + "smallvec", + "thiserror", + "tinyvec", + "tokio", + "url", +] + +[[package]] +name = "trust-dns-resolver" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ba72c2ea84515690c9fcef4c6c660bb9df3036ed1051686de84605b74fd558" +dependencies = [ + "cfg-if", + "futures-util", + "ipconfig", + "lazy_static", + "log", + "lru-cache", + "parking_lot 0.12.0", + "resolv-conf", + "smallvec", + "thiserror", + "tokio", + "trust-dns-proto", +] + +[[package]] +name = "typed-builder" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89851716b67b937e393b3daa8423e67ddfc4bbbf1654bcf05488e95e0828db0c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "typenum" version = "1.15.0" @@ -1746,6 +2296,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "url" version = "2.2.2" @@ -1758,6 +2314,16 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom", + "serde", +] + [[package]] name = "v_escape" version = "0.18.0" @@ -1833,6 +2399,89 @@ version = "0.10.2+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" +[[package]] +name = "wasm-bindgen" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" +dependencies = [ + "bumpalo", + "lazy_static", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" + +[[package]] +name = "web-sys" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "webpki-roots" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d8de8415c823c8abd270ad483c6feeac771fad964890779f9a8cb24fbbc1bf" +dependencies = [ + "webpki", +] + [[package]] name = "which" version = "4.2.4" @@ -1844,6 +2493,12 @@ dependencies = [ "libc", ] +[[package]] +name = "widestring" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983" + [[package]] name = "winapi" version = "0.3.9" @@ -1875,6 +2530,58 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-sys" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5acdd78cb4ba54c0045ac14f62d8f94a03d10047904ae2a40afa1e99d8f70825" +dependencies = [ + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_msvc" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17cffbe740121affb56fad0fc0e421804adf0ae00891205213b5cecd30db881d" + +[[package]] +name = "windows_i686_gnu" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2564fde759adb79129d9b4f54be42b32c89970c18ebf93124ca8870a498688ed" + +[[package]] +name = "windows_i686_msvc" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cd9d32ba70453522332c14d38814bceeb747d80b3958676007acadd7e166956" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfce6deae227ee8d356d19effc141a509cc503dfd1f850622ec4b0f84428e1f4" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d19538ccc21819d01deaf88d6a17eae6596a12e9aafdbb97916fb49896d89de9" + +[[package]] +name = "winreg" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69" +dependencies = [ + "winapi", +] + [[package]] name = "yansi-term" version = "0.1.2" diff --git a/frameworks/Rust/actix/Cargo.toml b/frameworks/Rust/actix/Cargo.toml index 22e66962d34..2ac00861353 100755 --- a/frameworks/Rust/actix/Cargo.toml +++ b/frameworks/Rust/actix/Cargo.toml @@ -19,7 +19,16 @@ path = "src/main_http.rs" name = "tfb-server" path = "src/main_server.rs" +[[bin]] +name = "tfb-web-mongodb" +path = "src/main_mongodb.rs" + +[[bin]] +name = "tfb-web-pg-deadpool" +path = "src/main_pg_deadpool.rs" + [dependencies] +anyhow = "1" actix = "0.12" actix-web = { version = "4.0.0-rc.3", default-features = false, features = ["macros"] } actix-http = { version = "3.0.0-rc.2", default-features = false } @@ -44,6 +53,8 @@ simd-json-derive = "0.2" snmalloc-rs = "0.2.6" tokio = { version = "1", features = ["full"] } tokio-postgres = "0.7.5" +deadpool-postgres = "0.10.1" +mongodb = "2.2.0" url = "2.1" v_htmlescape = "0.14" yarte = { version = "0.15", features = ["bytes-buf"] } diff --git a/frameworks/Rust/actix/README.md b/frameworks/Rust/actix/README.md index 1de0fe4144f..def975cde32 100644 --- a/frameworks/Rust/actix/README.md +++ b/frameworks/Rust/actix/README.md @@ -21,11 +21,15 @@ Actix web is a small, fast, pragmatic, open source rust web framework. * Multipart streams * Middlewares (Logger, Session, DefaultHeaders, CORS) -## Database +## Databases -PostgreSQL. +* PostgreSQL + * Raw driver access via [`tokio_postgres`](https://docs.rs/tokio-postgres/latest/tokio_postgres/) (actix-http test) + * ORM using [`diesel`](http://diesel.rs) (actix-web-diesel test) + * Raw driver access via [`tokio_postgres`](https://docs.rs/tokio-postgres/latest/tokio_postgres/) with connection pooling using [`deadpool_postgres`](https://docs.rs/deadpool-postgres/latest/deadpool_postgres/) (actix-web-pg-deadpool test) -* ORM using [diesel](http://diesel.rs) +* MongoDB + * Raw driver access and connection pooling via [`mongodb`](https://docs.rs/mongodb/latest/mongodb/) (actix-web-mongodb test) ## Test URLs diff --git a/frameworks/Rust/actix/actix-web-mongodb.dockerfile b/frameworks/Rust/actix/actix-web-mongodb.dockerfile new file mode 100644 index 00000000000..f89e9552660 --- /dev/null +++ b/frameworks/Rust/actix/actix-web-mongodb.dockerfile @@ -0,0 +1,15 @@ +FROM rust:1.57.0 + +ENV ACTIX_TECHEMPOWER_MONGODB_URL=mongodb://tfb-database:27017 + +RUN apt-get update -yqq && apt-get install -yqq cmake g++ + +ADD ./ /actix +WORKDIR /actix + +RUN cargo clean +RUN RUSTFLAGS="-C target-cpu=native" cargo build --release --bin tfb-web-mongodb + +EXPOSE 8080 + +CMD ./target/release/tfb-web-mongodb diff --git a/frameworks/Rust/actix/actix-web-pg-deadpool.dockerfile b/frameworks/Rust/actix/actix-web-pg-deadpool.dockerfile new file mode 100644 index 00000000000..0ce0a4dbaac --- /dev/null +++ b/frameworks/Rust/actix/actix-web-pg-deadpool.dockerfile @@ -0,0 +1,13 @@ +FROM rust:1.57.0 + +RUN apt-get update -yqq && apt-get install -yqq cmake g++ + +ADD ./ /actix +WORKDIR /actix + +RUN cargo clean +RUN RUSTFLAGS="-C target-cpu=native" cargo build --release --bin tfb-web-pg-deadpool + +EXPOSE 8080 + +CMD ./target/release/tfb-web-pg-deadpool \ No newline at end of file diff --git a/frameworks/Rust/actix/benchmark_config.json b/frameworks/Rust/actix/benchmark_config.json index 34906cce36a..caf6239a7e3 100755 --- a/frameworks/Rust/actix/benchmark_config.json +++ b/frameworks/Rust/actix/benchmark_config.json @@ -76,6 +76,46 @@ "display_name": "actix-server", "notes": "", "versus": "" + }, + "web-mongodb": { + "db_url": "/db", + "fortune_url": "/fortunes", + "query_url": "/queries?q=", + "update_url": "/updates?q=", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "MongoDB", + "framework": "actix", + "language": "Rust", + "orm": "Raw", + "platform": "None", + "webserver": "actix-web", + "os": "Linux", + "database_os": "Linux", + "display_name": "Actix Web [MongoDB]", + "notes": "", + "versus": "" + }, + "web-pg-deadpool": { + "db_url": "/db", + "fortune_url": "/fortunes", + "query_url": "/queries?q=", + "update_url": "/updates?q=", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "Postgres", + "framework": "actix", + "language": "Rust", + "orm": "Raw", + "platform": "None", + "webserver": "actix-web", + "os": "Linux", + "database_os": "Linux", + "display_name": "Actix Web [Postgres + deadpool]", + "notes": "", + "versus": "" } }] } diff --git a/frameworks/Rust/actix/src/main_mongodb.rs b/frameworks/Rust/actix/src/main_mongodb.rs new file mode 100644 index 00000000000..251060fe2e1 --- /dev/null +++ b/frameworks/Rust/actix/src/main_mongodb.rs @@ -0,0 +1,219 @@ +mod models_mongodb; +mod utils; + +use models_mongodb::{Fortune, World}; +use utils::{Queries, Result, CONNECTION_POOL_SIZE}; + +use actix_http::{ + header::{HeaderValue, CONTENT_TYPE, SERVER}, + KeepAlive, StatusCode, +}; +use actix_web::{web, App, HttpResponse, HttpServer}; +use anyhow::bail; +use futures::{stream::FuturesUnordered, TryStreamExt}; +use mongodb::bson::doc; +use mongodb::{options::ClientOptions, Client}; +use rand::{prelude::SmallRng, Rng, SeedableRng}; +use tokio::runtime::Handle; +use yarte::ywrite_html; + +use std::time::Duration; + +struct Data { + client: Client, + tokio_runtime: tokio::runtime::Handle, +} + +async fn find_random_world(data: web::Data) -> Result { + let runtime = data.tokio_runtime.clone(); + runtime + .spawn(async move { + let mut rng = SmallRng::from_entropy(); + let id = (rng.gen::() % 10_000 + 1) as i32; + + let coll = data.client.database("hello_world").collection("world"); + let world = coll + .find_one(doc! { "id": id as f32 }, None) + .await? + .expect("should find world"); + Ok(world) + }) + .await? +} + +#[actix_web::get("/db")] +async fn db(data: web::Data) -> Result>> { + let world = find_random_world(data).await?; + let mut bytes = Vec::with_capacity(48); + serde_json::to_writer(&mut bytes, &world)?; + + let mut res = HttpResponse::with_body(StatusCode::OK, bytes); + res.headers_mut() + .insert(SERVER, HeaderValue::from_static("Actix")); + res.headers_mut() + .insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + + Ok(res) +} + +async fn find_random_worlds(data: web::Data, num_of_worlds: usize) -> Result> { + let mut futs = FuturesUnordered::new(); + for _ in 0..num_of_worlds { + futs.push(find_random_world(data.clone())) + } + + let mut worlds = Vec::with_capacity(num_of_worlds); + while let Some(world) = futs.try_next().await? { + worlds.push(world); + } + + Ok(worlds) +} + +#[actix_web::get("/queries")] +async fn queries( + data: web::Data, + query: web::Query, +) -> Result>> { + let n_queries = query.q; + + let worlds = find_random_worlds(data, n_queries).await?; + + let mut bytes = Vec::with_capacity(35 * n_queries); + serde_json::to_writer(&mut bytes, &worlds)?; + + let mut res = HttpResponse::with_body(StatusCode::OK, bytes); + res.headers_mut() + .insert(SERVER, HeaderValue::from_static("Actix")); + res.headers_mut() + .insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + + Ok(res) +} + +#[actix_web::get("/updates")] +async fn updates( + data: web::Data, + query: web::Query, +) -> Result>> { + let tokio_runtime = data.tokio_runtime.clone(); + let client = data.client.clone(); + + let mut worlds = find_random_worlds(data, query.q).await?; + + let mut rng = SmallRng::from_entropy(); + let mut updates = Vec::new(); + for world in worlds.iter_mut() { + let new_random_number = (rng.gen::() % 10_000 + 1) as i32; + updates.push(doc! { + "q": { "id": world.id }, "u": { "$set": { "randomNumber": new_random_number }} + }); + world.random_number = new_random_number; + } + + tokio_runtime + .spawn(async move { + client + .database("hello_world") + .run_command( + doc! { + "update": "world", + "updates": updates, + "ordered": false, + }, + None, + ) + .await + }) + .await??; + + let mut bytes = Vec::with_capacity(35 * worlds.len()); + serde_json::to_writer(&mut bytes, &worlds)?; + + let mut res = HttpResponse::with_body(StatusCode::OK, bytes); + res.headers_mut() + .insert(SERVER, HeaderValue::from_static("Actix")); + res.headers_mut() + .insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + + Ok(res) +} + +#[actix_web::get("/fortunes")] +async fn fortune(data: web::Data) -> Result>> { + async fn fetch_fortunes(client: &Client) -> Result> { + let fortunes_cursor = client + .database("hello_world") + .collection::("fortune") + .find(None, None) + .await?; + + let mut fortunes: Vec = fortunes_cursor.try_collect().await?; + fortunes.push(Fortune { + id: 0, + message: "Additional fortune added at request time.".to_string(), + }); + + fortunes.sort_by(|a, b| a.message.cmp(&b.message)); + + Ok(fortunes) + } + + let d = data.clone(); + let fortunes = data + .tokio_runtime + .spawn(async move { fetch_fortunes(&d.client).await }) + .await??; + + let mut body = Vec::with_capacity(2048); + ywrite_html!(body, "{{> fortune }}"); + + let mut res = HttpResponse::with_body(StatusCode::OK, body); + res.headers_mut() + .insert(SERVER, HeaderValue::from_static("Actix")); + res.headers_mut().insert( + CONTENT_TYPE, + HeaderValue::from_static("text/html; charset=utf-8"), + ); + + Ok(res) +} + +fn main() { + actix_web::rt::System::with_tokio_rt(|| tokio::runtime::Runtime::new().unwrap()) + .block_on(async_main()) + .unwrap(); +} + +async fn async_main() -> Result<()> { + println!("Starting http server: 0.0.0.0:8080"); + + // use a separate, multithreaded tokio runtime for db queries for better performance + let handle = Handle::current(); + + let uri = std::env::var("ACTIX_TECHEMPOWER_MONGODB_URL") + .or_else(|_| bail!("missing ACTIX_TECHEMPOWER_MONGODB_URL env variable"))?; + let mut options = ClientOptions::parse(uri).await?; + options.max_pool_size = Some(CONNECTION_POOL_SIZE as u32); + let client = Client::with_options(options)?; + + HttpServer::new(move || { + App::new() + .app_data(web::Data::new(Data { + client: client.clone(), + tokio_runtime: handle.clone(), + })) + .service(fortune) + .service(db) + .service(queries) + .service(updates) + }) + .keep_alive(KeepAlive::Os) + .client_request_timeout(Duration::from_secs(0)) + .backlog(1024) + .bind("0.0.0.0:8080")? + .run() + .await?; + + Ok(()) +} diff --git a/frameworks/Rust/actix/src/main_pg_deadpool.rs b/frameworks/Rust/actix/src/main_pg_deadpool.rs new file mode 100644 index 00000000000..b24d5753d20 --- /dev/null +++ b/frameworks/Rust/actix/src/main_pg_deadpool.rs @@ -0,0 +1,210 @@ +mod models; +mod utils; + +use std::fmt::Write; +use std::time::Duration; + +use actix_http::KeepAlive; +use actix_web::{ + http::{ + header::{HeaderValue, CONTENT_TYPE, SERVER}, + StatusCode, + }, + web, App, HttpResponse, HttpServer, +}; +use deadpool_postgres::{Config, Pool, PoolConfig, Runtime}; +use futures::{stream::FuturesUnordered, TryStreamExt}; +use models::{Fortune, World}; +use rand::{prelude::SmallRng, Rng, SeedableRng}; +use tokio_postgres::{types::ToSql, NoTls}; +use utils::{Queries, Result, CONNECTION_POOL_SIZE}; +use yarte::ywrite_html; + +async fn find_random_world(pool: &Pool) -> Result { + let conn = pool.get().await?; + let world = conn + .prepare("SELECT * FROM world WHERE id=$1") + .await + .unwrap(); + + let mut rng = SmallRng::from_entropy(); + let id = (rng.gen::() % 10_000 + 1) as i32; + + let row = conn.query_one(&world, &[&id]).await?; + + Ok(World { + id: row.get(0), + randomnumber: row.get(1), + }) +} + +async fn find_random_worlds(pool: &Pool, num_of_worlds: usize) -> Result> { + let mut futs = FuturesUnordered::new(); + for _ in 0..num_of_worlds { + futs.push(find_random_world(pool)); + } + + let mut worlds = Vec::with_capacity(num_of_worlds); + while let Some(world) = futs.try_next().await? { + worlds.push(world); + } + + Ok(worlds) +} + +#[actix_web::get("/db")] +async fn db(data: web::Data) -> Result>> { + let world = find_random_world(&data).await?; + let mut bytes = Vec::with_capacity(48); + serde_json::to_writer(&mut bytes, &world)?; + + let mut res = HttpResponse::with_body(StatusCode::OK, bytes); + res.headers_mut() + .insert(SERVER, HeaderValue::from_static("Actix")); + res.headers_mut() + .insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + + Ok(res) +} + +#[actix_web::get("/queries")] +async fn queries( + data: web::Data, + query: web::Query, +) -> Result>> { + let n_queries = query.q; + + let worlds = find_random_worlds(&data, n_queries).await?; + + let mut bytes = Vec::with_capacity(35 * n_queries); + serde_json::to_writer(&mut bytes, &worlds)?; + + let mut res = HttpResponse::with_body(StatusCode::OK, bytes); + res.headers_mut() + .insert(SERVER, HeaderValue::from_static("Actix")); + res.headers_mut() + .insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + + Ok(res) +} + +#[actix_web::get("/updates")] +async fn updates( + data: web::Data, + query: web::Query, +) -> Result>> { + let mut worlds = find_random_worlds(&data, query.q).await?; + + let mut rng = SmallRng::from_entropy(); + + let mut updates = "UPDATE world SET randomnumber = CASE id ".to_string(); + let mut params: Vec<&(dyn ToSql + Sync)> = Vec::with_capacity(query.q as usize * 3); + + let mut n_params = 1; + for world in worlds.iter_mut() { + let new_random_number = (rng.gen::() % 10_000 + 1) as i32; + write!(&mut updates, "when ${} then ${} ", n_params, n_params + 1).unwrap(); + world.randomnumber = new_random_number; + n_params += 2; + } + + // need separate loop to borrow immutably + for world in worlds.iter() { + params.push(&world.id); + params.push(&world.randomnumber); + } + + updates.push_str("ELSE randomnumber END WHERE id IN ("); + for world in worlds.iter() { + write!(&mut updates, "${},", n_params).unwrap(); + params.push(&world.id); + n_params += 1; + } + + updates.pop(); // drop trailing comma + updates.push(')'); + + let conn = data.get().await?; + let stmt = conn.prepare(&updates).await?; + conn.query(&stmt, ¶ms).await?; + + let mut bytes = Vec::with_capacity(35 * worlds.len()); + serde_json::to_writer(&mut bytes, &worlds)?; + + let mut res = HttpResponse::with_body(StatusCode::OK, bytes); + res.headers_mut() + .insert(SERVER, HeaderValue::from_static("Actix")); + res.headers_mut() + .insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + + Ok(res) +} + +#[actix_web::get("/fortunes")] +async fn fortune(data: web::Data) -> Result>> { + let conn = data.get().await?; + let stmt = conn.prepare("SELECT * FROM Fortune").await?; + let params: &[&'static str] = &[]; + let s = conn.query_raw(&stmt, params).await?; + + let mut stream = Box::pin(s); + let mut fortunes = Vec::new(); + + while let Some(row) = stream.try_next().await? { + fortunes.push(Fortune { + id: row.get(0), + message: row.get(1), + }); + } + + fortunes.push(Fortune { + id: 0, + message: "Additional fortune added at request time.".to_string(), + }); + + fortunes.sort_by(|a, b| a.message.cmp(&b.message)); + + let mut body = Vec::with_capacity(2048); + ywrite_html!(body, "{{> fortune }}"); + + let mut res = HttpResponse::with_body(StatusCode::OK, body); + res.headers_mut() + .insert(SERVER, HeaderValue::from_static("Actix")); + res.headers_mut().insert( + CONTENT_TYPE, + HeaderValue::from_static("text/html; charset=utf-8"), + ); + + Ok(res) +} + +#[actix_web::main] +async fn main() -> Result<()> { + println!("Starting http server: 0.0.0.0:8080"); + + let mut cfg = Config::new(); + cfg.host = Some("tfb-database".to_string()); + cfg.dbname = Some("hello_world".to_string()); + cfg.user = Some("benchmarkdbuser".to_string()); + cfg.password = Some("benchmarkdbpass".to_string()); + let pc = PoolConfig::new(CONNECTION_POOL_SIZE); + cfg.pool = pc.into(); + let pool = cfg.create_pool(Some(Runtime::Tokio1), NoTls).unwrap(); + + HttpServer::new(move || { + App::new() + .app_data(web::Data::new(pool.clone())) + .service(fortune) + .service(db) + .service(queries) + .service(updates) + }) + .keep_alive(KeepAlive::Os) + .client_request_timeout(Duration::from_secs(0)) + .backlog(1024) + .bind("0.0.0.0:8080")? + .run() + .await?; + + Ok(()) +} diff --git a/frameworks/Rust/actix/src/models_mongodb.rs b/frameworks/Rust/actix/src/models_mongodb.rs new file mode 100644 index 00000000000..1f9a53c6a5d --- /dev/null +++ b/frameworks/Rust/actix/src/models_mongodb.rs @@ -0,0 +1,67 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct World { + pub id: i32, + pub random_number: i32, +} + +// The ids are stored in MongoDB as floating point numbers, so need a custom deserialization implementation +// to handle converting them. +impl<'de> Deserialize<'de> for World { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + #[derive(Deserialize)] + #[serde(rename_all = "camelCase")] + struct FloatWorld { + id: f32, + random_number: f32, + } + + let float = FloatWorld::deserialize(deserializer)?; + Ok(World { + id: float.id as i32, + random_number: float.random_number as i32, + }) + } +} + +#[allow(non_snake_case)] +#[derive(Serialize, Debug)] +pub struct Fortune { + pub id: i32, + pub message: String, +} + +// The ids are stored in MongoDB as floating point numbers, so need a custom deserialization implementation +// to handle converting them. +impl<'de> Deserialize<'de> for Fortune { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + #[derive(Deserialize)] + struct FloatFortune { + id: f32, + message: String, + } + + let float = FloatFortune::deserialize(deserializer)?; + Ok(Fortune { + id: float.id as i32, + message: float.message, + }) + } +} + +impl Default for Fortune { + fn default() -> Self { + Fortune { + id: -1, + message: "".to_string(), + } + } +} diff --git a/frameworks/Rust/actix/src/utils.rs b/frameworks/Rust/actix/src/utils.rs index 8053d21d079..9c39be346ea 100644 --- a/frameworks/Rust/actix/src/utils.rs +++ b/frameworks/Rust/actix/src/utils.rs @@ -1,6 +1,6 @@ #![allow(dead_code, unused_braces)] -use std::{borrow::Cow, cmp, io}; +use std::{borrow::Cow, cmp, fmt::Display, io}; use bytes::BufMut; use serde::{Deserialize, Serialize}; @@ -44,3 +44,55 @@ pub fn get_query_param(query: &str) -> u16 { }; cmp::min(500, cmp::max(1, q)) } + +pub const CONNECTION_POOL_SIZE: usize = 40; + +pub type Result = std::result::Result; + +#[derive(Debug)] +pub struct Error { + err: anyhow::Error, +} + +impl Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.err.fmt(f) + } +} + +impl actix_web::error::ResponseError for Error {} + +impl From for Error +where + T: Into, +{ + fn from(e: T) -> Self { + Error { err: e.into() } + } +} + +pub struct Queries { + pub q: usize, +} + +impl<'de> Deserialize<'de> for Queries { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + #[derive(Debug, Deserialize)] + struct Q { + q: Option, + } + + let q = Q::deserialize(deserializer)?; + let n = match q.q { + Some(s) => { + let i: i32 = s.parse().unwrap_or(1); + std::cmp::max(1, std::cmp::min(500, i)) as usize + } + None => 1, + }; + Ok(Queries { q: n }) + } +} diff --git a/frameworks/Rust/astra/Cargo.lock b/frameworks/Rust/astra/Cargo.lock new file mode 100644 index 00000000000..9dd5c02ed5c --- /dev/null +++ b/frameworks/Rust/astra/Cargo.lock @@ -0,0 +1,578 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom", + "once_cell", + "version_check", +] + +[[package]] +name = "astra" +version = "0.1.0" +dependencies = [ + "astra 0.1.2", + "http", + "mimalloc", + "serde", + "simd-json", +] + +[[package]] +name = "astra" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cc681bfbc70851b718adfb683c57a0bee0f7ac99c807d32ca01d2a3df2833a5" +dependencies = [ + "futures-core", + "hyper", + "log", + "mio", + "num_cpus", + "tokio", +] + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "bytes" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" + +[[package]] +name = "cc" +version = "1.0.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "float-cmp" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" +dependencies = [ + "num-traits", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "futures-channel" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" + +[[package]] +name = "futures-sink" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508" + +[[package]] +name = "futures-task" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" + +[[package]] +name = "futures-util" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" +dependencies = [ + "futures-core", + "futures-task", + "pin-project-lite", + "pin-utils", +] + +[[package]] +name = "getrandom" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "h2" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62eeb471aa3e3c9197aa4bfeabfe02982f6dc96f750486c0bb0009ac58b26d2b" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "halfbrown" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed39577259d319b81a15176a32673271be2786cb463889703c58c90fe83c825" +dependencies = [ + "hashbrown", + "serde", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +dependencies = [ + "ahash", +] + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "http" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f4c6746584866f0feabcc69893c5b51beef3831656a968ed7ae254cdc4fd03" +dependencies = [ + "bytes", + "fnv", + "itoa 1.0.1", +] + +[[package]] +name = "http-body" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9100414882e15fb7feccb4897e5f0ff0ff1ca7d1a86a23208ada4d7a18e6c6c4" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "hyper" +version = "0.14.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "043f0e083e9901b6cc658a77d1eb86f4fc650bbb977a4337dd63192826aa85dd" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa 1.0.1", + "pin-project-lite", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "indexmap" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8521a1b57e76b1ec69af7599e75e38e7b7fad6610f037db8c79b127201b5d119" + +[[package]] +name = "libmimalloc-sys" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7705fc40f6ed493f73584abbb324e74f96b358ff60dfe5659a0f8fc12c590a69" +dependencies = [ + "cc", +] + +[[package]] +name = "log" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "memchr" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" + +[[package]] +name = "mimalloc" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0dfa131390c2f6bdb3242f65ff271fcdaca5ff7b6c08f28398be7f2280e3926" +dependencies = [ + "libmimalloc-sys", +] + +[[package]] +name = "mio" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba272f85fa0b41fc91872be579b3bbe0f56b792aa361a380eb669469f68dafb2" +dependencies = [ + "libc", + "log", + "miow", + "ntapi", + "winapi", +] + +[[package]] +name = "miow" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" +dependencies = [ + "winapi", +] + +[[package]] +name = "ntapi" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" +dependencies = [ + "winapi", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "once_cell" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9" + +[[package]] +name = "pin-project-lite" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "proc-macro2" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quote" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "ryu" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" + +[[package]] +name = "serde" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" +dependencies = [ + "itoa 1.0.1", + "ryu", + "serde", +] + +[[package]] +name = "simd-json" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8db546b694ee6a410ec93748bcb445051b7a74d4992f8ab4a23d94f289449bd7" +dependencies = [ + "halfbrown", + "serde", + "serde_json", + "simdutf8", + "value-trait", +] + +[[package]] +name = "simdutf8" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c970da16e7c682fa90a261cf0724dee241c9f7831635ecc4e988ae8f3b505559" + +[[package]] +name = "slab" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" + +[[package]] +name = "syn" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "tokio" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e992e41e0d2fb9f755b37446f20900f64446ef54874f40a60c78f021ac6144" +dependencies = [ + "autocfg", + "bytes", + "memchr", + "pin-project-lite", +] + +[[package]] +name = "tokio-util" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower-service" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" + +[[package]] +name = "tracing" +version = "0.1.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" +dependencies = [ + "cfg-if", + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "try-lock" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" + +[[package]] +name = "unicode-xid" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" + +[[package]] +name = "value-trait" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23ba0ca16d76436ca217263f891e6cbe67e32faa8e20eda061c59e8fd7c6d34c" +dependencies = [ + "float-cmp", + "halfbrown", + "itoa 0.4.8", + "ryu", +] + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/frameworks/Rust/astra/Cargo.toml b/frameworks/Rust/astra/Cargo.toml new file mode 100755 index 00000000000..a3dae066327 --- /dev/null +++ b/frameworks/Rust/astra/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "astra" +version = "0.1.0" +edition = "2021" + +[dependencies] +astra = "0.1.2" +num_cpus = "1.13.1" +http = "0.2.6" +mimalloc = { version = "0.1", default-features = false } +simd-json = "0.4" +serde = { version = "1.0", features = ["derive"] } + +[profile.release] +lto = true +opt-level = 3 +codegen-units = 1 +panic = "abort" diff --git a/frameworks/Rust/astra/README.md b/frameworks/Rust/astra/README.md new file mode 100644 index 00000000000..6d0782a0fa2 --- /dev/null +++ b/frameworks/Rust/astra/README.md @@ -0,0 +1,15 @@ +# Astra + +## Description. + +A synchronous runtime for hyper. + +## Test URLs + +### Test 1: Plaintext + + http://localhost:8080/plaintext + +### Test 2: JSON Encoding + + http://localhost:8080/json diff --git a/frameworks/Rust/iron/iron.dockerfile b/frameworks/Rust/astra/astra.dockerfile similarity index 56% rename from frameworks/Rust/iron/iron.dockerfile rename to frameworks/Rust/astra/astra.dockerfile index aac6f5ede4b..b181753050f 100644 --- a/frameworks/Rust/iron/iron.dockerfile +++ b/frameworks/Rust/astra/astra.dockerfile @@ -1,11 +1,11 @@ -FROM rust:1.44 +FROM rust:1.58 -ADD ./ /iron -WORKDIR /iron +ADD ./ /astra +WORKDIR /astra RUN cargo clean RUN RUSTFLAGS="-C target-cpu=native" cargo build --release EXPOSE 8080 -CMD ./target/release/iron +CMD ./target/release/astra diff --git a/frameworks/D/hunt/benchmark_config.json b/frameworks/Rust/astra/benchmark_config.json old mode 100644 new mode 100755 similarity index 57% rename from frameworks/D/hunt/benchmark_config.json rename to frameworks/Rust/astra/benchmark_config.json index 80c71f7bcb2..56965fe9401 --- a/frameworks/D/hunt/benchmark_config.json +++ b/frameworks/Rust/astra/benchmark_config.json @@ -1,5 +1,5 @@ { - "framework": "hunt", + "framework": "astra", "tests": [ { "default": { @@ -7,18 +7,18 @@ "plaintext_url": "/plaintext", "port": 8080, "approach": "Realistic", - "classification": "Platform", - "framework": "Hunt", - "language": "D", - "flavor": "DLang2", + "classification": "Micro", + "database": "Postgres", + "framework": "astra", + "language": "Rust", "orm": "Raw", "platform": "None", - "webserver": "None", + "webserver": "hyper", "os": "Linux", "database_os": "Linux", - "display_name": "hunt", + "display_name": "astra", "notes": "", - "versus": "Hunt" + "versus": "" } } ] diff --git a/frameworks/OCaml/tiny_httpd/config.toml b/frameworks/Rust/astra/config.toml similarity index 66% rename from frameworks/OCaml/tiny_httpd/config.toml rename to frameworks/Rust/astra/config.toml index c506c4988b0..0847915cf82 100644 --- a/frameworks/OCaml/tiny_httpd/config.toml +++ b/frameworks/Rust/astra/config.toml @@ -1,15 +1,15 @@ [framework] -name = "tiny_httpd" +name = "astra" [main] urls.plaintext = "/plaintext" urls.json = "/json" approach = "Realistic" classification = "Micro" -database = "None" +database = "Postgres" database_os = "Linux" os = "Linux" -orm = "None" +orm = "Raw" platform = "None" -webserver = "None" -versus = "None" +webserver = "hyper" +versus = "" diff --git a/frameworks/Rust/astra/src/main.rs b/frameworks/Rust/astra/src/main.rs new file mode 100755 index 00000000000..0046f47e5ff --- /dev/null +++ b/frameworks/Rust/astra/src/main.rs @@ -0,0 +1,76 @@ +#[global_allocator] +static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; + +use astra::{Body, Request, Response, ResponseBuilder, Server}; +use http::StatusCode; + +fn main() { + Server::bind("0.0.0.0:8080") + .max_workers(num_cpus::get() * 20) + .http1_pipeline_flush(true) + .http1_only(true) + .serve(serve) + .expect("failed to start server"); +} + +fn serve(req: Request) -> Response { + let (req, _) = req.into_parts(); + + let mut headers = req.headers; + headers.clear(); + + let body = match req.uri.path() { + "/plaintext" => { + static HELLO_WORLD: &'static [u8] = b"Hello, world!"; + headers.insert(header::CONTENT_LENGTH, header::THIRTEEN.clone()); + headers.insert(header::CONTENT_TYPE, header::PLAIN_TEXT.clone()); + Body::new(HELLO_WORLD) + } + "/json" => { + let response = unsafe { + simd_json::to_vec(&Json { + message: "Hello, world!", + }) + .unwrap_unchecked() + }; + + headers.insert(header::CONTENT_LENGTH, header::TWENTY_SEVEN.clone()); + headers.insert(header::CONTENT_TYPE, header::JSON.clone()); + Body::new(response) + } + _ => { + return not_found(); + } + }; + + headers.insert(header::SERVER, header::ASTRA.clone()); + let mut res = Response::new(body); + *res.headers_mut() = headers; + res +} + +#[cold] +#[inline(never)] +fn not_found() -> Response { + unsafe { + ResponseBuilder::new() + .status(StatusCode::NOT_FOUND) + .body(Body::empty()) + .unwrap_unchecked() + } +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct Json { + message: &'static str, +} + +mod header { + pub use http::header::*; + + pub static ASTRA: HeaderValue = HeaderValue::from_static("astra"); + pub static THIRTEEN: HeaderValue = HeaderValue::from_static("13"); + pub static TWENTY_SEVEN: HeaderValue = HeaderValue::from_static("27"); + pub static PLAIN_TEXT: HeaderValue = HeaderValue::from_static("text/plain"); + pub static JSON: HeaderValue = HeaderValue::from_static("application/json"); +} diff --git a/frameworks/Rust/axum/Cargo.lock b/frameworks/Rust/axum/Cargo.lock index f7a8061f51d..f7ced7468a4 100644 --- a/frameworks/Rust/axum/Cargo.lock +++ b/frameworks/Rust/axum/Cargo.lock @@ -16,7 +16,7 @@ checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ "getrandom", "once_cell", - "version_check 0.9.3", + "version_check", ] [[package]] @@ -38,161 +38,11 @@ dependencies = [ "yansi-term", ] -[[package]] -name = "ansi_colours" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60e2fb6138a49ad9f1cb3c6d8f8ccbdd5e62b4dab317c1b435a47ecd7da1d28f" -dependencies = [ - "cc", -] - -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] - -[[package]] -name = "async-channel" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2114d64672151c0c5eaa5e131ec84a74f06e1e559830dabba01ca30605d66319" -dependencies = [ - "concurrent-queue", - "event-listener", - "futures-core", -] - -[[package]] -name = "async-executor" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "871f9bb5e0a22eeb7e8cf16641feb87c9dc67032ccf8ff49e772eb9941d3a965" -dependencies = [ - "async-task", - "concurrent-queue", - "fastrand", - "futures-lite", - "once_cell", - "slab", -] - -[[package]] -name = "async-global-executor" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9586ec52317f36de58453159d48351bc244bc24ced3effc1fce22f3d48664af6" -dependencies = [ - "async-channel", - "async-executor", - "async-io", - "async-mutex", - "blocking", - "futures-lite", - "num_cpus", - "once_cell", -] - -[[package]] -name = "async-io" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a811e6a479f2439f0c04038796b5cfb3d2ad56c230e0f2d3f7b04d68cfee607b" -dependencies = [ - "concurrent-queue", - "futures-lite", - "libc", - "log", - "once_cell", - "parking", - "polling", - "slab", - "socket2 0.4.2", - "waker-fn", - "winapi", -] - -[[package]] -name = "async-lock" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6a8ea61bf9947a1007c5cada31e647dbc77b103c679858150003ba697ea798b" -dependencies = [ - "event-listener", -] - -[[package]] -name = "async-mutex" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e" -dependencies = [ - "event-listener", -] - -[[package]] -name = "async-std" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8056f1455169ab86dd47b47391e4ab0cbd25410a70e9fe675544f49bafaf952" -dependencies = [ - "async-channel", - "async-global-executor", - "async-io", - "async-lock", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "num_cpus", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - -[[package]] -name = "async-stream" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "171374e7e3b2504e0e5236e3b59260560f9fe94bfe9ac39ba5e4e929c5590625" -dependencies = [ - "async-stream-impl", - "futures-core", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "648ed8c8d2ce5409ccd57453d9d1b214b342a0d69376a6feda1fd6cae3299308" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "async-task" -version = "4.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0" - [[package]] name = "async-trait" -version = "0.1.51" +version = "0.1.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44318e776df68115a881de9a8fd1b9e53368d7a4a5ce4cc48517da3393233a5e" +checksum = "96cf8829f67d2eab0b2dfa42c5d0ef737e0724e4a82b01b3e292456202b19716" dependencies = [ "proc-macro2", "quote", @@ -201,46 +51,26 @@ dependencies = [ [[package]] name = "atoi" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5" -dependencies = [ - "num-traits", -] - -[[package]] -name = "atomic-waker" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "065374052e7df7ee4047b1160cca5e1467a12351a40b3da123c870ba0b8eda2a" - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +checksum = "d7c57d12312ff59c811c0643f4d80830505833c9ffaebd193d819392b265be8e" dependencies = [ - "hermit-abi", - "libc", - "winapi", + "num-traits", ] [[package]] name = "autocfg" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "axum" -version = "0.1.2" +version = "0.2.0" dependencies = [ - "async-std", - "async-stream", - "async-trait", - "axum 0.3.2", - "bb8", - "bb8-postgres", + "axum 0.5.13", + "deadpool", + "deadpool-postgres", "dotenv", "futures", "futures-util", @@ -249,7 +79,6 @@ dependencies = [ "num_cpus", "rand", "serde", - "serde_derive", "serde_json", "sqlx", "tokio", @@ -263,18 +92,21 @@ dependencies = [ [[package]] name = "axum" -version = "0.3.2" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e874ee652f2ec443faed3073b80f0ac7a2042a3605fc0704d28bbbf22d623c" +checksum = "6b9496f0c1d1afb7a2af4338bbe1d969cddfead41d87a9fb3aaa6d0bbc7af648" dependencies = [ "async-trait", + "axum-core", "bitflags", - "bytes 1.1.0", + "bytes", "futures-util", "http", "http-body", "hyper", + "itoa 1.0.2", "matchit", + "memchr", "mime", "percent-encoding", "pin-project-lite", @@ -283,7 +115,6 @@ dependencies = [ "serde_urlencoded", "sync_wrapper", "tokio", - "tokio-util", "tower", "tower-http", "tower-layer", @@ -291,84 +122,24 @@ dependencies = [ ] [[package]] -name = "base64" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" - -[[package]] -name = "bat" -version = "0.18.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a069bad29696ecaa51ac79d3eb87abe3b65c7808ab2b3836afd9bd6c4009362" -dependencies = [ - "ansi_colours", - "ansi_term", - "bugreport", - "clircle", - "console", - "content_inspector", - "encoding", - "error-chain", - "globset", - "grep-cli", - "path_abs", - "semver", - "serde", - "serde_yaml", - "shell-words", - "syntect", - "unicode-width", -] - -[[package]] -name = "bb8" -version = "0.7.1" +name = "axum-core" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e9f4fa9768efd269499d8fba693260cfc670891cf6de3adc935588447a77cc8" +checksum = "e4f44a0e6200e9d11a1cdc989e4b358f6e3d354fbf48478f345a17f4e43f8635" dependencies = [ "async-trait", - "futures-channel", + "bytes", "futures-util", - "parking_lot", - "tokio", -] - -[[package]] -name = "bb8-postgres" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61fdf56d52b2cca401d2380407e5c35d3d25d3560224ecf74d6e4ca13e51239b" -dependencies = [ - "async-trait", - "bb8", - "tokio", - "tokio-postgres", -] - -[[package]] -name = "bincode" -version = "1.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" -dependencies = [ - "serde", -] - -[[package]] -name = "bit-set" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de" -dependencies = [ - "bit-vec", + "http", + "http-body", + "mime", ] [[package]] -name = "bit-vec" -version = "0.6.3" +name = "base64" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" +checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" [[package]] name = "bitflags" @@ -378,36 +149,21 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "block-buffer" -version = "0.9.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" dependencies = [ "generic-array", ] -[[package]] -name = "blocking" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046e47d4b2d391b1f6f8b407b1deb8dee56c1852ccd868becf2710f601b5f427" -dependencies = [ - "async-channel", - "async-task", - "atomic-waker", - "fastrand", - "futures-lite", - "once_cell", -] - [[package]] name = "bson" -version = "2.0.1" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff58d466782b57e0001c8e97c6a70c01c2359d7e13e257a83654c0b783ecc139" +checksum = "a24ecf39f5a314493ede1bb015984735d41aa6aedb59cafb95492d40cd893330" dependencies = [ "ahash", "base64", - "chrono", "hex", "indexmap", "lazy_static", @@ -415,45 +171,21 @@ dependencies = [ "serde", "serde_bytes", "serde_json", + "time 0.3.11", "uuid", ] -[[package]] -name = "bstr" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" -dependencies = [ - "lazy_static", - "memchr", - "regex-automata", -] - [[package]] name = "buf-min" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4531c8a9fe2fb94e0d2afdf6bb4effd4797baf98dd26b6e20be71a92ac78e8d" -dependencies = [ - "bytes 0.5.6", -] - -[[package]] -name = "bugreport" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0014b4b2b4f63bfe69c3838470121290cc437fdc79785d408a761a21e8b2404c" -dependencies = [ - "git-version", - "shell-escape", - "sys-info", -] [[package]] name = "bumpalo" -version = "3.8.0" +version = "3.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f1e260c3a9040a7c19a12468758f4c16f31a81a1fe087482be9570ec864bb6c" +checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3" [[package]] name = "byteorder" @@ -463,27 +195,18 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" - -[[package]] -name = "bytes" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" - -[[package]] -name = "cache-padded" -version = "1.1.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "631ae5198c9be5e753e5cc215e1bd73c2b466a3565173db433f52bb9d3e66dba" +checksum = "f0b3de4a0c5e67e16066a0715723abd91edc2f9001d09c46e1dca929351e130e" [[package]] name = "cc" -version = "1.0.71" +version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79c2681d6594606957bbb8631c4b90a7fcaaa72cdb714743a437b156d6a7eedd" +checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" +dependencies = [ + "jobserver", +] [[package]] name = "cfg-if" @@ -500,55 +223,10 @@ dependencies = [ "libc", "num-integer", "num-traits", - "time", + "time 0.1.44", "winapi", ] -[[package]] -name = "clircle" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e68bbd985a63de680ab4d1ad77b6306611a8f961b282c8b5ab513e6de934e396" -dependencies = [ - "cfg-if", - "libc", - "serde", - "winapi", -] - -[[package]] -name = "concurrent-queue" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ed07550be01594c6026cff2a1d7fe9c8f683caa798e12b68694ac9e88286a3" -dependencies = [ - "cache-padded", -] - -[[package]] -name = "console" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3993e6445baa160675931ec041a5e03ca84b9c6e32a056150d3aa2bdda0a1f45" -dependencies = [ - "encode_unicode", - "lazy_static", - "libc", - "regex", - "terminal_size", - "unicode-width", - "winapi", -] - -[[package]] -name = "content_inspector" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7bda66e858c683005a53a9a60c69a4aca7eeaa45d124526e389f7aec8e62f38" -dependencies = [ - "memchr", -] - [[package]] name = "convert_case" version = "0.4.0" @@ -557,9 +235,9 @@ checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" [[package]] name = "core-foundation" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6888e10551bb93e424d8df1d07f1a8b4fceb0001a3a4b048bfc47554946f47b3" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" dependencies = [ "core-foundation-sys", "libc", @@ -573,52 +251,42 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cpufeatures" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" +checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" dependencies = [ "libc", ] [[package]] name = "crc" -version = "2.1.0" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49fc9a695bca7f35f5f4c15cddc84415f66a74ea78eef08e90c5024f2b540e23" +checksum = "53757d12b596c16c78b83458d732a5d1a17ab3f53f2f7412f6fb57cc8a140ab3" dependencies = [ "crc-catalog", ] [[package]] name = "crc-catalog" -version = "1.1.1" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccaeedb56da03b09f598226e25e80088cb4cd25f316e6e4df7d695f0feeb1403" +checksum = "2d0165d2900ae6778e36e80bbc4da3b5eefccee9ba939761f9c2882a5d9af3ff" [[package]] name = "crc32fast" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.1" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ "cfg-if", - "crossbeam-utils", ] [[package]] name = "crossbeam-queue" -version = "0.3.2" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b10ddc024425c88c2ad148c1b0fd53f4c6d38db9697c9f1588381212fa657c9" +checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2" dependencies = [ "cfg-if", "crossbeam-utils", @@ -626,39 +294,29 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.5" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" +checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83" dependencies = [ "cfg-if", - "lazy_static", + "once_cell", ] [[package]] -name = "crypto-mac" -version = "0.11.1" +name = "crypto-common" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", - "subtle", -] - -[[package]] -name = "ctor" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccc0a48a9b826acdf4028595adc9db92caea352f7af011a3034acd172a52a0aa" -dependencies = [ - "quote", - "syn", + "typenum", ] [[package]] name = "darling" -version = "0.13.0" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "757c0ded2af11d8e739c4daea1ac623dd1624b06c844cf3f5a39f1bdbd99bb12" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" dependencies = [ "darling_core", "darling_macro", @@ -666,9 +324,9 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.13.0" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c34d8efb62d0c2d7f60ece80f75e5c63c1588ba68032740494b0b9a996466e3" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" dependencies = [ "fnv", "ident_case", @@ -680,9 +338,9 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.13.0" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade7bff147130fe5e6d39f089c6bd49ec0250f35d70b2eebf72afdfc919f15cc" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core", "quote", @@ -695,6 +353,41 @@ version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57" +[[package]] +name = "deadpool" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "421fe0f90f2ab22016f32a9881be5134fdd71c65298917084b0c7477cbc3856e" +dependencies = [ + "async-trait", + "deadpool-runtime", + "num_cpus", + "retain_mut", + "serde", + "tokio", +] + +[[package]] +name = "deadpool-postgres" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c668a58063c6331e3437e3146970943ad82b1b36169fd979bb2645ac2088209a" +dependencies = [ + "deadpool", + "log", + "tokio", + "tokio-postgres", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" +dependencies = [ + "tokio", +] + [[package]] name = "derivative" version = "2.2.0" @@ -708,40 +401,42 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.16" +version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40eebddd2156ce1bb37b20bbe5151340a31828b1f2d22ba4141f3531710e38df" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", "proc-macro2", "quote", - "rustc_version", + "rustc_version 0.4.0", "syn", ] [[package]] name = "digest" -version = "0.9.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" dependencies = [ - "generic-array", + "block-buffer", + "crypto-common", + "subtle", ] [[package]] name = "dirs" -version = "3.0.2" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30baa043103c9d0c2a57cf537cc2f35623889dc0d405e6c3cccfadbc81c71309" +checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" dependencies = [ "dirs-sys", ] [[package]] name = "dirs-sys" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03d86534ed367a67548dc68113a0f5db55432fdfbb6e6f9d77704397d95d5780" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" dependencies = [ "libc", "redox_users", @@ -762,85 +457,15 @@ checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0" [[package]] name = "either" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" - -[[package]] -name = "encode_unicode" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" - -[[package]] -name = "encoding" -version = "0.2.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec" -dependencies = [ - "encoding-index-japanese", - "encoding-index-korean", - "encoding-index-simpchinese", - "encoding-index-singlebyte", - "encoding-index-tradchinese", -] - -[[package]] -name = "encoding-index-japanese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e8b2ff42e9a05335dbf8b5c6f7567e5591d0d916ccef4e0b1710d32a0d0c91" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-korean" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dc33fb8e6bcba213fe2f14275f0963fd16f0a02c878e3095ecfdf5bee529d81" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-simpchinese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d87a7194909b9118fc707194baa434a4e3b0fb6a5a757c73c3adb07aa25031f7" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-singlebyte" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3351d5acffb224af9ca265f435b859c7c01537c0849754d3db3fdf2bfe2ae84a" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-tradchinese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd0e20d5688ce3cab59eb3ef3a2083a5c77bf496cb798dc6fcdb75f323890c18" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding_index_tests" -version = "0.1.4" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569" +checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be" [[package]] name = "enum-as-inner" -version = "0.3.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c5f0096a91d210159eceb2ff5e1c4da18388a170e1e3ce948aac9c8fdbbf595" +checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" dependencies = [ "heck", "proc-macro2", @@ -848,20 +473,11 @@ dependencies = [ "syn", ] -[[package]] -name = "error-chain" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d2f06b9cac1506ece98fe3231e3cc9c4410ec3d5b1f24ae1c8946f0742cdefc" -dependencies = [ - "version_check 0.9.3", -] - [[package]] name = "event-listener" -version = "2.5.1" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7531096570974c3a9dcf9e4b8e1cede1ec26cf5046219fb3b9d897503b9be59" +checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71" [[package]] name = "fallible-iterator" @@ -869,34 +485,22 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" -[[package]] -name = "fancy-regex" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d6b8560a05112eb52f04b00e5d3790c0dd75d9d980eb8a122fb23b92a623ccf" -dependencies = [ - "bit-set", - "regex", -] - [[package]] name = "fastrand" -version = "1.5.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b394ed3d285a429378d3b384b9eb1285267e7df4b166df24b7a6939a04dc392e" +checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" dependencies = [ "instant", ] [[package]] name = "flate2" -version = "1.0.22" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" +checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" dependencies = [ - "cfg-if", "crc32fast", - "libc", "miniz_oxide", ] @@ -933,9 +537,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.17" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a12aa0eb539080d55c3f2d45a67c3b58b6b0773c1a3ca2dfec66d58c97fd66ca" +checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" dependencies = [ "futures-channel", "futures-core", @@ -948,9 +552,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.17" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5da6ba8c3bb3c165d3c7319fc1cc8304facf1fb8db99c5de877183c08a273888" +checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" dependencies = [ "futures-core", "futures-sink", @@ -958,15 +562,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.17" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88d1c26957f23603395cd326b0ffe64124b818f4449552f960d815cfba83a53d" +checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" [[package]] name = "futures-executor" -version = "0.3.17" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45025be030969d763025784f7f355043dc6bc74093e4ecc5000ca4dc50d8745c" +checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6" dependencies = [ "futures-core", "futures-task", @@ -981,38 +585,21 @@ checksum = "62007592ac46aa7c2b6416f7deb9a8a8f63a01e0f1d6e1787d5630170db2b63e" dependencies = [ "futures-core", "lock_api", - "parking_lot", + "parking_lot 0.11.2", ] [[package]] name = "futures-io" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "522de2a0fe3e380f1bc577ba0474108faf3f6b18321dbf60b3b9c39a75073377" - -[[package]] -name = "futures-lite" -version = "1.12.0" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48" -dependencies = [ - "fastrand", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] +checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" [[package]] name = "futures-macro" -version = "0.3.17" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18e4a4b95cea4b4ccbcf1c5675ca7c4ee4e9e75eb79944d07defde18068f79bb" +checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" dependencies = [ - "autocfg", - "proc-macro-hack", "proc-macro2", "quote", "syn", @@ -1020,146 +607,78 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.17" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36ea153c13024fe480590b3e3d4cad89a0cfacecc24577b68f86c6ced9c2bc11" +checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" [[package]] name = "futures-task" -version = "0.3.17" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d3d00f4eddb73e498a54394f228cd55853bdf059259e8e7bc6e69d408892e99" +checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" [[package]] name = "futures-util" -version = "0.3.17" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36568465210a3a6ee45e1f165136d68671471a501e632e9a98d96872222b5481" +checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" dependencies = [ - "autocfg", "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "proc-macro-hack", - "proc-macro-nested", - "slab", -] - -[[package]] -name = "generic-array" -version = "0.14.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817" -dependencies = [ - "typenum", - "version_check 0.9.3", -] - -[[package]] -name = "getrandom" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "git-version" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6b0decc02f4636b9ccad390dcbe77b722a77efedfa393caf8379a51d5c61899" -dependencies = [ - "git-version-macro", - "proc-macro-hack", -] - -[[package]] -name = "git-version-macro" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe69f1cbdb6e28af2bac214e943b99ce8a0a06b447d15d3e61161b0423139f3f" -dependencies = [ - "proc-macro-hack", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "globset" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10463d9ff00a2a068db14231982f5132edebad0d7660cd956a1c30292dbcbfbd" -dependencies = [ - "aho-corasick", - "bstr", - "fnv", - "log", - "regex", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", ] [[package]] -name = "gloo-timers" -version = "0.2.1" +name = "generic-array" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47204a46aaff920a1ea58b11d03dec6f704287d27561724a4631e450654a891f" +checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", - "web-sys", + "typenum", + "version_check", ] [[package]] -name = "grep-cli" -version = "0.1.6" +name = "getrandom" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dd110c34bb4460d0de5062413b773e385cbf8a85a63fc535590110a09e79e8a" +checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6" dependencies = [ - "atty", - "bstr", - "globset", - "lazy_static", - "log", - "regex", - "same-file", - "termcolor", - "winapi-util", + "cfg-if", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", ] [[package]] name = "hashbrown" -version = "0.11.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ "ahash", ] [[package]] name = "hashlink" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf" +checksum = "d452c155cb93fecdfb02a73dd57b5d8e442c2063bd7aac72f1bc5e4263a43086" dependencies = [ "hashbrown", ] [[package]] name = "heck" -version = "0.3.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" dependencies = [ "unicode-segmentation", ] @@ -1180,22 +699,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] -name = "hmac" -version = "0.11.0" +name = "hkdf" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" +checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" dependencies = [ - "crypto-mac", - "digest", + "hmac", ] [[package]] -name = "home" -version = "0.5.3" +name = "hmac" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ - "winapi", + "digest", ] [[package]] @@ -1211,45 +729,51 @@ dependencies = [ [[package]] name = "http" -version = "0.2.5" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1323096b05d41827dadeaee54c9981958c0f94e670bc94ed80037d1a7b8b186b" +checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ - "bytes 1.1.0", + "bytes", "fnv", - "itoa", + "itoa 1.0.2", ] [[package]] name = "http-body" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.1.0", + "bytes", "http", "pin-project-lite", ] +[[package]] +name = "http-range-header" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29" + [[package]] name = "httparse" -version = "1.5.1" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" +checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c" [[package]] name = "httpdate" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6456b8a6c8f33fee7d958fcd1b60d55b11940a79e63ae87013e6d22e26034440" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" [[package]] name = "hyper" -version = "0.14.14" +version = "0.14.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b91bb1f221b6ea1f1e4371216b70f40748774c2fb5971b450c07773fb92d26b" +checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac" dependencies = [ - "bytes 1.1.0", + "bytes", "futures-channel", "futures-core", "futures-util", @@ -1257,9 +781,9 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa", + "itoa 1.0.2", "pin-project-lite", - "socket2 0.4.2", + "socket2", "tokio", "tower-service", "tracing", @@ -1285,9 +809,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.7.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg", "hashbrown", @@ -1304,11 +828,11 @@ dependencies = [ [[package]] name = "ipconfig" -version = "0.2.2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7e2f18aece9709094573a9f24f483c4f65caa4298e2f7ae1b71cc65d853fad7" +checksum = "723519edce41262b05d4143ceb95050e4c614f483e78e9fd9e39a8275a84ad98" dependencies = [ - "socket2 0.3.19", + "socket2", "widestring", "winapi", "winreg", @@ -1316,15 +840,15 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9" +checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b" [[package]] name = "itertools" -version = "0.10.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf" +checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" dependencies = [ "either", ] @@ -1336,21 +860,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" [[package]] -name = "js-sys" -version = "0.3.55" +name = "itoa" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" + +[[package]] +name = "jobserver" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cc9ffccd38c451a86bf13657df244e9c3f37493cce8e5e21e940963777acc84" +checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa" dependencies = [ - "wasm-bindgen", + "libc", ] [[package]] -name = "kv-log-macro" -version = "1.0.7" +name = "js-sys" +version = "0.3.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +checksum = "c3fac17f7123a73ca62df411b1bf727ccc805daa070338fda671c86dac1bdc27" dependencies = [ - "log", + "wasm-bindgen", ] [[package]] @@ -1359,50 +889,35 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "libc" -version = "0.2.107" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbe5e23404da5b4f555ef85ebed98fb4083e55a00c317800bc2a50ede9f3d219" - -[[package]] -name = "line-wrap" -version = "0.1.1" +version = "0.2.126" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9" -dependencies = [ - "safemem", -] +checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" [[package]] name = "linked-hash-map" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "lock_api" -version = "0.4.5" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712a4d093c9976e24e7dbca41db895dabcbac38eb5f4045393d17a95bdfb1109" +checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" dependencies = [ + "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.4.14" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ "cfg-if", - "value-bag", ] [[package]] @@ -1428,26 +943,24 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "matchit" -version = "0.4.4" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58b6f41fdfbec185dd3dff58b51e323f5bc61692c0de38419a957b0dcfccca3c" +checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb" [[package]] name = "md-5" -version = "0.9.1" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5a279bb9607f9f53c22d496eade00d138d1bdcccd07d74650387cf94942a15" +checksum = "658646b21e0b72f7866c7038ab086d3d5e1cd6271f060fd37defb241949d0582" dependencies = [ - "block-buffer", "digest", - "opaque-debug", ] [[package]] name = "memchr" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "mime" @@ -1463,41 +976,30 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.4.4" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc" dependencies = [ "adler", - "autocfg", ] [[package]] name = "mio" -version = "0.7.14" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8067b404fe97c70829f082dec8bcf4f71225d7eaea1d8645349cb76fa06205cc" +checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf" dependencies = [ "libc", "log", - "miow", - "ntapi", - "winapi", -] - -[[package]] -name = "miow" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" -dependencies = [ - "winapi", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys", ] [[package]] name = "mongodb" -version = "2.0.1" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d419667771704b002e6837d52f7461f70cea853f58c077d299f132ed6f75b2ad" +checksum = "b95afe97b0c799fdf69cd960272a2cb9662d077bd6efd84eb722bb9805d47554" dependencies = [ "async-trait", "base64", @@ -1505,9 +1007,9 @@ dependencies = [ "bson", "chrono", "derivative", + "flate2", "futures-core", "futures-executor", - "futures-io", "futures-util", "hex", "hmac", @@ -1517,13 +1019,16 @@ dependencies = [ "pbkdf2", "percent-encoding", "rand", + "rustc_version_runtime", "rustls", + "rustls-pemfile", "serde", "serde_bytes", "serde_with", "sha-1", "sha2", - "socket2 0.4.2", + "snap", + "socket2", "stringprep", "strsim", "take_mut", @@ -1535,16 +1040,15 @@ dependencies = [ "trust-dns-resolver", "typed-builder", "uuid", - "version_check 0.9.3", - "webpki", "webpki-roots", + "zstd", ] [[package]] name = "native-tls" -version = "0.2.8" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48ba9f7719b5a0f42f338907614285fb5fd70e53858141f69898a1fb7203b24d" +checksum = "fd7e2f3618557f980e0b17e8856252eee3c97fa12c54dff0ca290fb6266ca4a9" dependencies = [ "lazy_static", "libc", @@ -1560,39 +1064,19 @@ dependencies = [ [[package]] name = "nom" -version = "4.2.3" +version = "7.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" -dependencies = [ - "memchr", - "version_check 0.1.5", -] - -[[package]] -name = "nom" -version = "7.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1d11e1ef389c76fe5b81bcaf2ea32cf88b62bc494e19f493d0b30e7a930109" +checksum = "a8903e5a29a317527874d0402f867152a3d21c908bb0b933e416c65e301d4c36" dependencies = [ "memchr", "minimal-lexical", - "version_check 0.9.3", -] - -[[package]] -name = "ntapi" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" -dependencies = [ - "winapi", ] [[package]] name = "num-integer" -version = "0.1.44" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" dependencies = [ "autocfg", "num-traits", @@ -1600,60 +1084,75 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", ] [[package]] name = "num_cpus" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" dependencies = [ "hermit-abi", "libc", ] [[package]] -name = "once_cell" -version = "1.8.0" +name = "num_threads" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" +checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" +dependencies = [ + "libc", +] [[package]] -name = "opaque-debug" -version = "0.3.0" +name = "once_cell" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" +checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1" [[package]] name = "openssl" -version = "0.10.38" +version = "0.10.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7ae222234c30df141154f159066c5093ff73b63204dcda7121eb082fc56a95" +checksum = "618febf65336490dfcf20b73f885f5651a0c89c64c2d4a8c3662585a70bf5bd0" dependencies = [ "bitflags", "cfg-if", "foreign-types", "libc", "once_cell", + "openssl-macros", "openssl-sys", ] +[[package]] +name = "openssl-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "openssl-probe" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.70" +version = "0.9.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6517987b3f8226b5da3661dad65ff7f300cc59fb5ea8333ca191fc65fde3edf" +checksum = "e5f9bd0c2710541a3cda73d6f9ac4f1b240de4ae261065d309dbe73d9dceb42f" dependencies = [ "autocfg", "cc", @@ -1664,20 +1163,14 @@ dependencies = [ [[package]] name = "os_info" -version = "3.0.7" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ac91020bfed8cc3f8aa450d4c3b5fa1d3373fc091c8a92009f3b27749d5a227" +checksum = "0eca3ecae1481e12c3d9379ec541b238a16f0b75c9a409942daa8ec20dbfdb62" dependencies = [ "log", "winapi", ] -[[package]] -name = "parking" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72" - [[package]] name = "parking_lot" version = "0.11.2" @@ -1686,7 +1179,17 @@ checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" dependencies = [ "instant", "lock_api", - "parking_lot_core", + "parking_lot_core 0.8.5", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.3", ] [[package]] @@ -1704,21 +1207,31 @@ dependencies = [ ] [[package]] -name = "path_abs" -version = "0.5.1" +name = "parking_lot_core" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05ef02f6342ac01d8a93b65f96db53fe68a92a15f41144f97fb00a9e669633c3" +checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" dependencies = [ - "std_prelude", + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-sys", ] +[[package]] +name = "paste" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc" + [[package]] name = "pbkdf2" -version = "0.8.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d95f5254224e617595d2cc3cc73ff0a5eaf2637519e25f03388154e9378b6ffa" +checksum = "271779f35b581956db91a3e55737327a03aa051e90b1c47aeb189508533adfd7" dependencies = [ - "crypto-mac", + "digest", ] [[package]] @@ -1727,20 +1240,11 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" -[[package]] -name = "pest" -version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" -dependencies = [ - "ucd-trie", -] - [[package]] name = "phf" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fc3db1018c4b59d7d582a739436478b6035138b6aecbce989fc91c3e98409f" +checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" dependencies = [ "phf_shared", ] @@ -1756,18 +1260,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.0.8" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "576bc800220cc65dac09e99e97b08b358cfab6e17078de8dc5fee223bd2d0c08" +checksum = "78203e83c48cffbe01e4a2d35d566ca4de445d79a85372fc64e378bfc812a260" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.8" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e8fe8163d14ce7f0cdac2e040116f22eac817edabff0be91e8aff7e9accf389" +checksum = "710faf75e1b33345361201d36d04e98ac1ed8909151a017ed384700836104c74" dependencies = [ "proc-macro2", "quote", @@ -1776,9 +1280,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.7" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" [[package]] name = "pin-utils" @@ -1788,46 +1292,19 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" -version = "0.3.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12295df4f294471248581bc09bef3c38a5e46f1e36d6a37353621a0c6c357e1f" - -[[package]] -name = "plist" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38d026d73eeaf2ade76309d0c65db5a35ecf649e3cec428db316243ea9d6711" -dependencies = [ - "base64", - "chrono", - "indexmap", - "line-wrap", - "serde", - "xml-rs", -] - -[[package]] -name = "polling" -version = "2.2.0" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "685404d509889fade3e86fe3a5803bca2ec09b0c0778d5ada6ec8bf7a8de5259" -dependencies = [ - "cfg-if", - "libc", - "log", - "wepoll-ffi", - "winapi", -] +checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" [[package]] name = "postgres-protocol" -version = "0.6.2" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b145e6a4ed52cb316a27787fc20fe8a25221cb476479f61e4e0327c15b98d91a" +checksum = "878c6cbf956e03af9aa8204b407b9cbf47c072164800aa918c516cd4b056c50c" dependencies = [ "base64", "byteorder", - "bytes 1.1.0", + "bytes", "fallible-iterator", "hmac", "md-5", @@ -1839,40 +1316,38 @@ dependencies = [ [[package]] name = "postgres-types" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04619f94ba0cc80999f4fc7073607cb825bc739a883cb6d20900fc5e009d6b0d" +checksum = "ebd6e8b7189a73169290e89bd24c771071f1012d8fe6f738f5226531f0b03d89" dependencies = [ - "bytes 1.1.0", + "bytes", "fallible-iterator", "postgres-protocol", ] [[package]] name = "ppv-lite86" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba" - -[[package]] -name = "proc-macro-hack" -version = "0.5.19" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" +checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" [[package]] -name = "proc-macro-nested" -version = "0.1.7" +name = "prettyplease" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" +checksum = "da6ffbe862780245013cb1c0a48c4e44b7d665548088f91f6b90876d0625e4c2" +dependencies = [ + "proc-macro2", + "syn", +] [[package]] name = "proc-macro2" -version = "1.0.32" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba508cc11742c0dc5c1659771673afbab7a0efab23aa17e854cbab0837ed0b43" +checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7" dependencies = [ - "unicode-xid", + "unicode-ident", ] [[package]] @@ -1883,23 +1358,22 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.10" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" +checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804" dependencies = [ "proc-macro2", ] [[package]] name = "rand" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", "rand_core", - "rand_hc", ] [[package]] @@ -1921,56 +1395,42 @@ dependencies = [ "getrandom", ] -[[package]] -name = "rand_hc" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" -dependencies = [ - "rand_core", -] - [[package]] name = "redox_syscall" -version = "0.2.10" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" dependencies = [ "bitflags", ] [[package]] name = "redox_users" -version = "0.4.0" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ "getrandom", "redox_syscall", + "thiserror", ] [[package]] name = "regex" -version = "1.5.4" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" - [[package]] name = "regex-syntax" -version = "0.6.25" +version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" +checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" [[package]] name = "remove_dir_all" @@ -1991,6 +1451,12 @@ dependencies = [ "quick-error", ] +[[package]] +name = "retain_mut" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" + [[package]] name = "ring" version = "0.16.20" @@ -2008,61 +1474,67 @@ dependencies = [ [[package]] name = "rustc_version" -version = "0.3.3" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" dependencies = [ - "semver", + "semver 0.9.0", ] [[package]] -name = "rustls" -version = "0.19.1" +name = "rustc_version" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35edb675feee39aec9c99fa5ff985081995a06d594114ae14cbe797ad7b7a6d7" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "base64", - "log", - "ring", - "sct", - "webpki", + "semver 1.0.12", ] [[package]] -name = "rustversion" -version = "1.0.5" +name = "rustc_version_runtime" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61b3909d758bb75c79f23d4736fac9433868679d3ad2ea7a61e3c25cfda9a088" +checksum = "d31b7153270ebf48bf91c65ae5b0c00e749c4cfad505f66530ac74950249582f" +dependencies = [ + "rustc_version 0.2.3", + "semver 0.9.0", +] [[package]] -name = "ryu" -version = "1.0.5" +name = "rustls" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" +checksum = "5aab8ee6c7097ed6057f43c187a62418d0c05a4bd5f18b3571db50ee0f9ce033" +dependencies = [ + "log", + "ring", + "sct", + "webpki", +] [[package]] -name = "safemem" -version = "0.3.3" +name = "rustls-pemfile" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" +checksum = "1ee86d63972a7c661d1536fefe8c3c8407321c3df668891286de28abcd087360" +dependencies = [ + "base64", +] [[package]] -name = "same-file" -version = "1.0.6" +name = "ryu" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] +checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" [[package]] name = "schannel" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75" +checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" dependencies = [ "lazy_static", - "winapi", + "windows-sys", ] [[package]] @@ -2073,9 +1545,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "sct" -version = "0.6.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" dependencies = [ "ring", "untrusted", @@ -2083,9 +1555,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.4.2" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525bc1abfda2e1998d152c45cf13e696f76d0a4972310b22fac1658b05df7c87" +checksum = "2dc14f172faf8a0194a3aded622712b0de276821addc574fa54fc0a1167e10dc" dependencies = [ "bitflags", "core-foundation", @@ -2096,9 +1568,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.4.2" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9dd14d83160b528b7bfd66439110573efcfbe281b17fc2ca9f39f550d619c7e" +checksum = "0160a13a177a45bfb43ce71c01580998474f556ad854dcbca936dd2841a5c556" dependencies = [ "core-foundation-sys", "libc", @@ -2106,45 +1578,48 @@ dependencies = [ [[package]] name = "semver" -version = "0.11.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" dependencies = [ "semver-parser", ] +[[package]] +name = "semver" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1" + [[package]] name = "semver-parser" -version = "0.10.2" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" -dependencies = [ - "pest", -] +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.130" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913" +checksum = "fc855a42c7967b7c369eb5860f7164ef1f6f81c20c7cc1141f2a604e18723b03" dependencies = [ "serde_derive", ] [[package]] name = "serde_bytes" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" +checksum = "212e73464ebcde48d723aa02eb270ba62eff38a9b732df31f33f1b4e145f3a54" dependencies = [ "serde", ] [[package]] name = "serde_derive" -version = "1.0.130" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b" +checksum = "6f2122636b9fe3b81f1cb25099fcf2d3f542cdb1d45940d56c713158884a05da" dependencies = [ "proc-macro2", "quote", @@ -2153,44 +1628,43 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.69" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e466864e431129c7e0d3476b92f20458e5879919a0596c6472738d9fa2d342f8" +checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7" dependencies = [ "indexmap", - "itoa", + "itoa 1.0.2", "ryu", "serde", ] [[package]] name = "serde_urlencoded" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edfa57a7f8d9c1d260a549e7224100f6c43d43f9103e06dd8b4095a9b2b43ce9" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa", + "itoa 1.0.2", "ryu", "serde", ] [[package]] name = "serde_with" -version = "1.11.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad6056b4cb69b6e43e3a0f055def223380baecc99da683884f205bf347f7c4b3" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" dependencies = [ - "rustversion", "serde", "serde_with_macros", ] [[package]] name = "serde_with_macros" -version = "1.5.1" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12e47be9471c72889ebafb5e14d5ff930d89ae7a67bbdb5f8abb564f845a927e" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ "darling", "proc-macro2", @@ -2198,56 +1672,28 @@ dependencies = [ "syn", ] -[[package]] -name = "serde_yaml" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8c608a35705a5d3cdc9fbe403147647ff34b921f8e833e49306df898f9b20af" -dependencies = [ - "dtoa", - "indexmap", - "serde", - "yaml-rust", -] - [[package]] name = "sha-1" -version = "0.9.8" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f" dependencies = [ - "block-buffer", "cfg-if", "cpufeatures", "digest", - "opaque-debug", ] [[package]] name = "sha2" -version = "0.9.8" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b69f9a4c9740d74c5baa3fd2e547f9525fa8088a8a958e0ca2409a514e33f5fa" +checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" dependencies = [ - "block-buffer", "cfg-if", "cpufeatures", "digest", - "opaque-debug", ] -[[package]] -name = "shell-escape" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f" - -[[package]] -name = "shell-words" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6fa3938c99da4914afedd13bf3d79bcb6c277d1b2c398d23257a304d9e1b074" - [[package]] name = "signal-hook-registry" version = "1.4.0" @@ -2259,38 +1705,36 @@ dependencies = [ [[package]] name = "siphasher" -version = "0.3.7" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "533494a8f9b724d33625ab53c6c4800f7cc445895924a8ef649222dcb76e938b" +checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" [[package]] name = "slab" -version = "0.4.5" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +dependencies = [ + "autocfg", +] [[package]] name = "smallvec" -version = "1.7.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309" +checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" [[package]] -name = "socket2" -version = "0.3.19" +name = "snap" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" -dependencies = [ - "cfg-if", - "libc", - "winapi", -] +checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451" [[package]] name = "socket2" -version = "0.4.2" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dc90fe6c7be1a323296982db1836d1ea9e47b6839496dde9a541bc496df3516" +checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" dependencies = [ "libc", "winapi", @@ -2309,15 +1753,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4b7922be017ee70900be125523f38bdd644f4f06a1b16e8fa5a8ee8c34bffd4" dependencies = [ "itertools", - "nom 7.1.0", + "nom", "unicode_categories", ] [[package]] name = "sqlx" -version = "0.5.9" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7911b0031a0247af40095838002999c7a52fba29d9739e93326e71a5a1bc9d43" +checksum = "1f82cbe94f41641d6c410ded25bbf5097c240cefdf8e3b06d04198d0a96af6a4" dependencies = [ "sqlx-core", "sqlx-macros", @@ -2325,37 +1769,37 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.5.9" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aec89bfaca8f7737439bad16d52b07f1ccd0730520d3bf6ae9d069fe4b641fb1" +checksum = "6b69bf218860335ddda60d6ce85ee39f6cf6e5630e300e19757d1de15886a093" dependencies = [ "ahash", "atoi", "base64", "bitflags", "byteorder", - "bytes 1.1.0", + "bytes", "crc", - "crossbeam-channel", "crossbeam-queue", - "crossbeam-utils", "dirs", "either", + "event-listener", "futures-channel", "futures-core", "futures-intrusive", "futures-util", "hashlink", "hex", + "hkdf", "hmac", "indexmap", - "itoa", + "itoa 1.0.2", "libc", "log", "md-5", "memchr", "once_cell", - "parking_lot", + "paste", "percent-encoding", "rand", "serde", @@ -2374,9 +1818,9 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.5.9" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "584866c833511b1a152e87a7ee20dee2739746f60c858b3c5209150bc4b466f5" +checksum = "f40c63177cf23d356b159b60acd27c54af7423f1736988502e36bae9a712118f" dependencies = [ "dotenv", "either", @@ -2393,9 +1837,9 @@ dependencies = [ [[package]] name = "sqlx-rt" -version = "0.5.9" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d1bd069de53442e7a320f525a6d4deb8bb0621ac7a55f7eccbc2b58b57f43d0" +checksum = "874e93a365a598dc3dadb197565952cb143ae4aa716f7bcc933a8d836f6bf89f" dependencies = [ "native-tls", "once_cell", @@ -2403,12 +1847,6 @@ dependencies = [ "tokio-native-tls", ] -[[package]] -name = "std_prelude" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8207e78455ffdf55661170876f88daf85356e4edd54e0a3dbc79586ca1e50cbe" - [[package]] name = "stringprep" version = "0.1.2" @@ -2433,13 +1871,13 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "syn" -version = "1.0.81" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2afee18b8beb5a596ecb4a2dce128c719b4ba399d34126b9e4396e3f9860966" +checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd" dependencies = [ "proc-macro2", "quote", - "unicode-xid", + "unicode-ident", ] [[package]] @@ -2448,38 +1886,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20518fe4a4c9acf048008599e464deb21beeae3d3578418951a189c235a7a9a8" -[[package]] -name = "syntect" -version = "4.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b20815bbe80ee0be06e6957450a841185fcf690fe0178f14d77a05ce2caa031" -dependencies = [ - "bincode", - "bitflags", - "fancy-regex", - "flate2", - "fnv", - "lazy_static", - "lazycell", - "plist", - "regex-syntax", - "serde", - "serde_derive", - "serde_json", - "walkdir", - "yaml-rust", -] - -[[package]] -name = "sys-info" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b3a0d0aba8bf96a0e1ddfdc352fc53b3df7f39318c71854910c3c4b024ae52c" -dependencies = [ - "cc", - "libc", -] - [[package]] name = "take_mut" version = "0.2.2" @@ -2488,51 +1894,32 @@ checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" [[package]] name = "tempfile" -version = "3.2.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" +checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" dependencies = [ "cfg-if", + "fastrand", "libc", - "rand", "redox_syscall", "remove_dir_all", "winapi", ] -[[package]] -name = "termcolor" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "terminal_size" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "thiserror" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" dependencies = [ "proc-macro2", "quote", @@ -2541,19 +1928,38 @@ dependencies = [ [[package]] name = "time" -version = "0.1.43" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" +checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" dependencies = [ "libc", + "wasi 0.10.0+wasi-snapshot-preview1", "winapi", ] +[[package]] +name = "time" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c91f41dcb2f096c05f0873d667dceec1087ce5bcf984ec8ffb19acddbb3217" +dependencies = [ + "itoa 1.0.2", + "libc", + "num_threads", + "time-macros", +] + +[[package]] +name = "time-macros" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42657b1a6f4d817cda8e7a0ace261fe0cc946cf3a80314390b22cc61ae080792" + [[package]] name = "tinyvec" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83b2a3d4d9091d0abd7eba4dc2710b1718583bd4d8992e2190720ea38f391f7" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" dependencies = [ "tinyvec_macros", ] @@ -2566,29 +1972,30 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.13.0" +version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "588b2d10a336da58d877567cd8fb8a14b463e2104910f8132cd054b4b96e29ee" +checksum = "57aec3cfa4c296db7255446efb4928a6be304b431a806216105542a67b6ca82e" dependencies = [ "autocfg", - "bytes 1.1.0", + "bytes", "libc", "memchr", "mio", "num_cpus", "once_cell", - "parking_lot", + "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", + "socket2", "tokio-macros", "winapi", ] [[package]] name = "tokio-macros" -version = "1.5.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "114383b041aa6212c579467afa0075fbbdd0718de036100bc0ba7961d8cb9095" +checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484" dependencies = [ "proc-macro2", "quote", @@ -2627,32 +2034,32 @@ dependencies = [ [[package]] name = "tokio-postgres" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6c8b33df661b548dcd8f9bf87debb8c56c05657ed291122e1188698c2ece95" +checksum = "19c88a47a23c5d2dc9ecd28fb38fba5fc7e5ddc1fe64488ec145076b0c71c8ae" dependencies = [ "async-trait", "byteorder", - "bytes 1.1.0", + "bytes", "fallible-iterator", "futures", "log", - "parking_lot", + "parking_lot 0.12.1", "percent-encoding", "phf", "pin-project-lite", "postgres-protocol", "postgres-types", - "socket2 0.4.2", + "socket2", "tokio", "tokio-util", ] [[package]] name = "tokio-rustls" -version = "0.22.0" +version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ "rustls", "tokio", @@ -2661,9 +2068,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50145484efff8818b5ccd256697f36863f587da82cf8b409c53adf1e840798e3" +checksum = "df54d54117d6fdc4e4fea40fe1e4e566b3505700e148a6827e59b34b0d2600d9" dependencies = [ "futures-core", "pin-project-lite", @@ -2672,52 +2079,38 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.6.9" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" +checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45" dependencies = [ - "bytes 1.1.0", + "bytes", "futures-core", "futures-sink", - "log", "pin-project-lite", "tokio", + "tracing", ] [[package]] name = "toml" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" +checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" dependencies = [ "serde", ] -[[package]] -name = "toolchain_find" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e85654a10e7a07a47c6f19d93818f3f343e22927f2fa280c84f7c8042743413" -dependencies = [ - "home", - "lazy_static", - "regex", - "semver", - "walkdir", -] - [[package]] name = "tower" -version = "0.4.10" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c00e500fff5fa1131c866b246041a6bf96da9c965f8fe4128cb1421f23e93c00" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", "pin-project", "pin-project-lite", "tokio", - "tokio-util", "tower-layer", "tower-service", "tracing", @@ -2725,16 +2118,19 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.1.1" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b56efe69aa0ad2b5da6b942e57ea9f6fe683b7a314d4ff48662e2c8838de1" +checksum = "3c530c8675c1dbf98facee631536fa116b5fb6382d7dd6dc1b118d970eafe3ba" dependencies = [ - "bytes 1.1.0", + "bitflags", + "bytes", "futures-core", "futures-util", "http", "http-body", - "pin-project", + "http-range-header", + "pin-project-lite", + "tower", "tower-layer", "tower-service", ] @@ -2747,48 +2143,36 @@ checksum = "343bc9466d3fe6b0f960ef45960509f84480bf4fd96f92901afe7ff3df9d3a62" [[package]] name = "tower-service" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.29" +version = "0.1.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" +checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160" dependencies = [ "cfg-if", "log", "pin-project-lite", - "tracing-attributes", "tracing-core", ] -[[package]] -name = "tracing-attributes" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "tracing-core" -version = "0.1.21" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4" +checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7" dependencies = [ - "lazy_static", + "once_cell", ] [[package]] name = "trust-dns-proto" -version = "0.20.3" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0d7f5db438199a6e2609debe3f69f808d074e0a2888ee0bccb45fe234d03f4" +checksum = "9c31f240f59877c3d4bb3b3ea0ec5a6a0cff07323580ff8c7a605cd7d08b255d" dependencies = [ "async-trait", "cfg-if", @@ -2811,9 +2195,9 @@ dependencies = [ [[package]] name = "trust-dns-resolver" -version = "0.20.3" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ad17b608a64bd0735e67bde16b0636f8aa8591f831a25d18443ed00a699770" +checksum = "e4ba72c2ea84515690c9fcef4c6c660bb9df3036ed1051686de84605b74fd558" dependencies = [ "cfg-if", "futures-util", @@ -2821,7 +2205,7 @@ dependencies = [ "lazy_static", "log", "lru-cache", - "parking_lot", + "parking_lot 0.12.1", "resolv-conf", "smallvec", "thiserror", @@ -2837,9 +2221,9 @@ checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" [[package]] name = "typed-builder" -version = "0.9.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a46ee5bd706ff79131be9c94e7edcb82b703c487766a114434e5790361cf08c5" +checksum = "89851716b67b937e393b3daa8423e67ddfc4bbbf1654bcf05488e95e0828db0c" dependencies = [ "proc-macro2", "quote", @@ -2848,36 +2232,36 @@ dependencies = [ [[package]] name = "typenum" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec" +checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] -name = "ucd-trie" -version = "0.1.3" +name = "unicode-bidi" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" +checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" [[package]] -name = "unicode-bidi" -version = "0.3.7" +name = "unicode-ident" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" +checksum = "15c61ba63f9235225a22310255a29b806b907c9b8c964bcbd0a2c70f3f2deea7" [[package]] name = "unicode-normalization" -version = "0.1.19" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" [[package]] name = "unicode-width" @@ -2887,9 +2271,9 @@ checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" [[package]] name = "unicode-xid" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" +checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" [[package]] name = "unicode_categories" @@ -2922,13 +2306,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" dependencies = [ "getrandom", + "serde", ] [[package]] name = "v_escape" -version = "0.16.1" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b57701f09098e70ef300373fcfc1eda4e2961a88824f160894db534d8933a853" +checksum = "79d297315e8ca0b98255614f409699ea189e5929e820f07f69afcebf96c41f9b" dependencies = [ "buf-min", "v_escape_derive", @@ -2936,13 +2321,10 @@ dependencies = [ [[package]] name = "v_escape_derive" -version = "0.8.5" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29769400af8b264944b851c961a4a6930e76604f59b1fcd51246bab6a296c8c" +checksum = "8fe81cf194472e6ddd6545f8e91ee9780de636194c2e896b8ac201ac78389809" dependencies = [ - "nom 4.2.3", - "proc-macro2", - "quote", "syn", ] @@ -2958,24 +2340,14 @@ dependencies = [ [[package]] name = "v_htmlescape" -version = "0.13.1" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2609d928c084cd51b46a04b098bce48099278e0029a3489067b58673c720be59" +checksum = "04b32732bcd549ad15fcb01ee63ad03dd6a0289e9ba72b8164707d1f9fa80478" dependencies = [ "cfg-if", "v_escape", ] -[[package]] -name = "value-bag" -version = "1.0.0-alpha.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79923f7731dc61ebfba3633098bf3ac533bbd35ccd8c57e7088d9a5eebe0263f" -dependencies = [ - "ctor", - "version_check 0.9.3", -] - [[package]] name = "vcpkg" version = "0.2.15" @@ -2984,32 +2356,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" - -[[package]] -name = "version_check" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" - -[[package]] -name = "waker-fn" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" - -[[package]] -name = "walkdir" -version = "2.3.2" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" -dependencies = [ - "same-file", - "winapi", - "winapi-util", -] +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "want" @@ -3023,15 +2372,21 @@ dependencies = [ [[package]] name = "wasi" -version = "0.10.2+wasi-snapshot-preview1" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.78" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "632f73e236b219150ea279196e54e610f5dbafa5d61786303d4da54f84e47fce" +checksum = "7c53b543413a17a202f4be280a7e5c62a1c69345f5de525ee64f8cfdbc954994" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -3039,9 +2394,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.78" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a317bf8f9fba2476b4b2c85ef4c4af8ff39c3c7f0cdfeed4f82c34a880aa837b" +checksum = "5491a68ab4500fa6b4d726bd67408630c3dbe9c4fe7bda16d5c82a1fd8c7340a" dependencies = [ "bumpalo", "lazy_static", @@ -3052,23 +2407,11 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d7523cb1f2a4c96c1317ca690031b714a51cc14e05f712446691f413f5d39" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "wasm-bindgen-macro" -version = "0.2.78" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d56146e7c495528bf6587663bea13a8eb588d39b36b679d83972e1a2dbbdacf9" +checksum = "c441e177922bc58f1e12c022624b6216378e5febc2f0533e41ba443d505b80aa" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3076,9 +2419,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.78" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7803e0eea25835f8abdc585cd3021b3deb11543c6fe226dcd30b228857c5c5ab" +checksum = "7d94ac45fcf608c1f45ef53e748d35660f168490c10b23704c7779ab8f5c3048" dependencies = [ "proc-macro2", "quote", @@ -3089,15 +2432,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.78" +version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0237232789cf037d5480773fe568aac745bfe2afbc11a863e97901780a6b47cc" +checksum = "6a89911bd99e5f3659ec4acf9c4d93b0a90fe4a2a11f15328472058edc5261be" [[package]] name = "web-sys" -version = "0.3.55" +version = "0.3.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38eb105f1c59d9eaa6b5cdc92b859d85b926e82cb2e0945cd0c9259faa6fe9fb" +checksum = "2fed94beee57daf8dd7d51f2b15dc2bcde92d7a72304cdf662a4371008b71b90" dependencies = [ "js-sys", "wasm-bindgen", @@ -3105,9 +2448,9 @@ dependencies = [ [[package]] name = "webpki" -version = "0.21.4" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e38c0608262c46d4a56202ebabdeb094cef7e560ca7a226c6bf055188aa4ea" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" dependencies = [ "ring", "untrusted", @@ -3115,27 +2458,18 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.21.1" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aabe153544e473b775453675851ecc86863d2a81d786d741f6b76778f2a48940" +checksum = "f1c760f0d366a6c24a02ed7816e23e691f5d92291f94d15e836006fd11b04daf" dependencies = [ "webpki", ] -[[package]] -name = "wepoll-ffi" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb" -dependencies = [ - "cc", -] - [[package]] name = "whoami" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c33ac5ee236a4efbf2c98967e12c6cc0c51d93a744159a52957ba206ae6ef5f7" +checksum = "524b58fa5a20a2fb3014dd6358b70e6579692a56ef6fce928834e488f42f65e8" dependencies = [ "wasm-bindgen", "web-sys", @@ -3143,9 +2477,9 @@ dependencies = [ [[package]] name = "widestring" -version = "0.4.3" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c168940144dd21fd8046987c16a46a33d5fc84eec29ef9dcddc2ac9e31526b7c" +checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983" [[package]] name = "winapi" @@ -3163,15 +2497,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" -[[package]] -name = "winapi-util" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -dependencies = [ - "winapi", -] - [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" @@ -3179,27 +2504,55 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] -name = "winreg" -version = "0.6.2" +name = "windows-sys" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2986deb581c4fe11b621998a5e53361efe6b48a151178d0cd9eeffa4dc6acc9" +checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" dependencies = [ - "winapi", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_msvc", ] [[package]] -name = "xml-rs" -version = "0.8.4" +name = "windows_aarch64_msvc" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2d7d3948613f75c98fd9328cfdcc45acc4d360655289d0a7d4ec931392200a3" +checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" [[package]] -name = "yaml-rust" -version = "0.4.5" +name = "windows_i686_gnu" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" + +[[package]] +name = "windows_i686_msvc" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" + +[[package]] +name = "winreg" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69" dependencies = [ - "linked-hash-map", + "winapi", ] [[package]] @@ -3213,9 +2566,9 @@ dependencies = [ [[package]] name = "yarte" -version = "0.15.5" +version = "0.15.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3e60e0183fc866895ac6ebdbf0a6e78c5cdb6cc8ff3d87a9b0a9fba5e5718f5" +checksum = "c716c25f8cee3c289a749a10255f2b8eac52f8ac7279242f99eeb25acf2b51ce" dependencies = [ "yarte_derive", "yarte_helpers", @@ -3223,9 +2576,9 @@ dependencies = [ [[package]] name = "yarte_codegen" -version = "0.15.4" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436ebd6f8745af7a67b597fb6ca9a9d3bd0cfce7d4db34ca84f23c5711399132" +checksum = "ba620881dbdc7e340518f23dbea0a253dd1aa567bfbde9e39b2f8e01b1ab868c" dependencies = [ "proc-macro2", "quote", @@ -3236,9 +2589,9 @@ dependencies = [ [[package]] name = "yarte_derive" -version = "0.15.4" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "928aa038d1b77fc0332f0dfac3e97f11f8c6357f71cac3bfa02eada0f1bdfe75" +checksum = "2e4ad5cbe1874a6c8c6a0c59043a1abe02df60a441b098a9e8120b74be497bd0" dependencies = [ "proc-macro2", "quote", @@ -3251,25 +2604,24 @@ dependencies = [ [[package]] name = "yarte_helpers" -version = "0.15.3" +version = "0.15.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c98f2629461e6ceed46c61df0c43cf46f2d51b5a778b456fe9d40ab0e9e637db" +checksum = "c752e264ef064fb624c5d85e5f174fe130d273e9221d47daaad091281067b212" dependencies = [ - "bat", "dtoa", - "itoa", + "itoa 0.4.8", + "prettyplease", "serde", - "tempfile", + "syn", "toml", - "toolchain_find", "v_htmlescape", ] [[package]] name = "yarte_hir" -version = "0.15.3" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fec7622b44b5f34ad22f5186e8c214f2d8b6251259797671b819e31e1c5d3954" +checksum = "c512fd587daa46e67fa758df5cf0ff3ec143210542ae291a42ac6cc133b3a653" dependencies = [ "derive_more", "proc-macro2", @@ -3283,9 +2635,9 @@ dependencies = [ [[package]] name = "yarte_parser" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51dc059a5d3d5fcb182f8dbed76649a074f2c4f1b060744880ad97bbffb351ac" +checksum = "22462f6bb1d1b3ecaabe73ac51fc300f36868982f04f17d17eb8b90b1062efc7" dependencies = [ "annotate-snippets", "derive_more", @@ -3296,3 +2648,32 @@ dependencies = [ "unicode-xid", "yarte_helpers", ] + +[[package]] +name = "zstd" +version = "0.11.2+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "5.0.2+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.1+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fd07cbbc53846d9145dbffdf6dd09a7a0aa52be46741825f5c97bdd4f73f12b" +dependencies = [ + "cc", + "libc", +] diff --git a/frameworks/Rust/axum/Cargo.toml b/frameworks/Rust/axum/Cargo.toml index cc6fdf816af..dea84c72a54 100644 --- a/frameworks/Rust/axum/Cargo.toml +++ b/frameworks/Rust/axum/Cargo.toml @@ -1,8 +1,8 @@ [package] name = "axum" -version = "0.1.2" +version = "0.2.0" authors = ["Dragos Varovici "] -edition = "2018" +edition = "2021" [[bin]] name = "axum" @@ -13,39 +13,43 @@ name = "axum-sqlx" path = "src/main_sqlx.rs" [[bin]] -name = "axum-bb8" -path = "src/main_bb8.rs" +name = "axum-pg-pool" +path = "src/main_pg_pool.rs" [[bin]] name = "axum-mongo" path = "src/main_mongo.rs" +[[bin]] +name = "axum-mongo-raw" +path = "src/main_mongo_raw.rs" + +[[bin]] +name = "axum-pg" +path = "src/main_pg.rs" + [dependencies] -num_cpus = { version = "^1.13" } -rand = { version = "^0.8", features = ["small_rng"]} -yarte = { version = "^0.15" } -async-stream = { version = "^0.3" } -async-trait = { version = "0.1" } -async-std = "1.10" -futures = { version = "^0.3" } -futures-util = { version = "^0.3" } -dotenv = { version = "^0.15" } -serde = { version = "^1", features = ["derive"] } -serde_json = { version = "^1" } -serde_derive = { version = "^1" } -axum = { version = "^0.3" } -tokio = { version = "1.0", features = ["full"] } -hyper = "0.14" -tower = { version = "0.4", features = ["util"] } -tower-http = { version = "0.1", features = ["set-header"] } -sqlx = { version = "^0.5", features = [ "postgres", "macros", "runtime-tokio-native-tls" ] } -bb8 = "0.7" -bb8-postgres = "0.7" -tokio-postgres = "0.7" -tokio-pg-mapper = "0.2" -tokio-pg-mapper-derive = "0.2" -mongodb = "2.0" +axum = { version = "0.5.1", default-features = false, features = ["json", "query"] } +deadpool = { version = "0.9.3", features = ["rt_tokio_1", "serde", "async-trait", "managed" ] } +deadpool-postgres = "0.10.2" +dotenv = "0.15.0" +futures = "0.3.21" +futures-util = "0.3.21" +hyper = { version = "0.14.18", features = ["http1"] } +mongodb = { version = "2.2.1", features = ["zstd-compression", "snappy-compression", "zlib-compression"] } +num_cpus = "1.13.1" +rand = { version = "0.8.5", features = ["small_rng"] } +serde = { version = "1.0.136", features = ["derive"] } +serde_json = "1.0.79" +sqlx = { version = "0.6.0", features = ["postgres", "macros", "runtime-tokio-native-tls"] } +tokio = { version = "1.17.0", features = ["full"] } +tokio-pg-mapper = "0.2.0" +tokio-pg-mapper-derive = "0.2.0" +tokio-postgres = "0.7.5" +tower = { version = "0.4.12", features = ["util"] } +tower-http = { version = "0.3.4", features = ["set-header"] } +yarte = "0.15.6" [profile.release] lto = true -codegen-units = 1 \ No newline at end of file +codegen-units = 1 diff --git a/frameworks/Rust/axum/README.md b/frameworks/Rust/axum/README.md index 58cb2d5a422..55ca3e0978e 100755 --- a/frameworks/Rust/axum/README.md +++ b/frameworks/Rust/axum/README.md @@ -5,8 +5,8 @@ Axum is a web application framework that focuses on ergonomics and modularity. -* [User Guide](https://docs.rs/axum/0.3.0/axum/) -* [API Documentation](https://docs.rs/axum/0.3.0/axum/) +* [User Guide](https://docs.rs/axum/0.3/axum/) +* [API Documentation](https://docs.rs/axum/0.3/axum/) * Cargo package: [axum](https://crates.io/crates/axum) ## Database diff --git a/frameworks/Rust/axum/axum-mongo-raw.dockerfile b/frameworks/Rust/axum/axum-mongo-raw.dockerfile new file mode 100644 index 00000000000..104d70c4be9 --- /dev/null +++ b/frameworks/Rust/axum/axum-mongo-raw.dockerfile @@ -0,0 +1,25 @@ +FROM rust:1.60-slim-buster + +ENV AXUM_TECHEMPOWER_MONGODB_URL=mongodb://tfb-database:27017 +ENV AXUM_TECHEMPOWER_MAX_POOL_SIZE=28 +ENV AXUM_TECHEMPOWER_MIN_POOL_SIZE=14 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + pkg-config libssl-dev \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /axum +COPY ./src ./src +COPY ./templates ./templates +COPY ./Cargo.toml ./Cargo.toml +COPY ./Cargo.lock ./Cargo.lock +COPY ./run.sh ./run.sh +RUN chmod +x ./run.sh + +ENV RUSTFLAGS "-C target-cpu=native" +RUN cargo build --release +RUN cp ./target/release/axum-mongo-raw ./target/release/axum-techempower + +EXPOSE 8000 + +CMD ["./run.sh"] diff --git a/frameworks/Rust/axum/axum-mongo.dockerfile b/frameworks/Rust/axum/axum-mongo.dockerfile index 91c750823a5..77604b1dd2d 100644 --- a/frameworks/Rust/axum/axum-mongo.dockerfile +++ b/frameworks/Rust/axum/axum-mongo.dockerfile @@ -1,6 +1,8 @@ -FROM rust:1.55-slim-buster +FROM rust:1.60-slim-buster ENV AXUM_TECHEMPOWER_MONGODB_URL=mongodb://tfb-database:27017 +ENV AXUM_TECHEMPOWER_MAX_POOL_SIZE=28 +ENV AXUM_TECHEMPOWER_MIN_POOL_SIZE=14 RUN apt-get update && apt-get install -y --no-install-recommends \ pkg-config libssl-dev \ diff --git a/frameworks/Rust/axum/axum-pg-pool.dockerfile b/frameworks/Rust/axum/axum-pg-pool.dockerfile new file mode 100644 index 00000000000..6486ef5fba0 --- /dev/null +++ b/frameworks/Rust/axum/axum-pg-pool.dockerfile @@ -0,0 +1,24 @@ +FROM rust:1.60-slim-buster + +ENV AXUM_TECHEMPOWER_DATABASE_URL=postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world +ENV AXUM_TECHEMPOWER_MAX_POOL_SIZE=28 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + libpq-dev pkg-config libssl-dev \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /axum +COPY ./src ./src +COPY ./templates ./templates +COPY ./Cargo.toml ./Cargo.toml +COPY ./Cargo.lock ./Cargo.lock +COPY ./run.sh ./run.sh +RUN chmod +x ./run.sh + +ENV RUSTFLAGS "-C target-cpu=native" +RUN cargo build --release +RUN cp ./target/release/axum-pg-pool ./target/release/axum-techempower + +EXPOSE 8000 + +CMD ["./run.sh"] diff --git a/frameworks/Rust/axum/axum-bb8.dockerfile b/frameworks/Rust/axum/axum-pg.dockerfile similarity index 84% rename from frameworks/Rust/axum/axum-bb8.dockerfile rename to frameworks/Rust/axum/axum-pg.dockerfile index 7919de30125..1d3bc4543d8 100644 --- a/frameworks/Rust/axum/axum-bb8.dockerfile +++ b/frameworks/Rust/axum/axum-pg.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.55-slim-buster +FROM rust:1.60-slim-buster ENV AXUM_TECHEMPOWER_DATABASE_URL=postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world @@ -16,7 +16,7 @@ RUN chmod +x ./run.sh ENV RUSTFLAGS "-C target-cpu=native" RUN cargo build --release -RUN cp ./target/release/axum-bb8 ./target/release/axum-techempower +RUN cp ./target/release/axum-pg ./target/release/axum-techempower EXPOSE 8000 diff --git a/frameworks/Rust/axum/axum-sqlx.dockerfile b/frameworks/Rust/axum/axum-sqlx.dockerfile index 0a3e0c21c16..4a1843375ea 100644 --- a/frameworks/Rust/axum/axum-sqlx.dockerfile +++ b/frameworks/Rust/axum/axum-sqlx.dockerfile @@ -1,6 +1,8 @@ -FROM rust:1.55-slim-buster +FROM rust:1.60-slim-buster ENV AXUM_TECHEMPOWER_DATABASE_URL=postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world +ENV AXUM_TECHEMPOWER_MAX_POOL_SIZE=56 +ENV AXUM_TECHEMPOWER_MIN_POOL_SIZE=56 RUN apt-get update && apt-get install -y --no-install-recommends \ libpq-dev pkg-config libssl-dev \ diff --git a/frameworks/Rust/axum/axum.dockerfile b/frameworks/Rust/axum/axum.dockerfile index b8dc9dfc323..984b923406d 100644 --- a/frameworks/Rust/axum/axum.dockerfile +++ b/frameworks/Rust/axum/axum.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.55-slim-buster +FROM rust:1.60-slim-buster RUN apt-get update && apt-get install -y --no-install-recommends \ pkg-config libssl-dev \ diff --git a/frameworks/Rust/axum/benchmark_config.json b/frameworks/Rust/axum/benchmark_config.json index e40431100a7..f05dd4a67dd 100755 --- a/frameworks/Rust/axum/benchmark_config.json +++ b/frameworks/Rust/axum/benchmark_config.json @@ -22,6 +22,25 @@ "versus": "None" }, "sqlx": { + "db_url": "/db", + "fortune_url": "/fortunes", + "port": 8000, + "approach": "Realistic", + "classification": "Fullstack", + "database": "postgres", + "framework": "Axum", + "language": "Rust", + "flavor": "None", + "orm": "Raw", + "platform": "Rust", + "webserver": "Hyper", + "os": "Linux", + "database_os": "Linux", + "display_name": "Axum [Postgresql - sqlx]", + "notes": "", + "versus": "None" + }, + "pg": { "db_url": "/db", "fortune_url": "/fortunes", "query_url": "/queries?queries=", @@ -33,16 +52,16 @@ "framework": "Axum", "language": "Rust", "flavor": "None", - "orm": "Full", + "orm": "Raw", "platform": "Rust", "webserver": "Hyper", "os": "Linux", "database_os": "Linux", - "display_name": "Axum [sqlx]", + "display_name": "Axum [Postgresql]", "notes": "", "versus": "None" }, - "bb8": { + "pg-pool": { "db_url": "/db", "query_url": "/queries?queries=", "update_url": "/updates?queries=", @@ -54,12 +73,12 @@ "framework": "Axum", "language": "Rust", "flavor": "None", - "orm": "Full", + "orm": "Raw", "platform": "Rust", "webserver": "Hyper", "os": "Linux", "database_os": "Linux", - "display_name": "Axum [bb8]", + "display_name": "Axum [Postgresql - deadpool]", "notes": "", "versus": "None" }, @@ -67,6 +86,7 @@ "db_url": "/db", "query_url": "/queries?queries=", "fortune_url": "/fortunes", + "update_url": "/updates?queries=", "port": 8000, "approach": "Realistic", "classification": "Fullstack", @@ -74,12 +94,32 @@ "framework": "Axum", "language": "Rust", "flavor": "None", - "orm": "Full", + "orm": "Raw", + "platform": "Rust", + "webserver": "Hyper", + "os": "Linux", + "database_os": "Linux", + "display_name": "Axum [Mongodb]", + "notes": "", + "versus": "None" + }, + "mongo-raw": { + "db_url": "/db", + "query_url": "/queries?queries=", + "update_url": "/updates?queries=", + "port": 8000, + "approach": "Realistic", + "classification": "Fullstack", + "database": "mongodb", + "framework": "Axum", + "language": "Rust", + "flavor": "None", + "orm": "Raw", "platform": "Rust", "webserver": "Hyper", "os": "Linux", "database_os": "Linux", - "display_name": "Axum [mongodb]", + "display_name": "Axum [Mongodb raw]", "notes": "", "versus": "None" } diff --git a/frameworks/Rust/axum/src/common.rs b/frameworks/Rust/axum/src/common.rs deleted file mode 100644 index a0de5faf5a6..00000000000 --- a/frameworks/Rust/axum/src/common.rs +++ /dev/null @@ -1 +0,0 @@ -pub const POOL_SIZE: u32 = 56; \ No newline at end of file diff --git a/frameworks/Rust/axum/src/database_bb8.rs b/frameworks/Rust/axum/src/database_bb8.rs deleted file mode 100644 index ae63ceb2e6c..00000000000 --- a/frameworks/Rust/axum/src/database_bb8.rs +++ /dev/null @@ -1,37 +0,0 @@ -use axum::extract::{Extension, FromRequest, RequestParts}; -use axum::http::StatusCode; -use bb8::{Pool, PooledConnection}; -use bb8_postgres::PostgresConnectionManager; -use bb8_postgres::tokio_postgres::NoTls; -use crate::common::POOL_SIZE; -use crate::utils::internal_error; - -pub type ConnectionManager = PostgresConnectionManager; -pub type ConnectionPool = Pool; -pub type Connection = PooledConnection<'static, ConnectionManager>; - -pub async fn create_bb8_pool(database_url: String) -> ConnectionPool { - let manager = PostgresConnectionManager::new_from_stringlike(database_url, NoTls).unwrap(); - - Pool::builder().max_size(POOL_SIZE).build(manager).await.unwrap() -} - -pub struct DatabaseConnection(pub Connection); - -#[async_trait] -impl FromRequest for DatabaseConnection - where - B: Send, -{ - type Rejection = (StatusCode, String); - - async fn from_request(req: &mut RequestParts) -> Result { - let Extension(pool) = Extension::::from_request(req) - .await - .map_err(internal_error)?; - - let conn = pool.get_owned().await.map_err(internal_error)?; - - Ok(Self(conn)) - } -} \ No newline at end of file diff --git a/frameworks/Rust/axum/src/database_mongo.rs b/frameworks/Rust/axum/src/database_mongo.rs index 8c0204cd1d3..920b8dc265d 100644 --- a/frameworks/Rust/axum/src/database_mongo.rs +++ b/frameworks/Rust/axum/src/database_mongo.rs @@ -1,26 +1,117 @@ +use axum::async_trait; use axum::extract::{Extension, FromRequest, RequestParts}; use axum::http::StatusCode; +use futures_util::stream::FuturesUnordered; +use futures_util::TryStreamExt; +use std::io; -use mongodb::{Client, Database}; use crate::utils::internal_error; +use crate::{Fortune, World}; +use futures_util::StreamExt; +use mongodb::bson::doc; +use mongodb::Database; pub struct DatabaseConnection(pub Database); #[async_trait] impl FromRequest for DatabaseConnection - where - B: Send, +where + B: Send, { type Rejection = (StatusCode, String); async fn from_request(req: &mut RequestParts) -> Result { - let Extension(client) = Extension::::from_request(req) + let Extension(db) = Extension::::from_request(req) .await .map_err(internal_error)?; - let database = client.database("hello_world"); + Ok(Self(db)) + } +} + +#[derive(Debug)] +pub enum MongoError { + Io(io::Error), + Mongo(mongodb::error::Error), +} - Ok(Self(database)) +impl From for MongoError { + fn from(err: io::Error) -> Self { + MongoError::Io(err) } } +impl From for MongoError { + fn from(err: mongodb::error::Error) -> Self { + MongoError::Mongo(err) + } +} + +pub async fn find_world_by_id(db: Database, id: i32) -> Result { + let world_collection = db.collection::("world"); + + let filter = doc! { "_id": id as f32 }; + + let world: World = world_collection + .find_one(Some(filter), None) + .await + .unwrap() + .expect("expected world, found none"); + Ok(world) +} + +pub async fn find_worlds(db: Database, ids: Vec) -> Result, MongoError> { + let future_worlds = FuturesUnordered::new(); + + for id in ids { + future_worlds.push(find_world_by_id(db.clone(), id)); + } + + let worlds: Result, MongoError> = future_worlds.try_collect().await; + worlds +} + +pub async fn fetch_fortunes(db: Database) -> Result, MongoError> { + let fortune_collection = db.collection::("fortune"); + + let mut fortune_cursor = fortune_collection + .find(None, None) + .await + .expect("fortunes could not be loaded"); + + let mut fortunes: Vec = Vec::new(); + + while let Some(doc) = fortune_cursor.next().await { + fortunes.push(doc.expect("could not load fortune")); + } + + fortunes.push(Fortune { + id: 0.0, + message: "Additional fortune added at request time.".to_string(), + }); + + fortunes.sort_by(|a, b| a.message.cmp(&b.message)); + Ok(fortunes) +} + +pub async fn update_worlds( + db: Database, + worlds: Vec, +) -> Result { + let mut updates = Vec::new(); + + for world in worlds { + updates.push(doc! { + "q": { "id": world.id }, "u": { "$set": { "randomNumber": world.random_number }} + }); + } + + db.run_command( + doc! {"update": "world", "updates": updates, "ordered": false}, + None, + ) + .await + .expect("could not update worlds"); + + Ok(true) +} diff --git a/frameworks/Rust/axum/src/database_mongo_raw.rs b/frameworks/Rust/axum/src/database_mongo_raw.rs new file mode 100644 index 00000000000..02e191d8123 --- /dev/null +++ b/frameworks/Rust/axum/src/database_mongo_raw.rs @@ -0,0 +1,108 @@ +use axum::async_trait; +use axum::extract::{Extension, FromRequest, RequestParts}; +use axum::http::StatusCode; +use futures_util::stream::FuturesUnordered; +use futures_util::TryStreamExt; +use std::io; + +use crate::utils::internal_error; +use crate::World; +use mongodb::bson::{doc, RawDocumentBuf}; +use mongodb::Database; + +pub struct DatabaseConnection(pub Database); + +#[async_trait] +impl FromRequest for DatabaseConnection +where + B: Send, +{ + type Rejection = (StatusCode, String); + + async fn from_request(req: &mut RequestParts) -> Result { + let Extension(db) = Extension::::from_request(req) + .await + .map_err(internal_error)?; + + Ok(Self(db)) + } +} + +#[derive(Debug)] +pub enum MongoError { + Io(io::Error), + Mongo(mongodb::error::Error), +} + +impl From for MongoError { + fn from(err: io::Error) -> Self { + MongoError::Io(err) + } +} + +impl From for MongoError { + fn from(err: mongodb::error::Error) -> Self { + MongoError::Mongo(err) + } +} + +pub async fn find_world_by_id(db: Database, id: i32) -> Result { + let world_collection = db.collection::("world"); + + let filter = doc! { "_id": id as f32 }; + + let raw: RawDocumentBuf = world_collection + .find_one(Some(filter), None) + .await + .unwrap() + .expect("expected world, found none"); + + Ok(World { + id: raw + .get("id") + .expect("expected to parse world id") + .expect("could not get world id") + .as_f64() + .expect("could not extract world id") as f32, + random_number: raw + .get("randomNumber") + .expect("expected to parse world randomNumber") + .expect("expected to get world randomNumber") + .as_f64() + .expect("could not extract world randomNumber") + as f32, + }) +} + +pub async fn find_worlds(db: Database, ids: Vec) -> Result, MongoError> { + let future_worlds = FuturesUnordered::new(); + + for id in ids { + future_worlds.push(find_world_by_id(db.clone(), id)); + } + + let worlds: Result, MongoError> = future_worlds.try_collect().await; + worlds +} + +pub async fn update_worlds( + db: Database, + worlds: Vec, +) -> Result { + let mut updates = Vec::new(); + + for world in worlds { + updates.push(doc! { + "q": { "id": world.id }, "u": { "$set": { "randomNumber": world.random_number }} + }); + } + + db.run_command( + doc! {"update": "world", "updates": updates, "ordered": false}, + None, + ) + .await + .expect("could not update worlds"); + + Ok(true) +} diff --git a/frameworks/Rust/axum/src/database_pg.rs b/frameworks/Rust/axum/src/database_pg.rs new file mode 100644 index 00000000000..1860b4021ab --- /dev/null +++ b/frameworks/Rust/axum/src/database_pg.rs @@ -0,0 +1,206 @@ +use axum::async_trait; +use axum::extract::{Extension, FromRequest, RequestParts}; +use axum::http::StatusCode; +use futures::{ + stream::futures_unordered::FuturesUnordered, FutureExt, StreamExt, TryStreamExt, +}; +use rand::{rngs::SmallRng, thread_rng, Rng, SeedableRng}; +use std::sync::Arc; +use std::{collections::HashMap, fmt::Write, io}; +use tokio::pin; +use tokio_postgres::{connect, types::ToSql, Client, NoTls, Statement}; + +use crate::models_pg::{Fortune, World}; +use crate::utils::internal_error; + +#[derive(Debug)] +pub enum PgError { + Io(io::Error), + Pg(tokio_postgres::Error), +} + +impl From for PgError { + fn from(err: io::Error) -> Self { + PgError::Io(err) + } +} + +impl From for PgError { + fn from(err: tokio_postgres::Error) -> Self { + PgError::Pg(err) + } +} + +/// Postgres interface +pub struct PgConnection { + client: Client, + fortune: Statement, + world: Statement, + updates: HashMap, +} + +impl PgConnection { + pub async fn connect(db_url: String) -> Arc { + let (cl, conn) = connect(&*db_url, NoTls) + .await + .expect("can not connect to postgresql"); + + // Spawn connection + tokio::spawn(async move { + if let Err(error) = conn.await { + eprintln!("Connection error: {}", error); + } + }); + + let fortune = cl.prepare("SELECT * FROM fortune").await.unwrap(); + let mut updates = HashMap::new(); + + for num in 1..=500u16 { + let mut pl = 1; + let mut q = String::new(); + + q.push_str("UPDATE world SET randomnumber = CASE id "); + + for _ in 1..=num { + let _ = write!(q, "when ${} then ${} ", pl, pl + 1); + pl += 2; + } + + q.push_str("ELSE randomnumber END WHERE id IN ("); + + for _ in 1..=num { + let _ = write!(q, "${},", pl); + pl += 1; + } + + q.pop(); + q.push(')'); + + updates.insert(num, cl.prepare(&q).await.unwrap()); + } + + let world = cl.prepare("SELECT * FROM world WHERE id=$1").await.unwrap(); + + Arc::new(PgConnection { + client: cl, + fortune, + world, + updates, + }) + } +} + +impl PgConnection { + async fn query_one_world(&self, id: i32) -> Result { + let stream = self.client.query_raw(&self.world, &[&id]).await?; + pin!(stream); + let row = stream.next().await.unwrap()?; + Ok(World { + id: row.get(0), + randomnumber: row.get(1), + }) + } + + pub async fn get_world(&self) -> Result { + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); + + let random_id = (rng.gen::() % 10_000 + 1) as i32; + + let world = self.query_one_world(random_id).await?; + Ok(world) + } + + pub async fn get_worlds(&self, num: usize) -> Result, PgError> { + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); + + let worlds = FuturesUnordered::new(); + + for _ in 0..num { + let w_id = (rng.gen::() % 10_000 + 1) as i32; + worlds.push(self.query_one_world(w_id)); + } + + worlds.try_collect().await + } + + pub async fn update(&self, num: u16) -> Result, PgError> { + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); + + let worlds = FuturesUnordered::new(); + + for _ in 0..num { + let id = (rng.gen::() % 10_000 + 1) as i32; + let w_id = (rng.gen::() % 10_000 + 1) as i32; + + worlds.push(self.query_one_world(w_id).map(move |res| match res { + Ok(mut world) => { + world.randomnumber = id; + Ok(world) + } + + Err(err) => Err(err), + })); + } + + let st = self.updates.get(&num).unwrap().clone(); + + let worlds: Vec = worlds.try_collect().await?; + + let mut params: Vec<&(dyn ToSql + Sync)> = Vec::with_capacity(num as usize * 3); + + for w in &worlds { + params.push(&w.id); + params.push(&w.randomnumber); + } + + for w in &worlds { + params.push(&w.id); + } + + self.client.query(&st, ¶ms[..]).await?; + + Ok(worlds) + } + + pub async fn tell_fortune(&self) -> Result, PgError> { + let mut items = vec![Fortune { + id: 0, + message: "Additional fortune added at request time.".parse().unwrap(), + }]; + + let fut = self.client.query_raw::<_, _, &[i32; 0]>(&self.fortune, &[]); + + let stream = fut.await?; + pin!(stream); + + while let Some(row) = stream.next().await { + let row = row?; + + items.push(Fortune { + id: row.get(0), + message: row.get(1), + }); + } + + items.sort_by(|it, next| it.message.cmp(&next.message)); + Ok(items) + } +} + +pub struct DatabaseConnection(pub Arc); + +#[async_trait] +impl FromRequest for DatabaseConnection +where + B: Send, +{ + type Rejection = (StatusCode, String); + + async fn from_request(req: &mut RequestParts) -> Result { + let Extension(pg_connection) = Extension::>::from_request(req) + .await + .map_err(internal_error)?; + + Ok(Self(pg_connection)) + } +} diff --git a/frameworks/Rust/axum/src/database_pg_pool.rs b/frameworks/Rust/axum/src/database_pg_pool.rs new file mode 100644 index 00000000000..0da8f9f9c3f --- /dev/null +++ b/frameworks/Rust/axum/src/database_pg_pool.rs @@ -0,0 +1,125 @@ +use axum::async_trait; +use axum::extract::{Extension, FromRequest, RequestParts}; +use axum::http::StatusCode; +use deadpool_postgres::{Client, Manager, ManagerConfig, RecyclingMethod}; +use std::io; +use std::str::FromStr; +use tokio_pg_mapper::FromTokioPostgresRow; +use tokio_postgres::{NoTls, Row, Statement}; + +use crate::utils::internal_error; +use crate::{Fortune, World}; + +#[derive(Debug)] +pub enum PgError { + Io(io::Error), + Pg(tokio_postgres::Error), +} + +impl From for PgError { + fn from(err: io::Error) -> Self { + PgError::Io(err) + } +} + +impl From for PgError { + fn from(err: tokio_postgres::Error) -> Self { + PgError::Pg(err) + } +} + +pub async fn create_pool( + database_url: String, + max_pool_size: u32, +) -> deadpool_postgres::Pool { + let pg_config = + tokio_postgres::Config::from_str(&*database_url).expect("invalid database url"); + + let mgr_config = ManagerConfig { + recycling_method: RecyclingMethod::Fast, + }; + let mgr = Manager::from_config(pg_config, NoTls, mgr_config); + let pool: deadpool_postgres::Pool = deadpool_postgres::Pool::builder(mgr) + .max_size(max_pool_size as usize) + .build() + .unwrap(); + + pool +} + +pub struct DatabaseClient(pub Client); + +#[async_trait] +impl FromRequest for DatabaseClient +where + B: Send, +{ + type Rejection = (StatusCode, String); + + async fn from_request(req: &mut RequestParts) -> Result { + let Extension(pool) = Extension::::from_request(req) + .await + .map_err(internal_error)?; + + let conn = pool.get().await.map_err(internal_error)?; + + Ok(Self(conn)) + } +} + +pub async fn fetch_world_by_id( + client: &Client, + number: i32, + select: &Statement, +) -> Result { + let row: Row = client.query_one(select, &[&number]).await.unwrap(); + + Ok(World::from_row(row).unwrap()) +} + +pub async fn update_world( + client: &Client, + update: &Statement, + random_id: i32, + w_id: i32, +) -> Result { + let rows_modified: u64 = client.execute(update, &[&random_id, &w_id]).await.unwrap(); + + Ok(rows_modified) +} + +pub async fn fetch_all_fortunes( + client: Client, + select: &Statement, +) -> Result, PgError> { + let rows: Vec = client.query(select, &[]).await.unwrap(); + + let mut fortunes: Vec = Vec::with_capacity(rows.capacity()); + + for row in rows { + fortunes.push(Fortune::from_row(row).unwrap()); + } + + Ok(fortunes) +} + +pub async fn prepare_fetch_all_fortunes_statement(client: &Client) -> Statement { + client + .prepare_cached("SELECT * FROM Fortune") + .await + .unwrap() +} + +pub async fn prepare_fetch_world_by_id_statement(client: &Client) -> Statement { + client + .prepare_cached("SELECT id, randomnumber FROM World WHERE id = $1") + .await + .unwrap() +} + +pub async fn prepare_update_world_by_id_statement(client: &Client) -> Statement { + client + .prepare_cached("UPDATE World SET randomnumber = $1 WHERE id = $2") + .await + .unwrap() +} diff --git a/frameworks/Rust/axum/src/database_sqlx.rs b/frameworks/Rust/axum/src/database_sqlx.rs index 7a40162d153..88ddb11c2a7 100644 --- a/frameworks/Rust/axum/src/database_sqlx.rs +++ b/frameworks/Rust/axum/src/database_sqlx.rs @@ -1,22 +1,51 @@ +use axum::async_trait; use axum::extract::{Extension, FromRequest, RequestParts}; use axum::http::StatusCode; +use std::io; -use sqlx::{PgPool, Postgres}; -use sqlx::pool::PoolConnection; -use sqlx::postgres::PgPoolOptions; -use crate::common::POOL_SIZE; use crate::utils::internal_error; +use crate::{Fortune, World}; +use sqlx::pool::PoolConnection; +use sqlx::postgres::{PgArguments, PgPoolOptions}; +use sqlx::{Arguments, PgPool, Postgres}; + +#[derive(Debug)] +pub enum PgError { + Io(io::Error), + Pg(sqlx::Error), +} -pub async fn create_pool(database_url: String) -> PgPool { - PgPoolOptions::new().max_connections(POOL_SIZE).min_connections(56).connect(&*database_url).await.unwrap() +impl From for PgError { + fn from(err: io::Error) -> Self { + PgError::Io(err) + } +} + +impl From for PgError { + fn from(err: sqlx::Error) -> Self { + PgError::Pg(err) + } +} + +pub async fn create_pool( + database_url: String, + max_pool_size: u32, + min_pool_size: u32, +) -> PgPool { + PgPoolOptions::new() + .max_connections(max_pool_size) + .min_connections(min_pool_size) + .connect(&*database_url) + .await + .unwrap() } pub struct DatabaseConnection(pub PoolConnection); #[async_trait] impl FromRequest for DatabaseConnection - where - B: Send, +where + B: Send, { type Rejection = (StatusCode, String); @@ -31,3 +60,27 @@ impl FromRequest for DatabaseConnection } } +pub async fn fetch_world( + mut conn: PoolConnection, + number: i32, +) -> Result { + let mut args = PgArguments::default(); + args.add(number); + + let world: World = + sqlx::query_as_with("SELECT id, randomnumber FROM World WHERE id = $1", args) + .fetch_one(&mut conn) + .await + .expect("error loading world"); + Ok(world) +} + +pub async fn fetch_fortunes( + mut conn: PoolConnection, +) -> Result, PgError> { + let fortunes: Vec = sqlx::query_as("SELECT * FROM Fortune") + .fetch_all(&mut conn) + .await + .expect("error loading Fortunes"); + Ok(fortunes) +} diff --git a/frameworks/Rust/axum/src/main.rs b/frameworks/Rust/axum/src/main.rs index f6e31e2ed92..05c870cd139 100644 --- a/frameworks/Rust/axum/src/main.rs +++ b/frameworks/Rust/axum/src/main.rs @@ -1,23 +1,15 @@ -extern crate serde_derive; -extern crate dotenv; -extern crate async_trait; -extern crate tokio_pg_mapper_derive; -extern crate tokio_pg_mapper; - mod models_common; mod server; -mod common; -use models_common::{Message}; +use models_common::Message; use axum::http::StatusCode; -use axum::Json; -use dotenv::dotenv; -use axum::{Router, routing::get}; use axum::http::{header, HeaderValue}; use axum::response::IntoResponse; +use axum::Json; +use axum::{routing::get, Router}; +use dotenv::dotenv; use tower_http::set_header::SetResponseHeaderLayer; -use hyper::Body; pub async fn plaintext() -> &'static str { "Hello, World!" @@ -35,14 +27,19 @@ pub async fn json() -> impl IntoResponse { async fn main() { dotenv().ok(); - let app = Router::new() + let server_header_value = HeaderValue::from_static("Axum"); + + let app = Router::new() .route("/plaintext", get(plaintext)) .route("/json", get(json)) - .layer(SetResponseHeaderLayer::<_, Body>::if_not_present(header::SERVER, HeaderValue::from_static("Axum"))); + .layer(SetResponseHeaderLayer::if_not_present( + header::SERVER, + server_header_value, + )); server::builder() .http1_pipeline_flush(true) .serve(app.into_make_service()) .await .unwrap(); -} \ No newline at end of file +} diff --git a/frameworks/Rust/axum/src/main_bb8.rs b/frameworks/Rust/axum/src/main_bb8.rs deleted file mode 100644 index 6b4f30a933e..00000000000 --- a/frameworks/Rust/axum/src/main_bb8.rs +++ /dev/null @@ -1,165 +0,0 @@ -extern crate serde_derive; -extern crate dotenv; -#[macro_use] -extern crate async_trait; - -mod models_common; -mod models_bb8; -mod database_bb8; -mod utils; -mod server; -mod common; - -use dotenv::dotenv; -use std::env; -use crate::database_bb8::{Connection, create_bb8_pool, DatabaseConnection}; -use axum::{ - extract::{Query}, - http::StatusCode, - response::IntoResponse, - routing::get, - AddExtensionLayer, Json, Router, -}; -use axum::http::{header, HeaderValue}; -use bb8_postgres::tokio_postgres::{Row, Statement}; -use tower_http::set_header::SetResponseHeaderLayer; -use hyper::Body; -use rand::rngs::SmallRng; -use rand::{SeedableRng}; -use tokio_pg_mapper::FromTokioPostgresRow; -use yarte::Template; - -use models_bb8::{World, Fortune}; -use utils::{Params, parse_params, random_number}; -use crate::utils::Utf8Html; - -async fn db(DatabaseConnection(conn): DatabaseConnection) -> impl IntoResponse { - let mut rng = SmallRng::from_entropy(); - let number = random_number(&mut rng); - - let select = prepare_fetch_world_by_id_statement(&conn).await; - let world = fetch_world_by_id_using_statement(&conn, number, &select).await; - - (StatusCode::OK, Json(world)) -} - -async fn fetch_world_by_id_using_statement(conn: &Connection, number: i32, select: &Statement) -> World { - let row: Row = conn.query_one(select, &[&number]).await.unwrap(); - - World::from_row(row).unwrap() -} - -async fn queries(DatabaseConnection(conn): DatabaseConnection, Query(params): Query) -> impl IntoResponse { - let q = parse_params(params); - - let mut rng = SmallRng::from_entropy(); - - let mut results = Vec::with_capacity(q as usize); - - let select = prepare_fetch_world_by_id_statement(&conn).await; - - for _ in 0..q { - let query_id = random_number(&mut rng); - - let result :World = fetch_world_by_id_using_statement(&conn, query_id, &select).await; - - results.push(result); - } - - (StatusCode::OK, Json(results)) -} - -async fn fortunes(DatabaseConnection(conn): DatabaseConnection) -> impl IntoResponse { - let select = prepare_fetch_all_fortunes_statement(&conn).await; - - let rows: Vec = conn.query(&select, &[]).await.unwrap(); - - let mut fortunes: Vec = Vec::with_capacity(rows.capacity()); - - for row in rows { - fortunes.push(Fortune::from_row(row).unwrap()); - } - - fortunes.push(Fortune { - id: 0, - message: "Additional fortune added at request time.".to_string(), - }); - - fortunes.sort_by(|a, b| a.message.cmp(&b.message)); - - Utf8Html( - FortunesTemplate { - fortunes: &fortunes, - } - .call() - .expect("error rendering template"), - ) -} - -async fn updates(DatabaseConnection(conn): DatabaseConnection, Query(params): Query) -> impl IntoResponse { - let q = parse_params(params); - - let mut rng = SmallRng::from_entropy(); - - let mut results = Vec::with_capacity(q as usize); - - let select = prepare_fetch_world_by_id_statement(&conn).await; - - for _ in 0..q { - let query_id = random_number(&mut rng); - let mut result :World = fetch_world_by_id_using_statement(&conn, query_id, &select).await; - - result.randomnumber = random_number(&mut rng); - results.push(result); - } - - let update = prepare_update_world_by_id_statement(&conn).await; - - for w in &results { - conn.execute(&update, &[&w.randomnumber, &w.id]).await.unwrap(); - } - - (StatusCode::OK, Json(results)) -} - -async fn prepare_fetch_all_fortunes_statement(conn: &Connection) -> Statement { - conn.prepare("SELECT * FROM Fortune").await.unwrap() -} - -async fn prepare_fetch_world_by_id_statement(conn: &Connection) -> Statement { - conn.prepare("SELECT id, randomnumber FROM World WHERE id = $1").await.unwrap() -} - -async fn prepare_update_world_by_id_statement(conn: &Connection) -> Statement { - conn.prepare("UPDATE World SET randomnumber = $1 WHERE id = $2").await.unwrap() -} - -#[tokio::main] -async fn main() { - dotenv().ok(); - - let database_url = env::var("AXUM_TECHEMPOWER_DATABASE_URL").ok() - .expect("AXUM_TECHEMPOWER_DATABASE_URL environment variable was not set"); - - // setup connection pool - let pool = create_bb8_pool(database_url).await; - - let router = Router::new() - .route("/fortunes", get(fortunes)) - .route("/db", get(db)) - .route("/queries", get(queries)) - .route("/updates", get(updates)) - .layer(AddExtensionLayer::new(pool)) - .layer(SetResponseHeaderLayer::<_, Body>::if_not_present(header::SERVER, HeaderValue::from_static("Axum"))); - - server::builder() - .serve(router.into_make_service()) - .await - .unwrap(); -} - -#[derive(Template)] -#[template(path = "fortunes.html.hbs")] -pub struct FortunesTemplate<'a> { - pub fortunes: &'a Vec, -} diff --git a/frameworks/Rust/axum/src/main_mongo.rs b/frameworks/Rust/axum/src/main_mongo.rs index f38320f0c77..6766b5e1d20 100644 --- a/frameworks/Rust/axum/src/main_mongo.rs +++ b/frameworks/Rust/axum/src/main_mongo.rs @@ -1,95 +1,114 @@ -extern crate serde_derive; -extern crate dotenv; -#[macro_use] -extern crate async_trait; - +mod database_mongo; mod models_common; mod models_mongo; -mod database_mongo; -mod utils; mod server; -mod common; +mod utils; -use dotenv::dotenv; -use std::env; -use std::time::Duration; +use axum::http::{header, HeaderValue}; use axum::{ - extract::{Query}, - http::StatusCode, - response::IntoResponse, - routing::get, - AddExtensionLayer, Json, Router, + extract::Query, http::StatusCode, response::IntoResponse, routing::get, Extension, + Json, Router, }; -use axum::http::{header, HeaderValue}; -use futures::stream::StreamExt; +use dotenv::dotenv; +use mongodb::options::{ClientOptions, Compressor}; +use mongodb::Client; +use rand::{rngs::SmallRng, thread_rng, Rng, SeedableRng}; +use std::time::Duration; use tower_http::set_header::SetResponseHeaderLayer; -use hyper::Body; -use rand::rngs::SmallRng; -use rand::{SeedableRng}; use yarte::Template; -use mongodb::{bson::doc, Client, Database}; -use mongodb::options::ClientOptions; -use models_mongo::{World, Fortune}; -use utils::{Params, parse_params, random_number, Utf8Html}; +use crate::database_mongo::{ + fetch_fortunes, find_world_by_id, find_worlds, update_worlds, +}; +use crate::utils::get_environment_variable; use database_mongo::DatabaseConnection; use models_mongo::FortuneInfo; +use models_mongo::{Fortune, World}; +use utils::{parse_params, Params, Utf8Html}; -async fn db(DatabaseConnection(mut db): DatabaseConnection) -> impl IntoResponse { - let mut rng = SmallRng::from_entropy(); - let number = random_number(&mut rng); - - let world = find_world_by_id(&mut db, number).await; - - (StatusCode::OK, Json(world)) +#[derive(Template)] +#[template(path = "fortunes.html.hbs")] +pub struct FortunesTemplate<'a> { + pub fortunes: &'a Vec, } -async fn find_world_by_id(db: &mut Database, number: i32) -> World { - let world_collection = db.collection::("world"); +async fn db(DatabaseConnection(db): DatabaseConnection) -> impl IntoResponse { + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); - let filter = doc! { "id": number as f32 }; + let random_id = (rng.gen::() % 10_000 + 1) as i32; - let world: World = world_collection.find_one(Some(filter), None).await.expect("world could not be found").unwrap(); - world + let world = find_world_by_id(db, random_id) + .await + .expect("world could not be found"); + + (StatusCode::OK, Json(world)) } -async fn queries(DatabaseConnection(mut db): DatabaseConnection, Query(params): Query) -> impl IntoResponse { +async fn queries( + DatabaseConnection(db): DatabaseConnection, + Query(params): Query, +) -> impl IntoResponse { let q = parse_params(params); - let mut rng = SmallRng::from_entropy(); - - let mut results = Vec::with_capacity(q as usize); + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); + let mut ids: Vec = Vec::with_capacity(q as usize); for _ in 0..q { - let query_id = random_number(&mut rng); + let random_id = (rng.gen::() % 10_000 + 1) as i32; - let result :World = find_world_by_id(&mut db, query_id).await; - - results.push(result); + ids.push(random_id); } + let worlds = find_worlds(db, ids).await; + let results = worlds.expect("worlds could not be retrieved"); + (StatusCode::OK, Json(results)) } -async fn fortunes(DatabaseConnection(db): DatabaseConnection) -> impl IntoResponse { - let fortune_collection = db.collection::("fortune"); +async fn updates( + DatabaseConnection(db): DatabaseConnection, + Query(params): Query, +) -> impl IntoResponse { + let q = parse_params(params); - let mut fortune_cursor = fortune_collection.find(None, None).await.expect("fortunes could not be loaded"); + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); + let mut ids: Vec = Vec::with_capacity(q as usize); - let mut fortunes: Vec = Vec::with_capacity(100 as usize); + for _ in 0..q { + let random_id = (rng.gen::() % 10_000 + 1) as i32; - while let Some(doc) = fortune_cursor.next().await { - fortunes.push(doc.expect("could not load fortune")); + ids.push(random_id); } - fortunes.push(Fortune { - id: 0.0, - message: "Additional fortune added at request time.".to_string(), - }); + let worlds = find_worlds(db.clone(), ids) + .await + .expect("worlds could not be retrieved"); + let mut updated_worlds: Vec = Vec::with_capacity(q as usize); - fortunes.sort_by(|a, b| a.message.cmp(&b.message)); + for mut world in worlds { + let random_number = (rng.gen::() % 10_000 + 1) as i32; - let fortune_infos: Vec = fortunes.iter().map(|f| FortuneInfo { id: f.id as i32, message: f.message.clone() }).collect(); + world.random_number = random_number as f32; + updated_worlds.push(world); + } + + update_worlds(db.clone(), updated_worlds.clone()) + .await + .expect("could not update worlds"); + + (StatusCode::OK, Json(updated_worlds.clone())) +} + +async fn fortunes(DatabaseConnection(db): DatabaseConnection) -> impl IntoResponse { + let fortunes = fetch_fortunes(db).await.expect("could not fetch fortunes"); + + let fortune_infos: Vec = fortunes + .iter() + .map(|f| FortuneInfo { + id: f.id as i32, + message: f.message.clone(), + }) + .collect(); Utf8Html( FortunesTemplate { @@ -100,36 +119,66 @@ async fn fortunes(DatabaseConnection(db): DatabaseConnection) -> impl IntoRespon ) } -#[tokio::main] -async fn main() { +fn main() { dotenv().ok(); - let database_url = env::var("AXUM_TECHEMPOWER_MONGODB_URL").ok() - .expect("AXUM_TECHEMPOWER_MONGODB_URL environment variable was not set"); + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + + for _ in 1..num_cpus::get() { + std::thread::spawn(move || { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + rt.block_on(serve()); + }); + } + rt.block_on(serve()); +} + +async fn serve() { + let database_url: String = get_environment_variable("AXUM_TECHEMPOWER_MONGODB_URL"); + let max_pool_size: u32 = get_environment_variable("AXUM_TECHEMPOWER_MAX_POOL_SIZE"); + let min_pool_size: u32 = get_environment_variable("AXUM_TECHEMPOWER_MIN_POOL_SIZE"); - // setup connection pool let mut client_options = ClientOptions::parse(database_url).await.unwrap(); - client_options.max_pool_size = Some(common::POOL_SIZE); - client_options.min_pool_size = Some(common::POOL_SIZE); + + // setup connection pool + client_options.max_pool_size = Some(max_pool_size); + client_options.min_pool_size = Some(min_pool_size); client_options.connect_timeout = Some(Duration::from_millis(200)); + // the server will select the algorithm it supports from the list provided by the driver + client_options.compressors = Some(vec![ + Compressor::Snappy, + Compressor::Zlib { + level: Default::default(), + }, + Compressor::Zstd { + level: Default::default(), + }, + ]); + let client = Client::with_options(client_options).unwrap(); + let database = client.database("hello_world"); + let server_header_value = HeaderValue::from_static("Axum"); let app = Router::new() .route("/fortunes", get(fortunes)) .route("/db", get(db)) .route("/queries", get(queries)) - .layer(AddExtensionLayer::new(client)) - .layer(SetResponseHeaderLayer::<_, Body>::if_not_present(header::SERVER, HeaderValue::from_static("Axum"))); + .route("/updates", get(updates)) + .layer(Extension(database)) + .layer(SetResponseHeaderLayer::if_not_present( + header::SERVER, + server_header_value, + )); server::builder() .serve(app.into_make_service()) .await .unwrap(); } - -#[derive(Template)] -#[template(path = "fortunes.html.hbs")] -pub struct FortunesTemplate<'a> { - pub fortunes: &'a Vec, -} \ No newline at end of file diff --git a/frameworks/Rust/axum/src/main_mongo_raw.rs b/frameworks/Rust/axum/src/main_mongo_raw.rs new file mode 100644 index 00000000000..2254ccc4281 --- /dev/null +++ b/frameworks/Rust/axum/src/main_mongo_raw.rs @@ -0,0 +1,153 @@ +mod database_mongo_raw; +mod models_common; +mod models_mongo; +mod server; +mod utils; + +use axum::http::{header, HeaderValue}; +use axum::{ + extract::Query, http::StatusCode, response::IntoResponse, routing::get, Extension, + Json, Router, +}; +use dotenv::dotenv; +use mongodb::options::{ClientOptions, Compressor}; +use mongodb::Client; +use rand::{rngs::SmallRng, thread_rng, Rng, SeedableRng}; +use std::time::Duration; +use tower_http::set_header::SetResponseHeaderLayer; + +use database_mongo_raw::DatabaseConnection; +use database_mongo_raw::{find_world_by_id, find_worlds, update_worlds}; +use models_mongo::World; +use utils::get_environment_variable; +use utils::{parse_params, Params}; + +async fn db(DatabaseConnection(db): DatabaseConnection) -> impl IntoResponse { + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); + + let random_id = (rng.gen::() % 10_000 + 1) as i32; + + let world = find_world_by_id(db, random_id) + .await + .expect("world could not be found"); + + (StatusCode::OK, Json(world)) +} + +async fn queries( + DatabaseConnection(db): DatabaseConnection, + Query(params): Query, +) -> impl IntoResponse { + let q = parse_params(params); + + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); + let mut ids: Vec = Vec::with_capacity(q as usize); + + for _ in 0..q { + let random_id = (rng.gen::() % 10_000 + 1) as i32; + + ids.push(random_id); + } + + let worlds = find_worlds(db, ids).await; + let results = worlds.expect("worlds could not be retrieved"); + + (StatusCode::OK, Json(results)) +} + +async fn updates( + DatabaseConnection(db): DatabaseConnection, + Query(params): Query, +) -> impl IntoResponse { + let q = parse_params(params); + + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); + let mut ids: Vec = Vec::with_capacity(q as usize); + + for _ in 0..q { + let random_id = (rng.gen::() % 10_000 + 1) as i32; + + ids.push(random_id); + } + + let worlds = find_worlds(db.clone(), ids) + .await + .expect("worlds could not be retrieved"); + let mut updated_worlds: Vec = Vec::with_capacity(q as usize); + + for mut world in worlds { + let random_number = (rng.gen::() % 10_000 + 1) as i32; + + world.random_number = random_number as f32; + updated_worlds.push(world); + } + + update_worlds(db.clone(), updated_worlds.clone()) + .await + .expect("could not update worlds"); + + (StatusCode::OK, Json(updated_worlds.clone())) +} + +fn main() { + dotenv().ok(); + + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + + for _ in 1..num_cpus::get() { + std::thread::spawn(move || { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + rt.block_on(serve()); + }); + } + rt.block_on(serve()); +} + +async fn serve() { + let database_url: String = get_environment_variable("AXUM_TECHEMPOWER_MONGODB_URL"); + let max_pool_size: u32 = get_environment_variable("AXUM_TECHEMPOWER_MAX_POOL_SIZE"); + let min_pool_size: u32 = get_environment_variable("AXUM_TECHEMPOWER_MIN_POOL_SIZE"); + + let mut client_options = ClientOptions::parse(database_url).await.unwrap(); + + // setup connection pool + client_options.max_pool_size = Some(max_pool_size); + client_options.min_pool_size = Some(min_pool_size); + client_options.connect_timeout = Some(Duration::from_millis(200)); + + // the server will select the algorithm it supports from the list provided by the driver + client_options.compressors = Some(vec![ + Compressor::Snappy, + Compressor::Zlib { + level: Default::default(), + }, + Compressor::Zstd { + level: Default::default(), + }, + ]); + + let client = Client::with_options(client_options).unwrap(); + let database = client.database("hello_world"); + let server_header_value = HeaderValue::from_static("Axum"); + + let app = Router::new() + .route("/db", get(db)) + .route("/queries", get(queries)) + .route("/updates", get(updates)) + .layer(Extension(database)) + .layer(SetResponseHeaderLayer::if_not_present( + header::SERVER, + server_header_value, + )); + + server::builder() + .serve(app.into_make_service()) + .await + .unwrap(); +} diff --git a/frameworks/Rust/axum/src/main_pg.rs b/frameworks/Rust/axum/src/main_pg.rs new file mode 100644 index 00000000000..8b123463eb0 --- /dev/null +++ b/frameworks/Rust/axum/src/main_pg.rs @@ -0,0 +1,114 @@ +mod database_pg; +mod models_common; +mod models_pg; +mod server; +mod utils; + +use axum::http::{header, HeaderValue}; +use axum::{ + extract::Query, http::StatusCode, response::IntoResponse, routing::get, Extension, + Json, Router, +}; +use dotenv::dotenv; +use tower_http::set_header::SetResponseHeaderLayer; +use yarte::Template; + +use crate::database_pg::{DatabaseConnection, PgConnection}; +use models_pg::Fortune; +use utils::{parse_params, Params}; + +use crate::utils::{get_environment_variable, Utf8Html}; + +#[derive(Template)] +#[template(path = "fortunes.html.hbs")] +pub struct FortunesTemplate<'a> { + pub fortunes: &'a Vec, +} + +async fn db(DatabaseConnection(conn): DatabaseConnection) -> impl IntoResponse { + let world = conn.get_world().await.expect("error loading world"); + + (StatusCode::OK, Json(world)) +} + +async fn queries( + DatabaseConnection(conn): DatabaseConnection, + Query(params): Query, +) -> impl IntoResponse { + let q = parse_params(params); + + let results = conn + .get_worlds(q as usize) + .await + .expect("error loading worlds"); + + (StatusCode::OK, Json(results)) +} + +async fn fortunes(DatabaseConnection(conn): DatabaseConnection) -> impl IntoResponse { + let fortunes: Vec = + conn.tell_fortune().await.expect("error loading fortunes"); + + Utf8Html( + FortunesTemplate { + fortunes: &fortunes, + } + .call() + .expect("error rendering template"), + ) +} + +async fn updates( + DatabaseConnection(conn): DatabaseConnection, + Query(params): Query, +) -> impl IntoResponse { + let q = parse_params(params); + + let results = conn.update(q as u16).await.expect("error updating worlds"); + + (StatusCode::OK, Json(results)) +} + +fn main() { + dotenv().ok(); + + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + + for _ in 1..num_cpus::get() { + std::thread::spawn(move || { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + rt.block_on(serve()); + }); + } + rt.block_on(serve()); +} + +async fn serve() { + let database_url: String = get_environment_variable("AXUM_TECHEMPOWER_DATABASE_URL"); + + // setup connection pool + let pg_connection = PgConnection::connect(database_url).await; + let server_header_value = HeaderValue::from_static("Axum"); + + let router = Router::new() + .route("/fortunes", get(fortunes)) + .route("/db", get(db)) + .route("/queries", get(queries)) + .route("/updates", get(updates)) + .layer(Extension(pg_connection.clone())) + .layer(SetResponseHeaderLayer::if_not_present( + header::SERVER, + server_header_value, + )); + + server::builder() + .serve(router.into_make_service()) + .await + .unwrap(); +} diff --git a/frameworks/Rust/axum/src/main_pg_pool.rs b/frameworks/Rust/axum/src/main_pg_pool.rs new file mode 100644 index 00000000000..3ca67b03b9f --- /dev/null +++ b/frameworks/Rust/axum/src/main_pg_pool.rs @@ -0,0 +1,164 @@ +mod database_pg_pool; +mod models_common; +mod models_pg_pool; +mod server; +mod utils; + +use crate::database_pg_pool::{ + create_pool, fetch_all_fortunes, fetch_world_by_id, + prepare_fetch_all_fortunes_statement, prepare_fetch_world_by_id_statement, + prepare_update_world_by_id_statement, update_world, DatabaseClient, PgError, +}; +use axum::http::{header, HeaderValue}; +use axum::{ + extract::Query, http::StatusCode, response::IntoResponse, routing::get, Extension, + Json, Router, +}; +use dotenv::dotenv; +use futures_util::stream::FuturesUnordered; +use futures_util::TryStreamExt; +use rand::rngs::SmallRng; +use rand::{thread_rng, Rng, SeedableRng}; +use tower_http::set_header::SetResponseHeaderLayer; +use yarte::Template; + +use crate::utils::{get_environment_variable, Utf8Html}; +use models_pg_pool::{Fortune, World}; +use utils::{parse_params, random_number, Params}; + +#[derive(Template)] +#[template(path = "fortunes.html.hbs")] +pub struct FortunesTemplate<'a> { + pub fortunes: &'a Vec, +} + +async fn db(DatabaseClient(client): DatabaseClient) -> impl IntoResponse { + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); + + let random_id = (rng.gen::() % 10_000 + 1) as i32; + + let select = prepare_fetch_world_by_id_statement(&client).await; + let world = fetch_world_by_id(&client, random_id, &select) + .await + .expect("could not fetch world"); + + (StatusCode::OK, Json(world)) +} + +async fn queries( + DatabaseClient(client): DatabaseClient, + Query(params): Query, +) -> impl IntoResponse { + let q = parse_params(params); + + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); + + let select = prepare_fetch_world_by_id_statement(&client).await; + + let future_worlds = FuturesUnordered::new(); + + for _ in 0..q { + let w_id = (rng.gen::() % 10_000 + 1) as i32; + + future_worlds.push(fetch_world_by_id(&client, w_id, &select)); + } + + let worlds: Result, PgError> = future_worlds.try_collect().await; + let results = worlds.expect("worlds could not be retrieved"); + + (StatusCode::OK, Json(results)) +} + +async fn fortunes(DatabaseClient(client): DatabaseClient) -> impl IntoResponse { + let select = prepare_fetch_all_fortunes_statement(&client).await; + + let mut fortunes = fetch_all_fortunes(client, &select) + .await + .expect("could not fetch fortunes"); + + fortunes.push(Fortune { + id: 0, + message: "Additional fortune added at request time.".to_string(), + }); + + fortunes.sort_by(|a, b| a.message.cmp(&b.message)); + + Utf8Html( + FortunesTemplate { + fortunes: &fortunes, + } + .call() + .expect("error rendering template"), + ) +} + +async fn updates( + DatabaseClient(client): DatabaseClient, + Query(params): Query, +) -> impl IntoResponse { + let q = parse_params(params); + + let mut rng = SmallRng::from_entropy(); + + let select = prepare_fetch_world_by_id_statement(&client).await; + + let future_worlds = FuturesUnordered::new(); + + for _ in 0..q { + let query_id = random_number(&mut rng); + + future_worlds.push(fetch_world_by_id(&client, query_id, &select)); + } + + let worlds: Result, PgError> = future_worlds.try_collect().await; + let results = worlds.expect("worlds could not be retrieved"); + + let update = prepare_update_world_by_id_statement(&client).await; + + let future_world_updates = FuturesUnordered::new(); + + for w in &results { + let random_id = random_number(&mut rng); + let w_id = w.id; + + future_world_updates.push(update_world(&client, &update, random_id, w_id)); + } + + let world_updates: Result, PgError> = + future_world_updates.try_collect().await; + world_updates.expect("updates could not be executed"); + + (StatusCode::OK, Json(results)) +} + +#[tokio::main] +async fn main() { + dotenv().ok(); + + serve().await; +} + +async fn serve() { + let database_url: String = get_environment_variable("AXUM_TECHEMPOWER_DATABASE_URL"); + let max_pool_size: u32 = get_environment_variable("AXUM_TECHEMPOWER_MAX_POOL_SIZE"); + + // setup Client pool + let pool = create_pool(database_url, max_pool_size).await; + let server_header_value = HeaderValue::from_static("Axum"); + + let router = Router::new() + .route("/fortunes", get(fortunes)) + .route("/db", get(db)) + .route("/queries", get(queries)) + .route("/updates", get(updates)) + .layer(Extension(pool)) + .layer(SetResponseHeaderLayer::if_not_present( + header::SERVER, + server_header_value, + )); + + server::builder() + .serve(router.into_make_service()) + .await + .unwrap(); +} diff --git a/frameworks/Rust/axum/src/main_sqlx.rs b/frameworks/Rust/axum/src/main_sqlx.rs index 79ad925157c..3f7debbd68d 100644 --- a/frameworks/Rust/axum/src/main_sqlx.rs +++ b/frameworks/Rust/axum/src/main_sqlx.rs @@ -1,69 +1,49 @@ -extern crate serde_derive; -extern crate dotenv; -#[macro_use] -extern crate async_trait; - +mod database_sqlx; mod models_common; mod models_sqlx; -mod database_sqlx; -mod utils; mod server; -mod common; +mod utils; -use dotenv::dotenv; -use std::env; -use crate::database_sqlx::{DatabaseConnection}; +use crate::database_sqlx::{fetch_fortunes, fetch_world, DatabaseConnection}; +use axum::http::{header, HeaderValue}; use axum::{ - extract::{Query}, - http::StatusCode, - response::IntoResponse, - routing::get, - AddExtensionLayer, Json, Router, + extract::Extension, http::StatusCode, response::IntoResponse, routing::get, Json, + Router, }; -use axum::http::{header, HeaderValue}; -use tower_http::set_header::SetResponseHeaderLayer; -use hyper::Body; +use dotenv::dotenv; use rand::rngs::SmallRng; -use rand::{SeedableRng}; +use rand::{thread_rng, Rng, SeedableRng}; use sqlx::PgPool; +use tower_http::set_header::SetResponseHeaderLayer; use yarte::Template; -use models_sqlx::{World, Fortune}; +use crate::utils::get_environment_variable; use database_sqlx::create_pool; -use utils::{Params, parse_params, random_number, Utf8Html}; - -async fn db(DatabaseConnection(mut conn): DatabaseConnection) -> impl IntoResponse { - let mut rng = SmallRng::from_entropy(); - let number = random_number(&mut rng); - - let world : World = sqlx::query_as("SELECT id, randomnumber FROM World WHERE id = $1").bind(number) - .fetch_one(&mut conn).await.ok().expect("error loading world"); +use models_sqlx::{Fortune, World}; +use utils::Utf8Html; - (StatusCode::OK, Json(world)) +#[derive(Template)] +#[template(path = "fortunes.html.hbs")] +pub struct FortunesTemplate<'a> { + pub fortunes: &'a Vec, } -async fn queries(DatabaseConnection(mut conn): DatabaseConnection, Query(params): Query) -> impl IntoResponse { - let q = parse_params(params); - - let mut rng = SmallRng::from_entropy(); +async fn db(DatabaseConnection(conn): DatabaseConnection) -> impl IntoResponse { + let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap(); - let mut results = Vec::with_capacity(q as usize); + let random_id = (rng.gen::() % 10_000 + 1) as i32; - for _ in 0..q { - let query_id = random_number(&mut rng); - - let result :World = sqlx::query_as("SELECT * FROM World WHERE id = $1").bind(query_id) - .fetch_one(&mut conn).await.ok().expect("error loading world"); - - results.push(result); - } + let world = fetch_world(conn, random_id) + .await + .expect("could not fetch world"); - (StatusCode::OK, Json(results)) + (StatusCode::OK, Json(world)) } -async fn fortunes(DatabaseConnection(mut conn): DatabaseConnection) -> impl IntoResponse { - let mut fortunes: Vec = sqlx::query_as("SELECT * FROM Fortune").fetch_all(&mut conn).await - .ok().expect("Could not load Fortunes"); +async fn fortunes(DatabaseConnection(conn): DatabaseConnection) -> impl IntoResponse { + let mut fortunes = fetch_fortunes(conn) + .await + .expect("could not fetch fortunes"); fortunes.push(Fortune { id: 0, @@ -81,41 +61,16 @@ async fn fortunes(DatabaseConnection(mut conn): DatabaseConnection) -> impl Into ) } -async fn updates(DatabaseConnection(mut conn): DatabaseConnection, Query(params): Query) -> impl IntoResponse { - let q = parse_params(params); - - let mut rng = SmallRng::from_entropy(); - - let mut results = Vec::with_capacity(q as usize); - - for _ in 0..q { - let query_id = random_number(&mut rng); - let mut result :World = sqlx::query_as("SELECT * FROM World WHERE id = $1").bind(query_id) - .fetch_one(&mut conn).await.ok().expect("error loading world"); - - result.random_number = random_number(&mut rng); - results.push(result); - } - - for w in &results { - sqlx::query("UPDATE World SET randomnumber = $1 WHERE id = $2") - .bind(w.random_number).bind(w.id) - .execute(&mut conn) - .await.ok().expect("could not update world"); - } - - (StatusCode::OK, Json(results)) -} - #[tokio::main] async fn main() { dotenv().ok(); - let database_url = env::var("AXUM_TECHEMPOWER_DATABASE_URL").ok() - .expect("AXUM_TECHEMPOWER_DATABASE_URL environment variable was not set"); + let database_url: String = get_environment_variable("AXUM_TECHEMPOWER_DATABASE_URL"); + let max_pool_size: u32 = get_environment_variable("AXUM_TECHEMPOWER_MAX_POOL_SIZE"); + let min_pool_size: u32 = get_environment_variable("AXUM_TECHEMPOWER_MIN_POOL_SIZE"); // setup connection pool - let pool = create_pool(database_url).await; + let pool = create_pool(database_url, max_pool_size, min_pool_size).await; let app = router(pool).await; @@ -126,17 +81,14 @@ async fn main() { } async fn router(pool: PgPool) -> Router { + let server_header_value = HeaderValue::from_static("Axum"); + Router::new() .route("/fortunes", get(fortunes)) .route("/db", get(db)) - .route("/queries", get(queries)) - .route("/updates", get(updates)) - .layer(AddExtensionLayer::new(pool)) - .layer(SetResponseHeaderLayer::<_, Body>::if_not_present(header::SERVER, HeaderValue::from_static("Axum"))) + .layer(Extension(pool)) + .layer(SetResponseHeaderLayer::if_not_present( + header::SERVER, + server_header_value, + )) } - -#[derive(Template)] -#[template(path = "fortunes.html.hbs")] -pub struct FortunesTemplate<'a> { - pub fortunes: &'a Vec, -} \ No newline at end of file diff --git a/frameworks/Rust/axum/src/models_common.rs b/frameworks/Rust/axum/src/models_common.rs index e638602bc2e..9aed68955a4 100644 --- a/frameworks/Rust/axum/src/models_common.rs +++ b/frameworks/Rust/axum/src/models_common.rs @@ -1,6 +1,6 @@ -use serde::{Serialize}; +use serde::Serialize; #[derive(Serialize)] pub struct Message { pub message: &'static str, -} \ No newline at end of file +} diff --git a/frameworks/Rust/axum/src/models_mongo.rs b/frameworks/Rust/axum/src/models_mongo.rs index 63681e1e653..3a8e1617e6a 100644 --- a/frameworks/Rust/axum/src/models_mongo.rs +++ b/frameworks/Rust/axum/src/models_mongo.rs @@ -3,21 +3,18 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] pub struct Fortune { pub id: f32, - pub message: String + pub message: String, } #[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] pub struct FortuneInfo { pub id: i32, - pub message: String + pub message: String, } #[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] pub struct World { pub id: f32, #[serde(rename = "randomNumber")] - pub random_number: f32 + pub random_number: f32, } - - - diff --git a/frameworks/Rust/axum/src/models_pg.rs b/frameworks/Rust/axum/src/models_pg.rs new file mode 100644 index 00000000000..0c0df6536f4 --- /dev/null +++ b/frameworks/Rust/axum/src/models_pg.rs @@ -0,0 +1,16 @@ +use serde::{Deserialize, Serialize}; + +#[allow(non_snake_case)] +#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] +pub struct Fortune { + pub id: i32, + pub message: String, +} + +#[allow(non_snake_case)] +#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] +pub struct World { + pub id: i32, + #[serde(rename = "randomNumber")] + pub randomnumber: i32, +} diff --git a/frameworks/Rust/axum/src/models_bb8.rs b/frameworks/Rust/axum/src/models_pg_pool.rs similarity index 89% rename from frameworks/Rust/axum/src/models_bb8.rs rename to frameworks/Rust/axum/src/models_pg_pool.rs index 40977859f21..7dce3021eed 100644 --- a/frameworks/Rust/axum/src/models_bb8.rs +++ b/frameworks/Rust/axum/src/models_pg_pool.rs @@ -6,7 +6,7 @@ use tokio_pg_mapper_derive::PostgresMapper; #[pg_mapper(table = "Fortune")] pub struct Fortune { pub id: i32, - pub message: String + pub message: String, } #[allow(non_snake_case)] @@ -15,8 +15,5 @@ pub struct Fortune { pub struct World { pub id: i32, #[serde(rename = "randomNumber")] - pub randomnumber: i32 + pub randomnumber: i32, } - - - diff --git a/frameworks/Rust/axum/src/models_sqlx.rs b/frameworks/Rust/axum/src/models_sqlx.rs index 2117f0b5478..270fe941b17 100644 --- a/frameworks/Rust/axum/src/models_sqlx.rs +++ b/frameworks/Rust/axum/src/models_sqlx.rs @@ -4,7 +4,7 @@ use sqlx::FromRow; #[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromRow)] pub struct Fortune { pub id: i32, - pub message: String + pub message: String, } #[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromRow)] @@ -12,8 +12,5 @@ pub struct World { pub id: i32, #[sqlx(rename = "randomnumber")] #[serde(rename = "randomNumber")] - pub random_number: i32 + pub random_number: i32, } - - - diff --git a/frameworks/Rust/axum/src/server.rs b/frameworks/Rust/axum/src/server.rs index b6b8994ebe3..9232ef47017 100644 --- a/frameworks/Rust/axum/src/server.rs +++ b/frameworks/Rust/axum/src/server.rs @@ -1,19 +1,19 @@ - use std::io; use std::net::{Ipv4Addr, SocketAddr}; use hyper::server::conn::AddrIncoming; use tokio::net::{TcpListener, TcpSocket}; -use crate::common::POOL_SIZE; pub fn builder() -> hyper::server::Builder { let addr = SocketAddr::from((Ipv4Addr::UNSPECIFIED, 8000)); let listener = reuse_listener(addr).expect("couldn't bind to addr"); let incoming = AddrIncoming::from_listener(listener).unwrap(); - println!("Started axum server at 8000 with pool size {pool_size}", pool_size=POOL_SIZE); + println!("Started axum server at 8000"); - axum::Server::builder(incoming).http1_only(true).tcp_nodelay(true) + axum::Server::builder(incoming) + .http1_only(true) + .tcp_nodelay(true) } fn reuse_listener(addr: SocketAddr) -> io::Result { @@ -23,13 +23,13 @@ fn reuse_listener(addr: SocketAddr) -> io::Result { }; #[cfg(unix)] - { - if let Err(e) = socket.set_reuseport(true) { - eprintln!("error setting SO_REUSEPORT: {}", e); - } + { + if let Err(e) = socket.set_reuseport(true) { + eprintln!("error setting SO_REUSEPORT: {}", e); } + } socket.set_reuseaddr(true)?; socket.bind(addr)?; socket.listen(1024) -} \ No newline at end of file +} diff --git a/frameworks/Rust/axum/src/utils.rs b/frameworks/Rust/axum/src/utils.rs index af4acfd5918..161a4e07e52 100644 --- a/frameworks/Rust/axum/src/utils.rs +++ b/frameworks/Rust/axum/src/utils.rs @@ -1,50 +1,48 @@ -use std::convert::Infallible; use axum::body::{Bytes, Full}; -use axum::http::{header, HeaderValue, Response, StatusCode}; -use axum::response::IntoResponse; -use rand::Rng; +use axum::http::{header, HeaderValue, StatusCode}; +use axum::response::{IntoResponse, Response}; use rand::rngs::SmallRng; -use serde::{Deserialize}; +use rand::Rng; +use serde::Deserialize; + +use std::env; +use std::fmt::Debug; +use std::str::FromStr; + +pub fn get_environment_variable(key: &str) -> T +where + ::Err: Debug, +{ + T::from_str( + &*env::var(key).expect(&*format!("{} environment variable was not set", key)), + ) + .expect(&*format!("could not parse {}", key)) +} #[derive(Debug, Deserialize)] pub struct Params { queries: Option, } +#[allow(dead_code)] pub fn random_number(rng: &mut SmallRng) -> i32 { (rng.gen::() % 10_000 + 1) as i32 } +#[allow(dead_code)] pub fn parse_params(params: Params) -> i32 { - let mut q = 0; - - if params.queries.is_some() { - let queries = params.queries.ok_or("could not get value").unwrap(); - - let queries_as_int = queries.parse::(); - - match queries_as_int { - Ok(_ok) => q = queries_as_int.unwrap(), - Err(_e) => q = 1, - } - } - - let q = if q == 0 { - 1 - } else if q > 500 { - 500 - } else { - q - }; - - q + params + .queries + .and_then(|q| q.parse().ok()) + .unwrap_or(1) + .clamp(1, 500) } /// Utility function for mapping any error into a `500 Internal Server Error` /// response. pub fn internal_error(err: E) -> (StatusCode, String) - where - E: std::error::Error, +where + E: std::error::Error, { (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()) } @@ -53,16 +51,15 @@ pub fn internal_error(err: E) -> (StatusCode, String) pub struct Utf8Html(pub T); impl IntoResponse for Utf8Html - where - T: Into>, +where + T: Into>, { - type Body = Full; - type BodyError = Infallible; - - fn into_response(self) -> Response { - let mut res = Response::new(self.0.into()); - res.headers_mut() - .insert(header::CONTENT_TYPE, HeaderValue::from_static("text/html; charset=utf-8")); + fn into_response(self) -> Response { + let mut res = (StatusCode::OK, self.0.into()).into_response(); + res.headers_mut().insert( + header::CONTENT_TYPE, + HeaderValue::from_static("text/html; charset=utf-8"), + ); res } } diff --git a/frameworks/Rust/faf/benchmark_config.json b/frameworks/Rust/faf/benchmark_config.json index 283500e21a3..476d9a3dfd6 100644 --- a/frameworks/Rust/faf/benchmark_config.json +++ b/frameworks/Rust/faf/benchmark_config.json @@ -4,7 +4,7 @@ { "default": { "plaintext_url": "/plaintext", - "port": 8089, + "port": 8080, "approach": "Realistic", "classification": "Platform", "database": "None", diff --git a/frameworks/Rust/faf/faf.dockerfile b/frameworks/Rust/faf/faf.dockerfile index 9a1d5484f62..3899c1128d8 100644 --- a/frameworks/Rust/faf/faf.dockerfile +++ b/frameworks/Rust/faf/faf.dockerfile @@ -12,5 +12,5 @@ RUN RUSTFLAGS="-Ctarget-cpu=native -Ztune-cpu=native -Zmutable-noalias=yes -Clin /root/.cargo/bin/cargo build --release --target x86_64-unknown-linux-gnu -Zbuild-std=panic_abort,core,std,alloc,proc_macro,compiler_builtins \ && strip ./target/x86_64-unknown-linux-gnu/release/faf-ex -EXPOSE 8089 +EXPOSE 8080 CMD ./target/x86_64-unknown-linux-gnu/release/faf-ex diff --git a/frameworks/Rust/faf/src/main.rs b/frameworks/Rust/faf/src/main.rs index f6cfdd48cdd..d8f4478ea76 100644 --- a/frameworks/Rust/faf/src/main.rs +++ b/frameworks/Rust/faf/src/main.rs @@ -75,5 +75,5 @@ fn cb( } pub fn main() { - faf::epoll::go(8089, cb); + faf::epoll::go(8080, cb); } diff --git a/frameworks/Rust/gotham/gotham.dockerfile b/frameworks/Rust/gotham/gotham.dockerfile index 73003b79921..8a2c96ac924 100644 --- a/frameworks/Rust/gotham/gotham.dockerfile +++ b/frameworks/Rust/gotham/gotham.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.44 +FROM rust:1.59 WORKDIR /gotham COPY ./src ./src diff --git a/frameworks/Rust/hyper/Cargo.lock b/frameworks/Rust/hyper/Cargo.lock index ed33d18f6aa..119c9eb3594 100644 --- a/frameworks/Rust/hyper/Cargo.lock +++ b/frameworks/Rust/hyper/Cargo.lock @@ -14,12 +14,6 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" -[[package]] -name = "autocfg" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" - [[package]] name = "base64" version = "0.10.1" @@ -84,7 +78,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" dependencies = [ "byteorder", - "either", "iovec", ] @@ -94,6 +87,12 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" +[[package]] +name = "bytes" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" + [[package]] name = "cfg-if" version = "0.1.10" @@ -182,12 +181,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "either" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" - [[package]] name = "fake-simd" version = "0.1.2" @@ -241,37 +234,46 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] -name = "generic-array" -version = "0.12.4" +name = "futures-channel" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" +checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" dependencies = [ - "typenum", + "futures-core", ] [[package]] -name = "h2" -version = "0.1.26" +name = "futures-core" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" + +[[package]] +name = "futures-task" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5b34c246847f938a410a03c5458c7fee2274436675e76d8b903c08efc29c462" +checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" + +[[package]] +name = "futures-util" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" dependencies = [ - "byteorder", - "bytes 0.4.12", - "fnv", - "futures", - "http", - "indexmap", - "log 0.4.14", - "slab", - "string", - "tokio-io", + "futures-core", + "futures-task", + "pin-project-lite", + "pin-utils", ] [[package]] -name = "hashbrown" -version = "0.11.2" +name = "generic-array" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" +dependencies = [ + "typenum", +] [[package]] name = "heck" @@ -303,25 +305,24 @@ dependencies = [ [[package]] name = "http" -version = "0.1.21" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6ccf5ede3a895d8856620237b2f02972c1bbc78d2965ad7fe8838d4a0ed41f0" +checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ - "bytes 0.4.12", + "bytes 1.1.0", "fnv", - "itoa", + "itoa 1.0.2", ] [[package]] name = "http-body" -version = "0.1.0" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6741c859c1b2463a423a1dbce98d418e6c3c3fc720fb0d45528657320920292d" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 0.4.12", - "futures", + "bytes 1.1.0", "http", - "tokio-buf", + "pin-project-lite", ] [[package]] @@ -330,25 +331,31 @@ version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + [[package]] name = "hyper" -version = "0.12.36" +version = "0.14.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c843caf6296fc1f93444735205af9ed4e109a539005abb2564ae1d6fad34c52" +checksum = "13f67199e765030fa08fe0bd581af683f0d5bc04ea09c2b1102012c5fb90e7fd" dependencies = [ - "bytes 0.4.12", - "futures", - "h2", + "bytes 1.1.0", + "futures-channel", + "futures-core", + "futures-util", "http", "http-body", "httparse", - "iovec", - "itoa", - "log 0.4.14", - "rustc_version", - "time", - "tokio-buf", - "tokio-io", + "httpdate", + "itoa 0.4.8", + "pin-project-lite", + "tokio", + "tower-service", + "tracing", "want", ] @@ -376,16 +383,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" -[[package]] -name = "indexmap" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" -dependencies = [ - "autocfg 1.0.1", - "hashbrown", -] - [[package]] name = "iovec" version = "0.1.4" @@ -401,6 +398,12 @@ version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" +[[package]] +name = "itoa" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" + [[package]] name = "kernel32-sys" version = "0.2.2" @@ -529,6 +532,12 @@ dependencies = [ "libc", ] +[[package]] +name = "once_cell" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" + [[package]] name = "opaque-debug" version = "0.2.3" @@ -575,6 +584,18 @@ dependencies = [ "siphasher", ] +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + [[package]] name = "postgres-protocol" version = "0.4.1" @@ -636,7 +657,7 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" dependencies = [ - "autocfg 0.1.7", + "autocfg", "libc", "rand_chacha", "rand_core 0.4.2", @@ -655,7 +676,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" dependencies = [ - "autocfg 0.1.7", + "autocfg", "rand_core 0.3.1", ] @@ -723,7 +744,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" dependencies = [ - "autocfg 0.1.7", + "autocfg", "rand_core 0.4.2", ] @@ -751,15 +772,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05a51ad2b1c5c710fa89e6b1631068dab84ed687bc6a5fe061ad65da3d0c25b2" -[[package]] -name = "rustc_version" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -dependencies = [ - "semver", -] - [[package]] name = "ryu" version = "1.0.5" @@ -772,21 +784,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "332ffa32bf586782a3efaeb58f127980944bbc8c4d6913a86107ac2a5ab24b28" -[[package]] -name = "semver" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" -dependencies = [ - "semver-parser", -] - -[[package]] -name = "semver-parser" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" - [[package]] name = "serde" version = "1.0.130" @@ -810,7 +807,7 @@ version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8" dependencies = [ - "itoa", + "itoa 0.4.8", "ryu", "serde", ] @@ -850,15 +847,6 @@ dependencies = [ "rent_to_own", ] -[[package]] -name = "string" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d24114bfcceb867ca7f71a0d3fe45d45619ec47a6fbfa98cb14e14250bfa5d6d" -dependencies = [ - "bytes 0.4.12", -] - [[package]] name = "stringprep" version = "0.1.2" @@ -897,17 +885,6 @@ dependencies = [ "unicode-xid 0.2.2", ] -[[package]] -name = "time" -version = "0.1.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" -dependencies = [ - "libc", - "wasi", - "winapi 0.3.9", -] - [[package]] name = "tinyvec" version = "1.4.0" @@ -924,14 +901,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] -name = "tokio-buf" -version = "0.1.1" +name = "tokio" +version = "1.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fb220f46c53859a4b7ec083e41dec9778ff0b1851c0942b211edb89e0ccdc46" +checksum = "c51a52ed6686dd62c320f9b89299e9dfb46f730c7a48e635c19f21d116cb1439" dependencies = [ - "bytes 0.4.12", - "either", - "futures", + "pin-project-lite", ] [[package]] @@ -990,6 +965,32 @@ dependencies = [ "tokio-io", ] +[[package]] +name = "tower-service" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" + +[[package]] +name = "tracing" +version = "0.1.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160" +dependencies = [ + "cfg-if 1.0.0", + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7709595b8878a4965ce5e87ebf880a7d39c9afc6837721b21a5a816a8117d921" +dependencies = [ + "once_cell", +] + [[package]] name = "try-lock" version = "0.2.3" @@ -1075,21 +1076,14 @@ checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" [[package]] name = "want" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" dependencies = [ - "futures", "log 0.4.14", "try-lock", ] -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "winapi" version = "0.2.8" diff --git a/frameworks/Rust/hyper/Cargo.toml b/frameworks/Rust/hyper/Cargo.toml index ca6d598b888..78586713e38 100644 --- a/frameworks/Rust/hyper/Cargo.toml +++ b/frameworks/Rust/hyper/Cargo.toml @@ -20,7 +20,7 @@ path = "src/main_db.rs" futures = "0.1" # Disable default runtime, so that tokio-core can be used instead. # See below for why... -hyper = { version = "0.12", default-features = false } +hyper = { version = "0.14", default-features = false } # Since no logs are allowed anyways, just compile-time turn them all off log = { version = "0.4", features = ["release_max_level_off"] } markup = "0.3.1" diff --git a/frameworks/Rust/iron/Cargo.toml b/frameworks/Rust/iron/Cargo.toml deleted file mode 100755 index 186ab9029ad..00000000000 --- a/frameworks/Rust/iron/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "iron" -version = "0.0.4" -edition = "2018" - -[dependencies] -serde = "1.0" -serde_json = "1.0" -serde_derive = "1.0" -iron = "0.5" -router = "0.5" -persistent = "0.3" -hyper = "0.10" -rand = "0.3" -r2d2 = "0.7" -postgres = "0.15.1" -r2d2_postgres = "0.13" -mustache = "0.8" -rustc-serialize = "0.3" diff --git a/frameworks/Rust/iron/benchmark_config.json b/frameworks/Rust/iron/benchmark_config.json deleted file mode 100755 index 54bcc3eb7c3..00000000000 --- a/frameworks/Rust/iron/benchmark_config.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "framework": "iron", - "tests": [{ - "default": { - "json_url": "/json", - "db_url": "/db", - "fortune_url": "/fortune", - "query_url": "/queries?queries=", - "cached_query_url": "/cached-worlds?queries=", - "update_url": "/updates?queries=", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "Postgres", - "framework": "iron", - "language": "Rust", - "orm": "raw", - "platform": "Rust", - "webserver": "hyper", - "os": "Linux", - "database_os": "Linux", - "display_name": "iron", - "notes": "", - "versus": "" - } - }] -} diff --git a/frameworks/Rust/iron/config.toml b/frameworks/Rust/iron/config.toml deleted file mode 100644 index 0067b79dc93..00000000000 --- a/frameworks/Rust/iron/config.toml +++ /dev/null @@ -1,20 +0,0 @@ -[framework] -name = "iron" - -[main] -urls.plaintext = "/plaintext" -urls.json = "/json" -urls.db = "/db" -urls.query = "/queries?queries=" -urls.update = "/updates?queries=" -urls.fortune = "/fortune" -urls.cached_query = "/cached-worlds?queries=" -approach = "Realistic" -classification = "Micro" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "raw" -platform = "Rust" -webserver = "hyper" -versus = "" diff --git a/frameworks/Rust/iron/src/main.rs b/frameworks/Rust/iron/src/main.rs deleted file mode 100755 index 86ae0f67ed0..00000000000 --- a/frameworks/Rust/iron/src/main.rs +++ /dev/null @@ -1,303 +0,0 @@ -extern crate iron; -extern crate persistent; -#[macro_use] -extern crate router; -extern crate serde; -extern crate serde_json; -#[macro_use] -extern crate serde_derive; -extern crate hyper; -extern crate mustache; -extern crate postgres; -extern crate r2d2; -extern crate r2d2_postgres; -extern crate rand; -extern crate rustc_serialize; - -use hyper::header::{ContentType, Server}; -use iron::modifiers::Header; -use iron::prelude::*; -use iron::status; -use iron::typemap::Key; -use persistent::Read; -use r2d2::Pool; -use r2d2_postgres::{PostgresConnectionManager, TlsMode}; -use rand::distributions::{IndependentSample, Range}; - -#[derive(Serialize, Deserialize)] -struct Message { - message: String, -} - -#[allow(non_snake_case)] -#[derive(Serialize, Deserialize, Clone)] -struct DatabaseRow { - id: i32, - randomNumber: i32, -} - -struct CachedRows; -impl Key for CachedRows { - type Value = Vec; -} - -pub type PostgresPool = Pool; - -struct DbPool; -impl Key for DbPool { - type Value = PostgresPool; -} - -struct FortuneTemplate; -impl Key for FortuneTemplate { - type Value = mustache::Template; -} - -#[derive(RustcEncodable)] -struct FortuneRow { - id: i32, - message: String, -} - -fn main() { - let r2d2_config = r2d2::Config::default(); - let pg_conn_manager = PostgresConnectionManager::new( - "postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world", - TlsMode::None, - ) - .unwrap(); - let pool = r2d2::Pool::new(r2d2_config, pg_conn_manager).unwrap(); - let template = mustache::compile_str( - " - Fortunes - - - {{#.}} - {{/.}} -
idmessage
{{id}}{{message}}
", - ) - .unwrap(); - - let mut cached_rows: Vec = Vec::with_capacity(10000); - let conn = pool.get().unwrap(); - - for num in 1..10000 { - let rows = &conn - .query("SELECT id, randomnumber FROM World WHERE id = $1", &[&num]) - .unwrap(); - let row = rows.get(0); - cached_rows.push(DatabaseRow { - id: row.get(0), - randomNumber: row.get(1), - }); - } - - let app = router!( - json: get "/json" => json_handler, - single_db_query: get "/db" => single_db_query_handler, - plaintext: get "/plaintext" => plaintext_handler, - queries: get "/queries" => queries_handler, - cachedworlds: get "/cached-worlds" => cached_queries_handler, - fortune: get "/fortune" => fortune_handler, - updates: get "/updates" => updates_handler - ); - let mut middleware = Chain::new(app); - middleware.link(Read::::both(pool)); - middleware.link(Read::::both(template)); - middleware.link(Read::::both(cached_rows)); - - println!("Starting server..."); - Iron::new(middleware).http("0.0.0.0:8080").unwrap(); -} - -fn json_handler(_: &mut Request) -> IronResult { - let message: Message = Message { - message: "Hello, World!".to_owned(), - }; - let content_type = Header(ContentType::json()); - let server = Header(Server("Iron".to_owned())); - Ok(Response::with(( - status::Ok, - serde_json::to_string(&message).unwrap(), - content_type, - server, - ))) -} - -fn plaintext_handler(_: &mut Request) -> IronResult { - let server = Header(Server("Iron".to_owned())); - Ok(Response::with((status::Ok, "Hello, World!", server))) -} - -fn single_db_query_handler(req: &mut Request) -> IronResult { - let content_type = Header(ContentType::json()); - let server = Header(Server("Iron".to_owned())); - let pool = req.get::>().unwrap(); - let conn = pool.get().unwrap(); - let row = random_row(conn); - Ok(Response::with(( - status::Ok, - serde_json::to_string(&row).unwrap(), - server, - content_type, - ))) -} - -fn queries_handler(req: &mut Request) -> IronResult { - let content_type = Header(ContentType::json()); - let server = Header(Server("Iron".to_owned())); - let pool = req.get::>().unwrap(); - let query = req.url.query().unwrap(); - let param = match get_param(query, "queries") { - Some(n) => match n.parse::() { - Ok(m) => match m { - e @ 1...500 => e, - e if e > 500 => 500, - _ => 1, - }, - _ => 1, - }, - _ => 1, - }; - let mut res: Vec = Vec::with_capacity(param); - for _ in 0..param { - let conn = pool.get().unwrap(); - res.push(random_row(conn)) - } - Ok(Response::with(( - status::Ok, - serde_json::to_string(&res).unwrap(), - server, - content_type, - ))) -} - -fn cached_queries_handler(req: &mut Request) -> IronResult { - let content_type = Header(ContentType::json()); - let server = Header(Server("Iron".to_owned())); - let cached_rows = req.get::>().unwrap().to_owned(); - let query = req.url.query().unwrap(); - let param = match get_param(query, "queries") { - Some(n) => match n.parse::() { - Ok(m) => match m { - e @ 1...500 => e, - e if e > 500 => 500, - _ => 1, - }, - _ => 1, - }, - _ => 1, - }; - - let mut res: Vec = Vec::with_capacity(param); - for _ in 0..param { - let mut rng = rand::thread_rng(); - let between = Range::new(1, 10000); - let num = between.ind_sample(&mut rng); - res.push(cached_rows[num].to_owned()) - } - Ok(Response::with(( - status::Ok, - serde_json::to_string(&res).unwrap(), - server, - content_type, - ))) -} - -fn fortune_handler(req: &mut Request) -> IronResult { - let content_type = Header(ContentType::html()); - let server = Header(Server("Iron".to_owned())); - let template = req.get::>().unwrap(); - let pool = req.get::>().unwrap(); - let conn = pool.get().unwrap(); - let query_res = &conn.query("SELECT id, message FROM Fortune", &[]).unwrap(); - let query_res_iter = query_res.iter(); - let mut rows: Vec = query_res_iter - .map(|row| FortuneRow { - id: row.get(0), - message: row.get(1), - }) - .collect(); - rows.push(FortuneRow { - id: 0, - message: "Additional fortune added at request time.".to_string(), - }); - rows.sort_by(|it, next| it.message.cmp(&next.message)); - let mut res = vec![]; - template.render(&mut res, &rows).unwrap(); - Ok(Response::with((status::Ok, res, server, content_type))) -} - -fn updates_handler(req: &mut Request) -> IronResult { - let mut rng = rand::thread_rng(); - let between = Range::new(1, 10000); - let content_type = Header(ContentType::json()); - let server = Header(Server("Iron".to_owned())); - let pool = req.get::>().unwrap(); - let query = req.url.query().unwrap(); - let param = match get_param(query, "queries") { - Some(n) => match n.parse::() { - Ok(m) => match m { - e @ 1...500 => e, - e if e > 500 => 500, - _ => 1, - }, - _ => 1, - }, - _ => 1, - }; - let mut dbres: Vec = Vec::with_capacity(param); - for _ in 0..param { - let conn = pool.get().unwrap(); - dbres.push(random_row(conn)) - } - let conn = pool.get().unwrap(); - let trans = conn.transaction().unwrap(); - // Sorting guarantees no deadlocks between multiple concurrent threads - dbres.sort_by_key(|it| it.id); - let mut res: Vec = Vec::with_capacity(param); - for row in dbres { - let num = between.ind_sample(&mut rng); - trans - .execute( - "UPDATE World SET randomnumber = $1 WHERE id = $2", - &[&num, &row.id], - ) - .unwrap(); - res.push(DatabaseRow { - id: row.id, - randomNumber: num, - }) - } - trans.commit().unwrap(); - Ok(Response::with(( - status::Ok, - serde_json::to_string(&res).unwrap(), - server, - content_type, - ))) -} - -fn random_row(conn: r2d2::PooledConnection) -> DatabaseRow { - let mut rng = rand::thread_rng(); - let between = Range::new(1, 10000); - let num = between.ind_sample(&mut rng); - let rows = &conn - .query("SELECT id, randomnumber FROM World WHERE id = $1", &[&num]) - .unwrap(); - let row = rows.get(0); - DatabaseRow { - id: row.get(0), - randomNumber: row.get(1), - } -} - -fn get_param<'a>(querystring: &'a str, param: &'a str) -> Option<&'a str> { - let n = querystring - .split("&") - .find(|&it| !(it.find(param).is_none())); - match n { - Some(n) => n.split("=").nth(1), - _ => n, - } -} diff --git a/frameworks/Rust/ntex/Cargo.toml b/frameworks/Rust/ntex/Cargo.toml index 39535cb4988..c160ded0d05 100755 --- a/frameworks/Rust/ntex/Cargo.toml +++ b/frameworks/Rust/ntex/Cargo.toml @@ -37,13 +37,13 @@ tokio = ["ntex/tokio"] async-std = ["ntex/async-std"] [dependencies] -ntex = "0.5.14" +ntex = "0.5.16" mimalloc = { version = "0.1.25", default-features = false } snmalloc-rs = { version = "0.2.26", features = ["1mib", "native-cpu"] } yarte = { version = "0.15", features = ["bytes-buf", "json"] } env_logger = "0.9" -nanorand = { version = "0.6", default-features = false, features = ["std", "wyrand"] } -atoi = "0.4" +nanorand = { version = "0.7", default-features = false, features = ["std", "wyrand"] } +atoi = "1.0" num_cpus = "1.13" futures = "0.3" http = "0.2" @@ -53,7 +53,7 @@ simd-json-derive = "0.2.2" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" log = { version = "0.4", features = ["release_max_level_off"] } -tok_io = {version = "=1.15.0", package = "tokio" } +tok_io = {version = "1", package = "tokio" } tokio-postgres = { git="https://github.com/fafhrd91/postgres.git" } [profile.release] diff --git a/frameworks/Rust/ntex/ntex-astd.dockerfile b/frameworks/Rust/ntex/ntex-astd.dockerfile index 62a923b654b..7f2b8cd3df2 100644 --- a/frameworks/Rust/ntex/ntex-astd.dockerfile +++ b/frameworks/Rust/ntex/ntex-astd.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.57.0 +FROM rust:1.61.0 # Disable simd at jsonescape # ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/ntex/ntex-db-astd.dockerfile b/frameworks/Rust/ntex/ntex-db-astd.dockerfile index 33d487e077f..9d04bb13196 100644 --- a/frameworks/Rust/ntex/ntex-db-astd.dockerfile +++ b/frameworks/Rust/ntex/ntex-db-astd.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.57.0 +FROM rust:1.61.0 # Disable simd at jsonescape # ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/ntex/ntex-db.dockerfile b/frameworks/Rust/ntex/ntex-db.dockerfile index d54dc4799af..a1811d3d679 100644 --- a/frameworks/Rust/ntex/ntex-db.dockerfile +++ b/frameworks/Rust/ntex/ntex-db.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.57.0 +FROM rust:1.61.0 # Disable simd at jsonescape # ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/ntex/ntex-plt-astd.dockerfile b/frameworks/Rust/ntex/ntex-plt-astd.dockerfile index 5f788a9dfb9..0a878b79695 100644 --- a/frameworks/Rust/ntex/ntex-plt-astd.dockerfile +++ b/frameworks/Rust/ntex/ntex-plt-astd.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.57.0 +FROM rust:1.61.0 # Disable simd at jsonescape # ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/ntex/ntex-plt.dockerfile b/frameworks/Rust/ntex/ntex-plt.dockerfile index a320203cd09..f6ee510e739 100644 --- a/frameworks/Rust/ntex/ntex-plt.dockerfile +++ b/frameworks/Rust/ntex/ntex-plt.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.57.0 +FROM rust:1.61.0 # Disable simd at jsonescape # ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/ntex/ntex.dockerfile b/frameworks/Rust/ntex/ntex.dockerfile index 960a07f5945..7d29bcd9c84 100644 --- a/frameworks/Rust/ntex/ntex.dockerfile +++ b/frameworks/Rust/ntex/ntex.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.57.0 +FROM rust:1.61.0 # Disable simd at jsonescape # ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/ntex/src/db.rs b/frameworks/Rust/ntex/src/db.rs index cea0a38aea6..e814b1d0040 100644 --- a/frameworks/Rust/ntex/src/db.rs +++ b/frameworks/Rust/ntex/src/db.rs @@ -1,7 +1,7 @@ use std::{borrow::Cow, fmt::Write as FmtWrite}; use futures::{Future, FutureExt}; -use nanorand::{WyRand, Rng}; +use nanorand::{Rng, WyRand}; use ntex::util::{join_all, Bytes, BytesMut}; use smallvec::SmallVec; use tokio_postgres::types::ToSql; diff --git a/frameworks/Rust/ntex/src/main.rs b/frameworks/Rust/ntex/src/main.rs index 4610829a25e..0fa82483b55 100644 --- a/frameworks/Rust/ntex/src/main.rs +++ b/frameworks/Rust/ntex/src/main.rs @@ -50,8 +50,8 @@ async fn main() -> std::io::Result<()> { .backlog(1024) .bind("techempower", "0.0.0.0:8080", |cfg| { cfg.memory_pool(PoolId::P1); - PoolId::P1.set_read_params(65535, 1024); - PoolId::P1.set_write_params(65535, 1024); + PoolId::P1.set_read_params(65535, 8192); + PoolId::P1.set_write_params(65535, 8192); http::HttpService::build() .keep_alive(http::KeepAlive::Os) diff --git a/frameworks/Rust/ntex/src/main_db.rs b/frameworks/Rust/ntex/src/main_db.rs index 5fbf6ffdb8d..e0ea041ccfe 100644 --- a/frameworks/Rust/ntex/src/main_db.rs +++ b/frameworks/Rust/ntex/src/main_db.rs @@ -1,3 +1,4 @@ +#[cfg(not(target_os = "macos"))] #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; @@ -100,8 +101,8 @@ async fn main() -> std::io::Result<()> { .backlog(1024) .bind("techempower", "0.0.0.0:8080", |cfg| { cfg.memory_pool(PoolId::P1); - PoolId::P1.set_read_params(65535, 1024); - PoolId::P1.set_write_params(65535, 1024); + PoolId::P1.set_read_params(65535, 8192); + PoolId::P1.set_write_params(65535, 8192); HttpService::build() .keep_alive(KeepAlive::Os) diff --git a/frameworks/Rust/ntex/src/main_plt.rs b/frameworks/Rust/ntex/src/main_plt.rs index 28fecd97012..9b16027d4b0 100644 --- a/frameworks/Rust/ntex/src/main_plt.rs +++ b/frameworks/Rust/ntex/src/main_plt.rs @@ -88,8 +88,8 @@ async fn main() -> io::Result<()> { .backlog(1024) .bind("techempower", "0.0.0.0:8080", |cfg| { cfg.memory_pool(PoolId::P1); - PoolId::P1.set_read_params(65535, 1024); - PoolId::P1.set_write_params(65535, 1024); + PoolId::P1.set_read_params(65535, 8192); + PoolId::P1.set_write_params(65535, 8192); fn_service(|io| App { io, diff --git a/frameworks/Rust/roa/benchmark_config.json b/frameworks/Rust/roa/benchmark_config.json index 1a9189ad785..b60fbac4858 100755 --- a/frameworks/Rust/roa/benchmark_config.json +++ b/frameworks/Rust/roa/benchmark_config.json @@ -57,7 +57,7 @@ }, "diesel": { "db_url": "/db", - "fortune_url": "/fortune", + "fortune_url": "/fortunes", "query_url": "/queries?q=", "update_url": "/updates?q=", "port": 8080, @@ -77,7 +77,7 @@ }, "pg": { "db_url": "/db", - "fortune_url": "/fortune", + "fortune_url": "/fortunes", "query_url": "/queries?q=", "update_url": "/updates?q=", "port": 8080, @@ -97,7 +97,7 @@ }, "sqlx": { "db_url": "/db", - "fortune_url": "/fortune", + "fortune_url": "/fortunes", "query_url": "/queries?q=", "update_url": "/updates?q=", "port": 8080, diff --git a/frameworks/Rust/roa/roa-core.dockerfile b/frameworks/Rust/roa/roa-core.dockerfile index ef3488eb1e8..ce2fd1e0723 100644 --- a/frameworks/Rust/roa/roa-core.dockerfile +++ b/frameworks/Rust/roa/roa-core.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.44 +FROM rust:1.59 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/roa/roa-diesel.dockerfile b/frameworks/Rust/roa/roa-diesel.dockerfile index f641dc429fd..effe77c5c04 100644 --- a/frameworks/Rust/roa/roa-diesel.dockerfile +++ b/frameworks/Rust/roa/roa-diesel.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.44 +FROM rust:1.59 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/roa/roa-pg.dockerfile b/frameworks/Rust/roa/roa-pg.dockerfile index 996ee43777e..d0a93934630 100644 --- a/frameworks/Rust/roa/roa-pg.dockerfile +++ b/frameworks/Rust/roa/roa-pg.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.44 +FROM rust:1.59 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/roa/roa-sqlx.dockerfile b/frameworks/Rust/roa/roa-sqlx.dockerfile index 34e61f24fde..bb2468f68f8 100644 --- a/frameworks/Rust/roa/roa-sqlx.dockerfile +++ b/frameworks/Rust/roa/roa-sqlx.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.44 +FROM rust:1.59 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/roa/roa-tokio.dockerfile b/frameworks/Rust/roa/roa-tokio.dockerfile index 926b04af3dc..93a266e10f5 100644 --- a/frameworks/Rust/roa/roa-tokio.dockerfile +++ b/frameworks/Rust/roa/roa-tokio.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.44 +FROM rust:1.59 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/roa/roa.dockerfile b/frameworks/Rust/roa/roa.dockerfile index d992629c33f..1d330eef042 100644 --- a/frameworks/Rust/roa/roa.dockerfile +++ b/frameworks/Rust/roa/roa.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.44 +FROM rust:1.59.0 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/roa/src/main-db.rs b/frameworks/Rust/roa/src/main-db.rs index 0bbd4137c05..acb702c17fd 100644 --- a/frameworks/Rust/roa/src/main-db.rs +++ b/frameworks/Rust/roa/src/main-db.rs @@ -101,7 +101,7 @@ fn routes(prefix: &'static str) -> StdResult> { .gate(gate) .on("/db", get(db)) .on("/queries", get(queries)) - .on("/fortune", get(fortune)) + .on("/fortunes", get(fortune)) .on("/updates", get(updates)) .routes(prefix) .map_err(Into::into) diff --git a/frameworks/Rust/rocket/Cargo.toml b/frameworks/Rust/rocket/Cargo.toml index 9328c2452bd..c77294ad930 100644 --- a/frameworks/Rust/rocket/Cargo.toml +++ b/frameworks/Rust/rocket/Cargo.toml @@ -1,26 +1,32 @@ [package] name = "rocket_techempower" -version = "0.2.0" +version = "0.3.0" authors = ["Marcelo Barbosa ", "Brendan Hansknecht ", "Dragos Varovici "] -edition = "2018" +edition = "2021" + +[[bin]] +name = "rocket" +path = "src/main.rs" [dependencies] -num_cpus = { version = "^1.13" } -rand = { version = "^0.8" } -yarte = { version = "^0.15" } -lazy_static = { version = "^1.4" } -async-stream = { version = "^0.3" } -async-trait = { version = "0.1" } -futures = { version = "^0.3" } -futures-util = { version = "^0.3" } -rocket = { git = "https://github.com/SergioBenitez/Rocket", features = [ +num_cpus = "1.13.1" +rand = "0.8.5" +yarte = "0.15.6" +lazy_static = "1.4.0" +async-stream = "0.3.3" +async-trait = "0.1.53" +futures = "0.3.21" +futures-util = "0.3.21" +rocket = { version = "0.5.0-rc.2", features = [ "json", ] } -sqlx = { version = "^0.5", features = [ "postgres", "macros", "migrate", "sqlite" ] } -rocket_db_pools = { git = "https://github.com/SergioBenitez/Rocket", features = [ "sqlx_postgres" ] } -figment = { version = "^0.10" } -dotenv = { version = "^0.15" } +rocket_db_pools = { version = "0.1.0-rc.2", features = [ + "sqlx_postgres", +] } +sqlx = { version = "0.5.13", features = [ "postgres", "macros" ] } +figment = "0.10.6" +dotenv = "0.15.0" -serde = { version = "^1" } -serde_json = { version = "^1" } -serde_derive = { version = "^1" } +serde = "1.0.137" +serde_json = "1.0.81" +serde_derive = "1.0.137" \ No newline at end of file diff --git a/frameworks/Rust/rocket/db/migrations/20211027024424_create-world-table.sql b/frameworks/Rust/rocket/db/migrations/20211027024424_create-world-table.sql deleted file mode 100644 index 8c14d4bafea..00000000000 --- a/frameworks/Rust/rocket/db/migrations/20211027024424_create-world-table.sql +++ /dev/null @@ -1,10 +0,0 @@ -CREATE TABLE World ( - id INTEGER PRIMARY KEY, - randomnumber INTEGER NOT NULL -); - -INSERT INTO World (id, randomnumber) VALUES (1, 101); -INSERT INTO World (id, randomnumber) VALUES (2, 102); -INSERT INTO World (id, randomnumber) VALUES (3, 103); -INSERT INTO World (id, randomnumber) VALUES (4, 104); -INSERT INTO World (id, randomnumber) VALUES (5, 105); \ No newline at end of file diff --git a/frameworks/Rust/rocket/db/migrations/20211028024424_create-fortune-table.sql b/frameworks/Rust/rocket/db/migrations/20211028024424_create-fortune-table.sql deleted file mode 100644 index e2f68851770..00000000000 --- a/frameworks/Rust/rocket/db/migrations/20211028024424_create-fortune-table.sql +++ /dev/null @@ -1,17 +0,0 @@ -CREATE TABLE Fortune ( - id INTEGER PRIMARY KEY, - message VARCHAR NOT NULL -); - -INSERT INTO Fortune (id, message) VALUES (1, 'fortune: No such file or directory'); -INSERT INTO Fortune (id, message) VALUES (2, 'A computer scientist is someone who fixes things that aren''t broken.'); -INSERT INTO Fortune (id, message) VALUES (3, 'After enough decimal places, nobody gives a damn.'); -INSERT INTO Fortune (id, message) VALUES (4, 'A bad random number generator: 1, 1, 1, 1, 1, 4.33e+67, 1, 1, 1'); -INSERT INTO Fortune (id, message) VALUES (5, 'A computer program does what you tell it to do, not what you want it to do.'); -INSERT INTO Fortune (id, message) VALUES (6, 'Emacs is a nice operating system, but I prefer UNIX. — Tom Christaensen'); -INSERT INTO Fortune (id, message) VALUES (7, 'Any program that runs right is obsolete.'); -INSERT INTO Fortune (id, message) VALUES (8, 'A list is only as strong as its weakest link. — Donald Knuth'); -INSERT INTO Fortune (id, message) VALUES (9, 'Feature: A bug with seniority.'); -INSERT INTO Fortune (id, message) VALUES (10, 'Computers make very fast, very accurate mistakes.'); -INSERT INTO Fortune (id, message) VALUES (11, ''); -INSERT INTO Fortune (id, message) VALUES (12, 'フレームワークのベンチマーク'); \ No newline at end of file diff --git a/frameworks/Rust/rocket/rocket.dockerfile b/frameworks/Rust/rocket/rocket.dockerfile index a566defd5ed..1d5cba3cd5a 100644 --- a/frameworks/Rust/rocket/rocket.dockerfile +++ b/frameworks/Rust/rocket/rocket.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.55-slim-buster +FROM rust:1.60-slim ENV ROCKET_BENCHMARK_DATABASE_URL=postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world @@ -10,7 +10,8 @@ ADD ./ /rocket WORKDIR /rocket RUN RUSTFLAGS="-C target-cpu=native" cargo build --release +RUN cp ./target/release/rocket ./target/release/rocket-techempower EXPOSE 8000 -CMD ./target/release/rocket_techempower +CMD ./target/release/rocket-techempower diff --git a/frameworks/Rust/rocket/src/main.rs b/frameworks/Rust/rocket/src/main.rs index 27d1a47d6fd..54f67f03078 100644 --- a/frameworks/Rust/rocket/src/main.rs +++ b/frameworks/Rust/rocket/src/main.rs @@ -12,6 +12,7 @@ mod database; use dotenv::dotenv; use std::net::{IpAddr, Ipv4Addr}; use std::env; +use std::thread::available_parallelism; use rocket::{Rocket, Build}; use rocket::serde::json::Json; use rocket::response::content::RawHtml; @@ -156,7 +157,7 @@ pub fn launch() -> Rocket { port: 8000, keep_alive: 0, log_level: LogLevel::Off, - workers: num_cpus::get() * 16, + workers: available_parallelism().expect("could not get parallelism").get() * 16, ..Default::default() }; diff --git a/frameworks/Rust/salvo/Cargo.toml b/frameworks/Rust/salvo/Cargo.toml index 3b242e2cee1..b3ff8192c8d 100644 --- a/frameworks/Rust/salvo/Cargo.toml +++ b/frameworks/Rust/salvo/Cargo.toml @@ -16,25 +16,24 @@ name = "main-pg" path = "src/main_pg.rs" [dependencies] -anyhow = "1.0" -async-trait = "0.1.51" +anyhow = "1" +async-trait = "0.1" +bytes = "1" diesel = { version = "1.4", features = ["postgres", "r2d2"] } futures = "0.3" -markup = "0.12" -num_cpus = "1.13.0" -# mimalloc = { version = "0.1.25", default-features = false } -once_cell = "1.5.2" -rand = { version = "0.8.3", features = ["min_const_gen", "small_rng"] } -random-fast-rng = "0.1.1" -salvo = { version = "0.16", features = ["anyhow"] } -serde = { version = "1.0", features = ["derive"] } -serde_derive = "1.0.125" -serde_json = "1.0.64" -smallvec = "1.6.1" -snmalloc-rs = { version = "0.2.24", features = ["1mib", "native-cpu"] } +markup = "0.13" +# mimalloc = { version = "0.1", default-features = false } +once_cell = "1" +rand = { version = "0.8", features = ["min_const_gen", "small_rng"] } +random-fast-rng = "0.1" +salvo = { version = "0.31", default-features = false, features = ["anyhow"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +smallvec = "1" +snmalloc-rs = { version = "0.3", features = ["native-cpu"] } tokio = { version = "1", features = ["macros", "rt"] } -tokio-postgres = "0.7.2" -v_htmlescape = "0.14" +tokio-postgres = "0.7" +v_htmlescape = "0.15" [profile.release] lto = true diff --git a/frameworks/Rust/salvo/README.md b/frameworks/Rust/salvo/README.md index a6e6e3ff141..b3ea8295627 100644 --- a/frameworks/Rust/salvo/README.md +++ b/frameworks/Rust/salvo/README.md @@ -2,4 +2,4 @@ ## Description -Salvo is a simple but powerful web server framework written in Rust. +Salvo is a powerful and simplest web server framework in Rust world. diff --git a/frameworks/Rust/salvo/salvo-diesel.dockerfile b/frameworks/Rust/salvo/salvo-diesel.dockerfile index 345dff5047e..1736b0194b7 100644 --- a/frameworks/Rust/salvo/salvo-diesel.dockerfile +++ b/frameworks/Rust/salvo/salvo-diesel.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.58.1 +FROM rust:1.62.1 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/salvo/salvo-pg.dockerfile b/frameworks/Rust/salvo/salvo-pg.dockerfile index 67a89ea0b89..c696a92741a 100644 --- a/frameworks/Rust/salvo/salvo-pg.dockerfile +++ b/frameworks/Rust/salvo/salvo-pg.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.58.1 +FROM rust:1.62.1 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/salvo/salvo.dockerfile b/frameworks/Rust/salvo/salvo.dockerfile index 8971531d6e6..b58afb1817d 100644 --- a/frameworks/Rust/salvo/salvo.dockerfile +++ b/frameworks/Rust/salvo/salvo.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.58.1 +FROM rust:1.62.1 # Disable simd at jsonescape ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/salvo/src/main.rs b/frameworks/Rust/salvo/src/main.rs index acd9a0ca3e7..7231c340ddf 100644 --- a/frameworks/Rust/salvo/src/main.rs +++ b/frameworks/Rust/salvo/src/main.rs @@ -1,65 +1,52 @@ // #[global_allocator] -// static ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc; -// #[global_allocator] // static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; -#[macro_use] -extern crate serde_derive; -extern crate serde_json; - use std::sync::Arc; +use bytes::Bytes; use salvo::http::header::{self, HeaderValue}; +use salvo::http::response::Body; use salvo::prelude::*; +use serde::Serialize; mod server; -static HELLO_WORLD: &'static [u8] = b"Hello, world!"; #[derive(Serialize)] pub struct Message { pub message: &'static str, } -#[fn_handler] -async fn json(res: &mut Response) { - res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_json(&Message { +#[handler] +fn json(res: &mut Response) { + let headers = res.headers_mut(); + headers.insert(header::SERVER, HeaderValue::from_static("S")); + headers.insert( + header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + let data = serde_json::to_vec(&Message { message: "Hello, World!", - }); + }) + .unwrap(); + res.set_body(Body::Once(Bytes::from(data))); } -#[fn_handler] -async fn plaintext(res: &mut Response) { - res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_binary(HeaderValue::from_static("text/plain"), HELLO_WORLD); +#[handler] +fn plaintext(res: &mut Response) { + let headers = res.headers_mut(); + headers.insert(header::SERVER, HeaderValue::from_static("S")); + headers.insert(header::CONTENT_TYPE, HeaderValue::from_static("text/plain")); + res.set_body(Body::Once(Bytes::from_static(b"Hello, world!"))); } -fn main() { +#[tokio::main] +async fn main() { let router = Arc::new( Router::new() .push(Router::with_path("plaintext").get(plaintext)) .push(Router::with_path("json").get(json)), ); - for _ in 1..num_cpus::get() { - let router = router.clone(); - std::thread::spawn(move || { - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .unwrap(); - rt.block_on(serve(router)); - }); - } - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .unwrap(); - rt.block_on(serve(router)); -} - -async fn serve(router: Arc) { - println!("Started http server: 127.0.0.1:8080"); server::builder() .http1_pipeline_flush(true) .serve(Service::new(router)) diff --git a/frameworks/Rust/salvo/src/main_diesel.rs b/frameworks/Rust/salvo/src/main_diesel.rs index b2b1205f786..91117613e99 100644 --- a/frameworks/Rust/salvo/src/main_diesel.rs +++ b/frameworks/Rust/salvo/src/main_diesel.rs @@ -9,6 +9,7 @@ extern crate diesel; use std::cmp; use std::fmt::Write; use std::sync::Arc; +use std::thread::available_parallelism; use anyhow::Error; use diesel::prelude::*; @@ -45,20 +46,20 @@ fn build_pool(database_url: &str, size: u32) -> Result { .build(manager) } -#[fn_handler] +#[handler] async fn world_row(res: &mut Response) -> Result<(), Error> { let mut rng = SmallRng::from_entropy(); let random_id = rng.gen_range(1..10_001); let conn = connect()?; let row = world::table.find(random_id).first::(&conn)?; res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_json(&row); + res.render(Json(row)); Ok(()) } -#[fn_handler] +#[handler] async fn queries(req: &mut Request, res: &mut Response) -> Result<(), Error> { - let count = req.get_query::("q").unwrap_or(1); + let count = req.query::("q").unwrap_or(1); let count = cmp::min(500, cmp::max(1, count)); let mut worlds = Vec::with_capacity(count); let mut rng = SmallRng::from_entropy(); @@ -69,13 +70,13 @@ async fn queries(req: &mut Request, res: &mut Response) -> Result<(), Error> { worlds.push(w); } res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_json(&worlds); + res.render(Json(worlds)); Ok(()) } -#[fn_handler] +#[handler] async fn cached_queries(req: &mut Request, res: &mut Response) -> Result<(), Error> { - let count = req.get_query::("q").unwrap_or(1); + let count = req.query::("q").unwrap_or(1); let count = cmp::min(500, cmp::max(1, count)); let mut worlds = Vec::with_capacity(count); let mut rng = SmallRng::from_entropy(); @@ -87,13 +88,13 @@ async fn cached_queries(req: &mut Request, res: &mut Response) -> Result<(), Err } } res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_json(&worlds); + res.render(Json(worlds)); Ok(()) } -#[fn_handler] +#[handler] async fn updates(req: &mut Request, res: &mut Response) -> Result<(), Error> { - let count = req.get_query::("q").unwrap_or(1); + let count = req.query::("q").unwrap_or(1); let count = cmp::min(500, cmp::max(1, count)); let conn = connect()?; let mut worlds = Vec::with_capacity(count); @@ -116,11 +117,11 @@ async fn updates(req: &mut Request, res: &mut Response) -> Result<(), Error> { })?; res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_json(&worlds); + res.render(Json(worlds)); Ok(()) } -#[fn_handler] +#[handler] async fn fortunes(res: &mut Response) -> Result<(), Error> { let conn = connect()?; let mut items = fortune::table.get_results::(&conn)?; @@ -134,7 +135,7 @@ async fn fortunes(res: &mut Response) -> Result<(), Error> { write!(&mut body, "{}", FortunesTemplate { items }).unwrap(); res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_html_text(&body); + res.render(Text::Html(body)); Ok(()) } @@ -176,12 +177,12 @@ fn main() { .push(Router::with_path("cached_queries").get(cached_queries)) .push(Router::with_path("updates").get(updates)), ); - let cpus = num_cpus::get(); + let size = available_parallelism().map(|n| n.get()).unwrap_or(16); DB_POOL - .set(build_pool(&DB_URL, cpus as u32).expect(&format!("Error connecting to {}", &DB_URL))) + .set(build_pool(DB_URL, size as u32).unwrap_or_else(|_| panic!("Error connecting to {}", &DB_URL))) .ok(); populate_cache().expect("error cache worlds"); - for _ in 1..cpus { + for _ in 1..size { let router = router.clone(); std::thread::spawn(move || { let rt = tokio::runtime::Builder::new_current_thread() @@ -191,6 +192,7 @@ fn main() { rt.block_on(serve(router)); }); } + println!("Starting http server: 127.0.0.1:8080"); let rt = tokio::runtime::Builder::new_current_thread() .enable_all() .build() @@ -199,6 +201,5 @@ fn main() { } async fn serve(router: Arc) { - println!("Starting http server: 127.0.0.1:8080"); server::builder().serve(Service::new(router)).await.unwrap(); } diff --git a/frameworks/Rust/salvo/src/main_pg.rs b/frameworks/Rust/salvo/src/main_pg.rs index df78efbe5a8..4bab1015a4e 100644 --- a/frameworks/Rust/salvo/src/main_pg.rs +++ b/frameworks/Rust/salvo/src/main_pg.rs @@ -7,6 +7,7 @@ use std::cmp; use std::collections::HashMap; use std::fmt::Write; use std::io; +use std::thread::available_parallelism; use anyhow::Error; use async_trait::async_trait; @@ -184,8 +185,7 @@ impl WorldHandler { async fn new() -> Self { Self { conn: PgConnection::create(DB_URL) - .await - .expect(&format!("Error connecting to {}", &DB_URL)), + .await.unwrap_or_else(|_| panic!("Error connecting to {}", &DB_URL)), } } } @@ -194,7 +194,7 @@ impl Handler for WorldHandler { async fn handle(&self, _req: &mut Request, _depot: &mut Depot, res: &mut Response, _ctrl: &mut FlowCtrl) { res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); let world = self.conn.get_world().await.unwrap(); - res.render_json(&world); + res.render(Json(world)); } } struct WorldsHandler { @@ -205,18 +205,18 @@ impl WorldsHandler { Self { conn: PgConnection::create(DB_URL) .await - .expect(&format!("Error connecting to {}", &DB_URL)), + .unwrap_or_else(|_| panic!("Error connecting to {}", &DB_URL)), } } } #[async_trait] impl Handler for WorldsHandler { async fn handle(&self, req: &mut Request, _depot: &mut Depot, res: &mut Response, _ctrl: &mut FlowCtrl) { - let count = req.get_query::("q").unwrap_or(1); + let count = req.query::("q").unwrap_or(1); let count = cmp::min(500, cmp::max(1, count)); res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); let worlds = self.conn.get_worlds(count).await.unwrap(); - res.render_json(&worlds); + res.render(Json(worlds)); } } struct UpdatesHandler { @@ -227,18 +227,18 @@ impl UpdatesHandler { Self { conn: PgConnection::create(DB_URL) .await - .expect(&format!("Error connecting to {}", &DB_URL)), + .unwrap_or_else(|_| panic!("Error connecting to {}", &DB_URL)), } } } #[async_trait] impl Handler for UpdatesHandler { async fn handle(&self, req: &mut Request, _depot: &mut Depot, res: &mut Response, _ctrl: &mut FlowCtrl) { - let count = req.get_query::("q").unwrap_or(1); + let count = req.query::("q").unwrap_or(1); let count = cmp::min(500, cmp::max(1, count)); res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); let worlds = self.conn.update(count).await.unwrap(); - res.render_json(&worlds); + res.render(Json(worlds)); } } struct FortunesHandler { @@ -249,7 +249,7 @@ impl FortunesHandler { Self { conn: PgConnection::create(DB_URL) .await - .expect(&format!("Error connecting to {}", &DB_URL)), + .unwrap_or_else(|_| panic!("Error connecting to {}", &DB_URL)), } } } @@ -258,15 +258,14 @@ impl Handler for FortunesHandler { async fn handle(&self, _req: &mut Request, _depot: &mut Depot, res: &mut Response, _ctrl: &mut FlowCtrl) { let mut body = String::new(); write!(&mut body, "{}", self.conn.tell_fortune().await.unwrap()).unwrap(); - res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_html_text(&body); + res.render(Text::Html(body)); } } -#[fn_handler] +#[handler] async fn cached_queries(req: &mut Request, res: &mut Response) -> Result<(), Error> { - let count = req.get_query::("q").unwrap_or(1); + let count = req.query::("q").unwrap_or(1); let count = cmp::min(500, cmp::max(1, count)); let mut worlds = Vec::with_capacity(count); let mut rng = SmallRng::from_entropy(); @@ -278,7 +277,7 @@ async fn cached_queries(req: &mut Request, res: &mut Response) -> Result<(), Err } } res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_json(&worlds); + res.render(Json(worlds)); Ok(()) } @@ -297,7 +296,7 @@ fn main() { rt.block_on(async { populate_cache().await.expect("error cache worlds"); }); - for _ in 1..num_cpus::get() { + for _ in 1..available_parallelism().map(|n| n.get()).unwrap_or(16) { std::thread::spawn(move || { let rt = tokio::runtime::Builder::new_current_thread() .enable_all() @@ -306,11 +305,11 @@ fn main() { rt.block_on(serve()); }); } + println!("Started http server: 127.0.0.1:8080"); rt.block_on(serve()); } async fn serve() { - println!("Started http server: 127.0.0.1:8080"); let router = Router::new() .push(Router::with_path("db").get(WorldHandler::new().await)) .push(Router::with_path("fortunes").get(FortunesHandler::new().await)) diff --git a/frameworks/Rust/salvo/src/server.rs b/frameworks/Rust/salvo/src/server.rs index 1f53fbc3421..a56501c24d6 100644 --- a/frameworks/Rust/salvo/src/server.rs +++ b/frameworks/Rust/salvo/src/server.rs @@ -1,15 +1,17 @@ use std::io; use std::net::{Ipv4Addr, SocketAddr}; -use salvo::hyper::server::conn::AddrIncoming; use salvo::hyper; +use salvo::hyper::server::conn::AddrIncoming; use tokio::net::{TcpListener, TcpSocket}; pub fn builder() -> hyper::server::Builder { let addr = SocketAddr::from((Ipv4Addr::UNSPECIFIED, 8080)); let listener = reuse_listener(addr).expect("couldn't bind to addr"); let incoming = AddrIncoming::from_listener(listener).unwrap(); - hyper::Server::builder(incoming).http1_only(true).tcp_nodelay(true) + hyper::Server::builder(incoming) + .http1_only(true) + .tcp_nodelay(true) } fn reuse_listener(addr: SocketAddr) -> io::Result { @@ -19,13 +21,13 @@ fn reuse_listener(addr: SocketAddr) -> io::Result { }; #[cfg(unix)] - { - if let Err(e) = socket.set_reuseport(true) { - eprintln!("error setting SO_REUSEPORT: {}", e); - } + { + if let Err(e) = socket.set_reuseport(true) { + eprintln!("error setting SO_REUSEPORT: {}", e); } + } socket.set_reuseaddr(true)?; socket.bind(addr)?; socket.listen(1024) -} \ No newline at end of file +} diff --git a/frameworks/Rust/tide/Cargo.lock b/frameworks/Rust/tide/Cargo.lock index 6d7624293fd..9b6b0b41344 100644 --- a/frameworks/Rust/tide/Cargo.lock +++ b/frameworks/Rust/tide/Cargo.lock @@ -1,5 +1,7 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. +version = 3 + [[package]] name = "aead" version = "0.3.2" @@ -637,9 +639,9 @@ checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57" [[package]] name = "diesel" -version = "1.4.5" +version = "1.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e2de9deab977a153492a1468d1b1c0662c1cf39e5ea87d0c060ecd59ef18d8c" +checksum = "047bfc4d5c3bd2ef6ca6f981941046113524b9a9f9a7cbdfdd7ff40f58e6f542" dependencies = [ "bitflags", "byteorder", diff --git a/frameworks/Rust/tide/Cargo.toml b/frameworks/Rust/tide/Cargo.toml index 269c10835e5..80c05675a77 100644 --- a/frameworks/Rust/tide/Cargo.toml +++ b/frameworks/Rust/tide/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" askama = "0.10.5" async-std = { version = "1.9.0", features = ["attributes"] } async-trait = "0.1.42" -diesel = { version = "1.4.5", features = ["postgres", "r2d2"] } +diesel = { version = "1.4.6", features = ["postgres", "r2d2"] } http-types = "2.10.0" rand = { version = "0.7", features = ["small_rng"] } serde = { version = "1.0.123", features = ["derive"] } diff --git a/frameworks/Rust/trillium/Cargo.lock b/frameworks/Rust/trillium/Cargo.lock index aef9e24631b..8f7e6d321d4 100644 --- a/frameworks/Rust/trillium/Cargo.lock +++ b/frameworks/Rust/trillium/Cargo.lock @@ -33,9 +33,9 @@ checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" [[package]] name = "askama" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d8f355701c672c2ba3d718acbd213f740beea577cc4eae66accdffe15be1882" +checksum = "fb98f10f371286b177db5eeb9a6e5396609555686a35e1d4f7b9a9c6d8af0139" dependencies = [ "askama_derive", "askama_escape", @@ -44,9 +44,9 @@ dependencies = [ [[package]] name = "askama_derive" -version = "0.11.0" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84704cab5b7ae0fd3a9f78ee5eb7b27f3749df445f04623db6633459ae283267" +checksum = "87bf87e6e8b47264efa9bde63d6225c6276a52e05e91bf37eaa8afd0032d6b71" dependencies = [ "askama_shared", "proc-macro2", @@ -55,18 +55,20 @@ dependencies = [ [[package]] name = "askama_escape" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a1bb320f97e6edf9f756bf015900038e43c7700e059688e5724a928c8f3b8d5" +checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341" [[package]] name = "askama_shared" -version = "0.12.0" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dae03eebba55a2697a376e58b573a29fe36893157173ac8df312ad85f3c0e012" +checksum = "bf722b94118a07fcbc6640190f247334027685d4e218b794dbfe17c32bf38ed0" dependencies = [ "askama_escape", "humansize", + "mime", + "mime_guess", "nom", "num-traits", "percent-encoding", @@ -462,9 +464,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" +checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" dependencies = [ "cfg-if", "lazy_static", @@ -548,9 +550,9 @@ checksum = "f7531096570974c3a9dcf9e4b8e1cede1ec26cf5046219fb3b9d897503b9be59" [[package]] name = "fastrand" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "779d043b6a0b90cc4c0ed7ee380a6504394cee7efd7db050e3774eee387324b2" +checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" dependencies = [ "instant", ] @@ -603,9 +605,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" +checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" dependencies = [ "futures-core", "futures-sink", @@ -613,9 +615,9 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" +checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" [[package]] name = "futures-executor" @@ -641,9 +643,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f9d34af5a1aac6fb380f735fe510746c38067c5bf16c7fd250280503c971b2" +checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" [[package]] name = "futures-lite" @@ -662,9 +664,9 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c" +checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" dependencies = [ "proc-macro2", "quote", @@ -673,21 +675,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508" +checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" [[package]] name = "futures-task" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" +checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" [[package]] name = "futures-util" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" +checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" dependencies = [ "futures-channel", "futures-core", @@ -1035,6 +1037,16 @@ dependencies = [ "tokio", ] +[[package]] +name = "mime_guess" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" +dependencies = [ + "mime", + "unicase", +] + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -1188,9 +1200,9 @@ dependencies = [ [[package]] name = "ouroboros" -version = "0.11.1" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3518a68fc597f6a42f83a31e41c039c3cbaa10fa8bb239c936c235e81cce873f" +checksum = "71643f290d126e18ac2598876d01e1d57aed164afc78fdb6e2a0c6589a1f6662" dependencies = [ "aliasable", "ouroboros_macro", @@ -1199,9 +1211,9 @@ dependencies = [ [[package]] name = "ouroboros_macro" -version = "0.11.1" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e23813b1bcb2d41a838849a2bbae40ae5c03c85ecabf04ba97086f438484714" +checksum = "ed9a247206016d424fe8497bc611e510887af5c261fbbf977877c4bb55ca4d82" dependencies = [ "Inflector", "proc-macro-error", @@ -1460,9 +1472,9 @@ dependencies = [ [[package]] name = "routefinder" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55ef95b3a4278404aa40845bfe17ef4906cbe768c39bf4fa0886a06a07df80d" +checksum = "480a056df7cdee2fd55df6dc10ce137c70b00cc65f20a16f796d2250ef8e0cd8" dependencies = [ "smartcow", "smartstring", @@ -1479,6 +1491,12 @@ dependencies = [ "serde", ] +[[package]] +name = "rustversion" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" + [[package]] name = "ryu" version = "1.0.9" @@ -1503,15 +1521,16 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "sea-orm" -version = "0.4.2" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f594c2a48a3f2c7c911187c67a39d08f63af932801073737358bef0b5f06576" +checksum = "dd24380b48dacd3ed1c3d467c7b17ffa5818555a2c04066f4a0a9e17d830abc9" dependencies = [ "async-stream", "async-trait", "chrono", "futures", "futures-util", + "once_cell", "ouroboros", "rust_decimal", "sea-orm-macros", @@ -1520,15 +1539,16 @@ dependencies = [ "serde", "serde_json", "sqlx", + "tracing", "url", "uuid", ] [[package]] name = "sea-orm-macros" -version = "0.4.2" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73786f2ccb8f697d83e80a1ddd3c580ead76dddd068ebb4349b5ab648e625cd2" +checksum = "c199fa8630b1e195d7aef24ce8944af8f4ced67c4eccffd8926453b59f2565a1" dependencies = [ "bae", "heck", @@ -1539,9 +1559,9 @@ dependencies = [ [[package]] name = "sea-query" -version = "0.19.4" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c6353d854a61e47b2691feded408c6ffd07ba9913311f0ff17c889ef2f102f" +checksum = "9088ff96158860a75d98a85a654fdd9d97b10515773af6d87339bfc48258c800" dependencies = [ "chrono", "rust_decimal", @@ -1565,22 +1585,23 @@ dependencies = [ [[package]] name = "sea-strum" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c77c6c6c8b3950fccb65edd5d04985b5377f4c8f669cb9a215553f0369ec001" +checksum = "391d06a6007842cfe79ac6f7f53911b76dfd69fc9a6769f1cf6569d12ce20e1b" dependencies = [ "sea-strum_macros", ] [[package]] name = "sea-strum_macros" -version = "0.21.2" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51c247af6c2c4ffd372fe97e9afa579b4438e4c306c9aa3f11cbf72f1e845180" +checksum = "69b4397b825df6ccf1e98bcdabef3bbcfc47ff5853983467850eeab878384f21" dependencies = [ "heck", "proc-macro2", "quote", + "rustversion", "syn", ] @@ -1609,18 +1630,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.132" +version = "1.0.136" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9875c23cf305cd1fd7eb77234cbb705f21ea6a72c637a5c6db5fe4b8e7f008" +checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.132" +version = "1.0.136" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc0db5cb2556c0e558887d9bbdcf6ac4471e83ff66cf696e5419024d1606276" +checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" dependencies = [ "proc-macro2", "quote", @@ -1629,9 +1650,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.73" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5" +checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" dependencies = [ "indexmap", "itoa 1.0.1", @@ -2044,10 +2065,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" dependencies = [ "cfg-if", + "log", "pin-project-lite", + "tracing-attributes", "tracing-core", ] +[[package]] +name = "tracing-attributes" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "tracing-core" version = "0.1.21" @@ -2145,9 +2179,9 @@ dependencies = [ [[package]] name = "trillium-router" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51fd3cf61f1f6c1493ead6e304a6a295ebe09f25d502f27c66f850dc2cb0def4" +checksum = "20eb6edfe01b2df89944e0371453dbc288d94cf8b779f9fbd89c24ed923217a9" dependencies = [ "log", "routefinder", @@ -2212,6 +2246,15 @@ version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec" +[[package]] +name = "unicase" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +dependencies = [ + "version_check", +] + [[package]] name = "unicode-bidi" version = "0.3.7" diff --git a/frameworks/Rust/trillium/Cargo.toml b/frameworks/Rust/trillium/Cargo.toml index aeeaed7e698..9c1a1bcf338 100644 --- a/frameworks/Rust/trillium/Cargo.toml +++ b/frameworks/Rust/trillium/Cargo.toml @@ -4,20 +4,24 @@ version = "0.1.0" edition = "2021" [dependencies] -askama = "0.11.0" -fastrand = "1.6.0" +askama = "0.11.1" +fastrand = "1.7.0" futures-lite = "1.12.0" -futures-util = "0.3.19" -serde = { version = "1.0.132", features = ["derive"] } -serde_json = "1.0.73" +futures-util = "0.3.21" +serde = { version = "1.0.136", features = ["derive"] } +serde_json = "1.0.79" trillium = "0.2.2" trillium-api = "0.1.0" trillium-askama = "0.3.0" trillium-async-std = "0.2.0" trillium-logger = "0.4.0" -trillium-router = "0.3.1" +trillium-router = "0.3.2" [dependencies.sea-orm] -version = "0.4.2" +version = "0.6.0" default-features = false features = ["runtime-async-std-native-tls", "sqlx-postgres", "macros"] + +[profile.release] +panic = "abort" +lto = "fat" diff --git a/frameworks/Rust/trillium/benchmark_config.json b/frameworks/Rust/trillium/benchmark_config.json index 8056a8e1b3d..19076a3598f 100755 --- a/frameworks/Rust/trillium/benchmark_config.json +++ b/frameworks/Rust/trillium/benchmark_config.json @@ -22,7 +22,8 @@ "database_os": "Linux", "display_name": "Trillium", "notes": "", - "versus": "None" + "versus": "None", + "tags": ["verified"] } } ] diff --git a/frameworks/Rust/trillium/src/db/world.rs b/frameworks/Rust/trillium/src/db/world.rs index e1480cefc96..d8ea0b82223 100644 --- a/frameworks/Rust/trillium/src/db/world.rs +++ b/frameworks/Rust/trillium/src/db/world.rs @@ -21,19 +21,4 @@ impl RelationTrait for Relation { } } -impl TryFrom for Model { - type Error = ActiveModel; - - fn try_from(value: ActiveModel) -> Result { - if value.id.is_unchanged() && value.random_number.is_unchanged() { - Ok(Self { - id: value.id.unwrap(), - random_number: value.random_number.unwrap(), - }) - } else { - Err(value) - } - } -} - impl ActiveModelBehavior for ActiveModel {} diff --git a/frameworks/Rust/trillium/src/routes/updates.rs b/frameworks/Rust/trillium/src/routes/updates.rs index e71ccae40ad..c88e5595f67 100644 --- a/frameworks/Rust/trillium/src/routes/updates.rs +++ b/frameworks/Rust/trillium/src/routes/updates.rs @@ -5,6 +5,7 @@ use crate::db::{ use futures_util::stream::{futures_unordered::FuturesUnordered, StreamExt}; use sea_orm::{entity::prelude::*, IntoActiveModel, Set}; +use std::iter; use trillium::Conn; use trillium_api::ApiConnExt; use trillium_router::RouterConnExt; @@ -20,14 +21,14 @@ pub async fn handler(conn: Conn) -> Conn { let db = conn.db(); let vec_of_worlds: Vec = - std::iter::repeat_with(|| Worlds::find_by_id(fastrand::i32(1..10000)).one(db)) + iter::repeat_with(|| Worlds::find_by_id(fastrand::i32(1..10000)).one(db)) .take(queries) .collect::>() .filter_map(|x| async move { x.ok().flatten() }) .filter_map(|w| async move { let mut am = w.clone().into_active_model(); am.random_number = Set(fastrand::i32(1..10000)); - am.update(db).await.ok().and_then(|a| a.try_into().ok()) + am.update(db).await.ok() }) .collect() .await; diff --git a/frameworks/Rust/warp-rust/Cargo.lock b/frameworks/Rust/warp-rust/Cargo.lock index 3e67ecf5697..b506c208ffd 100644 --- a/frameworks/Rust/warp-rust/Cargo.lock +++ b/frameworks/Rust/warp-rust/Cargo.lock @@ -336,9 +336,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" +checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" dependencies = [ "cfg-if", "lazy_static", @@ -1399,9 +1399,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.4" +version = "1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1" dependencies = [ "aho-corasick", "memchr", @@ -1416,9 +1416,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" [[package]] name = "regex-syntax" -version = "0.6.25" +version = "0.6.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" +checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" [[package]] name = "remove_dir_all" @@ -1898,9 +1898,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.10.1" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92036be488bb6594459f2e03b60e42df6f937fe6ca5c5ffdcb539c6b84dc40f5" +checksum = "52963f91310c08d91cb7bff5786dfc8b79642ab839e188187e92105dbfb9d2c8" dependencies = [ "autocfg", "bytes 1.1.0", diff --git a/frameworks/Rust/warp-rust/Cargo.toml b/frameworks/Rust/warp-rust/Cargo.toml index 29bda01013c..b679e511ae9 100644 --- a/frameworks/Rust/warp-rust/Cargo.toml +++ b/frameworks/Rust/warp-rust/Cargo.toml @@ -9,7 +9,7 @@ futures = "0.3.12" rand = { version = "0.8.2", features = ["small_rng"] } serde = { version = "1.0.120", features = ["derive"] } sqlx = { version = "0.5.1", features = ["runtime-tokio-rustls", "postgres"] } -tokio = { version = "1.0.2", features = ["macros", "rt-multi-thread"] } +tokio = { version = "1.13.1", features = ["macros", "rt-multi-thread"] } warp = "0.3.0" yarte = "0.15.3" diff --git a/frameworks/Rust/warp-rust/warp-rust.dockerfile b/frameworks/Rust/warp-rust/warp-rust.dockerfile index 3458f649270..06379709caf 100644 --- a/frameworks/Rust/warp-rust/warp-rust.dockerfile +++ b/frameworks/Rust/warp-rust/warp-rust.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.50 +FROM rust:1.59 WORKDIR /warp-rust COPY src src diff --git a/frameworks/Rust/xitca-web/Cargo.toml b/frameworks/Rust/xitca-web/Cargo.toml index aadecdf0cfd..b07269cf612 100755 --- a/frameworks/Rust/xitca-web/Cargo.toml +++ b/frameworks/Rust/xitca-web/Cargo.toml @@ -6,32 +6,56 @@ edition = "2021" [[bin]] name = "xitca-web" path = "./src/main.rs" +required-features = ["raw", "simd"] [[bin]] name = "xitca-web-diesel" path = "./src/main_diesel.rs" +required-features = ["orm", "serde", "web"] + +[features] +# raw-pg optional +raw = ["xitca-postgres"] +# diesel-pg orm optional +orm = ["diesel", "diesel-async"] +# simd-json optional +simd = ["simd-json", "simd-json-derive"] +# serde optional +serde = ["dep:serde"] +# web optional +web = ["xitca-web"] [dependencies] -xitca-http = "0.1" -xitca-http-codegen = "0.1" +xitca-http = { version = "0.1", features = ["util-service"] } xitca-server = "0.1" xitca-service = "0.1" -xitca-web = "0.1" +xitca-unsafe-collection = "0.1" -ahash = { version = "0.7.6", features = ["compile-time-rng"] } -atoi = "0.4.0" +atoi = "1.0.0" core_affinity = "0.5.10" -diesel = { git = "https://github.com/diesel-rs/diesel.git", rev = "37ec18f46ced2d6e9197414156fdb705d7a61426", default-features = false } -diesel-async = { version = "0.1.0", features = ["postgres"], default-features = false } futures-util = { version = "0.3.18", default-features = false, features = ["alloc"] } -mimalloc = { version = "0.1.27", default-features = false } -rand = { version = "0.8", default-features = false, features = ["min_const_gen", "small_rng"] } -sailfish = "0.3.3" -serde = "1" -simd-json = "0.4.8" +mimalloc = { version = "0.1.29", default-features = false } +rand = { version = "0.8.5", default-features = false, features = ["min_const_gen", "nightly", "small_rng"] } +sailfish = "0.4" tang-rs = "0.2" -tokio = { version = "1.14", features = ["macros", "rt"] } -tokio-postgres = "0.7.5" +tokio = "1.20" + +# web optional +xitca-web = { version = "0.1", features = ["json"], optional = true } + +# raw-pg optional +xitca-postgres = { version = "0.1", default-features = false, features = ["single-thread"], optional = true } + +# diesel-pg orm optional +diesel = { version = "2.0.0-rc.0", default-features = false, features = ["i-implement-a-third-party-backend-and-opt-into-breaking-changes"], optional = true } +diesel-async = { version = "0.1.0", default-features = false, features = ["postgres"], optional = true } + +# simd-json optional +simd-json = { version = "0.6", default-features = false, features = ["swar-number-parsing", "hints"], optional = true } +simd-json-derive = { version = "0.4", default-features = false, optional = true } + +# serde optional +serde = { version = "1", features = ["derive"], optional = true } [profile.release] lto = true @@ -40,15 +64,13 @@ codegen-units = 1 panic = "abort" [patch.crates-io] -xitca-http = { git = "https://github.com/fakeshadow/xitca-web.git", rev = "59827177f6c319c6fa9940fe5f146754fff90aad" } -xitca-http-codegen = { git = "https://github.com/fakeshadow/xitca-web.git", rev = "59827177f6c319c6fa9940fe5f146754fff90aad" } -xitca-io = { git = "https://github.com/fakeshadow/xitca-web.git", rev = "59827177f6c319c6fa9940fe5f146754fff90aad" } -xitca-server = { git = "https://github.com/fakeshadow/xitca-web.git", rev = "59827177f6c319c6fa9940fe5f146754fff90aad" } -xitca-service = { git = "https://github.com/fakeshadow/xitca-web.git", rev = "59827177f6c319c6fa9940fe5f146754fff90aad" } -xitca-web = { git = "https://github.com/fakeshadow/xitca-web.git", rev = "59827177f6c319c6fa9940fe5f146754fff90aad" } - -diesel = { git = "https://github.com/diesel-rs/diesel.git", rev = "37ec18f46ced2d6e9197414156fdb705d7a61426" } -diesel-async = { git = "https://github.com/weiznich/diesel_async.git", rev = "06b3416826dbc8ce404f6d613daea989b23549ca" } - -# this is not right. but not much can be done when the maintainer is unactive. -sailfish = { git = "https://github.com/jdrouet/sailfish", rev = "7d2b59247eaab10b67311d6c1c7d50a7d751d791" } \ No newline at end of file +xitca-http = { git = "https://github.com/HFQR/xitca-web.git", rev = "4fd97d4bd906995c3a70a50c33fc8ac076693e97" } +xitca-io = { git = "https://github.com/HFQR/xitca-web.git", rev = "4fd97d4bd906995c3a70a50c33fc8ac076693e97" } +xitca-postgres = { git = "https://github.com/HFQR/xitca-web.git", rev = "4fd97d4bd906995c3a70a50c33fc8ac076693e97" } +xitca-server = { git = "https://github.com/HFQR/xitca-web.git", rev = "4fd97d4bd906995c3a70a50c33fc8ac076693e97" } +xitca-service = { git = "https://github.com/HFQR/xitca-web.git", rev = "4fd97d4bd906995c3a70a50c33fc8ac076693e97" } +xitca-unsafe-collection = { git = "https://github.com/HFQR/xitca-web.git", rev = "4fd97d4bd906995c3a70a50c33fc8ac076693e97" } +xitca-web = { git = "https://github.com/HFQR/xitca-web.git", rev = "4fd97d4bd906995c3a70a50c33fc8ac076693e97" } + +diesel = { git = "https://github.com/diesel-rs/diesel.git", rev = "53a4157776d4320fffc11fe73779cc53702843d6" } +diesel-async = { git = "https://github.com/weiznich/diesel_async.git", rev = "3c9e976c1c30d3aa3d0751b89f72b2ce43869c4f" } diff --git a/frameworks/Rust/xitca-web/src/db.rs b/frameworks/Rust/xitca-web/src/db.rs index 0b02f476776..180ef3eceed 100644 --- a/frameworks/Rust/xitca-web/src/db.rs +++ b/frameworks/Rust/xitca-web/src/db.rs @@ -1,34 +1,53 @@ -use std::{cell::RefCell, error::Error, fmt::Write}; +use std::{cell::RefCell, collections::HashMap, error::Error, fmt::Write}; -use ahash::AHashMap; use futures_util::stream::{FuturesUnordered, StreamExt, TryStreamExt}; use rand::{rngs::SmallRng, Rng, SeedableRng}; -use tokio::pin; -use tokio_postgres::{types::ToSql, NoTls, Statement}; +use xitca_postgres::{Postgres, Statement, ToSql}; +use xitca_unsafe_collection::no_hash::NoHashBuilder; use super::ser::{Fortune, Fortunes, World}; pub struct Client { - client: tokio_postgres::Client, + client: xitca_postgres::Client, rng: RefCell, fortune: Statement, world: Statement, - updates: AHashMap, + updates: HashMap, +} + +impl Drop for Client { + fn drop(&mut self) { + drop(self.fortune.clone().into_guarded(&self.client)); + + drop(self.world.clone().into_guarded(&self.client)); + + for (_, stmt) in std::mem::take(&mut self.updates) { + drop(stmt.into_guarded(&self.client)) + } + } } pub async fn create(config: &str) -> Client { - let (client, conn) = tokio_postgres::connect(config, NoTls).await.unwrap(); + let (client, conn) = Postgres::new(config.to_string()).connect().await.unwrap(); tokio::task::spawn_local(async move { let _ = conn.await; }); - let fortune = client.prepare("SELECT * FROM fortune").await.unwrap(); + let fortune = client + .prepare("SELECT * FROM fortune", &[]) + .await + .unwrap() + .leak(); + let world = client - .prepare("SELECT * FROM world WHERE id=$1") + .prepare("SELECT * FROM world WHERE id=$1", &[]) .await - .unwrap(); - let mut updates = AHashMap::new(); + .unwrap() + .leak(); + + let mut updates = HashMap::default(); + for num in 1..=500u16 { let mut pl = 1; let mut q = String::new(); @@ -45,7 +64,7 @@ pub async fn create(config: &str) -> Client { q.pop(); q.push(')'); - let st = client.prepare(&q).await.unwrap(); + let st = client.prepare(&q, &[]).await.unwrap().leak(); updates.insert(num, st); } @@ -62,9 +81,14 @@ type DbResult = Result>; impl Client { async fn query_one_world(&self, id: i32) -> DbResult { - let stream = self.client.query_raw(&self.world, &[&id]).await?; - pin!(stream); - let row = stream.next().await.unwrap()?; + let row = self + .client + .query_raw(&self.world, &[&id]) + .await? + .next() + .await + .unwrap()?; + Ok(World::new(row.get(0), row.get(1))) } @@ -128,13 +152,11 @@ impl Client { items.push(Fortune::new(0, "Additional fortune added at request time.")); - let stream = self + let mut stream = self .client - .query_raw::<_, _, &[i32; 0]>(&self.fortune, &[]) + .query_raw::<_, &[i32; 0]>(&self.fortune, &[]) .await?; - pin!(stream); - while let Some(row) = stream.try_next().await? { items.push(Fortune::new(row.get(0), row.get::<_, String>(1))); } diff --git a/frameworks/Rust/xitca-web/src/db_diesel.rs b/frameworks/Rust/xitca-web/src/db_diesel.rs index 22ff4bb51ee..ccd06b2b3c3 100755 --- a/frameworks/Rust/xitca-web/src/db_diesel.rs +++ b/frameworks/Rust/xitca-web/src/db_diesel.rs @@ -111,12 +111,12 @@ pub async fn create(config: &str) -> io::Result { impl DieselPool { pub async fn get_world(&self) -> DbResult { + use crate::schema::world::dsl::*; + let mut conn = self.pool.get().await?; let random_id = self.rng.borrow_mut().gen_range(1..10_001); - use crate::schema::world::dsl::*; - let w = world .filter(id.eq(random_id)) .load::(&mut *conn) @@ -128,24 +128,19 @@ impl DieselPool { } pub async fn get_worlds(&self, num: u16) -> DbResult> { + use crate::schema::world::dsl::*; + let worlds = { + let mut conn = self.pool.get().await?; + let mut rng = self.rng.borrow_mut(); (0..num) .map(|_| { let w_id = (rng.gen::() % 10_000 + 1) as i32; + let fut = world.filter(id.eq(w_id)).load::(&mut *conn); - async move { - let mut conn = self.pool.get().await?; - - use crate::schema::world::dsl::*; - - let w = world - .filter(id.eq(w_id)) - .load::(&mut *conn) - .await? - .pop() - .unwrap(); - + async { + let w = fut.await?.pop().unwrap(); Ok(w) } }) @@ -159,24 +154,19 @@ impl DieselPool { use crate::schema::world::dsl::*; let worlds = { + let mut conn = self.pool.get().await?; + let mut rng = self.rng.borrow_mut(); (0..num) .map(|_| { let w_id = rng.gen_range::(1..10_001); let new_id = rng.gen_range::(1..10_001); - async move { - let mut conn = self.pool.get().await?; - - let mut w = world - .filter(id.eq(w_id)) - .load::(&mut *conn) - .await? - .pop() - .unwrap(); + let fut = world.filter(id.eq(w_id)).load::(&mut *conn); + async move { + let mut w = fut.await?.pop().unwrap(); w.randomnumber = new_id; - DbResult::Ok(w) } }) @@ -187,28 +177,29 @@ impl DieselPool { worlds.sort_by_key(|w| w.id); - let mut conn = self.pool.get().await?; - - conn.transaction(move |conn| { - Box::pin(async move { - for w in &worlds { - diesel::update(world) - .filter(id.eq(w.id)) - .set(randomnumber.eq(w.randomnumber)) - .execute(conn) - .await?; - } - Ok(worlds) + self.pool + .get() + .await? + .transaction(move |conn| { + Box::pin(async move { + for w in &worlds { + diesel::update(world) + .filter(id.eq(w.id)) + .set(randomnumber.eq(w.randomnumber)) + .execute(conn) + .await?; + } + Ok(worlds) + }) }) - }) - .await + .await } pub async fn tell_fortune(&self) -> DbResult { - let mut conn = self.pool.get().await?; - use crate::schema::fortune::dsl::*; + let mut conn = self.pool.get().await?; + let mut items = fortune.load::(&mut *conn).await?; items.push(Fortune::new(0, "Additional fortune added at request time.")); diff --git a/frameworks/Rust/xitca-web/src/main.rs b/frameworks/Rust/xitca-web/src/main.rs index 3064ea11d51..50f05198215 100755 --- a/frameworks/Rust/xitca-web/src/main.rs +++ b/frameworks/Rust/xitca-web/src/main.rs @@ -1,5 +1,3 @@ -#![feature(generic_associated_types, type_alias_impl_trait)] - #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; @@ -8,180 +6,158 @@ mod ser; mod util; use std::{ + cell::RefCell, convert::Infallible, error::Error, - future::ready, - io, + fmt::Debug, sync::{Arc, Mutex}, }; -use serde::Serialize; +use simd_json_derive::Serialize; use xitca_http::{ - body::ResponseBody, - bytes::Bytes, + body::Once, + bytes::{BufMutWriter, Bytes, BytesMut}, config::HttpServiceConfig, h1::RequestBody, http::{ self, + const_header_value::{JSON, TEXT, TEXT_HTML_UTF8}, header::{CONTENT_TYPE, SERVER}, - IntoResponse, Method, + IntoResponse, + }, + request, + util::{ + middleware::TcpConfig, + service::{ + context::{object::ContextObjectConstructor, Context, ContextBuilder}, + route::get, + GenericRouter, + }, }, - util::service::Route, HttpServiceBuilder, }; -use xitca_server::Builder; +use xitca_service::{fn_service, BuildServiceExt, Service}; use self::db::Client; use self::ser::Message; -use self::util::{ - internal, not_found, AppState, QueryParse, JSON_HEADER_VALUE, SERVER_HEADER_VALUE, - TEXT_HEADER_VALUE, -}; +use self::util::{QueryParse, SERVER_HEADER_VALUE}; -type Request = http::Request; +type Response = http::Response>; +type Request = request::Request; -type Response = http::Response; +type Ctx<'a> = Context<'a, Request, State>; -#[tokio::main(flavor = "current_thread")] -async fn main() -> io::Result<()> { - let cores = core_affinity::get_core_ids().unwrap_or_else(Vec::new); +fn main() -> Result<(), Box> { + let cores = core_affinity::get_core_ids().unwrap_or_default(); let cores = Arc::new(Mutex::new(cores)); - let factory = || { - let http = Http { - config: "postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world", - }; + let db_url = "postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world"; + let builder = || { let config = HttpServiceConfig::new() .disable_vectored_write() .max_request_headers::<8>(); - let route = Route::new(http).methods([Method::GET]); + let router = GenericRouter::with_custom_object::>() + .insert("/plaintext", get(fn_service(plain_text))) + .insert("/json", get(fn_service(json))) + .insert("/db", get(fn_service(db))) + .insert("/fortunes", get(fn_service(fortunes))) + .insert("/queries", get(fn_service(queries))) + .insert("/updates", get(fn_service(updates))) + .enclosed_fn(middleware_fn); + + let service = ContextBuilder::new(|| async { + let client = db::create(db_url).await; + let write_buf = RefCell::new(BytesMut::new()); + Ok::<_, Infallible>(State { client, write_buf }) + }) + .service(router); + + let tcp_config = TcpConfig::new().set_nodelay(true); - HttpServiceBuilder::h1(route).config(config) + HttpServiceBuilder::h1(service) + .config(config) + .enclosed(tcp_config) }; - Builder::new() + xitca_server::Builder::new() .on_worker_start(move || { if let Some(core) = cores.lock().unwrap().pop() { core_affinity::set_for_current(core); } - ready(()) + async {} }) - .bind("xitca-web", "0.0.0.0:8080", factory)? + .bind("xitca-web", "0.0.0.0:8080", builder)? .build() - .await + .wait() + .map_err(Into::into) } -#[derive(Clone)] -struct Http { - config: &'static str, +async fn middleware_fn(service: &S, req: Ctx<'_>) -> Result +where + S: for<'c> Service, Response = Response, Error = E>, + E: Debug, +{ + let mut res = service.call(req).await.unwrap(); + res.headers_mut().append(SERVER, SERVER_HEADER_VALUE); + Ok(res) } -struct HttpService { - state: AppState, +async fn plain_text(ctx: Ctx<'_>) -> Result> { + let (req, _) = ctx.into_parts(); + let mut res = req.into_response(Bytes::from_static(b"Hello, World!")); + res.headers_mut().append(CONTENT_TYPE, TEXT); + Ok(res) } -#[xitca_http_codegen::service_impl] -impl HttpService { - async fn new_service(http: &Http, _: ()) -> Result { - let client = db::create(http.config).await; - - Ok(HttpService { - state: AppState::new(client), - }) - } - - async fn ready(&self) -> Result<(), Infallible> { - Ok(()) - } - - async fn call(&self, req: Request) -> Result { - match req.uri().path() { - "/plaintext" => self.plain_text(req), - "/json" => self.json(req), - "/db" => self.db(req).await, - "/fortunes" => self.fortunes(req).await, - "/queries" => self.queries(req).await, - "/updates" => self.updates(req).await, - _ => not_found(), - } - } +async fn json(ctx: Ctx<'_>) -> Result> { + let (req, state) = ctx.into_parts(); + _json(req, state, &Message::new()) } -impl HttpService { - fn plain_text(&self, req: Request) -> Result { - let mut res = req.into_response("Hello, World!"); - - res.headers_mut().append(SERVER, SERVER_HEADER_VALUE); - res.headers_mut().append(CONTENT_TYPE, TEXT_HEADER_VALUE); - - Ok(res) - } +async fn db(ctx: Ctx<'_>) -> Result> { + let (req, state) = ctx.into_parts(); + let world = state.client.get_world().await?; + _json(req, state, &world) +} - #[inline] - fn json(&self, req: Request) -> Result { - self._json(req, &Message::new()) - } +async fn fortunes(ctx: Ctx<'_>) -> Result> { + let (req, state) = ctx.into_parts(); + use sailfish::TemplateOnce; + let fortunes = state.client.tell_fortune().await?.render_once()?; + let mut res = req.into_response(Bytes::from(fortunes)); + res.headers_mut().append(CONTENT_TYPE, TEXT_HTML_UTF8); + Ok(res) +} - async fn db(&self, req: Request) -> Result { - match self.state.client().get_world().await { - Ok(ref world) => self._json(req, world), - Err(_) => internal(), - } - } +async fn queries(ctx: Ctx<'_>) -> Result> { + let (req, state) = ctx.into_parts(); + let num = req.uri().query().parse_query(); + let worlds = state.client.get_worlds(num).await?; + _json(req, state, worlds.as_slice()) +} - async fn fortunes(&self, req: Request) -> Result { - match self._fortunes().await { - Ok(body) => { - let mut res = req.into_response(body); +async fn updates(ctx: Ctx<'_>) -> Result> { + let (req, state) = ctx.into_parts(); + let num = req.uri().query().parse_query(); + let worlds = state.client.update(num).await?; + _json(req, state, worlds.as_slice()) +} - res.headers_mut().append(SERVER, util::SERVER_HEADER_VALUE); - res.headers_mut() - .append(CONTENT_TYPE, util::HTML_HEADER_VALUE); +fn _json(req: Request, state: &State, value: &S) -> Result> +where + S: ?Sized + Serialize, +{ + let mut buf = state.write_buf.borrow_mut(); + value.json_write(&mut BufMutWriter(&mut *buf)).unwrap(); + let body = buf.split().freeze(); + let mut res = req.into_response(body); + res.headers_mut().append(CONTENT_TYPE, JSON); + Ok(res) +} - Ok(res) - } - Err(_) => internal(), - } - } - - async fn queries(&self, req: Request) -> Result { - let num = req.uri().query().parse_query(); - match self.state.client().get_worlds(num).await { - Ok(worlds) => self._json(req, worlds.as_slice()), - Err(_) => internal(), - } - } - - async fn updates(&self, req: Request) -> Result { - let num = req.uri().query().parse_query(); - match self.state.client().update(num).await { - Ok(worlds) => self._json(req, worlds.as_slice()), - Err(_) => internal(), - } - } - - #[inline] - async fn _fortunes(&self) -> Result> { - use sailfish::TemplateOnce; - let fortunes = self.state.client().tell_fortune().await?.render_once()?; - Ok(fortunes.into()) - } - - #[inline] - fn _json(&self, req: Request, value: &S) -> Result - where - S: ?Sized + Serialize, - { - let mut writer = self.state.writer(); - simd_json::to_writer(&mut writer, value).unwrap(); - let body = writer.take(); - - let mut res = req.into_response(body); - res.headers_mut().append(SERVER, SERVER_HEADER_VALUE); - res.headers_mut().append(CONTENT_TYPE, JSON_HEADER_VALUE); - - Ok(res) - } +struct State { + client: Client, + write_buf: RefCell, } diff --git a/frameworks/Rust/xitca-web/src/main_diesel.rs b/frameworks/Rust/xitca-web/src/main_diesel.rs old mode 100755 new mode 100644 index 369127474ec..27ff808da8f --- a/frameworks/Rust/xitca-web/src/main_diesel.rs +++ b/frameworks/Rust/xitca-web/src/main_diesel.rs @@ -9,133 +9,91 @@ mod schema; mod ser; mod util; -use std::{error::Error, io}; +use std::{convert::Infallible, io}; use serde::Serialize; use xitca_web::{ - dev::{bytes::Bytes, fn_service}, - http::{ - header::{CONTENT_TYPE, SERVER}, - Method, - }, + dev::Service, + handler::{handler_service, html::Html, json::Json, state::StateRef, uri::UriRef, Responder}, + http::header::SERVER, request::WebRequest, + response::WebResponse, + route::get, App, HttpServer, }; use self::db_diesel::{create, DieselPool}; -use self::ser::Message; -use self::util::{ - internal, not_found, AppState, HandleResult, QueryParse, JSON_HEADER_VALUE, - SERVER_HEADER_VALUE, TEXT_HEADER_VALUE, -}; +use self::util::{QueryParse, SERVER_HEADER_VALUE}; + +type Error = Box; -type State = AppState; +type Request<'a> = WebRequest<'a, DieselPool>; -#[tokio::main(flavor = "current_thread")] -async fn main() -> io::Result<()> { +fn main() -> io::Result<()> { let config = "postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world"; HttpServer::new(move || { - App::with_async_state(move || async move { - let pool = create(config).await.map_err(|_| ())?; - Ok(AppState::new(pool)) - }) - .service(fn_service(handle)) + App::with_async_state(move || async { Ok::<_, Infallible>(create(config).await.unwrap()) }) + .at("/plaintext", get(handler_service(plain_text))) + .at("/json", get(handler_service(json))) + .at("/db", get(handler_service(db))) + .at("/fortunes", get(handler_service(fortunes))) + .at("/queries", get(handler_service(queries))) + .at("/updates", get(handler_service(updates))) + .enclosed_fn(middleware_fn) + .finish() }) - .disable_vectored_write() - .max_request_headers::<8>() .bind("0.0.0.0:8080")? .run() - .await -} - -async fn handle(req: &mut WebRequest<'_, State>) -> HandleResult { - let inner = req.req_mut(); - - match (inner.method(), inner.uri().path()) { - (&Method::GET, "/plaintext") => plain_text(req), - (&Method::GET, "/json") => json(req), - (&Method::GET, "/db") => db(req).await, - (&Method::GET, "/fortunes") => fortunes(req).await, - (&Method::GET, "/queries") => queries(req).await, - (&Method::GET, "/updates") => updates(req).await, - _ => not_found(), - } -} - -async fn db(req: &mut WebRequest<'_, State>) -> HandleResult { - match req.state().client().get_world().await { - Ok(world) => _json(req, &world), - Err(_) => internal(), - } + .wait() } -async fn fortunes(req: &mut WebRequest<'_, State>) -> HandleResult { - match _fortunes(req.state().client()).await { - Ok(body) => { - let mut res = req.as_response(body); +async fn middleware_fn(service: &S, mut ctx: Request<'_>) -> Result +where + S: for<'r> Service, Response = Result, Error = E>, + E: for<'r> Responder, Output = WebResponse>, +{ + let mut res = match service.call(ctx.reborrow()).await { + Ok(Ok(res)) => res, + Ok(Err(err)) => err.respond_to(ctx).await, + Err(err) => err.respond_to(ctx).await, + }; - res.headers_mut().append(SERVER, util::SERVER_HEADER_VALUE); - res.headers_mut() - .append(CONTENT_TYPE, util::HTML_HEADER_VALUE); + res.headers_mut().append(SERVER, SERVER_HEADER_VALUE); - Ok(res) - } - Err(_) => internal(), - } + Ok(res) } -async fn queries(req: &mut WebRequest<'_, State>) -> HandleResult { - let num = req.req_mut().uri().query().parse_query(); - - match req.state().client().get_worlds(num).await { - Ok(worlds) => _json(req, worlds.as_slice()), - Err(_) => internal(), - } +async fn plain_text() -> Result<&'static str, Error> { + Ok("Hello, World!") } -async fn updates(req: &mut WebRequest<'_, State>) -> HandleResult { - let num = req.req_mut().uri().query().parse_query(); +async fn json() -> Result, Error> { + Ok(Json(ser::Message::new())) +} - match req.state().client().update(num).await { - Ok(worlds) => _json(req, worlds.as_slice()), - Err(_) => internal(), - } +async fn db(StateRef(pool): StateRef<'_, DieselPool>) -> Result, Error> { + pool.get_world().await.map(Json) } -#[inline] -async fn _fortunes(pool: &DieselPool) -> Result> { +async fn fortunes(StateRef(pool): StateRef<'_, DieselPool>) -> Result, Error> { use sailfish::TemplateOnce; let fortunes = pool.tell_fortune().await?.render_once()?; - Ok(fortunes.into()) + Ok(Html(fortunes)) } -fn plain_text(req: &mut WebRequest<'_, D>) -> HandleResult { - let mut res = req.as_response(Bytes::from_static(b"Hello, World!")); - - res.headers_mut().append(SERVER, SERVER_HEADER_VALUE); - res.headers_mut().append(CONTENT_TYPE, TEXT_HEADER_VALUE); - - Ok(res) -} - -#[inline(always)] -fn json(req: &mut WebRequest<'_, AppState>) -> HandleResult { - _json(req, &Message::new()) +async fn queries( + StateRef(pool): StateRef<'_, DieselPool>, + UriRef(uri): UriRef<'_>, +) -> Result, Error> { + let num = uri.query().parse_query(); + pool.get_worlds(num).await.map(Json) } -#[inline] -fn _json(req: &mut WebRequest<'_, AppState>, value: &S) -> HandleResult -where - S: ?Sized + Serialize, -{ - let mut writer = req.state().writer(); - simd_json::to_writer(&mut writer, value).unwrap(); - let body = writer.take(); - - let mut res = req.as_response(body); - res.headers_mut().append(SERVER, SERVER_HEADER_VALUE); - res.headers_mut().append(CONTENT_TYPE, JSON_HEADER_VALUE); - - Ok(res) +async fn updates( + StateRef(pool): StateRef<'_, DieselPool>, + UriRef(uri): UriRef<'_>, +) -> Result, Error> { + let num = uri.query().parse_query(); + pool.update(num).await.map(Json) } diff --git a/frameworks/Rust/xitca-web/src/ser.rs b/frameworks/Rust/xitca-web/src/ser.rs index 13364d74678..39b4b59c483 100644 --- a/frameworks/Rust/xitca-web/src/ser.rs +++ b/frameworks/Rust/xitca-web/src/ser.rs @@ -1,10 +1,13 @@ use std::borrow::Cow; -use diesel::Queryable; use sailfish::TemplateOnce; -use serde::{Deserialize, Serialize}; -#[derive(Deserialize, Serialize)] +#[cfg(feature = "serde")] +use serde::Serialize; +#[cfg(feature = "simd")] +use simd_json_derive::Serialize; + +#[derive(Serialize)] pub struct Message { message: &'static str, } @@ -20,7 +23,8 @@ impl Message { } #[allow(non_snake_case)] -#[derive(Debug, Serialize, Queryable)] +#[cfg_attr(feature = "orm", derive(Queryable))] +#[derive(Debug, Serialize)] pub struct World { pub id: i32, pub randomnumber: i32, @@ -34,7 +38,7 @@ impl World { } } -#[derive(Queryable)] +#[cfg_attr(feature = "orm", derive(Queryable))] pub struct Fortune { pub id: i32, pub message: Cow<'static, str>, diff --git a/frameworks/Rust/xitca-web/src/util.rs b/frameworks/Rust/xitca-web/src/util.rs index aa597aae815..743370b09ad 100755 --- a/frameworks/Rust/xitca-web/src/util.rs +++ b/frameworks/Rust/xitca-web/src/util.rs @@ -1,46 +1,10 @@ #![allow(clippy::declare_interior_mutable_const)] -use std::{ - cell::{RefCell, RefMut}, - cmp, - convert::Infallible, - io, -}; +use std::cmp; -use xitca_web::{ - dev::bytes::{Bytes, BytesMut}, - http::{ - header::{HeaderValue, SERVER}, - StatusCode, - }, - response::{WebResponse, WebResponseBuilder}, -}; +use xitca_http::http::header::HeaderValue; -pub(super) type HandleResult = Result; - -pub(super) struct Writer<'a>(RefMut<'a, BytesMut>); - -impl Writer<'_> { - #[inline] - pub fn take(mut self) -> Bytes { - self.0.split().freeze() - } -} - -impl io::Write for &mut Writer<'_> { - #[inline] - fn write(&mut self, buf: &[u8]) -> io::Result { - self.0.extend_from_slice(buf); - Ok(buf.len()) - } - - #[inline] - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } -} - -pub(super) trait QueryParse { +pub trait QueryParse { fn parse_query(self) -> u16; } @@ -58,50 +22,4 @@ impl QueryParse for Option<&str> { } } -pub(super) struct AppState { - client: C, - // a re-usable buffer for write response data. - write_buf: RefCell, -} - -impl AppState { - pub(super) fn new(client: C) -> Self { - let write_buf = RefCell::new(BytesMut::new()); - Self { client, write_buf } - } - - #[inline] - pub(super) fn writer(&self) -> Writer<'_> { - Writer(self.write_buf.borrow_mut()) - } - - #[inline] - pub(super) fn client(&self) -> &C { - &self.client - } -} - pub const SERVER_HEADER_VALUE: HeaderValue = HeaderValue::from_static("TFB"); - -pub const HTML_HEADER_VALUE: HeaderValue = HeaderValue::from_static("text/html; charset=utf-8"); - -pub const TEXT_HEADER_VALUE: HeaderValue = HeaderValue::from_static("text/plain"); - -pub const JSON_HEADER_VALUE: HeaderValue = HeaderValue::from_static("application/json"); - -macro_rules! error { - ($error: ident, $code: path) => { - #[cold] - #[inline(never)] - pub(super) fn $error() -> HandleResult { - Ok(WebResponseBuilder::new() - .status($code) - .header(SERVER, SERVER_HEADER_VALUE) - .body(Bytes::new().into()) - .unwrap()) - } - }; -} - -error!(not_found, StatusCode::NOT_FOUND); -error!(internal, StatusCode::INTERNAL_SERVER_ERROR); diff --git a/frameworks/Rust/xitca-web/xitca-web-diesel.dockerfile b/frameworks/Rust/xitca-web/xitca-web-diesel.dockerfile index 820b268bdf2..66cdd60342b 100644 --- a/frameworks/Rust/xitca-web/xitca-web-diesel.dockerfile +++ b/frameworks/Rust/xitca-web/xitca-web-diesel.dockerfile @@ -1,11 +1,11 @@ -FROM rust:1.58 +FROM rust:1.62 ADD ./ /xitca-web WORKDIR /xitca-web -RUN rustup default nightly-2022-01-26 +RUN rustup default nightly-2022-07-18 RUN cargo clean -RUN RUSTFLAGS="-C target-cpu=native" cargo build --release --bin xitca-web-diesel +RUN RUSTFLAGS="-C target-cpu=native" cargo build --release --bin xitca-web-diesel --features orm,serde,web EXPOSE 8080 diff --git a/frameworks/Rust/xitca-web/xitca-web.dockerfile b/frameworks/Rust/xitca-web/xitca-web.dockerfile index b11a24c4c9d..97b23feee32 100644 --- a/frameworks/Rust/xitca-web/xitca-web.dockerfile +++ b/frameworks/Rust/xitca-web/xitca-web.dockerfile @@ -1,11 +1,11 @@ -FROM rust:1.58 +FROM rust:1.62 ADD ./ /xitca-web WORKDIR /xitca-web -RUN rustup default nightly-2022-01-26 +RUN rustup default nightly-2022-07-18 RUN cargo clean -RUN RUSTFLAGS="-C target-cpu=native" cargo build --release --bin xitca-web +RUN RUSTFLAGS="-C target-cpu=native" cargo build --release --bin xitca-web --features raw,simd EXPOSE 8080 diff --git a/frameworks/Scala/akka-http/akka-http.dockerfile b/frameworks/Scala/akka-http/akka-http.dockerfile index ec46abf4763..bcf2f9db95e 100644 --- a/frameworks/Scala/akka-http/akka-http.dockerfile +++ b/frameworks/Scala/akka-http/akka-http.dockerfile @@ -23,4 +23,4 @@ RUN sbt clean compile stage EXPOSE 9000 -CMD ["target/universal/stage/bin/akka-http-benchmark", "-Dakka.http.benchmark.mysql.dbhost=tfb-database", "-J-server", "-J-Xms2g", "-J-Xmx2g", "-J-XX:NewSize=1g", "-J-XX:MaxNewSize=1g", "-J-XX:InitialCodeCacheSize=256m", "-J-XX:ReservedCodeCacheSize=256m", "-J-XX:+UseParallelGC", "-J-XX:-UseBiasedLocking", "-J-XX:+AlwaysPreTouch"] +CMD ["target/universal/stage/bin/akka-http-benchmark", "-Dakka.http.benchmark.mysql.dbhost=tfb-database", "-J-server", "-J-Xms2g", "-J-Xmx2g", "-J-XX:NewSize=1g", "-J-XX:MaxNewSize=1g", "-J-XX:InitialCodeCacheSize=256m", "-J-XX:ReservedCodeCacheSize=256m", "-J-XX:+UseParallelGC", "-J-XX:-UseBiasedLocking", "-J-XX:+AlwaysPreTouch", "-J-XX:+UseNUMA", "-J-XX:+AggressiveOpts"] diff --git a/frameworks/Scala/akka-http/akka-http/build.sbt b/frameworks/Scala/akka-http/akka-http/build.sbt index 7a3c2da5a00..9e22525b987 100644 --- a/frameworks/Scala/akka-http/akka-http/build.sbt +++ b/frameworks/Scala/akka-http/akka-http/build.sbt @@ -6,13 +6,17 @@ name := "akka-http-benchmark" version := "0.1.0-SNAPSHOT" -scalaVersion := "2.13.6" +scalaVersion := "2.13.8" -resolvers += "Akka Snapshot Repository" at "http://repo.akka.io/snapshots/" +val akkaV = "2.6.19" +val akkaHttpV = "10.2.9" + +// to get latest versions +resolvers += "akka-http-snapshot-repository" at "https://oss.sonatype.org/content/repositories/snapshots" libraryDependencies ++= Seq( - "com.typesafe.akka" %% "akka-http" % "10.2.6", - "com.typesafe.akka" %% "akka-stream" % "2.6.16", + "com.typesafe.akka" %% "akka-http" % akkaHttpV, + "com.typesafe.akka" %% "akka-stream" % akkaV, "de.heikoseeberger" %% "akka-http-jsoniter-scala" % "1.34.0", "com.github.plokhotnyuk.jsoniter-scala" %% "jsoniter-scala-macros" % "2.6.0", "mysql" % "mysql-connector-java" % "8.0.21", diff --git a/frameworks/Scala/akka-http/akka-http/project/build.properties b/frameworks/Scala/akka-http/akka-http/project/build.properties index 0837f7a132d..9edb75b77c2 100644 --- a/frameworks/Scala/akka-http/akka-http/project/build.properties +++ b/frameworks/Scala/akka-http/akka-http/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.13 +sbt.version=1.5.4 diff --git a/frameworks/Scala/akka-http/akka-http/project/plugins.sbt b/frameworks/Scala/akka-http/akka-http/project/plugins.sbt index 1c978cc4f25..864b9b25843 100644 --- a/frameworks/Scala/akka-http/akka-http/project/plugins.sbt +++ b/frameworks/Scala/akka-http/akka-http/project/plugins.sbt @@ -4,4 +4,6 @@ classpathTypes += "maven-plugin" addSbtPlugin("org.scalariform" % "sbt-scalariform" % "1.8.2") addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.3.16") -addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.3") \ No newline at end of file +addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.3") + +addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") \ No newline at end of file diff --git a/frameworks/Scala/akka-http/akka-http/src/main/resources/application.conf b/frameworks/Scala/akka-http/akka-http/src/main/resources/application.conf index 21249004463..2239c46797f 100644 --- a/frameworks/Scala/akka-http/akka-http/src/main/resources/application.conf +++ b/frameworks/Scala/akka-http/akka-http/src/main/resources/application.conf @@ -6,9 +6,11 @@ akka { parallelism-max = 64 # -- parallelism-factor = 1 # one thread per core is enough } - throughput = 64 } + internal-dispatcher = "akka.actor.default-dispatcher" } + stream.materializer.io.tcp.write-buffer-size = 128k + stream.materializer.io.tcp.coalesce-writes = 1 http { benchmark { host: 0.0.0.0 @@ -25,6 +27,7 @@ akka { } server { backlog = 1024 + request-timeout = off } } } diff --git a/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/App.scala b/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/App.scala index 8070eb50250..015a9bd25d4 100644 --- a/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/App.scala +++ b/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/App.scala @@ -10,7 +10,7 @@ import org.fusesource.scalate.TemplateEngine import scala.concurrent.ExecutionContext -class App extends Infrastructure with RandomGenerator with MySqlDataStore with PlaintextHandler with JsonHandler with DbHandler +class App extends Infrastructure with RandomGenerator with MySqlDataStore with JsonHandler with DbHandler with QueriesHandler with FortunesHandler with UpdatesHandler with RequestMapping with BenchmarkBootstrap with Templating { val templateEngine = new TemplateEngine() diff --git a/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/Bootstrap.scala b/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/Bootstrap.scala index b36961548a8..830f57859ff 100644 --- a/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/Bootstrap.scala +++ b/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/Bootstrap.scala @@ -1,6 +1,10 @@ package com.typesafe.akka.http.benchmark import akka.http.scaladsl.Http +import akka.http.scaladsl.model._ +import akka.http.scaladsl.util.FastFuture + +import scala.concurrent.Future trait Bootstrap { def run(): Unit @@ -12,5 +16,12 @@ trait BenchmarkBootstrap extends Bootstrap { _: Infrastructure with RequestMappi appConfig.getString("akka.http.benchmark.host"), appConfig.getInt("akka.http.benchmark.port")) .adaptSettings(settings => settings.mapHttp2Settings(_.withMaxConcurrentStreams(16))) - .bind(asRoute) + .bind(handler) + + val plainTextResponse = FastFuture.successful(HttpResponse(entity = HttpEntity("Hello, World!"))) + lazy val mainHandler: HttpRequest => Future[HttpResponse] = asRoute + lazy val handler: HttpRequest => Future[HttpResponse] = { + case HttpRequest(HttpMethods.GET, Uri.Path("/plaintext"), _, _, _) => plainTextResponse + case x => mainHandler(x) + } } \ No newline at end of file diff --git a/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/RequestMapping.scala b/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/RequestMapping.scala index 332f4f18a42..047c5b401f3 100644 --- a/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/RequestMapping.scala +++ b/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/RequestMapping.scala @@ -4,7 +4,7 @@ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import com.typesafe.akka.http.benchmark.handlers._ -trait RequestMapping { _: PlaintextHandler with JsonHandler with DbHandler with QueriesHandler with FortunesHandler with UpdatesHandler => +trait RequestMapping { _: JsonHandler with DbHandler with QueriesHandler with FortunesHandler with UpdatesHandler => def asRoute: Route = - plainTextEndpoint ~ jsonEndpoint ~ dbEndpoint ~ queriesEndpoint ~ fortunesEndpoint ~ updatesEndpoint + jsonEndpoint ~ dbEndpoint ~ queriesEndpoint ~ fortunesEndpoint ~ updatesEndpoint } diff --git a/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/handlers/PlaintextHandler.scala b/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/handlers/PlaintextHandler.scala deleted file mode 100644 index f775537bdb5..00000000000 --- a/frameworks/Scala/akka-http/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/handlers/PlaintextHandler.scala +++ /dev/null @@ -1,20 +0,0 @@ -package com.typesafe.akka.http.benchmark.handlers - -import akka.http.scaladsl.model.HttpCharsets._ -import akka.http.scaladsl.model.HttpEntity -import akka.http.scaladsl.model.HttpResponse -import akka.http.scaladsl.model.MediaType -import akka.http.scaladsl.server.Directives._ -import akka.http.scaladsl.server.Route - -trait PlaintextHandler { - // akka-http will always generate a charset parameter for text/plain, so to be competitive, we create a custom - // one here to save a few bytes of headers for this particular test case. This is explicitly allowed in: - // https://github.com/TechEmpower/FrameworkBenchmarks/wiki/Project-Information-Framework-Tests-Overview#specific-test-requirements - val plainTextResponse = HttpResponse(entity = HttpEntity(MediaType.customWithFixedCharset("text", "plain", `UTF-8`), "Hello, World!")) - - def plainTextEndpoint: Route = - (get & path("plaintext")) { - complete(plainTextResponse) - } -} diff --git a/frameworks/Scala/finagle/build.sbt b/frameworks/Scala/finagle/build.sbt index 2616f0d6391..c26f7baa934 100644 --- a/frameworks/Scala/finagle/build.sbt +++ b/frameworks/Scala/finagle/build.sbt @@ -1,4 +1,4 @@ -lazy val finagleVersion = "22.1.0" +lazy val finagleVersion = "22.4.0" name := "finagle-benchmark" scalaVersion := "2.12.12" diff --git a/frameworks/Scala/finatra/build.sbt b/frameworks/Scala/finatra/build.sbt index be13cb61844..259df9d7d32 100644 --- a/frameworks/Scala/finatra/build.sbt +++ b/frameworks/Scala/finatra/build.sbt @@ -1,4 +1,4 @@ -lazy val finatraVersion = "22.1.0" +lazy val finatraVersion = "22.4.0" name := "techempower-benchmarks-finatra" organization := "com.twitter" diff --git a/frameworks/Scala/scalene/scalene.dockerfile b/frameworks/Scala/scalene/scalene.dockerfile index b09694a3db4..469e2c972ca 100644 --- a/frameworks/Scala/scalene/scalene.dockerfile +++ b/frameworks/Scala/scalene/scalene.dockerfile @@ -1,19 +1,14 @@ FROM adoptopenjdk/openjdk13 -ARG SBT_VERSION=1.3.7 - -RUN \ - apt-get update && \ - apt-get -y install git +RUN apt-get update -yqq +RUN apt-get install -yqq gnupg git # Install sbt -RUN \ - curl -L -o sbt-$SBT_VERSION.deb https://dl.bintray.com/sbt/debian/sbt-$SBT_VERSION.deb && \ - dpkg -i sbt-$SBT_VERSION.deb && \ - rm sbt-$SBT_VERSION.deb && \ - apt-get update && \ - apt-get install sbt && \ - sbt sbtVersion +RUN echo "deb https://repo.scala-sbt.org/scalasbt/debian all main" | tee /etc/apt/sources.list.d/sbt.list +RUN echo "deb https://repo.scala-sbt.org/scalasbt/debian /" | tee /etc/apt/sources.list.d/sbt_old.list +RUN curl -sL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x2EE0EA64E40A89B84B2DF73499E82A75642AC823" | apt-key add +RUN apt-get update -yqq +RUN apt-get install -yqq sbt WORKDIR /scalene COPY project project diff --git a/frameworks/Scala/snunit/build.sbt b/frameworks/Scala/snunit/build.sbt index ab20a24ddaf..47b76b245ee 100644 --- a/frameworks/Scala/snunit/build.sbt +++ b/frameworks/Scala/snunit/build.sbt @@ -1,17 +1,17 @@ import scala.scalanative.build._ -scalaVersion := "2.13.7" +scalaVersion := "2.13.8" -val snunitVersion = "0.0.11" +val snunitVersion = "0.0.22" libraryDependencies ++= Seq( "com.github.lolgab" %%% "snunit" % snunitVersion, - "com.github.lolgab" %%% "snunit-async" % snunitVersion, - "com.lihaoyi" %%% "upickle" % "1.4.2" + "com.lihaoyi" %%% "upickle" % "2.0.0" ) nativeConfig ~= { _.withMode(Mode.releaseFull) .withLTO(LTO.thin) + .withGC(GC.commix) } enablePlugins(ScalaNativePlugin) diff --git a/frameworks/Scala/snunit/config.sh b/frameworks/Scala/snunit/config.sh index cb84c113540..f960a4336a5 100755 --- a/frameworks/Scala/snunit/config.sh +++ b/frameworks/Scala/snunit/config.sh @@ -1,5 +1,7 @@ #!/usr/bin/env bash +processes=$(expr $(nproc) / 2) + config='{' config+=' "listeners": {' config+=' "*:8080": {' @@ -9,8 +11,11 @@ config+=' },' config+=' "applications": {' config+=' "example": {' config+=' "type": "external",' -config+=' "processes": '"$(nproc)"',' -config+=' "executable": "/app/example"' +config+=' "processes": '"$processes"',' +config+=' "executable": "/app/example",' +config+=' "environment": {' +config+=' "SCALANATIVE_GC_THREADS": "2"' +config+=' }' config+=' }' config+=' }' config+='}' diff --git a/frameworks/Scala/snunit/project/build.properties b/frameworks/Scala/snunit/project/build.properties index 10fd9eee04a..c8fcab543a9 100644 --- a/frameworks/Scala/snunit/project/build.properties +++ b/frameworks/Scala/snunit/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.5 +sbt.version=1.6.2 diff --git a/frameworks/Scala/snunit/project/plugins.sbt b/frameworks/Scala/snunit/project/plugins.sbt index 17f77dd73ab..af19cec208f 100644 --- a/frameworks/Scala/snunit/project/plugins.sbt +++ b/frameworks/Scala/snunit/project/plugins.sbt @@ -1 +1 @@ -addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.4.2") +addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.4.4") diff --git a/frameworks/Scala/snunit/snunit.dockerfile b/frameworks/Scala/snunit/snunit.dockerfile index 41d205cc2a6..b2a19f885cd 100644 --- a/frameworks/Scala/snunit/snunit.dockerfile +++ b/frameworks/Scala/snunit/snunit.dockerfile @@ -6,7 +6,7 @@ RUN apt-get update && apt-get install -y curl gnupg && \ curl -sL https://nginx.org/keys/nginx_signing.key | apt-key add - && \ echo "deb https://packages.nginx.org/unit/debian/ bullseye unit" > /etc/apt/sources.list.d/unit.list && \ echo "deb-src https://packages.nginx.org/unit/debian/ bullseye unit" >> /etc/apt/sources.list.d/unit.list && \ - apt-get update && apt-get install -y clang unit-dev libuv1-dev openjdk-11-jdk sbt && \ + apt-get update && apt-get install -y clang unit-dev openjdk-11-jdk sbt && \ apt-get purge -y gnupg WORKDIR /workdir @@ -15,9 +15,7 @@ COPY . . RUN sbt nativeLink -FROM nginx/unit:1.26.1-minimal - -RUN apt-get update && apt-get install -y libuv1 +FROM nginx/unit:1.27.0-minimal COPY /config.sh /docker-entrypoint.d/ COPY --from=builder /workdir/target/scala-2.13/workdir-out /app/example diff --git a/frameworks/Scala/snunit/src/main/scala/Main.scala b/frameworks/Scala/snunit/src/main/scala/Main.scala index cf019e7ac7c..c49e56a4b19 100644 --- a/frameworks/Scala/snunit/src/main/scala/Main.scala +++ b/frameworks/Scala/snunit/src/main/scala/Main.scala @@ -9,8 +9,8 @@ object Message { object Main { def main(args: Array[String]): Unit = { - AsyncServerBuilder() - .withRequestHandler(req => + val server = SyncServerBuilder + .build(req => if (req.method == Method.GET && req.path == "/plaintext") req.send( statusCode = StatusCode.OK, @@ -20,8 +20,8 @@ object Main { else if (req.method == Method.GET && req.path == "/json") req.send( statusCode = StatusCode.OK, - content = stream(Message("Hello, World!")), - headers = Seq.empty + content = write(Message("Hello, World!")), + headers = Seq("Content-Type" -> "application/json") ) else req.send( @@ -30,6 +30,7 @@ object Main { headers = Seq("Content-Type" -> "text/plain") ) ) - .build() + + server.listen() } } diff --git a/frameworks/Scala/vertx-web-scala/vertx-web-scala.dockerfile b/frameworks/Scala/vertx-web-scala/vertx-web-scala.dockerfile index 2a1a4bbdfe8..b77f9a91083 100644 --- a/frameworks/Scala/vertx-web-scala/vertx-web-scala.dockerfile +++ b/frameworks/Scala/vertx-web-scala/vertx-web-scala.dockerfile @@ -2,13 +2,12 @@ FROM openjdk:11-jdk ARG SBT_VERSION=1.2.8 -RUN \ - curl -L -o sbt-$SBT_VERSION.deb https://dl.bintray.com/sbt/debian/sbt-$SBT_VERSION.deb && \ - dpkg -i sbt-$SBT_VERSION.deb && \ - rm sbt-$SBT_VERSION.deb && \ - apt-get update && \ - apt-get install sbt && \ - sbt sbtVersion +# Install sbt +RUN echo "deb https://repo.scala-sbt.org/scalasbt/debian all main" | tee /etc/apt/sources.list.d/sbt.list +RUN echo "deb https://repo.scala-sbt.org/scalasbt/debian /" | tee /etc/apt/sources.list.d/sbt_old.list +RUN curl -sL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x2EE0EA64E40A89B84B2DF73499E82A75642AC823" | apt-key add +RUN apt-get update -yqq +RUN apt-get install -yqq sbt WORKDIR /vertx COPY src src diff --git a/frameworks/Swift/hummingbird-core/hummingbird-core.dockerfile b/frameworks/Swift/hummingbird-core/hummingbird-core.dockerfile index 73d8591eef4..44ddfac32af 100644 --- a/frameworks/Swift/hummingbird-core/hummingbird-core.dockerfile +++ b/frameworks/Swift/hummingbird-core/hummingbird-core.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM swift:5.5 as build +FROM swift:5.6-focal as build WORKDIR /build # Copy entire repo into container @@ -15,7 +15,7 @@ RUN swift build \ # ================================ # Run image # ================================ -FROM swift:5.5-slim +FROM swift:5.6-focal-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/Swift/hummingbird/hummingbird-postgres.dockerfile b/frameworks/Swift/hummingbird/hummingbird-postgres.dockerfile index f22bcb21289..521ea653436 100644 --- a/frameworks/Swift/hummingbird/hummingbird-postgres.dockerfile +++ b/frameworks/Swift/hummingbird/hummingbird-postgres.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM swift:5.5 as build +FROM swift:5.6-focal as build WORKDIR /build # Copy entire repo into container @@ -15,7 +15,7 @@ RUN swift build \ # ================================ # Run image # ================================ -FROM swift:5.5-slim +FROM swift:5.6-focal-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/Swift/hummingbird/hummingbird.dockerfile b/frameworks/Swift/hummingbird/hummingbird.dockerfile index 73d8591eef4..44ddfac32af 100644 --- a/frameworks/Swift/hummingbird/hummingbird.dockerfile +++ b/frameworks/Swift/hummingbird/hummingbird.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM swift:5.5 as build +FROM swift:5.6-focal as build WORKDIR /build # Copy entire repo into container @@ -15,7 +15,7 @@ RUN swift build \ # ================================ # Run image # ================================ -FROM swift:5.5-slim +FROM swift:5.6-focal-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/Swift/hummingbird/src-postgres/Package.swift b/frameworks/Swift/hummingbird/src-postgres/Package.swift index add47c2e038..242105acfbb 100644 --- a/frameworks/Swift/hummingbird/src-postgres/Package.swift +++ b/frameworks/Swift/hummingbird/src-postgres/Package.swift @@ -1,4 +1,4 @@ -// swift-tools-version:5.3 +// swift-tools-version:5.5 // The swift-tools-version declares the minimum version of Swift required to build this package. import PackageDescription @@ -10,17 +10,17 @@ let package = Package( .executable(name: "server", targets: ["server"]) ], dependencies: [ - .package(url: "https://github.com/hummingbird-project/hummingbird.git", .upToNextMinor(from: "0.13.1")), + .package(url: "https://github.com/hummingbird-project/hummingbird.git", .upToNextMinor(from: "0.16.0")), .package(url: "https://github.com/hummingbird-project/hummingbird-mustache.git", from: "1.0.1"), - .package(url: "https://github.com/vapor/postgres-kit.git", from: "2.3.0"), + .package(url: "https://github.com/vapor/postgres-nio.git", from: "1.8.0"), ], targets: [ - .target(name: "server", + .executableTarget(name: "server", dependencies: [ .product(name: "Hummingbird", package: "hummingbird"), .product(name: "HummingbirdFoundation", package: "hummingbird"), .product(name: "HummingbirdMustache", package: "hummingbird-mustache"), - .product(name: "PostgresKit", package: "postgres-kit"), + .product(name: "PostgresNIO", package: "postgres-nio"), ], swiftSettings: [ // Enable better optimizations when building in Release configuration. Despite the use of diff --git a/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/FortunesController.swift b/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/FortunesController.swift index 18c5d63597b..0fb74080e19 100644 --- a/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/FortunesController.swift +++ b/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/FortunesController.swift @@ -1,6 +1,6 @@ import Hummingbird import HummingbirdMustache -import PostgresKit +import PostgresNIO struct HTML: HBResponseGenerator { let html: String @@ -11,9 +11,11 @@ struct HTML: HBResponseGenerator { } class FortunesController { + let connectionPoolGroup: HBConnectionPoolGroup let template: HBMustacheTemplate - init() { + init(connectionPoolGroup: HBConnectionPoolGroup) { + self.connectionPoolGroup = connectionPoolGroup self.template = try! HBMustacheTemplate(string: """ Fortunes{{#.}}{{/.}}
idmessage
{{id}}{{message}}
""") @@ -24,16 +26,20 @@ class FortunesController { } func fortunes(request: HBRequest) -> EventLoopFuture { - return request.db.query("SELECT id, message FROM Fortune").map { results in - var fortunes = results.map { - return Fortune( - id: $0.column("id")?.int32, - message: $0.column("message")?.string ?? "" - ) + return self.connection(for: request) { connection in + return connection.query("SELECT id, message FROM Fortune") + }.flatMapThrowing { results in + var fortunes = try results.map { result -> Fortune in + let decoded = try result.decode((Int32, String).self, context: .default) + return Fortune(id: decoded.0, message: decoded.1) } fortunes.append(.init(id: 0, message: "Additional fortune added at request time.")) let sortedFortunes = fortunes.sorted { $0.message < $1.message } return HTML(html: self.template.render(sortedFortunes) ) } } + + @discardableResult func connection(for request: HBRequest, closure: @escaping (PostgresConnection) -> EventLoopFuture) -> EventLoopFuture { + return self.connectionPoolGroup.lease(on: request.eventLoop, logger: request.logger, process: closure) + } } diff --git a/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/WorldController.swift b/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/WorldController.swift index 8b244b85393..d656d64e6e6 100644 --- a/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/WorldController.swift +++ b/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/WorldController.swift @@ -1,7 +1,9 @@ import Hummingbird -import PostgresKit +import PostgresNIO + +struct WorldController { + let connectionPoolGroup: HBConnectionPoolGroup -class WorldController { func add(to router: HBRouter) { router.get("db", use: single) router.get("queries", use: multiple) @@ -10,14 +12,14 @@ class WorldController { func single(request: HBRequest) -> EventLoopFuture { let id = Int32.random(in: 1...10_000) - return request.db.query("SELECT id, randomnumber FROM World WHERE id = $1", [ - PostgresData(int32: id) - ]).flatMapThrowing { result -> World in + return self.connection(for: request) { connection in + return connection.query("SELECT id, randomnumber FROM World WHERE id = $1", [ + PostgresData(int32: id) + ]) + }.flatMapThrowing { result -> World in guard let firstResult = result.first else { throw HBHTTPError(.notFound) } - return World( - id: id, - randomNumber: firstResult.column("randomnumber")?.int32 ?? 0 - ) + let result = try firstResult.decode((Int32, Int32).self, context: .default) + return World(id: result.0, randomNumber: result.1) } } @@ -25,14 +27,14 @@ class WorldController { let queries = (request.uri.queryParameters.get("queries", as: Int.self) ?? 1).bound(1, 500) let futures: [EventLoopFuture] = (0 ..< queries).map { _ -> EventLoopFuture in let id = Int32.random(in: 1...10_000) - return request.db.query("SELECT id, randomnumber FROM World WHERE id = $1", [ - PostgresData(int32: id) - ]).flatMapThrowing { result -> World in + return self.connection(for: request) { connection in + return connection.query("SELECT id, randomnumber FROM World WHERE id = $1", [ + PostgresData(int32: id) + ]) + }.flatMapThrowing { result -> World in guard let firstResult = result.first else { throw HBHTTPError(.notFound) } - return World( - id: id, - randomNumber: firstResult.column("randomnumber")?.int32 ?? 0 - ) + let result = try firstResult.decode((Int32, Int32).self, context: .default) + return World(id: result.0, randomNumber: result.1) } } return EventLoopFuture.whenAllSucceed(futures, on: request.eventLoop) @@ -44,17 +46,23 @@ class WorldController { let futures: [EventLoopFuture] = ids.map { _ -> EventLoopFuture in let id = Int32.random(in: 1...10_000) let randomNumber = Int32.random(in: 1...10_000) - return request.db.query("SELECT id, randomnumber FROM World WHERE id = $1", [ - PostgresData(int32: id) - ]).flatMap { result in - return request.db.query("UPDATE World SET randomnumber = $1 WHERE id = $2", [ - PostgresData(int32: randomNumber), + return self.connection(for: request) { connection in + return connection.query("SELECT id, randomnumber FROM World WHERE id = $1", [ PostgresData(int32: id) - ]) + ]).flatMap { result in + return connection.query("UPDATE World SET randomnumber = $1 WHERE id = $2", [ + PostgresData(int32: randomNumber), + PostgresData(int32: id) + ]) + } }.map { _ in return World(id: id, randomNumber: randomNumber) } } return EventLoopFuture.whenAllSucceed(futures, on: request.eventLoop) } + + @discardableResult func connection(for request: HBRequest, closure: @escaping (PostgresConnection) -> EventLoopFuture) -> EventLoopFuture { + return self.connectionPoolGroup.lease(on: request.eventLoop, logger: request.logger, process: closure) + } } diff --git a/frameworks/Swift/hummingbird/src-postgres/Sources/server/Models/Fortune.swift b/frameworks/Swift/hummingbird/src-postgres/Sources/server/Models/Fortune.swift index f664e04dcb2..e83dfc5f976 100644 --- a/frameworks/Swift/hummingbird/src-postgres/Sources/server/Models/Fortune.swift +++ b/frameworks/Swift/hummingbird/src-postgres/Sources/server/Models/Fortune.swift @@ -2,7 +2,7 @@ import Hummingbird import HummingbirdMustache struct Fortune: HBResponseEncodable { - var id: Int32? + var id: Int32 var message: String } diff --git a/frameworks/Swift/hummingbird/src-postgres/Sources/server/Models/World.swift b/frameworks/Swift/hummingbird/src-postgres/Sources/server/Models/World.swift index a9b795891ee..fb2d3a14ac9 100644 --- a/frameworks/Swift/hummingbird/src-postgres/Sources/server/Models/World.swift +++ b/frameworks/Swift/hummingbird/src-postgres/Sources/server/Models/World.swift @@ -1,7 +1,7 @@ import Hummingbird struct World: HBResponseEncodable { - var id: Int32? + var id: Int32 var randomNumber: Int32 } diff --git a/frameworks/Swift/hummingbird/src-postgres/Sources/server/PostgresConnectionSource.swift b/frameworks/Swift/hummingbird/src-postgres/Sources/server/PostgresConnectionSource.swift new file mode 100644 index 00000000000..fcbc6466a36 --- /dev/null +++ b/frameworks/Swift/hummingbird/src-postgres/Sources/server/PostgresConnectionSource.swift @@ -0,0 +1,25 @@ +import Hummingbird +import Logging +import PostgresNIO + +extension PostgresConnection: HBConnection { + public func close(on eventLoop: EventLoop) -> EventLoopFuture { + return close().hop(to: eventLoop) + } +} + +struct PostgresConnectionSource: HBConnectionSource { + typealias Connection = PostgresConnection + + let configuration: PostgresConnection.Configuration + + init(configuration: PostgresConnection.Configuration) { + self.configuration = configuration + } + + func makeConnection(on eventLoop: EventLoop, logger: Logger) -> EventLoopFuture { + let connection = PostgresConnection.connect(on: eventLoop, configuration: self.configuration, id: 0, logger: logger) + return connection + } +} + diff --git a/frameworks/Swift/hummingbird/src-postgres/Sources/server/database.swift b/frameworks/Swift/hummingbird/src-postgres/Sources/server/database.swift deleted file mode 100644 index 626dbb386f8..00000000000 --- a/frameworks/Swift/hummingbird/src-postgres/Sources/server/database.swift +++ /dev/null @@ -1,33 +0,0 @@ -import Hummingbird -import PostgresKit - -// tfb-server (aka, citrine) uses 28 hyper-threaded cores -// postgresql.conf specifies max_connections = 2000 -// -// 2000 / (28 * 2) = 35.7 (theoretical max) -// -// https://github.com/TechEmpower/FrameworkBenchmarks/wiki/Project-Information-Environment#citrine-self-hosted -// https://github.com/TechEmpower/FrameworkBenchmarks/blob/master/toolset/databases/postgres/postgresql.conf#L64 -let maxConnectionsPerEventLoop = 32 -var connectionPool: EventLoopGroupConnectionPool! - -extension HBApplication { - func initConnectionPool() { - connectionPool = EventLoopGroupConnectionPool( - source: PostgresConnectionSource(configuration: .init( - hostname: "tfb-database", - username: "benchmarkdbuser", - password: "benchmarkdbpass", - database: "hello_world" - )), - maxConnectionsPerEventLoop: maxConnectionsPerEventLoop, - on: self.eventLoopGroup - ) - } -} - -extension HBRequest { - var db: PostgresDatabase { - connectionPool.pool(for: self.eventLoop).database(logger: self.logger) - } -} diff --git a/frameworks/Swift/hummingbird/src-postgres/Sources/server/main.swift b/frameworks/Swift/hummingbird/src-postgres/Sources/server/main.swift index 932ba72c45d..f24b190303d 100644 --- a/frameworks/Swift/hummingbird/src-postgres/Sources/server/main.swift +++ b/frameworks/Swift/hummingbird/src-postgres/Sources/server/main.swift @@ -1,6 +1,15 @@ import Hummingbird import HummingbirdFoundation -import PostgresKit +import PostgresNIO + +// tfb-server (aka, citrine) uses 28 hyper-threaded cores +// postgresql.conf specifies max_connections = 2000 +// +// 2000 / (28 * 2) = 35.7 (theoretical max) +// +// https://github.com/TechEmpower/FrameworkBenchmarks/wiki/Project-Information-Environment#citrine-self-hosted +// https://github.com/TechEmpower/FrameworkBenchmarks/blob/master/toolset/databases/postgres/postgresql.conf#L64 +let maxConnectionsPerEventLoop = 32 extension Int { func bound(_ minValue: Int, _ maxValue: Int) -> Int { @@ -8,6 +17,16 @@ extension Int { } } +extension HBApplication { + var postgresConnectionGroup: HBConnectionPoolGroup { + get { self.extensions.get(\.postgresConnectionGroup) } + set { + self.extensions.set(\.postgresConnectionGroup, value: newValue) { group in + try group.close().wait() + } + } + } +} func runApp() throws { let env = HBEnvironment() let serverHostName = env.get("SERVER_HOSTNAME") ?? "127.0.0.1" @@ -19,10 +38,22 @@ func runApp() throws { ) let app = HBApplication(configuration: configuration) app.encoder = JSONEncoder() - app.initConnectionPool() - - WorldController().add(to: app.router) - FortunesController().add(to: app.router) + + app.postgresConnectionGroup = .init( + source: .init( + configuration: .init( + connection: .init(host: "tfb-database"), + authentication: .init(username: "benchmarkdbuser", database: "hello_world", password: "benchmarkdbpass"), + tls: .disable + ) + ), + maxConnections: maxConnectionsPerEventLoop, + eventLoopGroup: app.eventLoopGroup, + logger: app.logger + ) + + WorldController(connectionPoolGroup: app.postgresConnectionGroup).add(to: app.router) + FortunesController(connectionPoolGroup: app.postgresConnectionGroup).add(to: app.router) try app.start() app.wait() diff --git a/frameworks/Swift/swift-nio/swift-nio.dockerfile b/frameworks/Swift/swift-nio/swift-nio.dockerfile index 95763a3e925..2fa664e8d27 100644 --- a/frameworks/Swift/swift-nio/swift-nio.dockerfile +++ b/frameworks/Swift/swift-nio/swift-nio.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM swift:5.5 as build +FROM swift:5.6-focal as build WORKDIR /build # Copy entire repo into container @@ -15,7 +15,7 @@ RUN swift build \ # ================================ # Run image # ================================ -FROM swift:5.5-slim +FROM swift:5.6-focal-slim WORKDIR /run # Install Swift dependencies diff --git a/frameworks/Swift/vapor/vapor-fluent.dockerfile b/frameworks/Swift/vapor/vapor-fluent.dockerfile index 75c7382b9e6..67fe8a7e602 100644 --- a/frameworks/Swift/vapor/vapor-fluent.dockerfile +++ b/frameworks/Swift/vapor/vapor-fluent.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM swift:5.5 as build +FROM swift:5.6-focal as build WORKDIR /build # Copy entire repo into container @@ -15,7 +15,7 @@ RUN swift build \ # ================================ # Run image # ================================ -FROM swift:5.5-slim +FROM swift:5.6-focal-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/Swift/vapor/vapor-mongo-fluent.dockerfile b/frameworks/Swift/vapor/vapor-mongo-fluent.dockerfile index 71045d71f61..15d7398e5a0 100644 --- a/frameworks/Swift/vapor/vapor-mongo-fluent.dockerfile +++ b/frameworks/Swift/vapor/vapor-mongo-fluent.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM swift:5.5 as build +FROM swift:5.6-focal as build WORKDIR /build # Copy entire repo into container @@ -15,7 +15,7 @@ RUN swift build \ # ================================ # Run image # ================================ -FROM swift:5.5-slim +FROM swift:5.6-focal-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/Swift/vapor/vapor-mongo.dockerfile b/frameworks/Swift/vapor/vapor-mongo.dockerfile index 84ed85979c3..1444206913b 100644 --- a/frameworks/Swift/vapor/vapor-mongo.dockerfile +++ b/frameworks/Swift/vapor/vapor-mongo.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM swift:5.5 as build +FROM swift:5.6-focal as build WORKDIR /build # Copy entire repo into container @@ -18,7 +18,7 @@ RUN swift build \ # ================================ # Run image # ================================ -FROM swift:5.5-slim +FROM swift:5.6-focal-slim WORKDIR /run RUN apt update diff --git a/frameworks/Swift/vapor/vapor-postgres.dockerfile b/frameworks/Swift/vapor/vapor-postgres.dockerfile index ab4993c6d64..849ed61c864 100644 --- a/frameworks/Swift/vapor/vapor-postgres.dockerfile +++ b/frameworks/Swift/vapor/vapor-postgres.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM swift:5.5 as build +FROM swift:5.6-focal as build WORKDIR /build # Copy entire repo into container @@ -15,7 +15,7 @@ RUN swift build \ # ================================ # Run image # ================================ -FROM swift:5.5-slim +FROM swift:5.6-focal-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/Swift/vapor/vapor-sql-kit.dockerfile b/frameworks/Swift/vapor/vapor-sql-kit.dockerfile index 4198cc3cc5b..582fdc90c68 100644 --- a/frameworks/Swift/vapor/vapor-sql-kit.dockerfile +++ b/frameworks/Swift/vapor/vapor-sql-kit.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM swift:5.5 as build +FROM swift:5.6-focal as build WORKDIR /build # Copy entire repo into container @@ -15,7 +15,7 @@ RUN swift build \ # ================================ # Run image # ================================ -FROM swift:5.5-slim +FROM swift:5.6-focal-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/Swift/vapor/vapor.dockerfile b/frameworks/Swift/vapor/vapor.dockerfile index 5175552c9a6..931f5a37d3b 100644 --- a/frameworks/Swift/vapor/vapor.dockerfile +++ b/frameworks/Swift/vapor/vapor.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM swift:5.5 as build +FROM swift:5.6-focal as build WORKDIR /build # Copy entire repo into container @@ -15,7 +15,7 @@ RUN swift build \ # ================================ # Run image # ================================ -FROM swift:5.5-slim +FROM swift:5.6-focal-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/TypeScript/feathersjs/README.md b/frameworks/TypeScript/feathersjs/README.md new file mode 100755 index 00000000000..efe065524b6 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/README.md @@ -0,0 +1,40 @@ +# FeathersJS Benchmarking Test + +### Test Type Implementation Source Code + +* [JSON](src/services/json/json.service.ts) +* [PLAINTEXT](src/services/plaintext/plaintext.service.ts) +* [DB](src/services/world/world.service.ts) +* [QUERY](src/services/world/world.service.ts) +* [UPDATE](src/services/world/world.service.ts) +* [FORTUNES](src/services/fortune/fortune.service.ts) + +## Important Libraries +The tests were run with: +* [Feathers](https://github.com/feathersjs/feathers) +* [Feathers-Sequelize](https://github.com/feathersjs-ecosystem/feathers-sequelize) + +## Test URLs +### JSON + +http://localhost:8080/json + +### PLAINTEXT + +http://localhost:8080/plaintext + +### DB + +http://localhost:8080/db + +### QUERY + +http://localhost:8080/query?queries= + +### UPDATE + +http://localhost:8080/update?queries= + +### FORTUNES + +http://localhost:8080/fortunes diff --git a/frameworks/TypeScript/feathersjs/benchmark_config.json b/frameworks/TypeScript/feathersjs/benchmark_config.json new file mode 100755 index 00000000000..5d445be8601 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/benchmark_config.json @@ -0,0 +1,30 @@ +{ + "framework": "feathersjs", + "tests": [ + { + "default": { + "json_url": "/json", + "db_url": "/db", + "query_url": "/queries?queries=", + "fortune_url": "/fortunes", + "update_url": "/update?queries=", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "postgres", + "framework": "FeathersJS", + "language": "TypeScript", + "flavor": "None", + "orm": "Full", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "FeathersJS", + "notes": "", + "versus": "nodejs" + } + } + ] +} diff --git a/frameworks/TypeScript/feathersjs/config/default.json b/frameworks/TypeScript/feathersjs/config/default.json new file mode 100644 index 00000000000..f7e1800949b --- /dev/null +++ b/frameworks/TypeScript/feathersjs/config/default.json @@ -0,0 +1,10 @@ +{ + "host": "localhost", + "port": 8080, + "public": "../public/", + "paginate": { + "default": 10, + "max": 50 + }, + "postgres": "postgres://benchmarkdbuser:benchmarkdbpass@tfb-database:5432/hello_world" +} diff --git a/frameworks/TypeScript/feathersjs/feathersjs.dockerfile b/frameworks/TypeScript/feathersjs/feathersjs.dockerfile new file mode 100644 index 00000000000..e09b70e08b8 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/feathersjs.dockerfile @@ -0,0 +1,13 @@ +FROM node:16 + +COPY ./ ./ + +ENV NODE_ENV development + +RUN npm install + +ENV DATABASE_CONFIGURATION_PROFILE postgres +ENV FRAMEWORK express + +EXPOSE 8080 +CMD ["npm", "run", "start"] \ No newline at end of file diff --git a/frameworks/TypeScript/feathersjs/package.json b/frameworks/TypeScript/feathersjs/package.json new file mode 100644 index 00000000000..8d59da2bded --- /dev/null +++ b/frameworks/TypeScript/feathersjs/package.json @@ -0,0 +1,63 @@ +{ + "name": "feathers", + "description": "FeathersJS App", + "version": "0.0.0", + "main": "src", + "author": "", + "directories": { + "lib": "src", + "test": "test/", + "config": "config/" + }, + "engines": { + "node": "^16.0.0", + "npm": ">= 8.0.0" + }, + "scripts": { + "test": "npm run lint && npm run compile && npm run jest", + "lint": "eslint src/. test/. --config .eslintrc.json --ext .ts --fix", + "dev": "ts-node-dev --no-notify src/", + "start": "ts-node src/index", + "jest": "jest --forceExit", + "compile": "shx rm -rf lib/ && tsc" + }, + "types": "lib/", + "dependencies": { + "@feathersjs/configuration": "^4.5.12", + "@feathersjs/errors": "^4.5.12", + "@feathersjs/express": "^4.5.12", + "@feathersjs/feathers": "^4.5.12", + "@feathersjs/transport-commons": "^4.5.12", + "@types/express": "^4.17.13", + "compression": "^1.7.4", + "cors": "^2.8.5", + "feathers-memory": "^4.1.0", + "feathers-sequelize": "^6.3.2", + "helmet": "^4.6.0", + "mysql2": "^2.3.3", + "pg": "^8.7.3", + "pg-hstore": "^2.3.4", + "pug": "^3.0.2", + "sequelize": "^6.15.1", + "serve-favicon": "^2.5.0", + "ts-node": "^7.0.1", + "typescript": "^4.5.5", + "winston": "^3.5.1" + }, + "devDependencies": { + "@types/bluebird": "^3.5.36", + "@types/compression": "^1.7.2", + "@types/cors": "^2.8.12", + "@types/jest": "^27.0.2", + "@types/serve-favicon": "^2.5.3", + "@types/validator": "^10.11.3", + "@typescript-eslint/eslint-plugin": "^5.10.2", + "@typescript-eslint/parser": "^5.10.2", + "axios": "^0.25.0", + "eslint": "^8.8.0", + "jest": "^27.3.1", + "shx": "^0.3.4", + "ts-jest": "^27.1.3", + "ts-node-dev": "^1.1.8" + } +} diff --git a/frameworks/TypeScript/feathersjs/src/app.hooks.ts b/frameworks/TypeScript/feathersjs/src/app.hooks.ts new file mode 100644 index 00000000000..1be533832d0 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/app.hooks.ts @@ -0,0 +1,34 @@ +// Application hooks that run for every service +// Don't remove this comment. It's needed to format import lines nicely. + +export default { + before: { + all: [], + find: [], + get: [], + create: [], + update: [], + patch: [], + remove: [] + }, + + after: { + all: [], + find: [], + get: [], + create: [], + update: [], + patch: [], + remove: [] + }, + + error: { + all: [], + find: [], + get: [], + create: [], + update: [], + patch: [], + remove: [] + } +}; diff --git a/frameworks/TypeScript/feathersjs/src/app.ts b/frameworks/TypeScript/feathersjs/src/app.ts new file mode 100644 index 00000000000..a01ada71108 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/app.ts @@ -0,0 +1,64 @@ +import path from 'path'; +import favicon from 'serve-favicon'; +import compress from 'compression'; +import helmet from 'helmet'; +import cors from 'cors'; +import { Request, Response, NextFunction } from 'express'; + +import feathers from '@feathersjs/feathers'; +import configuration from '@feathersjs/configuration'; +import express from '@feathersjs/express'; + + + +import { Application } from './declarations'; +import logger from './logger'; +import middleware from './middleware'; +import services from './services'; +import appHooks from './app.hooks'; +import channels from './channels'; +import { HookContext as FeathersHookContext } from '@feathersjs/feathers'; +import sequelize from './sequelize'; +// Don't remove this comment. It's needed to format import lines nicely. + +const app: Application = express(feathers()); +export type HookContext = { app: Application } & FeathersHookContext; + +app.set('view engine', 'pug'); + +// Load app configuration +app.configure(configuration()); +// Enable security, CORS, compression, favicon and body parsing +app.use(helmet({ + contentSecurityPolicy: false +})); +app.use(cors()); +app.use(compress()); +app.use(express.json()); +app.use(express.urlencoded({ extended: true })); +app.use((req: Request, res: Response, next: NextFunction) => { + res.setHeader('Server', 'FeathersJS'); + next(); +}); + +// Set up Plugins and providers +app.configure(express.rest()); + + +app.configure(sequelize); + + +// Configure other middleware (see `middleware/index.ts`) +app.configure(middleware); +// Set up our services (see `services/index.ts`) +app.configure(services); +// Set up event channels (see channels.ts) +app.configure(channels); + +// Configure a middleware for 404s and the error handler +app.use(express.notFound()); +app.use(express.errorHandler({ logger } as any)); + +app.hooks(appHooks); + +export default app; diff --git a/frameworks/TypeScript/feathersjs/src/channels.ts b/frameworks/TypeScript/feathersjs/src/channels.ts new file mode 100644 index 00000000000..687c756ed99 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/channels.ts @@ -0,0 +1,65 @@ +import '@feathersjs/transport-commons'; +import { HookContext } from '@feathersjs/feathers'; +import { Application } from './declarations'; + +export default function(app: Application): void { + if(typeof app.channel !== 'function') { + // If no real-time functionality has been configured just return + return; + } + + app.on('connection', (connection: any): void => { + // On a new real-time connection, add it to the anonymous channel + app.channel('anonymous').join(connection); + }); + + app.on('login', (authResult: any, { connection }: any): void => { + // connection can be undefined if there is no + // real-time connection, e.g. when logging in via REST + if(connection) { + // Obtain the logged in user from the connection + // const user = connection.user; + + // The connection is no longer anonymous, remove it + app.channel('anonymous').leave(connection); + + // Add it to the authenticated user channel + app.channel('authenticated').join(connection); + + // Channels can be named anything and joined on any condition + + // E.g. to send real-time events only to admins use + // if(user.isAdmin) { app.channel('admins').join(connection); } + + // If the user has joined e.g. chat rooms + // if(Array.isArray(user.rooms)) user.rooms.forEach(room => app.channel(`rooms/${room.id}`).join(connection)); + + // Easily organize users by email and userid for things like messaging + // app.channel(`emails/${user.email}`).join(connection); + // app.channel(`userIds/${user.id}`).join(connection); + } + }); + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + app.publish((data: any, hook: HookContext) => { + // Here you can add event publishers to channels set up in `channels.ts` + // To publish only for a specific event use `app.publish(eventname, () => {})` + + console.log('Publishing all events to all authenticated users. See `channels.ts` and https://docs.feathersjs.com/api/channels.html for more information.'); // eslint-disable-line + + // e.g. to publish all service events to all authenticated users use + return app.channel('authenticated'); + }); + + // Here you can also add service specific event publishers + // e.g. the publish the `users` service `created` event to the `admins` channel + // app.service('users').publish('created', () => app.channel('admins')); + + // With the userid and email organization from above you can easily select involved users + // app.service('messages').publish(() => { + // return [ + // app.channel(`userIds/${data.createdBy}`), + // app.channel(`emails/${data.recipientEmail}`) + // ]; + // }); +} diff --git a/frameworks/TypeScript/feathersjs/src/declarations.d.ts b/frameworks/TypeScript/feathersjs/src/declarations.d.ts new file mode 100644 index 00000000000..56550834c63 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/declarations.d.ts @@ -0,0 +1,6 @@ +import { Application as ExpressFeathers } from '@feathersjs/express'; + +// A mapping of service names to types. Will be extended in service files. +export interface ServiceTypes {} +// The application instance type that will be used everywhere else +export type Application = ExpressFeathers; diff --git a/frameworks/TypeScript/feathersjs/src/index.ts b/frameworks/TypeScript/feathersjs/src/index.ts new file mode 100644 index 00000000000..9baa56715f2 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/index.ts @@ -0,0 +1,30 @@ +import cluster from 'cluster'; +import os from 'os'; + +import logger from './logger'; +import app from './app'; + +if (cluster.isPrimary) { + const cpuCount: os.CpuInfo[] = os.cpus(); + + for (const cpu of cpuCount) { + cluster.fork(); + } + + cluster.on('exit', () => { + process.exit(1); + }); +} else { + + const port = app.get('port'); + const server = app.listen(port); + + process.on('unhandledRejection', (reason, p) => + logger.error('Unhandled Rejection at: Promise ', p, reason) + ); + + server.on('listening', () => + logger.info('Feathers application started on http://%s:%d', app.get('host'), port) + ); +} + diff --git a/frameworks/TypeScript/feathersjs/src/logger.ts b/frameworks/TypeScript/feathersjs/src/logger.ts new file mode 100644 index 00000000000..739c222b018 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/logger.ts @@ -0,0 +1,16 @@ +import { createLogger, format, transports } from 'winston'; + +// Configure the Winston logger. For the complete documentation see https://github.com/winstonjs/winston +const logger = createLogger({ + // To see more detailed errors, change this to 'debug' + level: 'info', + format: format.combine( + format.splat(), + format.simple() + ), + transports: [ + new transports.Console() + ], +}); + +export default logger; diff --git a/frameworks/TypeScript/feathersjs/src/middleware/index.ts b/frameworks/TypeScript/feathersjs/src/middleware/index.ts new file mode 100644 index 00000000000..e78268375c0 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/middleware/index.ts @@ -0,0 +1,6 @@ +import { Application } from '../declarations'; +// Don't remove this comment. It's needed to format import lines nicely. + +// eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-empty-function +export default function (app: Application): void { +} diff --git a/frameworks/TypeScript/feathersjs/src/models/fortune.model.ts b/frameworks/TypeScript/feathersjs/src/models/fortune.model.ts new file mode 100644 index 00000000000..080b04ff7b3 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/models/fortune.model.ts @@ -0,0 +1,33 @@ +// See https://sequelize.org/master/manual/model-basics.html +// for more of what you can do here. +import { Sequelize, DataTypes, Model } from 'sequelize'; +import { Application } from '../declarations'; + +export interface FortuneModel { + id: number + message: string +} + +export default function (app: Application): typeof Model { + const sequelizeClient: Sequelize = app.get('sequelizeClient'); + const fortune = sequelizeClient.define('fortune', { + message: { + type: DataTypes.STRING, + allowNull: false + } + }, { + hooks: { + beforeCount(options: any): any { + options.raw = true; + } + } + }); + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + (fortune as any).associate = function (models: any): void { + // Define associations here + // See https://sequelize.org/master/manual/assocs.html + }; + + return fortune; +} diff --git a/frameworks/TypeScript/feathersjs/src/models/world.model.ts b/frameworks/TypeScript/feathersjs/src/models/world.model.ts new file mode 100644 index 00000000000..e09e275244d --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/models/world.model.ts @@ -0,0 +1,33 @@ +// See https://sequelize.org/master/manual/model-basics.html +// for more of what you can do here. +import { Sequelize, DataTypes, Model } from 'sequelize'; +import { Application } from '../declarations'; + +export interface WorldModel { + id: number + randomnumber: number +} + +export default function (app: Application): typeof Model { + const sequelizeClient: Sequelize = app.get('sequelizeClient'); + const world = sequelizeClient.define('world', { + randomnumber: { + type: DataTypes.INTEGER, + allowNull: false + } + }, { + hooks: { + beforeCount(options: any): any { + options.raw = true; + } + } + }); + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + (world as any).associate = function (models: any): void { + // Define associations here + // See https://sequelize.org/master/manual/assocs.html + }; + + return world; +} diff --git a/frameworks/TypeScript/feathersjs/src/sequelize.ts b/frameworks/TypeScript/feathersjs/src/sequelize.ts new file mode 100644 index 00000000000..5e8bfd807a7 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/sequelize.ts @@ -0,0 +1,35 @@ +import { Sequelize } from 'sequelize'; +import { Application } from './declarations'; +import { randInt } from './util'; + +export default function (app: Application): void { + const connectionString = app.get('postgres'); + const sequelize = new Sequelize(connectionString, { + dialect: 'postgres', + logging: false, + define: { + freezeTableName: true, + timestamps: false + } + }); + const oldSetup = app.setup; + + app.set('sequelizeClient', sequelize); + + app.setup = function (...args): Application { + const result = oldSetup.apply(this, args); + + // Set up data relationships + const models = sequelize.models; + Object.keys(models).forEach(name => { + if ('associate' in models[name]) { + (models[name] as any).associate(models); + } + }); + + // Sync to the database + app.set('sequelizeSync', sequelize.sync()); + + return result; + }; +} diff --git a/frameworks/TypeScript/feathersjs/src/services/fortune/fortune.class.ts b/frameworks/TypeScript/feathersjs/src/services/fortune/fortune.class.ts new file mode 100644 index 00000000000..1c7ad500a48 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/services/fortune/fortune.class.ts @@ -0,0 +1,23 @@ +import { Service, SequelizeServiceOptions } from 'feathers-sequelize'; +import { Application } from '../../declarations'; +import { FortuneModel } from '../../models/fortune.model'; + +export class Fortune extends Service { + //eslint-disable-next-line @typescript-eslint/no-unused-vars + constructor(options: Partial, app: Application) { + super(options); + } + + async getFortunes() { + const fortunes = await this.find(); + + fortunes.push({ + id: 0, + message: 'Additional fortune added at request time.', + }); + + fortunes.sort((f, s) => (f.message < s.message ? -1 : 1)); + + return fortunes; + } +} diff --git a/frameworks/TypeScript/feathersjs/src/services/fortune/fortune.hooks.ts b/frameworks/TypeScript/feathersjs/src/services/fortune/fortune.hooks.ts new file mode 100644 index 00000000000..3ea8af6930e --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/services/fortune/fortune.hooks.ts @@ -0,0 +1,33 @@ +import { HooksObject } from '@feathersjs/feathers'; + +export default { + before: { + all: [], + find: [], + get: [], + create: [], + update: [], + patch: [], + remove: [] + }, + + after: { + all: [], + find: [], + get: [], + create: [], + update: [], + patch: [], + remove: [] + }, + + error: { + all: [], + find: [], + get: [], + create: [], + update: [], + patch: [], + remove: [] + } +}; diff --git a/frameworks/TypeScript/feathersjs/src/services/fortune/fortune.service.ts b/frameworks/TypeScript/feathersjs/src/services/fortune/fortune.service.ts new file mode 100644 index 00000000000..dec3ece99cd --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/services/fortune/fortune.service.ts @@ -0,0 +1,33 @@ +// Initializes the `fortune` service on path `/fortune` +import { ServiceAddons } from '@feathersjs/feathers'; +import { Application } from '../../declarations'; +import { Request, Response } from 'express'; +import { Fortune } from './fortune.class'; +import createModel from '../../models/fortune.model'; +import hooks from './fortune.hooks'; + +// Add this service to the service type index +declare module '../../declarations' { + interface ServiceTypes { + 'fortune': Fortune & ServiceAddons; + } +} + +export default function (app: Application): void { + const options = { + Model: createModel(app) + }; + + // Initialize our service with any options it requires + app.use('/fortune', new Fortune(options, app)); + + app.get('/fortunes', async (req: Request, res: Response) => { + const fortunes = await app.service('fortune').getFortunes(); + res.render('fortunes', { fortunes }); + }); + + // Get our initialized service so that we can register hooks + const service = app.service('fortune'); + + service.hooks(hooks); +} diff --git a/frameworks/TypeScript/feathersjs/src/services/index.ts b/frameworks/TypeScript/feathersjs/src/services/index.ts new file mode 100644 index 00000000000..ce274655686 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/services/index.ts @@ -0,0 +1,18 @@ +import { Application } from '../declarations'; + +import plaintext from './plaintext/plaintext.service'; + +import json from './json/json.service'; + +import fortune from './fortune/fortune.service'; + +import world from './world/world.service'; + +// Don't remove this comment. It's needed to format import lines nicely. + +export default function (app: Application): void { + app.configure(plaintext); + app.configure(json); + app.configure(fortune); + app.configure(world); +} diff --git a/frameworks/TypeScript/feathersjs/src/services/json/json.service.ts b/frameworks/TypeScript/feathersjs/src/services/json/json.service.ts new file mode 100644 index 00000000000..a7ad503be15 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/services/json/json.service.ts @@ -0,0 +1,17 @@ +// Initializes the `json` service on path `/json` +import { ServiceAddons } from '@feathersjs/feathers'; +import { Application } from '../../declarations'; +import { Request, Response } from 'express'; + +// Add this service to the service type index +declare module '../../declarations' { + interface ServiceTypes { + 'json': ServiceAddons; + } +} + +export default function (app: Application): void { + app.get('/json', (req: Request, res: Response) => { + res.json({ message: 'Hello, World!' }); + }); +} diff --git a/frameworks/TypeScript/feathersjs/src/services/plaintext/plaintext.service.ts b/frameworks/TypeScript/feathersjs/src/services/plaintext/plaintext.service.ts new file mode 100644 index 00000000000..e51501e5d2a --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/services/plaintext/plaintext.service.ts @@ -0,0 +1,18 @@ +// Initializes the `plaintext` service on path `/plaintext` +import { ServiceAddons } from '@feathersjs/feathers'; +import { Application } from '../../declarations'; +import { Request, Response } from 'express'; + +// Add this service to the service type index +declare module '../../declarations' { + interface ServiceTypes { + 'plaintext': ServiceAddons; + } +} + +export default function (app: Application): void { + app.get('/plaintext', (req: Request, res: Response) => { + res.setHeader('Content-Type', 'text/plain'); + res.send('Hello, World!'); + }); +} diff --git a/frameworks/TypeScript/feathersjs/src/services/world/world.class.ts b/frameworks/TypeScript/feathersjs/src/services/world/world.class.ts new file mode 100644 index 00000000000..4a226e66471 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/services/world/world.class.ts @@ -0,0 +1,42 @@ +import { Service, SequelizeServiceOptions } from 'feathers-sequelize'; +import { Application } from '../../declarations'; +import { WorldModel } from '../../models/world.model'; +import { randInt } from '../../util'; + +export class World extends Service { + //eslint-disable-next-line @typescript-eslint/no-unused-vars + constructor(options: Partial, app: Application) { + super(options); + } + + async findRandom(): Promise { + return this.get(randInt()); + } + + async findMultiple(count: number): Promise { + const worldPromises: Promise[] = []; + + for (let i = 0 ; i < count ; i++) { + worldPromises.push(this.findRandom()); + } + + return await Promise.all(worldPromises); + } + + async updateMultiple(count: number): Promise { + const worlds: WorldModel[] = []; + + for (let i = 0; i < count; i++) { + const world = await this.findRandom(); + world.randomnumber = randInt(); + worlds.push(world); + this.Model.update({ randomnumber: world.randomnumber }, { + where: { + id: world.id + } + }); + } + + return await Promise.all(worlds); + } +} diff --git a/frameworks/TypeScript/feathersjs/src/services/world/world.hooks.ts b/frameworks/TypeScript/feathersjs/src/services/world/world.hooks.ts new file mode 100644 index 00000000000..a1dc9ac7527 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/services/world/world.hooks.ts @@ -0,0 +1,42 @@ +import { HookContext } from '@feathersjs/feathers'; +import { hooks } from 'feathers-sequelize'; + +const { hydrate } = hooks; + +function rawFalse(context: HookContext) { + if (!context.params.sequelize) context.params.sequelize = {}; + Object.assign(context.params.sequelize, { raw: false }); + return context; +} + +export default { + before: { + all: [rawFalse], + find: [], + get: [], + create: [], + update: [], + patch: [], + remove: [] + }, + + after: { + all: [hydrate()], + find: [], + get: [], + create: [], + update: [], + patch: [], + remove: [] + }, + + error: { + all: [], + find: [], + get: [], + create: [], + update: [], + patch: [], + remove: [] + } +}; diff --git a/frameworks/TypeScript/feathersjs/src/services/world/world.service.ts b/frameworks/TypeScript/feathersjs/src/services/world/world.service.ts new file mode 100644 index 00000000000..a445e2ae0d6 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/services/world/world.service.ts @@ -0,0 +1,50 @@ +// Initializes the `world` service on path `/world` +import { Params, ServiceAddons } from '@feathersjs/feathers'; +import { Application } from '../../declarations'; +import { Request, Response } from 'express'; +import { World } from './world.class'; +import createModel from '../../models/world.model'; +import hooks from './world.hooks'; +import { randInt } from '../../util'; + +// Add this service to the service type index +declare module '../../declarations' { + interface ServiceTypes { + 'world': World & ServiceAddons; + } +} + +export default function (app: Application): void { + const options = { + Model: createModel(app), + paginate: app.get('paginate') + }; + + // Initialize our service with any options it requires + app.use('/world', new World(options, app)); + + app.get('/db', async (req: Request, res: Response) => { + const world = await app.service('world').findRandom(); + + res.json(world); + }); + + app.get('/queries', async (req: Request, res: Response) => { + const queries = Math.min(Math.max(parseInt(req.query.queries) || 1, 1), 500); + const worlds = await app.service('world').findMultiple(queries); + + res.json(worlds); + }); + + app.get('/update', async (req: Request, res: Response) => { + const queries = Math.min(Math.max(parseInt(req.query.queries) || 1, 1), 500); + const worlds = await app.service('world').updateMultiple(queries); + + res.json(worlds); + }); + + // Get our initialized service so that we can register hooks + const service = app.service('world'); + + service.hooks(hooks); +} diff --git a/frameworks/TypeScript/feathersjs/src/util.ts b/frameworks/TypeScript/feathersjs/src/util.ts new file mode 100644 index 00000000000..67391ba173c --- /dev/null +++ b/frameworks/TypeScript/feathersjs/src/util.ts @@ -0,0 +1,3 @@ +export const randInt = () => { + return Math.floor(Math.random() * 10000) + 1; +}; \ No newline at end of file diff --git a/frameworks/TypeScript/feathersjs/tsconfig.json b/frameworks/TypeScript/feathersjs/tsconfig.json new file mode 100644 index 00000000000..70dd6b8a1de --- /dev/null +++ b/frameworks/TypeScript/feathersjs/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "es2018", + "module": "commonjs", + "outDir": "./lib", + "rootDir": "./src", + "strict": true, + "esModuleInterop": true + }, + "exclude": [ + "test" + ] +} diff --git a/frameworks/TypeScript/feathersjs/views/fortunes.pug b/frameworks/TypeScript/feathersjs/views/fortunes.pug new file mode 100644 index 00000000000..537952748b8 --- /dev/null +++ b/frameworks/TypeScript/feathersjs/views/fortunes.pug @@ -0,0 +1,13 @@ +doctype html +html + head + title Fortunes + body + table + tr + th id + th message + each fortune in fortunes + tr + td #{fortune.id} + td #{fortune.message} \ No newline at end of file diff --git a/frameworks/TypeScript/nest/nestjs-fastify-mongo.dockerfile b/frameworks/TypeScript/nest/nestjs-fastify-mongo.dockerfile index e5aee424748..d36cbd06e91 100644 --- a/frameworks/TypeScript/nest/nestjs-fastify-mongo.dockerfile +++ b/frameworks/TypeScript/nest/nestjs-fastify-mongo.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/nestjs-fastify-mysql.dockerfile b/frameworks/TypeScript/nest/nestjs-fastify-mysql.dockerfile index a2300b2d2ac..746b5c6497e 100644 --- a/frameworks/TypeScript/nest/nestjs-fastify-mysql.dockerfile +++ b/frameworks/TypeScript/nest/nestjs-fastify-mysql.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/nestjs-fastify.dockerfile b/frameworks/TypeScript/nest/nestjs-fastify.dockerfile index ef8287320c4..a41d822e71b 100644 --- a/frameworks/TypeScript/nest/nestjs-fastify.dockerfile +++ b/frameworks/TypeScript/nest/nestjs-fastify.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/nestjs-mongo.dockerfile b/frameworks/TypeScript/nest/nestjs-mongo.dockerfile index 80fb72a73a1..d64ff202897 100644 --- a/frameworks/TypeScript/nest/nestjs-mongo.dockerfile +++ b/frameworks/TypeScript/nest/nestjs-mongo.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/nestjs-mysql.dockerfile b/frameworks/TypeScript/nest/nestjs-mysql.dockerfile index c0f85d84292..b26c789b8c0 100644 --- a/frameworks/TypeScript/nest/nestjs-mysql.dockerfile +++ b/frameworks/TypeScript/nest/nestjs-mysql.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/nestjs.dockerfile b/frameworks/TypeScript/nest/nestjs.dockerfile index 4586dbfa13e..e2c48f77d68 100644 --- a/frameworks/TypeScript/nest/nestjs.dockerfile +++ b/frameworks/TypeScript/nest/nestjs.dockerfile @@ -1,4 +1,4 @@ -FROM node:16.13.0-slim +FROM node:16.14.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/package.json b/frameworks/TypeScript/nest/package.json index ea0f4a0c109..038b625987f 100644 --- a/frameworks/TypeScript/nest/package.json +++ b/frameworks/TypeScript/nest/package.json @@ -38,7 +38,7 @@ "@nestjs/cli": "7.5.3", "@nestjs/schematics": "7.2.4", "@types/express": "4.17.3", - "@types/node": "13.7.7", + "@types/node": "16.11.46", "@typescript-eslint/eslint-plugin": "2.22.0", "@typescript-eslint/parser": "2.22.0", "eslint": "6.8.0", diff --git a/frameworks/TypeScript/nest/src/main.ts b/frameworks/TypeScript/nest/src/main.ts index ced995ca8aa..82fa61ea07d 100644 --- a/frameworks/TypeScript/nest/src/main.ts +++ b/frameworks/TypeScript/nest/src/main.ts @@ -8,7 +8,7 @@ import { import { MongoModule } from './mongo/mongo.module'; import { join } from 'path'; import { SqlModule } from './sql/sql.module'; -import cluster = require('cluster'); +import cluster from 'cluster' import os = require('os'); const port = process.env.PORT || 8080; @@ -57,7 +57,7 @@ async function bootstrapFastify() { await app.listen(8080, '0.0.0.0'); } -if (cluster.isMaster) { +if (cluster.isPrimary) { const cpus = os.cpus().length; for (let i = 0; i < cpus; i++) { cluster.fork(); diff --git a/frameworks/TypeScript/nest/tsconfig.json b/frameworks/TypeScript/nest/tsconfig.json index 4e6e210f140..d35e11cf7bd 100644 --- a/frameworks/TypeScript/nest/tsconfig.json +++ b/frameworks/TypeScript/nest/tsconfig.json @@ -10,7 +10,8 @@ "outDir": "./dist", "baseUrl": "./", "incremental": true, - "skipLibCheck": true + "skipLibCheck": true, + "esModuleInterop": true }, "include": ["src/**/*"], "exclude": ["node_modules", "dist"] diff --git a/frameworks/TypeScript/oak/README.md b/frameworks/TypeScript/oak/README.md new file mode 100755 index 00000000000..7266dc9c67c --- /dev/null +++ b/frameworks/TypeScript/oak/README.md @@ -0,0 +1,48 @@ +# oak Benchmarking Test + +### Test Type Implementation Source Code + +- [PLAINTEXT](src/routes.ts#L15) +- [JSON](src/routes.ts#L16) +- [DB](src/routes.ts#L17) +- [QUERY](src/routes.ts#L24) +- [UPDATE](src/routes.ts#L37) +- [FORTUNES](src/routes.ts#L53) +- [CACHED QUERY](src/routes.ts#L67) + +## Important Libraries + +The tests were run with: + +- [deno](https://deno.land) +- [oak](https://deno.land/x/oak/) + +## Test URLs + +### JSON + +http://localhost:8080/json + +### PLAINTEXT + +http://localhost:8080/plaintext + +### DB + +http://localhost:8080/db + +### QUERY + +http://localhost:8080/query?q= + +### UPDATE + +http://localhost:8080/update?q= + +### FORTUNES + +http://localhost:8080/fortunes + +### CACHED QUERY + +http://localhost:8080/cached_query?q= \ No newline at end of file diff --git a/frameworks/TypeScript/oak/benchmark_config.json b/frameworks/TypeScript/oak/benchmark_config.json new file mode 100755 index 00000000000..8067c193462 --- /dev/null +++ b/frameworks/TypeScript/oak/benchmark_config.json @@ -0,0 +1,31 @@ +{ + "framework": "oak", + "tests": [ + { + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "db_url": "/db", + "query_url": "/queries?q=", + "update_url": "/updates?q=", + "fortune_url": "/fortunes", + "cached_query_url": "/cached_queries?q=", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "database": "postgres", + "framework": "None", + "language": "Typescript", + "flavor": "deno", + "orm": "Micro", + "platform": "deno", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "oak", + "notes": "", + "versus": "nodejs" + } + } + ] +} diff --git a/frameworks/TypeScript/oak/oak.dockerfile b/frameworks/TypeScript/oak/oak.dockerfile new file mode 100644 index 00000000000..73dbbb31452 --- /dev/null +++ b/frameworks/TypeScript/oak/oak.dockerfile @@ -0,0 +1,13 @@ +FROM denoland/deno + +EXPOSE 8080 + +WORKDIR /app + +USER deno + +COPY ./src/deps.ts . +RUN deno cache deps.ts + +ADD ./src . +CMD [ "run", "--allow-net", "main.ts" ] \ No newline at end of file diff --git a/frameworks/TypeScript/oak/src/deps.ts b/frameworks/TypeScript/oak/src/deps.ts new file mode 100644 index 00000000000..da220f1da7c --- /dev/null +++ b/frameworks/TypeScript/oak/src/deps.ts @@ -0,0 +1,20 @@ +export { + Application, + Context, + Router, + Status, +} from "https://deno.land/x/oak@v10.6.0/mod.ts"; +export { getQuery } from "https://deno.land/x/oak@v10.6.0/helpers.ts"; +export type { ResponseBody } from "https://deno.land/x/oak@v10.6.0/response.ts"; + +export { + Column, + connect, + DataType, + Manager, + Model, + Primary, +} from "https://deno.land/x/cotton@v0.7.5/mod.ts"; +export type { DatabaseResult } from "https://deno.land/x/cotton@v0.7.5/src/adapters/adapter.ts"; + +export { html } from "https://deno.land/x/literal_html@1.1.0/mod.ts"; diff --git a/frameworks/TypeScript/oak/src/helpers.ts b/frameworks/TypeScript/oak/src/helpers.ts new file mode 100644 index 00000000000..b016e6be7d5 --- /dev/null +++ b/frameworks/TypeScript/oak/src/helpers.ts @@ -0,0 +1,13 @@ +import { Context, ResponseBody, Status } from "./deps.ts"; + +export function Ok(ctx: Context, body: ResponseBody) { + ctx.response.status = Status.OK; + ctx.response.body = body; + return; +} + +export function NotFound(ctx: Context) { + ctx.response.status = Status.NotFound; + ctx.response.body = "Not found"; + return; +} diff --git a/frameworks/TypeScript/oak/src/main.ts b/frameworks/TypeScript/oak/src/main.ts new file mode 100644 index 00000000000..ac2896cf3ec --- /dev/null +++ b/frameworks/TypeScript/oak/src/main.ts @@ -0,0 +1,27 @@ +import { Application, DatabaseResult, Manager } from "./deps.ts"; +import { router } from "./routes.ts"; +import { getDbClient } from "./utils.ts"; + +const app = new Application< + { manager: Manager; cached_worlds: DatabaseResult[] } +>(); + +// headers +app.use(async (ctx, next) => { + ctx.response.headers.set("Date", new Date().toUTCString()); + ctx.response.headers.set("Server", "Oak"); + await next(); +}); + +// database handling +app.use(async (ctx, next) => { + const db = await getDbClient(); + ctx.state.manager = db.getManager(); + await next(); + await db.disconnect(); +}); + +app.use(router.routes()); +app.use(router.allowedMethods()); + +await app.listen({ port: 8080 }); diff --git a/frameworks/TypeScript/oak/src/models.ts b/frameworks/TypeScript/oak/src/models.ts new file mode 100644 index 00000000000..e55761f6af2 --- /dev/null +++ b/frameworks/TypeScript/oak/src/models.ts @@ -0,0 +1,19 @@ +import { Column, DataType, Model, Primary } from "./deps.ts"; + +@Model() +export class World { + @Primary() + id!: number; + + @Column({ type: DataType.Number }) + randomnumber!: number; +} + +@Model() +export class Fortune { + @Primary() + id!: number; + + @Column({ type: DataType.String }) + message!: string; +} diff --git a/frameworks/TypeScript/oak/src/routes.ts b/frameworks/TypeScript/oak/src/routes.ts new file mode 100644 index 00000000000..461ffbda991 --- /dev/null +++ b/frameworks/TypeScript/oak/src/routes.ts @@ -0,0 +1,76 @@ +import { Fortune, World } from "./models.ts"; +import { NotFound, Ok } from "./helpers.ts"; +import { + getDbClient, + parseQuery, + randomNumber, + renderTemplate, +} from "./utils.ts"; +import { Router } from "./deps.ts"; + +const cached_worlds = await (await getDbClient()).getManager().query(World) + .limit(10000).all(); + +export const router = new Router() + .get("/plaintext", (ctx) => Ok(ctx, "Hello, World!")) + .get("/json", (ctx) => Ok(ctx, { message: "Hello, World!" })) + .get("/db", async (ctx) => { + const world = await ctx.state.manager.query(World).where( + "id", + randomNumber(), + ).first(); + Ok(ctx, world); + }) + .get("/queries", async (ctx) => { + const worlds = []; + const queries = parseQuery(ctx); + for (let i = 0; i < queries; i++) { + const world = await ctx.state.manager.query(World).where( + "id", + randomNumber(), + ).first(); + worlds.push(world); + } + + Ok(ctx, worlds); + }) + .get("/updates", async (ctx) => { + const worlds = []; + const queries = parseQuery(ctx); + + for (let i = 0; i < queries; i++) { + const world = await ctx.state.manager.query(World).where( + "id", + randomNumber(), + ).first(); + world.randomnumber = randomNumber(); + worlds.push(world); + + await ctx.state.manager.save(world); + } + Ok(ctx, worlds); + }) + .get("/fortunes", async (ctx) => { + const fortunes: Fortune[] = await ctx.state.manager.query(Fortune).all(); + fortunes.push({ + id: 0, + message: "Additional fortune added at request time.", + }); + + fortunes.sort((a: Fortune, b: Fortune) => + a.message.localeCompare(b.message) + ); + + ctx.response.headers.set("Content-Type", "text/html; charset=utf-8"); + Ok(ctx, renderTemplate(fortunes)); + }) + .get("/cached_queries", (ctx) => { + const queries = parseQuery(ctx); + const worlds = []; + + for (let i = 0; i < queries; i++) { + worlds.push(cached_worlds[randomNumber()]); + } + Ok(ctx, worlds); + }) + .get("/(.*)", (ctx) => NotFound(ctx)); diff --git a/frameworks/TypeScript/oak/src/utils.ts b/frameworks/TypeScript/oak/src/utils.ts new file mode 100644 index 00000000000..dd097b36393 --- /dev/null +++ b/frameworks/TypeScript/oak/src/utils.ts @@ -0,0 +1,41 @@ +import { connect, Context, getQuery, html } from "./deps.ts"; +import { Fortune } from "./models.ts"; + +export const randomNumber = () => { + return Math.floor(Math.random() * 10000 + 1); +}; + +export const parseQuery = (ctx: Context) => { + return Math.min(parseInt(getQuery(ctx).q) || 1, 500); +}; + +export const renderTemplate = (fortunes: Fortune[]) => { + return ` + + Fortunes + + + + + + + ${ + fortunes.map(({ id, message }) => + html`` + ).join("") + } +
idmessage
${id}${message}
+ + `; +}; + +export async function getDbClient() { + return await connect({ + type: "postgres", + port: 5432, + database: "hello_world", + hostname: "tfb-database", + username: "benchmarkdbuser", + password: "benchmarkdbpass", + }); +} diff --git a/frameworks/TypeScript/oak/tsconfig.json b/frameworks/TypeScript/oak/tsconfig.json new file mode 100644 index 00000000000..6dacb8cc2c5 --- /dev/null +++ b/frameworks/TypeScript/oak/tsconfig.json @@ -0,0 +1,6 @@ +{ + "compilerOptions": { + "experimentalDecorators": true, + "emitDecoratorMetadata": true + } +} diff --git a/toolset/benchmark/benchmarker.py b/toolset/benchmark/benchmarker.py index 4b332fdba78..571921e11f3 100644 --- a/toolset/benchmark/benchmarker.py +++ b/toolset/benchmark/benchmarker.py @@ -96,10 +96,14 @@ def __exit_test(self, success, prefix, file, message=None): file=file, color=Fore.RED if success else '') self.time_logger.log_test_end(log_prefix=prefix, file=file) - if self.config.mode == "benchmark" and not self.last_test: + if self.config.mode == "benchmark": # Sleep for 60 seconds to ensure all host connects are closed log("Clean up: Sleep 60 seconds...", prefix=prefix, file=file) time.sleep(60) + # After benchmarks are complete for all test types in this test, + # let's clean up leftover test images (techempower/tfb.test.test-name) + self.docker_helper.clean() + return success def __run_test(self, test, benchmark_log): diff --git a/toolset/databases/postgres/postgres.dockerfile b/toolset/databases/postgres/postgres.dockerfile index e4119d2164f..f00cf2b0f8c 100644 --- a/toolset/databases/postgres/postgres.dockerfile +++ b/toolset/databases/postgres/postgres.dockerfile @@ -14,7 +14,7 @@ RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-k RUN apt-get -yqq update > /dev/null RUN apt-get -yqq install locales -ENV PG_VERSION 13 +ENV PG_VERSION 14 RUN locale-gen en_US.UTF-8 ENV LANG en_US.UTF-8 ENV LANGUAGE en_US:en @@ -25,6 +25,7 @@ ENV DEBIAN_FRONTEND noninteractive RUN apt-get -yqq install -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" postgresql-${PG_VERSION} postgresql-contrib-${PG_VERSION} # Make sure all the configuration files in main belong to postgres +RUN sed -i "s|PG_VERSION|${PG_VERSION}|g" postgresql.conf RUN mv postgresql.conf /etc/postgresql/${PG_VERSION}/main/postgresql.conf RUN mv pg_hba.conf /etc/postgresql/${PG_VERSION}/main/pg_hba.conf diff --git a/toolset/databases/postgres/postgresql.conf b/toolset/databases/postgres/postgresql.conf index 390afdddd8b..8e54a812f2f 100644 --- a/toolset/databases/postgres/postgresql.conf +++ b/toolset/databases/postgres/postgresql.conf @@ -40,9 +40,9 @@ data_directory = '/ssd/postgresql' # use data in another directory # (change requires restart) -hba_file = '/etc/postgresql/13/main/pg_hba.conf' # host-based authentication file +hba_file = '/etc/postgresql/PG_VERSION/main/pg_hba.conf' # host-based authentication file # (change requires restart) -ident_file = '/etc/postgresql/13/main/pg_ident.conf' # ident configuration file +ident_file = '/etc/postgresql/PG_VERSION/main/pg_ident.conf' # ident configuration file # (change requires restart) # If external_pid_file is not explicitly set, no extra PID file is written. @@ -81,7 +81,7 @@ ssl = false # (change requires restart) #ssl_ciphers = 'ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH' # allowed SSL ciphers # (change requires restart) #ssl_renegotiation_limit = 512MB # amount of data between renegotiations -#password_encryption = on +password_encryption = md5 #db_user_namespace = off # Kerberos and GSSAPI diff --git a/toolset/run-tests.py b/toolset/run-tests.py index e10072d454a..b2279c58c85 100644 --- a/toolset/run-tests.py +++ b/toolset/run-tests.py @@ -6,7 +6,6 @@ from toolset.benchmark.benchmarker import Benchmarker from toolset.utils.scaffolding import Scaffolding from toolset.utils.audit import Audit -from toolset.utils import cleaner from toolset.utils.benchmark_config import BenchmarkConfig from toolset.utils.output_helper import log @@ -73,11 +72,6 @@ def main(argv=None): action='store_true', default=False, help='Audits framework tests for inconsistencies') - parser.add_argument( - '--clean', - action='store_true', - default=False, - help='Removes the results directory') parser.add_argument( '--new', action='store_true', @@ -215,10 +209,6 @@ def main(argv=None): elif config.audit: Audit(benchmarker).start_audit() - elif config.clean: - cleaner.clean(benchmarker.results) - benchmarker.docker_helper.clean() - elif config.list_tests: all_tests = benchmarker.metadata.gather_tests() diff --git a/toolset/test_types/cached-query/cached-query.py b/toolset/test_types/cached-query/cached-query.py index 381a5284534..83f5302a641 100644 --- a/toolset/test_types/cached-query/cached-query.py +++ b/toolset/test_types/cached-query/cached-query.py @@ -36,7 +36,7 @@ def verify(self, base_url): if len(self.cached_query_url) < 15: problems.append( ("fail", - "Route for cached queries must be at least 8 characters, found '{}' instead".format(self.cached_query_url), + "Route for cached queries must be at least 15 characters, found '{}' instead".format(self.cached_query_url), url)) if len(problems) == 0: diff --git a/toolset/utils/benchmark_config.py b/toolset/utils/benchmark_config.py index 92550230aed..930dc2d58ea 100755 --- a/toolset/utils/benchmark_config.py +++ b/toolset/utils/benchmark_config.py @@ -30,7 +30,6 @@ def __init__(self, args): self.client_host = args.client_host self.audit = args.audit self.new = args.new - self.clean = args.clean self.mode = args.mode self.list_tests = args.list_tests self.list_tag = args.list_tag diff --git a/toolset/utils/cleaner.py b/toolset/utils/cleaner.py deleted file mode 100755 index ab9af4d8a9a..00000000000 --- a/toolset/utils/cleaner.py +++ /dev/null @@ -1,15 +0,0 @@ -import os -import shutil - - -def clean(results): - ''' - Cleans the given directory of all files and folders - ''' - results_dir = os.path.dirname(results.directory) - if os.path.exists(results_dir): - for file in os.listdir(results_dir): - if not os.path.exists(os.path.dirname(file)): - shutil.rmtree(os.path.join(results_dir, file)) - else: - os.remove(os.path.join(results_dir, file)) diff --git a/toolset/utils/docker_helper.py b/toolset/utils/docker_helper.py index ed3f1595603..54af7c8c516 100644 --- a/toolset/utils/docker_helper.py +++ b/toolset/utils/docker_helper.py @@ -93,25 +93,16 @@ def __build(self, base_url, path, build_log_file, log_prefix, dockerfile, def clean(self): ''' - Cleans all the docker images from the system + Cleans all the docker test images from the system and prunes ''' - - self.server.images.prune() for image in self.server.images.list(): if len(image.tags) > 0: - # 'techempower/tfb.test.gemini:0.1' -> 'techempower/tfb.test.gemini' - image_tag = image.tags[0].split(':')[0] - if image_tag != 'techempower/tfb' and 'techempower' in image_tag: - self.server.images.remove(image.id, force=True) + if 'tfb.test.' in image.tags[0]: + try: + self.server.images.remove(image.id, force=True) + except Exception: + pass self.server.images.prune() - - self.database.images.prune() - for image in self.database.images.list(): - if len(image.tags) > 0: - # 'techempower/tfb.test.gemini:0.1' -> 'techempower/tfb.test.gemini' - image_tag = image.tags[0].split(':')[0] - if image_tag != 'techempower/tfb' and 'techempower' in image_tag: - self.database.images.remove(image.id, force=True) self.database.images.prune() def build(self, test, build_log_dir=os.devnull): diff --git a/toolset/utils/results.py b/toolset/utils/results.py index 8e3e723af14..c2543ad6824 100644 --- a/toolset/utils/results.py +++ b/toolset/utils/results.py @@ -38,6 +38,9 @@ def __init__(self, benchmarker): self.environmentDescription = self.config.results_environment try: self.git = dict() + subprocess.call('git config --global --add safe.directory {}'.format(self.config.fw_root), + shell=True, + cwd=self.config.fw_root) self.git['commitId'] = self.__get_git_commit_id() self.git['repositoryUrl'] = self.__get_git_repository_url() self.git['branchName'] = self.__get_git_branch_name()